* parse.y: Use VA_OPEN/VA_CLOSE/VA_FIXEDARG throughout.
[official-gcc.git] / gcc / gcse.c
blob8e3187be247c490e5072da873be77c428605f37d
1 /* Global common subexpression elimination/Partial redundancy elimination
2 and global constant/copy propagation for GNU compiler.
3 Copyright (C) 1997, 1998, 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /* TODO
23 - reordering of memory allocation and freeing to be more space efficient
24 - do rough calc of how many regs are needed in each block, and a rough
25 calc of how many regs are available in each class and use that to
26 throttle back the code in cases where RTX_COST is minimal.
27 - a store to the same address as a load does not kill the load if the
28 source of the store is also the destination of the load. Handling this
29 allows more load motion, particularly out of loops.
30 - ability to realloc sbitmap vectors would allow one initial computation
31 of reg_set_in_block with only subsequent additions, rather than
32 recomputing it for each pass
36 /* References searched while implementing this.
38 Compilers Principles, Techniques and Tools
39 Aho, Sethi, Ullman
40 Addison-Wesley, 1988
42 Global Optimization by Suppression of Partial Redundancies
43 E. Morel, C. Renvoise
44 communications of the acm, Vol. 22, Num. 2, Feb. 1979
46 A Portable Machine-Independent Global Optimizer - Design and Measurements
47 Frederick Chow
48 Stanford Ph.D. thesis, Dec. 1983
50 A Fast Algorithm for Code Movement Optimization
51 D.M. Dhamdhere
52 SIGPLAN Notices, Vol. 23, Num. 10, Oct. 1988
54 A Solution to a Problem with Morel and Renvoise's
55 Global Optimization by Suppression of Partial Redundancies
56 K-H Drechsler, M.P. Stadel
57 ACM TOPLAS, Vol. 10, Num. 4, Oct. 1988
59 Practical Adaptation of the Global Optimization
60 Algorithm of Morel and Renvoise
61 D.M. Dhamdhere
62 ACM TOPLAS, Vol. 13, Num. 2. Apr. 1991
64 Efficiently Computing Static Single Assignment Form and the Control
65 Dependence Graph
66 R. Cytron, J. Ferrante, B.K. Rosen, M.N. Wegman, and F.K. Zadeck
67 ACM TOPLAS, Vol. 13, Num. 4, Oct. 1991
69 Lazy Code Motion
70 J. Knoop, O. Ruthing, B. Steffen
71 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
73 What's In a Region? Or Computing Control Dependence Regions in Near-Linear
74 Time for Reducible Flow Control
75 Thomas Ball
76 ACM Letters on Programming Languages and Systems,
77 Vol. 2, Num. 1-4, Mar-Dec 1993
79 An Efficient Representation for Sparse Sets
80 Preston Briggs, Linda Torczon
81 ACM Letters on Programming Languages and Systems,
82 Vol. 2, Num. 1-4, Mar-Dec 1993
84 A Variation of Knoop, Ruthing, and Steffen's Lazy Code Motion
85 K-H Drechsler, M.P. Stadel
86 ACM SIGPLAN Notices, Vol. 28, Num. 5, May 1993
88 Partial Dead Code Elimination
89 J. Knoop, O. Ruthing, B. Steffen
90 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
92 Effective Partial Redundancy Elimination
93 P. Briggs, K.D. Cooper
94 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
96 The Program Structure Tree: Computing Control Regions in Linear Time
97 R. Johnson, D. Pearson, K. Pingali
98 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
100 Optimal Code Motion: Theory and Practice
101 J. Knoop, O. Ruthing, B. Steffen
102 ACM TOPLAS, Vol. 16, Num. 4, Jul. 1994
104 The power of assignment motion
105 J. Knoop, O. Ruthing, B. Steffen
106 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
108 Global code motion / global value numbering
109 C. Click
110 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
112 Value Driven Redundancy Elimination
113 L.T. Simpson
114 Rice University Ph.D. thesis, Apr. 1996
116 Value Numbering
117 L.T. Simpson
118 Massively Scalar Compiler Project, Rice University, Sep. 1996
120 High Performance Compilers for Parallel Computing
121 Michael Wolfe
122 Addison-Wesley, 1996
124 Advanced Compiler Design and Implementation
125 Steven Muchnick
126 Morgan Kaufmann, 1997
128 Building an Optimizing Compiler
129 Robert Morgan
130 Digital Press, 1998
132 People wishing to speed up the code here should read:
133 Elimination Algorithms for Data Flow Analysis
134 B.G. Ryder, M.C. Paull
135 ACM Computing Surveys, Vol. 18, Num. 3, Sep. 1986
137 How to Analyze Large Programs Efficiently and Informatively
138 D.M. Dhamdhere, B.K. Rosen, F.K. Zadeck
139 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
141 People wishing to do something different can find various possibilities
142 in the above papers and elsewhere.
145 #include "config.h"
146 #include "system.h"
147 #include "toplev.h"
149 #include "rtl.h"
150 #include "tm_p.h"
151 #include "regs.h"
152 #include "hard-reg-set.h"
153 #include "flags.h"
154 #include "real.h"
155 #include "insn-config.h"
156 #include "recog.h"
157 #include "basic-block.h"
158 #include "output.h"
159 #include "function.h"
160 #include "expr.h"
161 #include "ggc.h"
162 #include "params.h"
164 #include "obstack.h"
165 #define obstack_chunk_alloc gmalloc
166 #define obstack_chunk_free free
168 /* Propagate flow information through back edges and thus enable PRE's
169 moving loop invariant calculations out of loops.
171 Originally this tended to create worse overall code, but several
172 improvements during the development of PRE seem to have made following
173 back edges generally a win.
175 Note much of the loop invariant code motion done here would normally
176 be done by loop.c, which has more heuristics for when to move invariants
177 out of loops. At some point we might need to move some of those
178 heuristics into gcse.c. */
179 #define FOLLOW_BACK_EDGES 1
181 /* We support GCSE via Partial Redundancy Elimination. PRE optimizations
182 are a superset of those done by GCSE.
184 We perform the following steps:
186 1) Compute basic block information.
188 2) Compute table of places where registers are set.
190 3) Perform copy/constant propagation.
192 4) Perform global cse.
194 5) Perform another pass of copy/constant propagation.
196 Two passes of copy/constant propagation are done because the first one
197 enables more GCSE and the second one helps to clean up the copies that
198 GCSE creates. This is needed more for PRE than for Classic because Classic
199 GCSE will try to use an existing register containing the common
200 subexpression rather than create a new one. This is harder to do for PRE
201 because of the code motion (which Classic GCSE doesn't do).
203 Expressions we are interested in GCSE-ing are of the form
204 (set (pseudo-reg) (expression)).
205 Function want_to_gcse_p says what these are.
207 PRE handles moving invariant expressions out of loops (by treating them as
208 partially redundant).
210 Eventually it would be nice to replace cse.c/gcse.c with SSA (static single
211 assignment) based GVN (global value numbering). L. T. Simpson's paper
212 (Rice University) on value numbering is a useful reference for this.
214 **********************
216 We used to support multiple passes but there are diminishing returns in
217 doing so. The first pass usually makes 90% of the changes that are doable.
218 A second pass can make a few more changes made possible by the first pass.
219 Experiments show any further passes don't make enough changes to justify
220 the expense.
222 A study of spec92 using an unlimited number of passes:
223 [1 pass] = 1208 substitutions, [2] = 577, [3] = 202, [4] = 192, [5] = 83,
224 [6] = 34, [7] = 17, [8] = 9, [9] = 4, [10] = 4, [11] = 2,
225 [12] = 2, [13] = 1, [15] = 1, [16] = 2, [41] = 1
227 It was found doing copy propagation between each pass enables further
228 substitutions.
230 PRE is quite expensive in complicated functions because the DFA can take
231 awhile to converge. Hence we only perform one pass. The parameter max-gcse-passes can
232 be modified if one wants to experiment.
234 **********************
236 The steps for PRE are:
238 1) Build the hash table of expressions we wish to GCSE (expr_hash_table).
240 2) Perform the data flow analysis for PRE.
242 3) Delete the redundant instructions
244 4) Insert the required copies [if any] that make the partially
245 redundant instructions fully redundant.
247 5) For other reaching expressions, insert an instruction to copy the value
248 to a newly created pseudo that will reach the redundant instruction.
250 The deletion is done first so that when we do insertions we
251 know which pseudo reg to use.
253 Various papers have argued that PRE DFA is expensive (O(n^2)) and others
254 argue it is not. The number of iterations for the algorithm to converge
255 is typically 2-4 so I don't view it as that expensive (relatively speaking).
257 PRE GCSE depends heavily on the second CSE pass to clean up the copies
258 we create. To make an expression reach the place where it's redundant,
259 the result of the expression is copied to a new register, and the redundant
260 expression is deleted by replacing it with this new register. Classic GCSE
261 doesn't have this problem as much as it computes the reaching defs of
262 each register in each block and thus can try to use an existing register.
264 **********************
266 A fair bit of simplicity is created by creating small functions for simple
267 tasks, even when the function is only called in one place. This may
268 measurably slow things down [or may not] by creating more function call
269 overhead than is necessary. The source is laid out so that it's trivial
270 to make the affected functions inline so that one can measure what speed
271 up, if any, can be achieved, and maybe later when things settle things can
272 be rearranged.
274 Help stamp out big monolithic functions! */
276 /* GCSE global vars. */
278 /* -dG dump file. */
279 static FILE *gcse_file;
281 /* Note whether or not we should run jump optimization after gcse. We
282 want to do this for two cases.
284 * If we changed any jumps via cprop.
286 * If we added any labels via edge splitting. */
288 static int run_jump_opt_after_gcse;
290 /* Bitmaps are normally not included in debugging dumps.
291 However it's useful to be able to print them from GDB.
292 We could create special functions for this, but it's simpler to
293 just allow passing stderr to the dump_foo fns. Since stderr can
294 be a macro, we store a copy here. */
295 static FILE *debug_stderr;
297 /* An obstack for our working variables. */
298 static struct obstack gcse_obstack;
300 /* Non-zero for each mode that supports (set (reg) (reg)).
301 This is trivially true for integer and floating point values.
302 It may or may not be true for condition codes. */
303 static char can_copy_p[(int) NUM_MACHINE_MODES];
305 /* Non-zero if can_copy_p has been initialized. */
306 static int can_copy_init_p;
308 struct reg_use {rtx reg_rtx; };
310 /* Hash table of expressions. */
312 struct expr
314 /* The expression (SET_SRC for expressions, PATTERN for assignments). */
315 rtx expr;
316 /* Index in the available expression bitmaps. */
317 int bitmap_index;
318 /* Next entry with the same hash. */
319 struct expr *next_same_hash;
320 /* List of anticipatable occurrences in basic blocks in the function.
321 An "anticipatable occurrence" is one that is the first occurrence in the
322 basic block, the operands are not modified in the basic block prior
323 to the occurrence and the output is not used between the start of
324 the block and the occurrence. */
325 struct occr *antic_occr;
326 /* List of available occurrence in basic blocks in the function.
327 An "available occurrence" is one that is the last occurrence in the
328 basic block and the operands are not modified by following statements in
329 the basic block [including this insn]. */
330 struct occr *avail_occr;
331 /* Non-null if the computation is PRE redundant.
332 The value is the newly created pseudo-reg to record a copy of the
333 expression in all the places that reach the redundant copy. */
334 rtx reaching_reg;
337 /* Occurrence of an expression.
338 There is one per basic block. If a pattern appears more than once the
339 last appearance is used [or first for anticipatable expressions]. */
341 struct occr
343 /* Next occurrence of this expression. */
344 struct occr *next;
345 /* The insn that computes the expression. */
346 rtx insn;
347 /* Non-zero if this [anticipatable] occurrence has been deleted. */
348 char deleted_p;
349 /* Non-zero if this [available] occurrence has been copied to
350 reaching_reg. */
351 /* ??? This is mutually exclusive with deleted_p, so they could share
352 the same byte. */
353 char copied_p;
356 /* Expression and copy propagation hash tables.
357 Each hash table is an array of buckets.
358 ??? It is known that if it were an array of entries, structure elements
359 `next_same_hash' and `bitmap_index' wouldn't be necessary. However, it is
360 not clear whether in the final analysis a sufficient amount of memory would
361 be saved as the size of the available expression bitmaps would be larger
362 [one could build a mapping table without holes afterwards though].
363 Someday I'll perform the computation and figure it out. */
365 /* Total size of the expression hash table, in elements. */
366 static unsigned int expr_hash_table_size;
368 /* The table itself.
369 This is an array of `expr_hash_table_size' elements. */
370 static struct expr **expr_hash_table;
372 /* Total size of the copy propagation hash table, in elements. */
373 static unsigned int set_hash_table_size;
375 /* The table itself.
376 This is an array of `set_hash_table_size' elements. */
377 static struct expr **set_hash_table;
379 /* Mapping of uids to cuids.
380 Only real insns get cuids. */
381 static int *uid_cuid;
383 /* Highest UID in UID_CUID. */
384 static int max_uid;
386 /* Get the cuid of an insn. */
387 #ifdef ENABLE_CHECKING
388 #define INSN_CUID(INSN) (INSN_UID (INSN) > max_uid ? (abort (), 0) : uid_cuid[INSN_UID (INSN)])
389 #else
390 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
391 #endif
393 /* Number of cuids. */
394 static int max_cuid;
396 /* Mapping of cuids to insns. */
397 static rtx *cuid_insn;
399 /* Get insn from cuid. */
400 #define CUID_INSN(CUID) (cuid_insn[CUID])
402 /* Maximum register number in function prior to doing gcse + 1.
403 Registers created during this pass have regno >= max_gcse_regno.
404 This is named with "gcse" to not collide with global of same name. */
405 static unsigned int max_gcse_regno;
407 /* Maximum number of cse-able expressions found. */
408 static int n_exprs;
410 /* Maximum number of assignments for copy propagation found. */
411 static int n_sets;
413 /* Table of registers that are modified.
415 For each register, each element is a list of places where the pseudo-reg
416 is set.
418 For simplicity, GCSE is done on sets of pseudo-regs only. PRE GCSE only
419 requires knowledge of which blocks kill which regs [and thus could use
420 a bitmap instead of the lists `reg_set_table' uses].
422 `reg_set_table' and could be turned into an array of bitmaps (num-bbs x
423 num-regs) [however perhaps it may be useful to keep the data as is]. One
424 advantage of recording things this way is that `reg_set_table' is fairly
425 sparse with respect to pseudo regs but for hard regs could be fairly dense
426 [relatively speaking]. And recording sets of pseudo-regs in lists speeds
427 up functions like compute_transp since in the case of pseudo-regs we only
428 need to iterate over the number of times a pseudo-reg is set, not over the
429 number of basic blocks [clearly there is a bit of a slow down in the cases
430 where a pseudo is set more than once in a block, however it is believed
431 that the net effect is to speed things up]. This isn't done for hard-regs
432 because recording call-clobbered hard-regs in `reg_set_table' at each
433 function call can consume a fair bit of memory, and iterating over
434 hard-regs stored this way in compute_transp will be more expensive. */
436 typedef struct reg_set
438 /* The next setting of this register. */
439 struct reg_set *next;
440 /* The insn where it was set. */
441 rtx insn;
442 } reg_set;
444 static reg_set **reg_set_table;
446 /* Size of `reg_set_table'.
447 The table starts out at max_gcse_regno + slop, and is enlarged as
448 necessary. */
449 static int reg_set_table_size;
451 /* Amount to grow `reg_set_table' by when it's full. */
452 #define REG_SET_TABLE_SLOP 100
454 /* This is a list of expressions which are MEMs and will be used by load
455 or store motion.
456 Load motion tracks MEMs which aren't killed by
457 anything except itself. (ie, loads and stores to a single location).
458 We can then allow movement of these MEM refs with a little special
459 allowance. (all stores copy the same value to the reaching reg used
460 for the loads). This means all values used to store into memory must have
461 no side effects so we can re-issue the setter value.
462 Store Motion uses this structure as an expression table to track stores
463 which look interesting, and might be moveable towards the exit block. */
465 struct ls_expr
467 struct expr * expr; /* Gcse expression reference for LM. */
468 rtx pattern; /* Pattern of this mem. */
469 rtx loads; /* INSN list of loads seen. */
470 rtx stores; /* INSN list of stores seen. */
471 struct ls_expr * next; /* Next in the list. */
472 int invalid; /* Invalid for some reason. */
473 int index; /* If it maps to a bitmap index. */
474 int hash_index; /* Index when in a hash table. */
475 rtx reaching_reg; /* Register to use when re-writing. */
478 /* Head of the list of load/store memory refs. */
479 static struct ls_expr * pre_ldst_mems = NULL;
481 /* Bitmap containing one bit for each register in the program.
482 Used when performing GCSE to track which registers have been set since
483 the start of the basic block. */
484 static sbitmap reg_set_bitmap;
486 /* For each block, a bitmap of registers set in the block.
487 This is used by expr_killed_p and compute_transp.
488 It is computed during hash table computation and not by compute_sets
489 as it includes registers added since the last pass (or between cprop and
490 gcse) and it's currently not easy to realloc sbitmap vectors. */
491 static sbitmap *reg_set_in_block;
493 /* Array, indexed by basic block number for a list of insns which modify
494 memory within that block. */
495 static rtx * modify_mem_list;
497 /* This array parallels modify_mem_list, but is kept canonicalized. */
498 static rtx * canon_modify_mem_list;
499 /* Various variables for statistics gathering. */
501 /* Memory used in a pass.
502 This isn't intended to be absolutely precise. Its intent is only
503 to keep an eye on memory usage. */
504 static int bytes_used;
506 /* GCSE substitutions made. */
507 static int gcse_subst_count;
508 /* Number of copy instructions created. */
509 static int gcse_create_count;
510 /* Number of constants propagated. */
511 static int const_prop_count;
512 /* Number of copys propagated. */
513 static int copy_prop_count;
515 /* These variables are used by classic GCSE.
516 Normally they'd be defined a bit later, but `rd_gen' needs to
517 be declared sooner. */
519 /* Each block has a bitmap of each type.
520 The length of each blocks bitmap is:
522 max_cuid - for reaching definitions
523 n_exprs - for available expressions
525 Thus we view the bitmaps as 2 dimensional arrays. i.e.
526 rd_kill[block_num][cuid_num]
527 ae_kill[block_num][expr_num] */
529 /* For reaching defs */
530 static sbitmap *rd_kill, *rd_gen, *reaching_defs, *rd_out;
532 /* for available exprs */
533 static sbitmap *ae_kill, *ae_gen, *ae_in, *ae_out;
535 /* Objects of this type are passed around by the null-pointer check
536 removal routines. */
537 struct null_pointer_info
539 /* The basic block being processed. */
540 int current_block;
541 /* The first register to be handled in this pass. */
542 unsigned int min_reg;
543 /* One greater than the last register to be handled in this pass. */
544 unsigned int max_reg;
545 sbitmap *nonnull_local;
546 sbitmap *nonnull_killed;
549 static void compute_can_copy PARAMS ((void));
550 static char *gmalloc PARAMS ((unsigned int));
551 static char *grealloc PARAMS ((char *, unsigned int));
552 static char *gcse_alloc PARAMS ((unsigned long));
553 static void alloc_gcse_mem PARAMS ((rtx));
554 static void free_gcse_mem PARAMS ((void));
555 static void alloc_reg_set_mem PARAMS ((int));
556 static void free_reg_set_mem PARAMS ((void));
557 static int get_bitmap_width PARAMS ((int, int, int));
558 static void record_one_set PARAMS ((int, rtx));
559 static void record_set_info PARAMS ((rtx, rtx, void *));
560 static void compute_sets PARAMS ((rtx));
561 static void hash_scan_insn PARAMS ((rtx, int, int));
562 static void hash_scan_set PARAMS ((rtx, rtx, int));
563 static void hash_scan_clobber PARAMS ((rtx, rtx));
564 static void hash_scan_call PARAMS ((rtx, rtx));
565 static int want_to_gcse_p PARAMS ((rtx));
566 static int oprs_unchanged_p PARAMS ((rtx, rtx, int));
567 static int oprs_anticipatable_p PARAMS ((rtx, rtx));
568 static int oprs_available_p PARAMS ((rtx, rtx));
569 static void insert_expr_in_table PARAMS ((rtx, enum machine_mode, rtx,
570 int, int));
571 static void insert_set_in_table PARAMS ((rtx, rtx));
572 static unsigned int hash_expr PARAMS ((rtx, enum machine_mode, int *, int));
573 static unsigned int hash_expr_1 PARAMS ((rtx, enum machine_mode, int *));
574 static unsigned int hash_string_1 PARAMS ((const char *));
575 static unsigned int hash_set PARAMS ((int, int));
576 static int expr_equiv_p PARAMS ((rtx, rtx));
577 static void record_last_reg_set_info PARAMS ((rtx, int));
578 static void record_last_mem_set_info PARAMS ((rtx));
579 static void record_last_set_info PARAMS ((rtx, rtx, void *));
580 static void compute_hash_table PARAMS ((int));
581 static void alloc_set_hash_table PARAMS ((int));
582 static void free_set_hash_table PARAMS ((void));
583 static void compute_set_hash_table PARAMS ((void));
584 static void alloc_expr_hash_table PARAMS ((unsigned int));
585 static void free_expr_hash_table PARAMS ((void));
586 static void compute_expr_hash_table PARAMS ((void));
587 static void dump_hash_table PARAMS ((FILE *, const char *, struct expr **,
588 int, int));
589 static struct expr *lookup_expr PARAMS ((rtx));
590 static struct expr *lookup_set PARAMS ((unsigned int, rtx));
591 static struct expr *next_set PARAMS ((unsigned int, struct expr *));
592 static void reset_opr_set_tables PARAMS ((void));
593 static int oprs_not_set_p PARAMS ((rtx, rtx));
594 static void mark_call PARAMS ((rtx));
595 static void mark_set PARAMS ((rtx, rtx));
596 static void mark_clobber PARAMS ((rtx, rtx));
597 static void mark_oprs_set PARAMS ((rtx));
598 static void alloc_cprop_mem PARAMS ((int, int));
599 static void free_cprop_mem PARAMS ((void));
600 static void compute_transp PARAMS ((rtx, int, sbitmap *, int));
601 static void compute_transpout PARAMS ((void));
602 static void compute_local_properties PARAMS ((sbitmap *, sbitmap *, sbitmap *,
603 int));
604 static void compute_cprop_data PARAMS ((void));
605 static void find_used_regs PARAMS ((rtx *, void *));
606 static int try_replace_reg PARAMS ((rtx, rtx, rtx));
607 static struct expr *find_avail_set PARAMS ((int, rtx));
608 static int cprop_jump PARAMS ((basic_block, rtx, rtx, rtx));
609 #ifdef HAVE_cc0
610 static int cprop_cc0_jump PARAMS ((basic_block, rtx, struct reg_use *, rtx));
611 #endif
612 static void mems_conflict_for_gcse_p PARAMS ((rtx, rtx, void *));
613 static int load_killed_in_block_p PARAMS ((basic_block, int, rtx, int));
614 static void canon_list_insert PARAMS ((rtx, rtx, void *));
615 static int cprop_insn PARAMS ((basic_block, rtx, int));
616 static int cprop PARAMS ((int));
617 static int one_cprop_pass PARAMS ((int, int));
618 static void alloc_pre_mem PARAMS ((int, int));
619 static void free_pre_mem PARAMS ((void));
620 static void compute_pre_data PARAMS ((void));
621 static int pre_expr_reaches_here_p PARAMS ((basic_block, struct expr *,
622 basic_block));
623 static void insert_insn_end_bb PARAMS ((struct expr *, basic_block, int));
624 static void pre_insert_copy_insn PARAMS ((struct expr *, rtx));
625 static void pre_insert_copies PARAMS ((void));
626 static int pre_delete PARAMS ((void));
627 static int pre_gcse PARAMS ((void));
628 static int one_pre_gcse_pass PARAMS ((int));
629 static void add_label_notes PARAMS ((rtx, rtx));
630 static void alloc_code_hoist_mem PARAMS ((int, int));
631 static void free_code_hoist_mem PARAMS ((void));
632 static void compute_code_hoist_vbeinout PARAMS ((void));
633 static void compute_code_hoist_data PARAMS ((void));
634 static int hoist_expr_reaches_here_p PARAMS ((basic_block, int, basic_block,
635 char *));
636 static void hoist_code PARAMS ((void));
637 static int one_code_hoisting_pass PARAMS ((void));
638 static void alloc_rd_mem PARAMS ((int, int));
639 static void free_rd_mem PARAMS ((void));
640 static void handle_rd_kill_set PARAMS ((rtx, int, basic_block));
641 static void compute_kill_rd PARAMS ((void));
642 static void compute_rd PARAMS ((void));
643 static void alloc_avail_expr_mem PARAMS ((int, int));
644 static void free_avail_expr_mem PARAMS ((void));
645 static void compute_ae_gen PARAMS ((void));
646 static int expr_killed_p PARAMS ((rtx, basic_block));
647 static void compute_ae_kill PARAMS ((sbitmap *, sbitmap *));
648 static int expr_reaches_here_p PARAMS ((struct occr *, struct expr *,
649 basic_block, int));
650 static rtx computing_insn PARAMS ((struct expr *, rtx));
651 static int def_reaches_here_p PARAMS ((rtx, rtx));
652 static int can_disregard_other_sets PARAMS ((struct reg_set **, rtx, int));
653 static int handle_avail_expr PARAMS ((rtx, struct expr *));
654 static int classic_gcse PARAMS ((void));
655 static int one_classic_gcse_pass PARAMS ((int));
656 static void invalidate_nonnull_info PARAMS ((rtx, rtx, void *));
657 static void delete_null_pointer_checks_1 PARAMS ((varray_type *, unsigned int *,
658 sbitmap *, sbitmap *,
659 struct null_pointer_info *));
660 static rtx process_insert_insn PARAMS ((struct expr *));
661 static int pre_edge_insert PARAMS ((struct edge_list *, struct expr **));
662 static int expr_reaches_here_p_work PARAMS ((struct occr *, struct expr *,
663 basic_block, int, char *));
664 static int pre_expr_reaches_here_p_work PARAMS ((basic_block, struct expr *,
665 basic_block, char *));
666 static struct ls_expr * ldst_entry PARAMS ((rtx));
667 static void free_ldst_entry PARAMS ((struct ls_expr *));
668 static void free_ldst_mems PARAMS ((void));
669 static void print_ldst_list PARAMS ((FILE *));
670 static struct ls_expr * find_rtx_in_ldst PARAMS ((rtx));
671 static int enumerate_ldsts PARAMS ((void));
672 static inline struct ls_expr * first_ls_expr PARAMS ((void));
673 static inline struct ls_expr * next_ls_expr PARAMS ((struct ls_expr *));
674 static int simple_mem PARAMS ((rtx));
675 static void invalidate_any_buried_refs PARAMS ((rtx));
676 static void compute_ld_motion_mems PARAMS ((void));
677 static void trim_ld_motion_mems PARAMS ((void));
678 static void update_ld_motion_stores PARAMS ((struct expr *));
679 static void reg_set_info PARAMS ((rtx, rtx, void *));
680 static int store_ops_ok PARAMS ((rtx, basic_block));
681 static void find_moveable_store PARAMS ((rtx));
682 static int compute_store_table PARAMS ((void));
683 static int load_kills_store PARAMS ((rtx, rtx));
684 static int find_loads PARAMS ((rtx, rtx));
685 static int store_killed_in_insn PARAMS ((rtx, rtx));
686 static int store_killed_after PARAMS ((rtx, rtx, basic_block));
687 static int store_killed_before PARAMS ((rtx, rtx, basic_block));
688 static void build_store_vectors PARAMS ((void));
689 static void insert_insn_start_bb PARAMS ((rtx, basic_block));
690 static int insert_store PARAMS ((struct ls_expr *, edge));
691 static void replace_store_insn PARAMS ((rtx, rtx, basic_block));
692 static void delete_store PARAMS ((struct ls_expr *,
693 basic_block));
694 static void free_store_memory PARAMS ((void));
695 static void store_motion PARAMS ((void));
697 /* Entry point for global common subexpression elimination.
698 F is the first instruction in the function. */
701 gcse_main (f, file)
702 rtx f;
703 FILE *file;
705 int changed, pass;
706 /* Bytes used at start of pass. */
707 int initial_bytes_used;
708 /* Maximum number of bytes used by a pass. */
709 int max_pass_bytes;
710 /* Point to release obstack data from for each pass. */
711 char *gcse_obstack_bottom;
713 /* Insertion of instructions on edges can create new basic blocks; we
714 need the original basic block count so that we can properly deallocate
715 arrays sized on the number of basic blocks originally in the cfg. */
716 int orig_bb_count;
717 /* We do not construct an accurate cfg in functions which call
718 setjmp, so just punt to be safe. */
719 if (current_function_calls_setjmp)
720 return 0;
722 /* Assume that we do not need to run jump optimizations after gcse. */
723 run_jump_opt_after_gcse = 0;
725 /* For calling dump_foo fns from gdb. */
726 debug_stderr = stderr;
727 gcse_file = file;
729 /* Identify the basic block information for this function, including
730 successors and predecessors. */
731 max_gcse_regno = max_reg_num ();
733 if (file)
734 dump_flow_info (file);
736 orig_bb_count = n_basic_blocks;
737 /* Return if there's nothing to do. */
738 if (n_basic_blocks <= 1)
739 return 0;
741 /* Trying to perform global optimizations on flow graphs which have
742 a high connectivity will take a long time and is unlikely to be
743 particularly useful.
745 In normal circumstances a cfg should have about twice as many edges
746 as blocks. But we do not want to punish small functions which have
747 a couple switch statements. So we require a relatively large number
748 of basic blocks and the ratio of edges to blocks to be high. */
749 if (n_basic_blocks > 1000 && n_edges / n_basic_blocks >= 20)
751 if (warn_disabled_optimization)
752 warning ("GCSE disabled: %d > 1000 basic blocks and %d >= 20 edges/basic block",
753 n_basic_blocks, n_edges / n_basic_blocks);
754 return 0;
757 /* If allocating memory for the cprop bitmap would take up too much
758 storage it's better just to disable the optimization. */
759 if ((n_basic_blocks
760 * SBITMAP_SET_SIZE (max_gcse_regno)
761 * sizeof (SBITMAP_ELT_TYPE)) > MAX_GCSE_MEMORY)
763 if (warn_disabled_optimization)
764 warning ("GCSE disabled: %d basic blocks and %d registers",
765 n_basic_blocks, max_gcse_regno);
767 return 0;
770 /* See what modes support reg/reg copy operations. */
771 if (! can_copy_init_p)
773 compute_can_copy ();
774 can_copy_init_p = 1;
777 gcc_obstack_init (&gcse_obstack);
778 bytes_used = 0;
780 /* We need alias. */
781 init_alias_analysis ();
782 /* Record where pseudo-registers are set. This data is kept accurate
783 during each pass. ??? We could also record hard-reg information here
784 [since it's unchanging], however it is currently done during hash table
785 computation.
787 It may be tempting to compute MEM set information here too, but MEM sets
788 will be subject to code motion one day and thus we need to compute
789 information about memory sets when we build the hash tables. */
791 alloc_reg_set_mem (max_gcse_regno);
792 compute_sets (f);
794 pass = 0;
795 initial_bytes_used = bytes_used;
796 max_pass_bytes = 0;
797 gcse_obstack_bottom = gcse_alloc (1);
798 changed = 1;
799 while (changed && pass < MAX_GCSE_PASSES)
801 changed = 0;
802 if (file)
803 fprintf (file, "GCSE pass %d\n\n", pass + 1);
805 /* Initialize bytes_used to the space for the pred/succ lists,
806 and the reg_set_table data. */
807 bytes_used = initial_bytes_used;
809 /* Each pass may create new registers, so recalculate each time. */
810 max_gcse_regno = max_reg_num ();
812 alloc_gcse_mem (f);
814 /* Don't allow constant propagation to modify jumps
815 during this pass. */
816 changed = one_cprop_pass (pass + 1, 0);
818 if (optimize_size)
819 changed |= one_classic_gcse_pass (pass + 1);
820 else
822 changed |= one_pre_gcse_pass (pass + 1);
823 /* We may have just created new basic blocks. Release and
824 recompute various things which are sized on the number of
825 basic blocks. */
826 if (changed)
828 int i;
830 for (i = 0; i < orig_bb_count; i++)
832 if (modify_mem_list[i])
833 free_INSN_LIST_list (modify_mem_list + i);
834 if (canon_modify_mem_list[i])
835 free_INSN_LIST_list (canon_modify_mem_list + i);
837 modify_mem_list
838 = (rtx *) gmalloc (n_basic_blocks * sizeof (rtx *));
839 canon_modify_mem_list
840 = (rtx *) gmalloc (n_basic_blocks * sizeof (rtx *));
841 memset ((char *) modify_mem_list, 0, n_basic_blocks * sizeof (rtx *));
842 memset ((char *) canon_modify_mem_list, 0, n_basic_blocks * sizeof (rtx *));
843 orig_bb_count = n_basic_blocks;
845 free_reg_set_mem ();
846 alloc_reg_set_mem (max_reg_num ());
847 compute_sets (f);
848 run_jump_opt_after_gcse = 1;
851 if (max_pass_bytes < bytes_used)
852 max_pass_bytes = bytes_used;
854 /* Free up memory, then reallocate for code hoisting. We can
855 not re-use the existing allocated memory because the tables
856 will not have info for the insns or registers created by
857 partial redundancy elimination. */
858 free_gcse_mem ();
860 /* It does not make sense to run code hoisting unless we optimizing
861 for code size -- it rarely makes programs faster, and can make
862 them bigger if we did partial redundancy elimination (when optimizing
863 for space, we use a classic gcse algorithm instead of partial
864 redundancy algorithms). */
865 if (optimize_size)
867 max_gcse_regno = max_reg_num ();
868 alloc_gcse_mem (f);
869 changed |= one_code_hoisting_pass ();
870 free_gcse_mem ();
872 if (max_pass_bytes < bytes_used)
873 max_pass_bytes = bytes_used;
876 if (file)
878 fprintf (file, "\n");
879 fflush (file);
882 obstack_free (&gcse_obstack, gcse_obstack_bottom);
883 pass++;
886 /* Do one last pass of copy propagation, including cprop into
887 conditional jumps. */
889 max_gcse_regno = max_reg_num ();
890 alloc_gcse_mem (f);
891 /* This time, go ahead and allow cprop to alter jumps. */
892 one_cprop_pass (pass + 1, 1);
893 free_gcse_mem ();
895 if (file)
897 fprintf (file, "GCSE of %s: %d basic blocks, ",
898 current_function_name, n_basic_blocks);
899 fprintf (file, "%d pass%s, %d bytes\n\n",
900 pass, pass > 1 ? "es" : "", max_pass_bytes);
903 obstack_free (&gcse_obstack, NULL);
904 free_reg_set_mem ();
905 /* We are finished with alias. */
906 end_alias_analysis ();
907 allocate_reg_info (max_reg_num (), FALSE, FALSE);
909 if (!optimize_size && flag_gcse_sm)
910 store_motion ();
911 /* Record where pseudo-registers are set. */
912 return run_jump_opt_after_gcse;
915 /* Misc. utilities. */
917 /* Compute which modes support reg/reg copy operations. */
919 static void
920 compute_can_copy ()
922 int i;
923 #ifndef AVOID_CCMODE_COPIES
924 rtx reg,insn;
925 #endif
926 memset (can_copy_p, 0, NUM_MACHINE_MODES);
928 start_sequence ();
929 for (i = 0; i < NUM_MACHINE_MODES; i++)
930 if (GET_MODE_CLASS (i) == MODE_CC)
932 #ifdef AVOID_CCMODE_COPIES
933 can_copy_p[i] = 0;
934 #else
935 reg = gen_rtx_REG ((enum machine_mode) i, LAST_VIRTUAL_REGISTER + 1);
936 insn = emit_insn (gen_rtx_SET (VOIDmode, reg, reg));
937 if (recog (PATTERN (insn), insn, NULL) >= 0)
938 can_copy_p[i] = 1;
939 #endif
941 else
942 can_copy_p[i] = 1;
944 end_sequence ();
947 /* Cover function to xmalloc to record bytes allocated. */
949 static char *
950 gmalloc (size)
951 unsigned int size;
953 bytes_used += size;
954 return xmalloc (size);
957 /* Cover function to xrealloc.
958 We don't record the additional size since we don't know it.
959 It won't affect memory usage stats much anyway. */
961 static char *
962 grealloc (ptr, size)
963 char *ptr;
964 unsigned int size;
966 return xrealloc (ptr, size);
969 /* Cover function to obstack_alloc.
970 We don't need to record the bytes allocated here since
971 obstack_chunk_alloc is set to gmalloc. */
973 static char *
974 gcse_alloc (size)
975 unsigned long size;
977 return (char *) obstack_alloc (&gcse_obstack, size);
980 /* Allocate memory for the cuid mapping array,
981 and reg/memory set tracking tables.
983 This is called at the start of each pass. */
985 static void
986 alloc_gcse_mem (f)
987 rtx f;
989 int i,n;
990 rtx insn;
992 /* Find the largest UID and create a mapping from UIDs to CUIDs.
993 CUIDs are like UIDs except they increase monotonically, have no gaps,
994 and only apply to real insns. */
996 max_uid = get_max_uid ();
997 n = (max_uid + 1) * sizeof (int);
998 uid_cuid = (int *) gmalloc (n);
999 memset ((char *) uid_cuid, 0, n);
1000 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
1002 if (INSN_P (insn))
1003 uid_cuid[INSN_UID (insn)] = i++;
1004 else
1005 uid_cuid[INSN_UID (insn)] = i;
1008 /* Create a table mapping cuids to insns. */
1010 max_cuid = i;
1011 n = (max_cuid + 1) * sizeof (rtx);
1012 cuid_insn = (rtx *) gmalloc (n);
1013 memset ((char *) cuid_insn, 0, n);
1014 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
1015 if (INSN_P (insn))
1016 CUID_INSN (i++) = insn;
1018 /* Allocate vars to track sets of regs. */
1019 reg_set_bitmap = (sbitmap) sbitmap_alloc (max_gcse_regno);
1021 /* Allocate vars to track sets of regs, memory per block. */
1022 reg_set_in_block = (sbitmap *) sbitmap_vector_alloc (n_basic_blocks,
1023 max_gcse_regno);
1024 /* Allocate array to keep a list of insns which modify memory in each
1025 basic block. */
1026 modify_mem_list = (rtx *) gmalloc (n_basic_blocks * sizeof (rtx *));
1027 canon_modify_mem_list = (rtx *) gmalloc (n_basic_blocks * sizeof (rtx *));
1028 memset ((char *) modify_mem_list, 0, n_basic_blocks * sizeof (rtx *));
1029 memset ((char *) canon_modify_mem_list, 0, n_basic_blocks * sizeof (rtx *));
1032 /* Free memory allocated by alloc_gcse_mem. */
1034 static void
1035 free_gcse_mem ()
1037 free (uid_cuid);
1038 free (cuid_insn);
1040 free (reg_set_bitmap);
1042 sbitmap_vector_free (reg_set_in_block);
1043 /* re-Cache any INSN_LIST nodes we have allocated. */
1045 int i;
1047 for (i = 0; i < n_basic_blocks; i++)
1049 if (modify_mem_list[i])
1050 free_INSN_LIST_list (modify_mem_list + i);
1051 if (canon_modify_mem_list[i])
1052 free_INSN_LIST_list (canon_modify_mem_list + i);
1055 free (modify_mem_list);
1056 free (canon_modify_mem_list);
1057 modify_mem_list = 0;
1058 canon_modify_mem_list = 0;
1062 /* Many of the global optimization algorithms work by solving dataflow
1063 equations for various expressions. Initially, some local value is
1064 computed for each expression in each block. Then, the values across the
1065 various blocks are combined (by following flow graph edges) to arrive at
1066 global values. Conceptually, each set of equations is independent. We
1067 may therefore solve all the equations in parallel, solve them one at a
1068 time, or pick any intermediate approach.
1070 When you're going to need N two-dimensional bitmaps, each X (say, the
1071 number of blocks) by Y (say, the number of expressions), call this
1072 function. It's not important what X and Y represent; only that Y
1073 correspond to the things that can be done in parallel. This function will
1074 return an appropriate chunking factor C; you should solve C sets of
1075 equations in parallel. By going through this function, we can easily
1076 trade space against time; by solving fewer equations in parallel we use
1077 less space. */
1079 static int
1080 get_bitmap_width (n, x, y)
1081 int n;
1082 int x;
1083 int y;
1085 /* It's not really worth figuring out *exactly* how much memory will
1086 be used by a particular choice. The important thing is to get
1087 something approximately right. */
1088 size_t max_bitmap_memory = 10 * 1024 * 1024;
1090 /* The number of bytes we'd use for a single column of minimum
1091 width. */
1092 size_t column_size = n * x * sizeof (SBITMAP_ELT_TYPE);
1094 /* Often, it's reasonable just to solve all the equations in
1095 parallel. */
1096 if (column_size * SBITMAP_SET_SIZE (y) <= max_bitmap_memory)
1097 return y;
1099 /* Otherwise, pick the largest width we can, without going over the
1100 limit. */
1101 return SBITMAP_ELT_BITS * ((max_bitmap_memory + column_size - 1)
1102 / column_size);
1105 /* Compute the local properties of each recorded expression.
1107 Local properties are those that are defined by the block, irrespective of
1108 other blocks.
1110 An expression is transparent in a block if its operands are not modified
1111 in the block.
1113 An expression is computed (locally available) in a block if it is computed
1114 at least once and expression would contain the same value if the
1115 computation was moved to the end of the block.
1117 An expression is locally anticipatable in a block if it is computed at
1118 least once and expression would contain the same value if the computation
1119 was moved to the beginning of the block.
1121 We call this routine for cprop, pre and code hoisting. They all compute
1122 basically the same information and thus can easily share this code.
1124 TRANSP, COMP, and ANTLOC are destination sbitmaps for recording local
1125 properties. If NULL, then it is not necessary to compute or record that
1126 particular property.
1128 SETP controls which hash table to look at. If zero, this routine looks at
1129 the expr hash table; if nonzero this routine looks at the set hash table.
1130 Additionally, TRANSP is computed as ~TRANSP, since this is really cprop's
1131 ABSALTERED. */
1133 static void
1134 compute_local_properties (transp, comp, antloc, setp)
1135 sbitmap *transp;
1136 sbitmap *comp;
1137 sbitmap *antloc;
1138 int setp;
1140 unsigned int i, hash_table_size;
1141 struct expr **hash_table;
1143 /* Initialize any bitmaps that were passed in. */
1144 if (transp)
1146 if (setp)
1147 sbitmap_vector_zero (transp, n_basic_blocks);
1148 else
1149 sbitmap_vector_ones (transp, n_basic_blocks);
1152 if (comp)
1153 sbitmap_vector_zero (comp, n_basic_blocks);
1154 if (antloc)
1155 sbitmap_vector_zero (antloc, n_basic_blocks);
1157 /* We use the same code for cprop, pre and hoisting. For cprop
1158 we care about the set hash table, for pre and hoisting we
1159 care about the expr hash table. */
1160 hash_table_size = setp ? set_hash_table_size : expr_hash_table_size;
1161 hash_table = setp ? set_hash_table : expr_hash_table;
1163 for (i = 0; i < hash_table_size; i++)
1165 struct expr *expr;
1167 for (expr = hash_table[i]; expr != NULL; expr = expr->next_same_hash)
1169 int indx = expr->bitmap_index;
1170 struct occr *occr;
1172 /* The expression is transparent in this block if it is not killed.
1173 We start by assuming all are transparent [none are killed], and
1174 then reset the bits for those that are. */
1175 if (transp)
1176 compute_transp (expr->expr, indx, transp, setp);
1178 /* The occurrences recorded in antic_occr are exactly those that
1179 we want to set to non-zero in ANTLOC. */
1180 if (antloc)
1181 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
1183 SET_BIT (antloc[BLOCK_NUM (occr->insn)], indx);
1185 /* While we're scanning the table, this is a good place to
1186 initialize this. */
1187 occr->deleted_p = 0;
1190 /* The occurrences recorded in avail_occr are exactly those that
1191 we want to set to non-zero in COMP. */
1192 if (comp)
1193 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
1195 SET_BIT (comp[BLOCK_NUM (occr->insn)], indx);
1197 /* While we're scanning the table, this is a good place to
1198 initialize this. */
1199 occr->copied_p = 0;
1202 /* While we're scanning the table, this is a good place to
1203 initialize this. */
1204 expr->reaching_reg = 0;
1209 /* Register set information.
1211 `reg_set_table' records where each register is set or otherwise
1212 modified. */
1214 static struct obstack reg_set_obstack;
1216 static void
1217 alloc_reg_set_mem (n_regs)
1218 int n_regs;
1220 unsigned int n;
1222 reg_set_table_size = n_regs + REG_SET_TABLE_SLOP;
1223 n = reg_set_table_size * sizeof (struct reg_set *);
1224 reg_set_table = (struct reg_set **) gmalloc (n);
1225 memset ((char *) reg_set_table, 0, n);
1227 gcc_obstack_init (&reg_set_obstack);
1230 static void
1231 free_reg_set_mem ()
1233 free (reg_set_table);
1234 obstack_free (&reg_set_obstack, NULL);
1237 /* Record REGNO in the reg_set table. */
1239 static void
1240 record_one_set (regno, insn)
1241 int regno;
1242 rtx insn;
1244 /* Allocate a new reg_set element and link it onto the list. */
1245 struct reg_set *new_reg_info;
1247 /* If the table isn't big enough, enlarge it. */
1248 if (regno >= reg_set_table_size)
1250 int new_size = regno + REG_SET_TABLE_SLOP;
1252 reg_set_table
1253 = (struct reg_set **) grealloc ((char *) reg_set_table,
1254 new_size * sizeof (struct reg_set *));
1255 memset ((char *) (reg_set_table + reg_set_table_size), 0,
1256 (new_size - reg_set_table_size) * sizeof (struct reg_set *));
1257 reg_set_table_size = new_size;
1260 new_reg_info = (struct reg_set *) obstack_alloc (&reg_set_obstack,
1261 sizeof (struct reg_set));
1262 bytes_used += sizeof (struct reg_set);
1263 new_reg_info->insn = insn;
1264 new_reg_info->next = reg_set_table[regno];
1265 reg_set_table[regno] = new_reg_info;
1268 /* Called from compute_sets via note_stores to handle one SET or CLOBBER in
1269 an insn. The DATA is really the instruction in which the SET is
1270 occurring. */
1272 static void
1273 record_set_info (dest, setter, data)
1274 rtx dest, setter ATTRIBUTE_UNUSED;
1275 void *data;
1277 rtx record_set_insn = (rtx) data;
1279 if (GET_CODE (dest) == REG && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
1280 record_one_set (REGNO (dest), record_set_insn);
1283 /* Scan the function and record each set of each pseudo-register.
1285 This is called once, at the start of the gcse pass. See the comments for
1286 `reg_set_table' for further documenation. */
1288 static void
1289 compute_sets (f)
1290 rtx f;
1292 rtx insn;
1294 for (insn = f; insn != 0; insn = NEXT_INSN (insn))
1295 if (INSN_P (insn))
1296 note_stores (PATTERN (insn), record_set_info, insn);
1299 /* Hash table support. */
1301 /* For each register, the cuid of the first/last insn in the block
1302 that set it, or -1 if not set. */
1303 #define NEVER_SET -1
1305 struct reg_avail_info
1307 int last_bb;
1308 int first_set;
1309 int last_set;
1312 static struct reg_avail_info *reg_avail_info;
1313 static int current_bb;
1316 /* See whether X, the source of a set, is something we want to consider for
1317 GCSE. */
1319 static int
1320 want_to_gcse_p (x)
1321 rtx x;
1323 static rtx test_insn = 0;
1324 int num_clobbers = 0;
1325 int icode;
1327 switch (GET_CODE (x))
1329 case REG:
1330 case SUBREG:
1331 case CONST_INT:
1332 case CONST_DOUBLE:
1333 case CALL:
1334 return 0;
1336 default:
1337 break;
1340 /* If this is a valid operand, we are OK. If it's VOIDmode, we aren't. */
1341 if (general_operand (x, GET_MODE (x)))
1342 return 1;
1343 else if (GET_MODE (x) == VOIDmode)
1344 return 0;
1346 /* Otherwise, check if we can make a valid insn from it. First initialize
1347 our test insn if we haven't already. */
1348 if (test_insn == 0)
1350 test_insn
1351 = make_insn_raw (gen_rtx_SET (VOIDmode,
1352 gen_rtx_REG (word_mode,
1353 FIRST_PSEUDO_REGISTER * 2),
1354 const0_rtx));
1355 NEXT_INSN (test_insn) = PREV_INSN (test_insn) = 0;
1356 ggc_add_rtx_root (&test_insn, 1);
1359 /* Now make an insn like the one we would make when GCSE'ing and see if
1360 valid. */
1361 PUT_MODE (SET_DEST (PATTERN (test_insn)), GET_MODE (x));
1362 SET_SRC (PATTERN (test_insn)) = x;
1363 return ((icode = recog (PATTERN (test_insn), test_insn, &num_clobbers)) >= 0
1364 && (num_clobbers == 0 || ! added_clobbers_hard_reg_p (icode)));
1367 /* Return non-zero if the operands of expression X are unchanged from the
1368 start of INSN's basic block up to but not including INSN (if AVAIL_P == 0),
1369 or from INSN to the end of INSN's basic block (if AVAIL_P != 0). */
1371 static int
1372 oprs_unchanged_p (x, insn, avail_p)
1373 rtx x, insn;
1374 int avail_p;
1376 int i, j;
1377 enum rtx_code code;
1378 const char *fmt;
1380 if (x == 0)
1381 return 1;
1383 code = GET_CODE (x);
1384 switch (code)
1386 case REG:
1388 struct reg_avail_info *info = &reg_avail_info[REGNO (x)];
1390 if (info->last_bb != current_bb)
1391 return 1;
1392 if (avail_p)
1393 return info->last_set < INSN_CUID (insn);
1394 else
1395 return info->first_set >= INSN_CUID (insn);
1398 case MEM:
1399 if (load_killed_in_block_p (BASIC_BLOCK (current_bb), INSN_CUID (insn),
1400 x, avail_p))
1401 return 0;
1402 else
1403 return oprs_unchanged_p (XEXP (x, 0), insn, avail_p);
1405 case PRE_DEC:
1406 case PRE_INC:
1407 case POST_DEC:
1408 case POST_INC:
1409 case PRE_MODIFY:
1410 case POST_MODIFY:
1411 return 0;
1413 case PC:
1414 case CC0: /*FIXME*/
1415 case CONST:
1416 case CONST_INT:
1417 case CONST_DOUBLE:
1418 case SYMBOL_REF:
1419 case LABEL_REF:
1420 case ADDR_VEC:
1421 case ADDR_DIFF_VEC:
1422 return 1;
1424 default:
1425 break;
1428 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
1430 if (fmt[i] == 'e')
1432 /* If we are about to do the last recursive call needed at this
1433 level, change it into iteration. This function is called enough
1434 to be worth it. */
1435 if (i == 0)
1436 return oprs_unchanged_p (XEXP (x, i), insn, avail_p);
1438 else if (! oprs_unchanged_p (XEXP (x, i), insn, avail_p))
1439 return 0;
1441 else if (fmt[i] == 'E')
1442 for (j = 0; j < XVECLEN (x, i); j++)
1443 if (! oprs_unchanged_p (XVECEXP (x, i, j), insn, avail_p))
1444 return 0;
1447 return 1;
1450 /* Used for communication between mems_conflict_for_gcse_p and
1451 load_killed_in_block_p. Nonzero if mems_conflict_for_gcse_p finds a
1452 conflict between two memory references. */
1453 static int gcse_mems_conflict_p;
1455 /* Used for communication between mems_conflict_for_gcse_p and
1456 load_killed_in_block_p. A memory reference for a load instruction,
1457 mems_conflict_for_gcse_p will see if a memory store conflicts with
1458 this memory load. */
1459 static rtx gcse_mem_operand;
1461 /* DEST is the output of an instruction. If it is a memory reference, and
1462 possibly conflicts with the load found in gcse_mem_operand, then set
1463 gcse_mems_conflict_p to a nonzero value. */
1465 static void
1466 mems_conflict_for_gcse_p (dest, setter, data)
1467 rtx dest, setter ATTRIBUTE_UNUSED;
1468 void *data ATTRIBUTE_UNUSED;
1470 while (GET_CODE (dest) == SUBREG
1471 || GET_CODE (dest) == ZERO_EXTRACT
1472 || GET_CODE (dest) == SIGN_EXTRACT
1473 || GET_CODE (dest) == STRICT_LOW_PART)
1474 dest = XEXP (dest, 0);
1476 /* If DEST is not a MEM, then it will not conflict with the load. Note
1477 that function calls are assumed to clobber memory, but are handled
1478 elsewhere. */
1479 if (GET_CODE (dest) != MEM)
1480 return;
1482 /* If we are setting a MEM in our list of specially recognized MEMs,
1483 don't mark as killed this time. */
1485 if (dest == gcse_mem_operand && pre_ldst_mems != NULL)
1487 if (!find_rtx_in_ldst (dest))
1488 gcse_mems_conflict_p = 1;
1489 return;
1492 if (true_dependence (dest, GET_MODE (dest), gcse_mem_operand,
1493 rtx_addr_varies_p))
1494 gcse_mems_conflict_p = 1;
1497 /* Return nonzero if the expression in X (a memory reference) is killed
1498 in block BB before or after the insn with the CUID in UID_LIMIT.
1499 AVAIL_P is nonzero for kills after UID_LIMIT, and zero for kills
1500 before UID_LIMIT.
1502 To check the entire block, set UID_LIMIT to max_uid + 1 and
1503 AVAIL_P to 0. */
1505 static int
1506 load_killed_in_block_p (bb, uid_limit, x, avail_p)
1507 basic_block bb;
1508 int uid_limit;
1509 rtx x;
1510 int avail_p;
1512 rtx list_entry = modify_mem_list[bb->index];
1513 while (list_entry)
1515 rtx setter;
1516 /* Ignore entries in the list that do not apply. */
1517 if ((avail_p
1518 && INSN_CUID (XEXP (list_entry, 0)) < uid_limit)
1519 || (! avail_p
1520 && INSN_CUID (XEXP (list_entry, 0)) > uid_limit))
1522 list_entry = XEXP (list_entry, 1);
1523 continue;
1526 setter = XEXP (list_entry, 0);
1528 /* If SETTER is a call everything is clobbered. Note that calls
1529 to pure functions are never put on the list, so we need not
1530 worry about them. */
1531 if (GET_CODE (setter) == CALL_INSN)
1532 return 1;
1534 /* SETTER must be an INSN of some kind that sets memory. Call
1535 note_stores to examine each hunk of memory that is modified.
1537 The note_stores interface is pretty limited, so we have to
1538 communicate via global variables. Yuk. */
1539 gcse_mem_operand = x;
1540 gcse_mems_conflict_p = 0;
1541 note_stores (PATTERN (setter), mems_conflict_for_gcse_p, NULL);
1542 if (gcse_mems_conflict_p)
1543 return 1;
1544 list_entry = XEXP (list_entry, 1);
1546 return 0;
1549 /* Return non-zero if the operands of expression X are unchanged from
1550 the start of INSN's basic block up to but not including INSN. */
1552 static int
1553 oprs_anticipatable_p (x, insn)
1554 rtx x, insn;
1556 return oprs_unchanged_p (x, insn, 0);
1559 /* Return non-zero if the operands of expression X are unchanged from
1560 INSN to the end of INSN's basic block. */
1562 static int
1563 oprs_available_p (x, insn)
1564 rtx x, insn;
1566 return oprs_unchanged_p (x, insn, 1);
1569 /* Hash expression X.
1571 MODE is only used if X is a CONST_INT. DO_NOT_RECORD_P is a boolean
1572 indicating if a volatile operand is found or if the expression contains
1573 something we don't want to insert in the table.
1575 ??? One might want to merge this with canon_hash. Later. */
1577 static unsigned int
1578 hash_expr (x, mode, do_not_record_p, hash_table_size)
1579 rtx x;
1580 enum machine_mode mode;
1581 int *do_not_record_p;
1582 int hash_table_size;
1584 unsigned int hash;
1586 *do_not_record_p = 0;
1588 hash = hash_expr_1 (x, mode, do_not_record_p);
1589 return hash % hash_table_size;
1592 /* Hash a string. Just add its bytes up. */
1594 static inline unsigned
1595 hash_string_1 (ps)
1596 const char *ps;
1598 unsigned hash = 0;
1599 const unsigned char *p = (const unsigned char *)ps;
1601 if (p)
1602 while (*p)
1603 hash += *p++;
1605 return hash;
1608 /* Subroutine of hash_expr to do the actual work. */
1610 static unsigned int
1611 hash_expr_1 (x, mode, do_not_record_p)
1612 rtx x;
1613 enum machine_mode mode;
1614 int *do_not_record_p;
1616 int i, j;
1617 unsigned hash = 0;
1618 enum rtx_code code;
1619 const char *fmt;
1621 /* Used to turn recursion into iteration. We can't rely on GCC's
1622 tail-recursion eliminatio since we need to keep accumulating values
1623 in HASH. */
1625 if (x == 0)
1626 return hash;
1628 repeat:
1629 code = GET_CODE (x);
1630 switch (code)
1632 case REG:
1633 hash += ((unsigned int) REG << 7) + REGNO (x);
1634 return hash;
1636 case CONST_INT:
1637 hash += (((unsigned int) CONST_INT << 7) + (unsigned int) mode
1638 + (unsigned int) INTVAL (x));
1639 return hash;
1641 case CONST_DOUBLE:
1642 /* This is like the general case, except that it only counts
1643 the integers representing the constant. */
1644 hash += (unsigned int) code + (unsigned int) GET_MODE (x);
1645 if (GET_MODE (x) != VOIDmode)
1646 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
1647 hash += (unsigned int) XWINT (x, i);
1648 else
1649 hash += ((unsigned int) CONST_DOUBLE_LOW (x)
1650 + (unsigned int) CONST_DOUBLE_HIGH (x));
1651 return hash;
1653 /* Assume there is only one rtx object for any given label. */
1654 case LABEL_REF:
1655 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
1656 differences and differences between each stage's debugging dumps. */
1657 hash += (((unsigned int) LABEL_REF << 7)
1658 + CODE_LABEL_NUMBER (XEXP (x, 0)));
1659 return hash;
1661 case SYMBOL_REF:
1663 /* Don't hash on the symbol's address to avoid bootstrap differences.
1664 Different hash values may cause expressions to be recorded in
1665 different orders and thus different registers to be used in the
1666 final assembler. This also avoids differences in the dump files
1667 between various stages. */
1668 unsigned int h = 0;
1669 const unsigned char *p = (const unsigned char *) XSTR (x, 0);
1671 while (*p)
1672 h += (h << 7) + *p++; /* ??? revisit */
1674 hash += ((unsigned int) SYMBOL_REF << 7) + h;
1675 return hash;
1678 case MEM:
1679 if (MEM_VOLATILE_P (x))
1681 *do_not_record_p = 1;
1682 return 0;
1685 hash += (unsigned int) MEM;
1686 hash += MEM_ALIAS_SET (x);
1687 x = XEXP (x, 0);
1688 goto repeat;
1690 case PRE_DEC:
1691 case PRE_INC:
1692 case POST_DEC:
1693 case POST_INC:
1694 case PC:
1695 case CC0:
1696 case CALL:
1697 case UNSPEC_VOLATILE:
1698 *do_not_record_p = 1;
1699 return 0;
1701 case ASM_OPERANDS:
1702 if (MEM_VOLATILE_P (x))
1704 *do_not_record_p = 1;
1705 return 0;
1707 else
1709 /* We don't want to take the filename and line into account. */
1710 hash += (unsigned) code + (unsigned) GET_MODE (x)
1711 + hash_string_1 (ASM_OPERANDS_TEMPLATE (x))
1712 + hash_string_1 (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
1713 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
1715 if (ASM_OPERANDS_INPUT_LENGTH (x))
1717 for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
1719 hash += (hash_expr_1 (ASM_OPERANDS_INPUT (x, i),
1720 GET_MODE (ASM_OPERANDS_INPUT (x, i)),
1721 do_not_record_p)
1722 + hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT
1723 (x, i)));
1726 hash += hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
1727 x = ASM_OPERANDS_INPUT (x, 0);
1728 mode = GET_MODE (x);
1729 goto repeat;
1731 return hash;
1734 default:
1735 break;
1738 hash += (unsigned) code + (unsigned) GET_MODE (x);
1739 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
1741 if (fmt[i] == 'e')
1743 /* If we are about to do the last recursive call
1744 needed at this level, change it into iteration.
1745 This function is called enough to be worth it. */
1746 if (i == 0)
1748 x = XEXP (x, i);
1749 goto repeat;
1752 hash += hash_expr_1 (XEXP (x, i), 0, do_not_record_p);
1753 if (*do_not_record_p)
1754 return 0;
1757 else if (fmt[i] == 'E')
1758 for (j = 0; j < XVECLEN (x, i); j++)
1760 hash += hash_expr_1 (XVECEXP (x, i, j), 0, do_not_record_p);
1761 if (*do_not_record_p)
1762 return 0;
1765 else if (fmt[i] == 's')
1766 hash += hash_string_1 (XSTR (x, i));
1767 else if (fmt[i] == 'i')
1768 hash += (unsigned int) XINT (x, i);
1769 else
1770 abort ();
1773 return hash;
1776 /* Hash a set of register REGNO.
1778 Sets are hashed on the register that is set. This simplifies the PRE copy
1779 propagation code.
1781 ??? May need to make things more elaborate. Later, as necessary. */
1783 static unsigned int
1784 hash_set (regno, hash_table_size)
1785 int regno;
1786 int hash_table_size;
1788 unsigned int hash;
1790 hash = regno;
1791 return hash % hash_table_size;
1794 /* Return non-zero if exp1 is equivalent to exp2.
1795 ??? Borrowed from cse.c. Might want to remerge with cse.c. Later. */
1797 static int
1798 expr_equiv_p (x, y)
1799 rtx x, y;
1801 register int i, j;
1802 register enum rtx_code code;
1803 register const char *fmt;
1805 if (x == y)
1806 return 1;
1808 if (x == 0 || y == 0)
1809 return x == y;
1811 code = GET_CODE (x);
1812 if (code != GET_CODE (y))
1813 return 0;
1815 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
1816 if (GET_MODE (x) != GET_MODE (y))
1817 return 0;
1819 switch (code)
1821 case PC:
1822 case CC0:
1823 return x == y;
1825 case CONST_INT:
1826 return INTVAL (x) == INTVAL (y);
1828 case LABEL_REF:
1829 return XEXP (x, 0) == XEXP (y, 0);
1831 case SYMBOL_REF:
1832 return XSTR (x, 0) == XSTR (y, 0);
1834 case REG:
1835 return REGNO (x) == REGNO (y);
1837 case MEM:
1838 /* Can't merge two expressions in different alias sets, since we can
1839 decide that the expression is transparent in a block when it isn't,
1840 due to it being set with the different alias set. */
1841 if (MEM_ALIAS_SET (x) != MEM_ALIAS_SET (y))
1842 return 0;
1843 break;
1845 /* For commutative operations, check both orders. */
1846 case PLUS:
1847 case MULT:
1848 case AND:
1849 case IOR:
1850 case XOR:
1851 case NE:
1852 case EQ:
1853 return ((expr_equiv_p (XEXP (x, 0), XEXP (y, 0))
1854 && expr_equiv_p (XEXP (x, 1), XEXP (y, 1)))
1855 || (expr_equiv_p (XEXP (x, 0), XEXP (y, 1))
1856 && expr_equiv_p (XEXP (x, 1), XEXP (y, 0))));
1858 case ASM_OPERANDS:
1859 /* We don't use the generic code below because we want to
1860 disregard filename and line numbers. */
1862 /* A volatile asm isn't equivalent to any other. */
1863 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
1864 return 0;
1866 if (GET_MODE (x) != GET_MODE (y)
1867 || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
1868 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
1869 ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
1870 || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
1871 || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
1872 return 0;
1874 if (ASM_OPERANDS_INPUT_LENGTH (x))
1876 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
1877 if (! expr_equiv_p (ASM_OPERANDS_INPUT (x, i),
1878 ASM_OPERANDS_INPUT (y, i))
1879 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
1880 ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
1881 return 0;
1884 return 1;
1886 default:
1887 break;
1890 /* Compare the elements. If any pair of corresponding elements
1891 fail to match, return 0 for the whole thing. */
1893 fmt = GET_RTX_FORMAT (code);
1894 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1896 switch (fmt[i])
1898 case 'e':
1899 if (! expr_equiv_p (XEXP (x, i), XEXP (y, i)))
1900 return 0;
1901 break;
1903 case 'E':
1904 if (XVECLEN (x, i) != XVECLEN (y, i))
1905 return 0;
1906 for (j = 0; j < XVECLEN (x, i); j++)
1907 if (! expr_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
1908 return 0;
1909 break;
1911 case 's':
1912 if (strcmp (XSTR (x, i), XSTR (y, i)))
1913 return 0;
1914 break;
1916 case 'i':
1917 if (XINT (x, i) != XINT (y, i))
1918 return 0;
1919 break;
1921 case 'w':
1922 if (XWINT (x, i) != XWINT (y, i))
1923 return 0;
1924 break;
1926 case '0':
1927 break;
1929 default:
1930 abort ();
1934 return 1;
1937 /* Insert expression X in INSN in the hash table.
1938 If it is already present, record it as the last occurrence in INSN's
1939 basic block.
1941 MODE is the mode of the value X is being stored into.
1942 It is only used if X is a CONST_INT.
1944 ANTIC_P is non-zero if X is an anticipatable expression.
1945 AVAIL_P is non-zero if X is an available expression. */
1947 static void
1948 insert_expr_in_table (x, mode, insn, antic_p, avail_p)
1949 rtx x;
1950 enum machine_mode mode;
1951 rtx insn;
1952 int antic_p, avail_p;
1954 int found, do_not_record_p;
1955 unsigned int hash;
1956 struct expr *cur_expr, *last_expr = NULL;
1957 struct occr *antic_occr, *avail_occr;
1958 struct occr *last_occr = NULL;
1960 hash = hash_expr (x, mode, &do_not_record_p, expr_hash_table_size);
1962 /* Do not insert expression in table if it contains volatile operands,
1963 or if hash_expr determines the expression is something we don't want
1964 to or can't handle. */
1965 if (do_not_record_p)
1966 return;
1968 cur_expr = expr_hash_table[hash];
1969 found = 0;
1971 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
1973 /* If the expression isn't found, save a pointer to the end of
1974 the list. */
1975 last_expr = cur_expr;
1976 cur_expr = cur_expr->next_same_hash;
1979 if (! found)
1981 cur_expr = (struct expr *) gcse_alloc (sizeof (struct expr));
1982 bytes_used += sizeof (struct expr);
1983 if (expr_hash_table[hash] == NULL)
1984 /* This is the first pattern that hashed to this index. */
1985 expr_hash_table[hash] = cur_expr;
1986 else
1987 /* Add EXPR to end of this hash chain. */
1988 last_expr->next_same_hash = cur_expr;
1990 /* Set the fields of the expr element. */
1991 cur_expr->expr = x;
1992 cur_expr->bitmap_index = n_exprs++;
1993 cur_expr->next_same_hash = NULL;
1994 cur_expr->antic_occr = NULL;
1995 cur_expr->avail_occr = NULL;
1998 /* Now record the occurrence(s). */
1999 if (antic_p)
2001 antic_occr = cur_expr->antic_occr;
2003 /* Search for another occurrence in the same basic block. */
2004 while (antic_occr && BLOCK_NUM (antic_occr->insn) != BLOCK_NUM (insn))
2006 /* If an occurrence isn't found, save a pointer to the end of
2007 the list. */
2008 last_occr = antic_occr;
2009 antic_occr = antic_occr->next;
2012 if (antic_occr)
2013 /* Found another instance of the expression in the same basic block.
2014 Prefer the currently recorded one. We want the first one in the
2015 block and the block is scanned from start to end. */
2016 ; /* nothing to do */
2017 else
2019 /* First occurrence of this expression in this basic block. */
2020 antic_occr = (struct occr *) gcse_alloc (sizeof (struct occr));
2021 bytes_used += sizeof (struct occr);
2022 /* First occurrence of this expression in any block? */
2023 if (cur_expr->antic_occr == NULL)
2024 cur_expr->antic_occr = antic_occr;
2025 else
2026 last_occr->next = antic_occr;
2028 antic_occr->insn = insn;
2029 antic_occr->next = NULL;
2033 if (avail_p)
2035 avail_occr = cur_expr->avail_occr;
2037 /* Search for another occurrence in the same basic block. */
2038 while (avail_occr && BLOCK_NUM (avail_occr->insn) != BLOCK_NUM (insn))
2040 /* If an occurrence isn't found, save a pointer to the end of
2041 the list. */
2042 last_occr = avail_occr;
2043 avail_occr = avail_occr->next;
2046 if (avail_occr)
2047 /* Found another instance of the expression in the same basic block.
2048 Prefer this occurrence to the currently recorded one. We want
2049 the last one in the block and the block is scanned from start
2050 to end. */
2051 avail_occr->insn = insn;
2052 else
2054 /* First occurrence of this expression in this basic block. */
2055 avail_occr = (struct occr *) gcse_alloc (sizeof (struct occr));
2056 bytes_used += sizeof (struct occr);
2058 /* First occurrence of this expression in any block? */
2059 if (cur_expr->avail_occr == NULL)
2060 cur_expr->avail_occr = avail_occr;
2061 else
2062 last_occr->next = avail_occr;
2064 avail_occr->insn = insn;
2065 avail_occr->next = NULL;
2070 /* Insert pattern X in INSN in the hash table.
2071 X is a SET of a reg to either another reg or a constant.
2072 If it is already present, record it as the last occurrence in INSN's
2073 basic block. */
2075 static void
2076 insert_set_in_table (x, insn)
2077 rtx x;
2078 rtx insn;
2080 int found;
2081 unsigned int hash;
2082 struct expr *cur_expr, *last_expr = NULL;
2083 struct occr *cur_occr, *last_occr = NULL;
2085 if (GET_CODE (x) != SET
2086 || GET_CODE (SET_DEST (x)) != REG)
2087 abort ();
2089 hash = hash_set (REGNO (SET_DEST (x)), set_hash_table_size);
2091 cur_expr = set_hash_table[hash];
2092 found = 0;
2094 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
2096 /* If the expression isn't found, save a pointer to the end of
2097 the list. */
2098 last_expr = cur_expr;
2099 cur_expr = cur_expr->next_same_hash;
2102 if (! found)
2104 cur_expr = (struct expr *) gcse_alloc (sizeof (struct expr));
2105 bytes_used += sizeof (struct expr);
2106 if (set_hash_table[hash] == NULL)
2107 /* This is the first pattern that hashed to this index. */
2108 set_hash_table[hash] = cur_expr;
2109 else
2110 /* Add EXPR to end of this hash chain. */
2111 last_expr->next_same_hash = cur_expr;
2113 /* Set the fields of the expr element.
2114 We must copy X because it can be modified when copy propagation is
2115 performed on its operands. */
2116 cur_expr->expr = copy_rtx (x);
2117 cur_expr->bitmap_index = n_sets++;
2118 cur_expr->next_same_hash = NULL;
2119 cur_expr->antic_occr = NULL;
2120 cur_expr->avail_occr = NULL;
2123 /* Now record the occurrence. */
2124 cur_occr = cur_expr->avail_occr;
2126 /* Search for another occurrence in the same basic block. */
2127 while (cur_occr && BLOCK_NUM (cur_occr->insn) != BLOCK_NUM (insn))
2129 /* If an occurrence isn't found, save a pointer to the end of
2130 the list. */
2131 last_occr = cur_occr;
2132 cur_occr = cur_occr->next;
2135 if (cur_occr)
2136 /* Found another instance of the expression in the same basic block.
2137 Prefer this occurrence to the currently recorded one. We want the
2138 last one in the block and the block is scanned from start to end. */
2139 cur_occr->insn = insn;
2140 else
2142 /* First occurrence of this expression in this basic block. */
2143 cur_occr = (struct occr *) gcse_alloc (sizeof (struct occr));
2144 bytes_used += sizeof (struct occr);
2146 /* First occurrence of this expression in any block? */
2147 if (cur_expr->avail_occr == NULL)
2148 cur_expr->avail_occr = cur_occr;
2149 else
2150 last_occr->next = cur_occr;
2152 cur_occr->insn = insn;
2153 cur_occr->next = NULL;
2157 /* Scan pattern PAT of INSN and add an entry to the hash table. If SET_P is
2158 non-zero, this is for the assignment hash table, otherwise it is for the
2159 expression hash table. */
2161 static void
2162 hash_scan_set (pat, insn, set_p)
2163 rtx pat, insn;
2164 int set_p;
2166 rtx src = SET_SRC (pat);
2167 rtx dest = SET_DEST (pat);
2168 rtx note;
2170 if (GET_CODE (src) == CALL)
2171 hash_scan_call (src, insn);
2173 else if (GET_CODE (dest) == REG)
2175 unsigned int regno = REGNO (dest);
2176 rtx tmp;
2178 /* If this is a single set and we are doing constant propagation,
2179 see if a REG_NOTE shows this equivalent to a constant. */
2180 if (set_p && (note = find_reg_equal_equiv_note (insn)) != 0
2181 && CONSTANT_P (XEXP (note, 0)))
2182 src = XEXP (note, 0), pat = gen_rtx_SET (VOIDmode, dest, src);
2184 /* Only record sets of pseudo-regs in the hash table. */
2185 if (! set_p
2186 && regno >= FIRST_PSEUDO_REGISTER
2187 /* Don't GCSE something if we can't do a reg/reg copy. */
2188 && can_copy_p [GET_MODE (dest)]
2189 /* Is SET_SRC something we want to gcse? */
2190 && want_to_gcse_p (src)
2191 /* Don't CSE a nop. */
2192 && ! set_noop_p (pat)
2193 /* Don't GCSE if it has attached REG_EQUIV note.
2194 At this point this only function parameters should have
2195 REG_EQUIV notes and if the argument slot is used somewhere
2196 explicitely, it means address of parameter has been taken,
2197 so we should not extend the lifetime of the pseudo. */
2198 && ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) == 0
2199 || GET_CODE (XEXP (note, 0)) != MEM))
2201 /* An expression is not anticipatable if its operands are
2202 modified before this insn or if this is not the only SET in
2203 this insn. */
2204 int antic_p = oprs_anticipatable_p (src, insn) && single_set (insn);
2205 /* An expression is not available if its operands are
2206 subsequently modified, including this insn. It's also not
2207 available if this is a branch, because we can't insert
2208 a set after the branch. */
2209 int avail_p = (oprs_available_p (src, insn)
2210 && ! JUMP_P (insn));
2212 insert_expr_in_table (src, GET_MODE (dest), insn, antic_p, avail_p);
2215 /* Record sets for constant/copy propagation. */
2216 else if (set_p
2217 && regno >= FIRST_PSEUDO_REGISTER
2218 && ((GET_CODE (src) == REG
2219 && REGNO (src) >= FIRST_PSEUDO_REGISTER
2220 && can_copy_p [GET_MODE (dest)]
2221 && REGNO (src) != regno)
2222 || GET_CODE (src) == CONST_INT
2223 || GET_CODE (src) == SYMBOL_REF
2224 || GET_CODE (src) == CONST_DOUBLE)
2225 /* A copy is not available if its src or dest is subsequently
2226 modified. Here we want to search from INSN+1 on, but
2227 oprs_available_p searches from INSN on. */
2228 && (insn == BLOCK_END (BLOCK_NUM (insn))
2229 || ((tmp = next_nonnote_insn (insn)) != NULL_RTX
2230 && oprs_available_p (pat, tmp))))
2231 insert_set_in_table (pat, insn);
2235 static void
2236 hash_scan_clobber (x, insn)
2237 rtx x ATTRIBUTE_UNUSED, insn ATTRIBUTE_UNUSED;
2239 /* Currently nothing to do. */
2242 static void
2243 hash_scan_call (x, insn)
2244 rtx x ATTRIBUTE_UNUSED, insn ATTRIBUTE_UNUSED;
2246 /* Currently nothing to do. */
2249 /* Process INSN and add hash table entries as appropriate.
2251 Only available expressions that set a single pseudo-reg are recorded.
2253 Single sets in a PARALLEL could be handled, but it's an extra complication
2254 that isn't dealt with right now. The trick is handling the CLOBBERs that
2255 are also in the PARALLEL. Later.
2257 If SET_P is non-zero, this is for the assignment hash table,
2258 otherwise it is for the expression hash table.
2259 If IN_LIBCALL_BLOCK nonzero, we are in a libcall block, and should
2260 not record any expressions. */
2262 static void
2263 hash_scan_insn (insn, set_p, in_libcall_block)
2264 rtx insn;
2265 int set_p;
2266 int in_libcall_block;
2268 rtx pat = PATTERN (insn);
2269 int i;
2271 if (in_libcall_block)
2272 return;
2274 /* Pick out the sets of INSN and for other forms of instructions record
2275 what's been modified. */
2277 if (GET_CODE (pat) == SET)
2278 hash_scan_set (pat, insn, set_p);
2279 else if (GET_CODE (pat) == PARALLEL)
2280 for (i = 0; i < XVECLEN (pat, 0); i++)
2282 rtx x = XVECEXP (pat, 0, i);
2284 if (GET_CODE (x) == SET)
2285 hash_scan_set (x, insn, set_p);
2286 else if (GET_CODE (x) == CLOBBER)
2287 hash_scan_clobber (x, insn);
2288 else if (GET_CODE (x) == CALL)
2289 hash_scan_call (x, insn);
2292 else if (GET_CODE (pat) == CLOBBER)
2293 hash_scan_clobber (pat, insn);
2294 else if (GET_CODE (pat) == CALL)
2295 hash_scan_call (pat, insn);
2298 static void
2299 dump_hash_table (file, name, table, table_size, total_size)
2300 FILE *file;
2301 const char *name;
2302 struct expr **table;
2303 int table_size, total_size;
2305 int i;
2306 /* Flattened out table, so it's printed in proper order. */
2307 struct expr **flat_table;
2308 unsigned int *hash_val;
2309 struct expr *expr;
2311 flat_table
2312 = (struct expr **) xcalloc (total_size, sizeof (struct expr *));
2313 hash_val = (unsigned int *) xmalloc (total_size * sizeof (unsigned int));
2315 for (i = 0; i < table_size; i++)
2316 for (expr = table[i]; expr != NULL; expr = expr->next_same_hash)
2318 flat_table[expr->bitmap_index] = expr;
2319 hash_val[expr->bitmap_index] = i;
2322 fprintf (file, "%s hash table (%d buckets, %d entries)\n",
2323 name, table_size, total_size);
2325 for (i = 0; i < total_size; i++)
2326 if (flat_table[i] != 0)
2328 expr = flat_table[i];
2329 fprintf (file, "Index %d (hash value %d)\n ",
2330 expr->bitmap_index, hash_val[i]);
2331 print_rtl (file, expr->expr);
2332 fprintf (file, "\n");
2335 fprintf (file, "\n");
2337 free (flat_table);
2338 free (hash_val);
2341 /* Record register first/last/block set information for REGNO in INSN.
2343 first_set records the first place in the block where the register
2344 is set and is used to compute "anticipatability".
2346 last_set records the last place in the block where the register
2347 is set and is used to compute "availability".
2349 last_bb records the block for which first_set and last_set are
2350 valid, as a quick test to invalidate them.
2352 reg_set_in_block records whether the register is set in the block
2353 and is used to compute "transparency". */
2355 static void
2356 record_last_reg_set_info (insn, regno)
2357 rtx insn;
2358 int regno;
2360 struct reg_avail_info *info = &reg_avail_info[regno];
2361 int cuid = INSN_CUID (insn);
2363 info->last_set = cuid;
2364 if (info->last_bb != current_bb)
2366 info->last_bb = current_bb;
2367 info->first_set = cuid;
2368 SET_BIT (reg_set_in_block[current_bb], regno);
2373 /* Record all of the canonicalized MEMs of record_last_mem_set_info's insn.
2374 Note we store a pair of elements in the list, so they have to be
2375 taken off pairwise. */
2377 static void
2378 canon_list_insert (dest, unused1, v_insn)
2379 rtx dest ATTRIBUTE_UNUSED;
2380 rtx unused1 ATTRIBUTE_UNUSED;
2381 void * v_insn;
2383 rtx dest_addr, insn;
2385 while (GET_CODE (dest) == SUBREG
2386 || GET_CODE (dest) == ZERO_EXTRACT
2387 || GET_CODE (dest) == SIGN_EXTRACT
2388 || GET_CODE (dest) == STRICT_LOW_PART)
2389 dest = XEXP (dest, 0);
2391 /* If DEST is not a MEM, then it will not conflict with a load. Note
2392 that function calls are assumed to clobber memory, but are handled
2393 elsewhere. */
2395 if (GET_CODE (dest) != MEM)
2396 return;
2398 dest_addr = get_addr (XEXP (dest, 0));
2399 dest_addr = canon_rtx (dest_addr);
2400 insn = (rtx) v_insn;
2402 canon_modify_mem_list[BLOCK_NUM (insn)] =
2403 alloc_INSN_LIST (dest_addr, canon_modify_mem_list[BLOCK_NUM (insn)]);
2404 canon_modify_mem_list[BLOCK_NUM (insn)] =
2405 alloc_INSN_LIST (dest, canon_modify_mem_list[BLOCK_NUM (insn)]);
2408 /* Record memory modification information for INSN. We do not actually care
2409 about the memory location(s) that are set, or even how they are set (consider
2410 a CALL_INSN). We merely need to record which insns modify memory. */
2412 static void
2413 record_last_mem_set_info (insn)
2414 rtx insn;
2416 /* load_killed_in_block_p will handle the case of calls clobbering
2417 everything. */
2418 modify_mem_list[BLOCK_NUM (insn)] =
2419 alloc_INSN_LIST (insn, modify_mem_list[BLOCK_NUM (insn)]);
2421 if (GET_CODE (insn) == CALL_INSN)
2423 /* Note that traversals of this loop (other than for free-ing)
2424 will break after encountering a CALL_INSN. So, there's no
2425 need to insert a pair of items, as canon_list_insert does. */
2426 canon_modify_mem_list[BLOCK_NUM (insn)] =
2427 alloc_INSN_LIST (insn, canon_modify_mem_list[BLOCK_NUM (insn)]);
2429 else
2430 note_stores (PATTERN (insn), canon_list_insert, (void*)insn );
2433 /* Called from compute_hash_table via note_stores to handle one
2434 SET or CLOBBER in an insn. DATA is really the instruction in which
2435 the SET is taking place. */
2437 static void
2438 record_last_set_info (dest, setter, data)
2439 rtx dest, setter ATTRIBUTE_UNUSED;
2440 void *data;
2442 rtx last_set_insn = (rtx) data;
2444 if (GET_CODE (dest) == SUBREG)
2445 dest = SUBREG_REG (dest);
2447 if (GET_CODE (dest) == REG)
2448 record_last_reg_set_info (last_set_insn, REGNO (dest));
2449 else if (GET_CODE (dest) == MEM
2450 /* Ignore pushes, they clobber nothing. */
2451 && ! push_operand (dest, GET_MODE (dest)))
2452 record_last_mem_set_info (last_set_insn);
2455 /* Top level function to create an expression or assignment hash table.
2457 Expression entries are placed in the hash table if
2458 - they are of the form (set (pseudo-reg) src),
2459 - src is something we want to perform GCSE on,
2460 - none of the operands are subsequently modified in the block
2462 Assignment entries are placed in the hash table if
2463 - they are of the form (set (pseudo-reg) src),
2464 - src is something we want to perform const/copy propagation on,
2465 - none of the operands or target are subsequently modified in the block
2467 Currently src must be a pseudo-reg or a const_int.
2469 F is the first insn.
2470 SET_P is non-zero for computing the assignment hash table. */
2472 static void
2473 compute_hash_table (set_p)
2474 int set_p;
2476 unsigned int i;
2478 /* While we compute the hash table we also compute a bit array of which
2479 registers are set in which blocks.
2480 ??? This isn't needed during const/copy propagation, but it's cheap to
2481 compute. Later. */
2482 sbitmap_vector_zero (reg_set_in_block, n_basic_blocks);
2484 /* re-Cache any INSN_LIST nodes we have allocated. */
2486 int i;
2487 for (i = 0; i < n_basic_blocks; i++)
2489 if (modify_mem_list[i])
2490 free_INSN_LIST_list (modify_mem_list + i);
2491 if (canon_modify_mem_list[i])
2492 free_INSN_LIST_list (canon_modify_mem_list + i);
2495 /* Some working arrays used to track first and last set in each block. */
2496 reg_avail_info = (struct reg_avail_info*)
2497 gmalloc (max_gcse_regno * sizeof (struct reg_avail_info));
2499 for (i = 0; i < max_gcse_regno; ++i)
2500 reg_avail_info[i].last_bb = NEVER_SET;
2502 for (current_bb = 0; current_bb < n_basic_blocks; current_bb++)
2504 rtx insn;
2505 unsigned int regno;
2506 int in_libcall_block;
2508 /* First pass over the instructions records information used to
2509 determine when registers and memory are first and last set.
2510 ??? hard-reg reg_set_in_block computation
2511 could be moved to compute_sets since they currently don't change. */
2513 for (insn = BLOCK_HEAD (current_bb);
2514 insn && insn != NEXT_INSN (BLOCK_END (current_bb));
2515 insn = NEXT_INSN (insn))
2517 if (! INSN_P (insn))
2518 continue;
2520 if (GET_CODE (insn) == CALL_INSN)
2522 bool clobbers_all = false;
2523 #ifdef NON_SAVING_SETJMP
2524 if (NON_SAVING_SETJMP
2525 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
2526 clobbers_all = true;
2527 #endif
2529 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2530 if (clobbers_all
2531 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2532 record_last_reg_set_info (insn, regno);
2534 mark_call (insn);
2537 note_stores (PATTERN (insn), record_last_set_info, insn);
2540 /* The next pass builds the hash table. */
2542 for (insn = BLOCK_HEAD (current_bb), in_libcall_block = 0;
2543 insn && insn != NEXT_INSN (BLOCK_END (current_bb));
2544 insn = NEXT_INSN (insn))
2545 if (INSN_P (insn))
2547 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
2548 in_libcall_block = 1;
2549 else if (set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
2550 in_libcall_block = 0;
2551 hash_scan_insn (insn, set_p, in_libcall_block);
2552 if (!set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
2553 in_libcall_block = 0;
2557 free (reg_avail_info);
2558 reg_avail_info = NULL;
2561 /* Allocate space for the set hash table.
2562 N_INSNS is the number of instructions in the function.
2563 It is used to determine the number of buckets to use. */
2565 static void
2566 alloc_set_hash_table (n_insns)
2567 int n_insns;
2569 int n;
2571 set_hash_table_size = n_insns / 4;
2572 if (set_hash_table_size < 11)
2573 set_hash_table_size = 11;
2575 /* Attempt to maintain efficient use of hash table.
2576 Making it an odd number is simplest for now.
2577 ??? Later take some measurements. */
2578 set_hash_table_size |= 1;
2579 n = set_hash_table_size * sizeof (struct expr *);
2580 set_hash_table = (struct expr **) gmalloc (n);
2583 /* Free things allocated by alloc_set_hash_table. */
2585 static void
2586 free_set_hash_table ()
2588 free (set_hash_table);
2591 /* Compute the hash table for doing copy/const propagation. */
2593 static void
2594 compute_set_hash_table ()
2596 /* Initialize count of number of entries in hash table. */
2597 n_sets = 0;
2598 memset ((char *) set_hash_table, 0,
2599 set_hash_table_size * sizeof (struct expr *));
2601 compute_hash_table (1);
2604 /* Allocate space for the expression hash table.
2605 N_INSNS is the number of instructions in the function.
2606 It is used to determine the number of buckets to use. */
2608 static void
2609 alloc_expr_hash_table (n_insns)
2610 unsigned int n_insns;
2612 int n;
2614 expr_hash_table_size = n_insns / 2;
2615 /* Make sure the amount is usable. */
2616 if (expr_hash_table_size < 11)
2617 expr_hash_table_size = 11;
2619 /* Attempt to maintain efficient use of hash table.
2620 Making it an odd number is simplest for now.
2621 ??? Later take some measurements. */
2622 expr_hash_table_size |= 1;
2623 n = expr_hash_table_size * sizeof (struct expr *);
2624 expr_hash_table = (struct expr **) gmalloc (n);
2627 /* Free things allocated by alloc_expr_hash_table. */
2629 static void
2630 free_expr_hash_table ()
2632 free (expr_hash_table);
2635 /* Compute the hash table for doing GCSE. */
2637 static void
2638 compute_expr_hash_table ()
2640 /* Initialize count of number of entries in hash table. */
2641 n_exprs = 0;
2642 memset ((char *) expr_hash_table, 0,
2643 expr_hash_table_size * sizeof (struct expr *));
2645 compute_hash_table (0);
2648 /* Expression tracking support. */
2650 /* Lookup pattern PAT in the expression table.
2651 The result is a pointer to the table entry, or NULL if not found. */
2653 static struct expr *
2654 lookup_expr (pat)
2655 rtx pat;
2657 int do_not_record_p;
2658 unsigned int hash = hash_expr (pat, GET_MODE (pat), &do_not_record_p,
2659 expr_hash_table_size);
2660 struct expr *expr;
2662 if (do_not_record_p)
2663 return NULL;
2665 expr = expr_hash_table[hash];
2667 while (expr && ! expr_equiv_p (expr->expr, pat))
2668 expr = expr->next_same_hash;
2670 return expr;
2673 /* Lookup REGNO in the set table. If PAT is non-NULL look for the entry that
2674 matches it, otherwise return the first entry for REGNO. The result is a
2675 pointer to the table entry, or NULL if not found. */
2677 static struct expr *
2678 lookup_set (regno, pat)
2679 unsigned int regno;
2680 rtx pat;
2682 unsigned int hash = hash_set (regno, set_hash_table_size);
2683 struct expr *expr;
2685 expr = set_hash_table[hash];
2687 if (pat)
2689 while (expr && ! expr_equiv_p (expr->expr, pat))
2690 expr = expr->next_same_hash;
2692 else
2694 while (expr && REGNO (SET_DEST (expr->expr)) != regno)
2695 expr = expr->next_same_hash;
2698 return expr;
2701 /* Return the next entry for REGNO in list EXPR. */
2703 static struct expr *
2704 next_set (regno, expr)
2705 unsigned int regno;
2706 struct expr *expr;
2709 expr = expr->next_same_hash;
2710 while (expr && REGNO (SET_DEST (expr->expr)) != regno);
2712 return expr;
2715 /* Reset tables used to keep track of what's still available [since the
2716 start of the block]. */
2718 static void
2719 reset_opr_set_tables ()
2721 /* Maintain a bitmap of which regs have been set since beginning of
2722 the block. */
2723 sbitmap_zero (reg_set_bitmap);
2725 /* Also keep a record of the last instruction to modify memory.
2726 For now this is very trivial, we only record whether any memory
2727 location has been modified. */
2729 int i;
2731 /* re-Cache any INSN_LIST nodes we have allocated. */
2732 for (i = 0; i < n_basic_blocks; i++)
2734 if (modify_mem_list[i])
2735 free_INSN_LIST_list (modify_mem_list + i);
2736 if (canon_modify_mem_list[i])
2737 free_INSN_LIST_list (canon_modify_mem_list + i);
2742 /* Return non-zero if the operands of X are not set before INSN in
2743 INSN's basic block. */
2745 static int
2746 oprs_not_set_p (x, insn)
2747 rtx x, insn;
2749 int i, j;
2750 enum rtx_code code;
2751 const char *fmt;
2753 if (x == 0)
2754 return 1;
2756 code = GET_CODE (x);
2757 switch (code)
2759 case PC:
2760 case CC0:
2761 case CONST:
2762 case CONST_INT:
2763 case CONST_DOUBLE:
2764 case SYMBOL_REF:
2765 case LABEL_REF:
2766 case ADDR_VEC:
2767 case ADDR_DIFF_VEC:
2768 return 1;
2770 case MEM:
2771 if (load_killed_in_block_p (BLOCK_FOR_INSN (insn),
2772 INSN_CUID (insn), x, 0))
2773 return 0;
2774 else
2775 return oprs_not_set_p (XEXP (x, 0), insn);
2777 case REG:
2778 return ! TEST_BIT (reg_set_bitmap, REGNO (x));
2780 default:
2781 break;
2784 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
2786 if (fmt[i] == 'e')
2788 /* If we are about to do the last recursive call
2789 needed at this level, change it into iteration.
2790 This function is called enough to be worth it. */
2791 if (i == 0)
2792 return oprs_not_set_p (XEXP (x, i), insn);
2794 if (! oprs_not_set_p (XEXP (x, i), insn))
2795 return 0;
2797 else if (fmt[i] == 'E')
2798 for (j = 0; j < XVECLEN (x, i); j++)
2799 if (! oprs_not_set_p (XVECEXP (x, i, j), insn))
2800 return 0;
2803 return 1;
2806 /* Mark things set by a CALL. */
2808 static void
2809 mark_call (insn)
2810 rtx insn;
2812 if (! CONST_OR_PURE_CALL_P (insn))
2813 record_last_mem_set_info (insn);
2816 /* Mark things set by a SET. */
2818 static void
2819 mark_set (pat, insn)
2820 rtx pat, insn;
2822 rtx dest = SET_DEST (pat);
2824 while (GET_CODE (dest) == SUBREG
2825 || GET_CODE (dest) == ZERO_EXTRACT
2826 || GET_CODE (dest) == SIGN_EXTRACT
2827 || GET_CODE (dest) == STRICT_LOW_PART)
2828 dest = XEXP (dest, 0);
2830 if (GET_CODE (dest) == REG)
2831 SET_BIT (reg_set_bitmap, REGNO (dest));
2832 else if (GET_CODE (dest) == MEM)
2833 record_last_mem_set_info (insn);
2835 if (GET_CODE (SET_SRC (pat)) == CALL)
2836 mark_call (insn);
2839 /* Record things set by a CLOBBER. */
2841 static void
2842 mark_clobber (pat, insn)
2843 rtx pat, insn;
2845 rtx clob = XEXP (pat, 0);
2847 while (GET_CODE (clob) == SUBREG || GET_CODE (clob) == STRICT_LOW_PART)
2848 clob = XEXP (clob, 0);
2850 if (GET_CODE (clob) == REG)
2851 SET_BIT (reg_set_bitmap, REGNO (clob));
2852 else
2853 record_last_mem_set_info (insn);
2856 /* Record things set by INSN.
2857 This data is used by oprs_not_set_p. */
2859 static void
2860 mark_oprs_set (insn)
2861 rtx insn;
2863 rtx pat = PATTERN (insn);
2864 int i;
2866 if (GET_CODE (pat) == SET)
2867 mark_set (pat, insn);
2868 else if (GET_CODE (pat) == PARALLEL)
2869 for (i = 0; i < XVECLEN (pat, 0); i++)
2871 rtx x = XVECEXP (pat, 0, i);
2873 if (GET_CODE (x) == SET)
2874 mark_set (x, insn);
2875 else if (GET_CODE (x) == CLOBBER)
2876 mark_clobber (x, insn);
2877 else if (GET_CODE (x) == CALL)
2878 mark_call (insn);
2881 else if (GET_CODE (pat) == CLOBBER)
2882 mark_clobber (pat, insn);
2883 else if (GET_CODE (pat) == CALL)
2884 mark_call (insn);
2888 /* Classic GCSE reaching definition support. */
2890 /* Allocate reaching def variables. */
2892 static void
2893 alloc_rd_mem (n_blocks, n_insns)
2894 int n_blocks, n_insns;
2896 rd_kill = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2897 sbitmap_vector_zero (rd_kill, n_basic_blocks);
2899 rd_gen = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2900 sbitmap_vector_zero (rd_gen, n_basic_blocks);
2902 reaching_defs = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2903 sbitmap_vector_zero (reaching_defs, n_basic_blocks);
2905 rd_out = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2906 sbitmap_vector_zero (rd_out, n_basic_blocks);
2909 /* Free reaching def variables. */
2911 static void
2912 free_rd_mem ()
2914 sbitmap_vector_free (rd_kill);
2915 sbitmap_vector_free (rd_gen);
2916 sbitmap_vector_free (reaching_defs);
2917 sbitmap_vector_free (rd_out);
2920 /* Add INSN to the kills of BB. REGNO, set in BB, is killed by INSN. */
2922 static void
2923 handle_rd_kill_set (insn, regno, bb)
2924 rtx insn;
2925 int regno;
2926 basic_block bb;
2928 struct reg_set *this_reg;
2930 for (this_reg = reg_set_table[regno]; this_reg; this_reg = this_reg ->next)
2931 if (BLOCK_NUM (this_reg->insn) != BLOCK_NUM (insn))
2932 SET_BIT (rd_kill[bb->index], INSN_CUID (this_reg->insn));
2935 /* Compute the set of kill's for reaching definitions. */
2937 static void
2938 compute_kill_rd ()
2940 int bb, cuid;
2941 unsigned int regno;
2942 int i;
2944 /* For each block
2945 For each set bit in `gen' of the block (i.e each insn which
2946 generates a definition in the block)
2947 Call the reg set by the insn corresponding to that bit regx
2948 Look at the linked list starting at reg_set_table[regx]
2949 For each setting of regx in the linked list, which is not in
2950 this block
2951 Set the bit in `kill' corresponding to that insn. */
2952 for (bb = 0; bb < n_basic_blocks; bb++)
2953 for (cuid = 0; cuid < max_cuid; cuid++)
2954 if (TEST_BIT (rd_gen[bb], cuid))
2956 rtx insn = CUID_INSN (cuid);
2957 rtx pat = PATTERN (insn);
2959 if (GET_CODE (insn) == CALL_INSN)
2961 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2962 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2963 handle_rd_kill_set (insn, regno, BASIC_BLOCK (bb));
2966 if (GET_CODE (pat) == PARALLEL)
2968 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
2970 enum rtx_code code = GET_CODE (XVECEXP (pat, 0, i));
2972 if ((code == SET || code == CLOBBER)
2973 && GET_CODE (XEXP (XVECEXP (pat, 0, i), 0)) == REG)
2974 handle_rd_kill_set (insn,
2975 REGNO (XEXP (XVECEXP (pat, 0, i), 0)),
2976 BASIC_BLOCK (bb));
2979 else if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == REG)
2980 /* Each setting of this register outside of this block
2981 must be marked in the set of kills in this block. */
2982 handle_rd_kill_set (insn, REGNO (SET_DEST (pat)), BASIC_BLOCK (bb));
2986 /* Compute the reaching definitions as in
2987 Compilers Principles, Techniques, and Tools. Aho, Sethi, Ullman,
2988 Chapter 10. It is the same algorithm as used for computing available
2989 expressions but applied to the gens and kills of reaching definitions. */
2991 static void
2992 compute_rd ()
2994 int bb, changed, passes;
2996 for (bb = 0; bb < n_basic_blocks; bb++)
2997 sbitmap_copy (rd_out[bb] /*dst*/, rd_gen[bb] /*src*/);
2999 passes = 0;
3000 changed = 1;
3001 while (changed)
3003 changed = 0;
3004 for (bb = 0; bb < n_basic_blocks; bb++)
3006 sbitmap_union_of_preds (reaching_defs[bb], rd_out, bb);
3007 changed |= sbitmap_union_of_diff (rd_out[bb], rd_gen[bb],
3008 reaching_defs[bb], rd_kill[bb]);
3010 passes++;
3013 if (gcse_file)
3014 fprintf (gcse_file, "reaching def computation: %d passes\n", passes);
3017 /* Classic GCSE available expression support. */
3019 /* Allocate memory for available expression computation. */
3021 static void
3022 alloc_avail_expr_mem (n_blocks, n_exprs)
3023 int n_blocks, n_exprs;
3025 ae_kill = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
3026 sbitmap_vector_zero (ae_kill, n_basic_blocks);
3028 ae_gen = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
3029 sbitmap_vector_zero (ae_gen, n_basic_blocks);
3031 ae_in = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
3032 sbitmap_vector_zero (ae_in, n_basic_blocks);
3034 ae_out = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
3035 sbitmap_vector_zero (ae_out, n_basic_blocks);
3038 static void
3039 free_avail_expr_mem ()
3041 sbitmap_vector_free (ae_kill);
3042 sbitmap_vector_free (ae_gen);
3043 sbitmap_vector_free (ae_in);
3044 sbitmap_vector_free (ae_out);
3047 /* Compute the set of available expressions generated in each basic block. */
3049 static void
3050 compute_ae_gen ()
3052 unsigned int i;
3053 struct expr *expr;
3054 struct occr *occr;
3056 /* For each recorded occurrence of each expression, set ae_gen[bb][expr].
3057 This is all we have to do because an expression is not recorded if it
3058 is not available, and the only expressions we want to work with are the
3059 ones that are recorded. */
3060 for (i = 0; i < expr_hash_table_size; i++)
3061 for (expr = expr_hash_table[i]; expr != 0; expr = expr->next_same_hash)
3062 for (occr = expr->avail_occr; occr != 0; occr = occr->next)
3063 SET_BIT (ae_gen[BLOCK_NUM (occr->insn)], expr->bitmap_index);
3066 /* Return non-zero if expression X is killed in BB. */
3068 static int
3069 expr_killed_p (x, bb)
3070 rtx x;
3071 basic_block bb;
3073 int i, j;
3074 enum rtx_code code;
3075 const char *fmt;
3077 if (x == 0)
3078 return 1;
3080 code = GET_CODE (x);
3081 switch (code)
3083 case REG:
3084 return TEST_BIT (reg_set_in_block[bb->index], REGNO (x));
3086 case MEM:
3087 if (load_killed_in_block_p (bb, get_max_uid () + 1, x, 0))
3088 return 1;
3089 else
3090 return expr_killed_p (XEXP (x, 0), bb);
3092 case PC:
3093 case CC0: /*FIXME*/
3094 case CONST:
3095 case CONST_INT:
3096 case CONST_DOUBLE:
3097 case SYMBOL_REF:
3098 case LABEL_REF:
3099 case ADDR_VEC:
3100 case ADDR_DIFF_VEC:
3101 return 0;
3103 default:
3104 break;
3107 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
3109 if (fmt[i] == 'e')
3111 /* If we are about to do the last recursive call
3112 needed at this level, change it into iteration.
3113 This function is called enough to be worth it. */
3114 if (i == 0)
3115 return expr_killed_p (XEXP (x, i), bb);
3116 else if (expr_killed_p (XEXP (x, i), bb))
3117 return 1;
3119 else if (fmt[i] == 'E')
3120 for (j = 0; j < XVECLEN (x, i); j++)
3121 if (expr_killed_p (XVECEXP (x, i, j), bb))
3122 return 1;
3125 return 0;
3128 /* Compute the set of available expressions killed in each basic block. */
3130 static void
3131 compute_ae_kill (ae_gen, ae_kill)
3132 sbitmap *ae_gen, *ae_kill;
3134 int bb;
3135 unsigned int i;
3136 struct expr *expr;
3138 for (bb = 0; bb < n_basic_blocks; bb++)
3139 for (i = 0; i < expr_hash_table_size; i++)
3140 for (expr = expr_hash_table[i]; expr; expr = expr->next_same_hash)
3142 /* Skip EXPR if generated in this block. */
3143 if (TEST_BIT (ae_gen[bb], expr->bitmap_index))
3144 continue;
3146 if (expr_killed_p (expr->expr, BASIC_BLOCK (bb)))
3147 SET_BIT (ae_kill[bb], expr->bitmap_index);
3151 /* Actually perform the Classic GCSE optimizations. */
3153 /* Return non-zero if occurrence OCCR of expression EXPR reaches block BB.
3155 CHECK_SELF_LOOP is non-zero if we should consider a block reaching itself
3156 as a positive reach. We want to do this when there are two computations
3157 of the expression in the block.
3159 VISITED is a pointer to a working buffer for tracking which BB's have
3160 been visited. It is NULL for the top-level call.
3162 We treat reaching expressions that go through blocks containing the same
3163 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
3164 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
3165 2 as not reaching. The intent is to improve the probability of finding
3166 only one reaching expression and to reduce register lifetimes by picking
3167 the closest such expression. */
3169 static int
3170 expr_reaches_here_p_work (occr, expr, bb, check_self_loop, visited)
3171 struct occr *occr;
3172 struct expr *expr;
3173 basic_block bb;
3174 int check_self_loop;
3175 char *visited;
3177 edge pred;
3179 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
3181 basic_block pred_bb = pred->src;
3183 if (visited[pred_bb->index])
3184 /* This predecessor has already been visited. Nothing to do. */
3186 else if (pred_bb == bb)
3188 /* BB loops on itself. */
3189 if (check_self_loop
3190 && TEST_BIT (ae_gen[pred_bb->index], expr->bitmap_index)
3191 && BLOCK_NUM (occr->insn) == pred_bb->index)
3192 return 1;
3194 visited[pred_bb->index] = 1;
3197 /* Ignore this predecessor if it kills the expression. */
3198 else if (TEST_BIT (ae_kill[pred_bb->index], expr->bitmap_index))
3199 visited[pred_bb->index] = 1;
3201 /* Does this predecessor generate this expression? */
3202 else if (TEST_BIT (ae_gen[pred_bb->index], expr->bitmap_index))
3204 /* Is this the occurrence we're looking for?
3205 Note that there's only one generating occurrence per block
3206 so we just need to check the block number. */
3207 if (BLOCK_NUM (occr->insn) == pred_bb->index)
3208 return 1;
3210 visited[pred_bb->index] = 1;
3213 /* Neither gen nor kill. */
3214 else
3216 visited[pred_bb->index] = 1;
3217 if (expr_reaches_here_p_work (occr, expr, pred_bb, check_self_loop,
3218 visited))
3220 return 1;
3224 /* All paths have been checked. */
3225 return 0;
3228 /* This wrapper for expr_reaches_here_p_work() is to ensure that any
3229 memory allocated for that function is returned. */
3231 static int
3232 expr_reaches_here_p (occr, expr, bb, check_self_loop)
3233 struct occr *occr;
3234 struct expr *expr;
3235 basic_block bb;
3236 int check_self_loop;
3238 int rval;
3239 char *visited = (char *) xcalloc (n_basic_blocks, 1);
3241 rval = expr_reaches_here_p_work (occr, expr, bb, check_self_loop, visited);
3243 free (visited);
3244 return rval;
3247 /* Return the instruction that computes EXPR that reaches INSN's basic block.
3248 If there is more than one such instruction, return NULL.
3250 Called only by handle_avail_expr. */
3252 static rtx
3253 computing_insn (expr, insn)
3254 struct expr *expr;
3255 rtx insn;
3257 basic_block bb = BLOCK_FOR_INSN (insn);
3259 if (expr->avail_occr->next == NULL)
3261 if (BLOCK_FOR_INSN (expr->avail_occr->insn) == bb)
3262 /* The available expression is actually itself
3263 (i.e. a loop in the flow graph) so do nothing. */
3264 return NULL;
3266 /* (FIXME) Case that we found a pattern that was created by
3267 a substitution that took place. */
3268 return expr->avail_occr->insn;
3270 else
3272 /* Pattern is computed more than once.
3273 Search backwards from this insn to see how many of these
3274 computations actually reach this insn. */
3275 struct occr *occr;
3276 rtx insn_computes_expr = NULL;
3277 int can_reach = 0;
3279 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
3281 if (BLOCK_FOR_INSN (occr->insn) == bb)
3283 /* The expression is generated in this block.
3284 The only time we care about this is when the expression
3285 is generated later in the block [and thus there's a loop].
3286 We let the normal cse pass handle the other cases. */
3287 if (INSN_CUID (insn) < INSN_CUID (occr->insn)
3288 && expr_reaches_here_p (occr, expr, bb, 1))
3290 can_reach++;
3291 if (can_reach > 1)
3292 return NULL;
3294 insn_computes_expr = occr->insn;
3297 else if (expr_reaches_here_p (occr, expr, bb, 0))
3299 can_reach++;
3300 if (can_reach > 1)
3301 return NULL;
3303 insn_computes_expr = occr->insn;
3307 if (insn_computes_expr == NULL)
3308 abort ();
3310 return insn_computes_expr;
3314 /* Return non-zero if the definition in DEF_INSN can reach INSN.
3315 Only called by can_disregard_other_sets. */
3317 static int
3318 def_reaches_here_p (insn, def_insn)
3319 rtx insn, def_insn;
3321 rtx reg;
3323 if (TEST_BIT (reaching_defs[BLOCK_NUM (insn)], INSN_CUID (def_insn)))
3324 return 1;
3326 if (BLOCK_NUM (insn) == BLOCK_NUM (def_insn))
3328 if (INSN_CUID (def_insn) < INSN_CUID (insn))
3330 if (GET_CODE (PATTERN (def_insn)) == PARALLEL)
3331 return 1;
3332 else if (GET_CODE (PATTERN (def_insn)) == CLOBBER)
3333 reg = XEXP (PATTERN (def_insn), 0);
3334 else if (GET_CODE (PATTERN (def_insn)) == SET)
3335 reg = SET_DEST (PATTERN (def_insn));
3336 else
3337 abort ();
3339 return ! reg_set_between_p (reg, NEXT_INSN (def_insn), insn);
3341 else
3342 return 0;
3345 return 0;
3348 /* Return non-zero if *ADDR_THIS_REG can only have one value at INSN. The
3349 value returned is the number of definitions that reach INSN. Returning a
3350 value of zero means that [maybe] more than one definition reaches INSN and
3351 the caller can't perform whatever optimization it is trying. i.e. it is
3352 always safe to return zero. */
3354 static int
3355 can_disregard_other_sets (addr_this_reg, insn, for_combine)
3356 struct reg_set **addr_this_reg;
3357 rtx insn;
3358 int for_combine;
3360 int number_of_reaching_defs = 0;
3361 struct reg_set *this_reg;
3363 for (this_reg = *addr_this_reg; this_reg != 0; this_reg = this_reg->next)
3364 if (def_reaches_here_p (insn, this_reg->insn))
3366 number_of_reaching_defs++;
3367 /* Ignore parallels for now. */
3368 if (GET_CODE (PATTERN (this_reg->insn)) == PARALLEL)
3369 return 0;
3371 if (!for_combine
3372 && (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER
3373 || ! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)),
3374 SET_SRC (PATTERN (insn)))))
3375 /* A setting of the reg to a different value reaches INSN. */
3376 return 0;
3378 if (number_of_reaching_defs > 1)
3380 /* If in this setting the value the register is being set to is
3381 equal to the previous value the register was set to and this
3382 setting reaches the insn we are trying to do the substitution
3383 on then we are ok. */
3384 if (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER)
3385 return 0;
3386 else if (! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)),
3387 SET_SRC (PATTERN (insn))))
3388 return 0;
3391 *addr_this_reg = this_reg;
3394 return number_of_reaching_defs;
3397 /* Expression computed by insn is available and the substitution is legal,
3398 so try to perform the substitution.
3400 The result is non-zero if any changes were made. */
3402 static int
3403 handle_avail_expr (insn, expr)
3404 rtx insn;
3405 struct expr *expr;
3407 rtx pat, insn_computes_expr, expr_set;
3408 rtx to;
3409 struct reg_set *this_reg;
3410 int found_setting, use_src;
3411 int changed = 0;
3413 /* We only handle the case where one computation of the expression
3414 reaches this instruction. */
3415 insn_computes_expr = computing_insn (expr, insn);
3416 if (insn_computes_expr == NULL)
3417 return 0;
3418 expr_set = single_set (insn_computes_expr);
3419 if (!expr_set)
3420 abort ();
3422 found_setting = 0;
3423 use_src = 0;
3425 /* At this point we know only one computation of EXPR outside of this
3426 block reaches this insn. Now try to find a register that the
3427 expression is computed into. */
3428 if (GET_CODE (SET_SRC (expr_set)) == REG)
3430 /* This is the case when the available expression that reaches
3431 here has already been handled as an available expression. */
3432 unsigned int regnum_for_replacing
3433 = REGNO (SET_SRC (expr_set));
3435 /* If the register was created by GCSE we can't use `reg_set_table',
3436 however we know it's set only once. */
3437 if (regnum_for_replacing >= max_gcse_regno
3438 /* If the register the expression is computed into is set only once,
3439 or only one set reaches this insn, we can use it. */
3440 || (((this_reg = reg_set_table[regnum_for_replacing]),
3441 this_reg->next == NULL)
3442 || can_disregard_other_sets (&this_reg, insn, 0)))
3444 use_src = 1;
3445 found_setting = 1;
3449 if (!found_setting)
3451 unsigned int regnum_for_replacing
3452 = REGNO (SET_DEST (expr_set));
3454 /* This shouldn't happen. */
3455 if (regnum_for_replacing >= max_gcse_regno)
3456 abort ();
3458 this_reg = reg_set_table[regnum_for_replacing];
3460 /* If the register the expression is computed into is set only once,
3461 or only one set reaches this insn, use it. */
3462 if (this_reg->next == NULL
3463 || can_disregard_other_sets (&this_reg, insn, 0))
3464 found_setting = 1;
3467 if (found_setting)
3469 pat = PATTERN (insn);
3470 if (use_src)
3471 to = SET_SRC (expr_set);
3472 else
3473 to = SET_DEST (expr_set);
3474 changed = validate_change (insn, &SET_SRC (pat), to, 0);
3476 /* We should be able to ignore the return code from validate_change but
3477 to play it safe we check. */
3478 if (changed)
3480 gcse_subst_count++;
3481 if (gcse_file != NULL)
3483 fprintf (gcse_file, "GCSE: Replacing the source in insn %d with",
3484 INSN_UID (insn));
3485 fprintf (gcse_file, " reg %d %s insn %d\n",
3486 REGNO (to), use_src ? "from" : "set in",
3487 INSN_UID (insn_computes_expr));
3492 /* The register that the expr is computed into is set more than once. */
3493 else if (1 /*expensive_op(this_pattrn->op) && do_expensive_gcse)*/)
3495 /* Insert an insn after insnx that copies the reg set in insnx
3496 into a new pseudo register call this new register REGN.
3497 From insnb until end of basic block or until REGB is set
3498 replace all uses of REGB with REGN. */
3499 rtx new_insn;
3501 to = gen_reg_rtx (GET_MODE (SET_DEST (expr_set)));
3503 /* Generate the new insn. */
3504 /* ??? If the change fails, we return 0, even though we created
3505 an insn. I think this is ok. */
3506 new_insn
3507 = emit_insn_after (gen_rtx_SET (VOIDmode, to,
3508 SET_DEST (expr_set)),
3509 insn_computes_expr);
3511 /* Keep register set table up to date. */
3512 record_one_set (REGNO (to), new_insn);
3514 gcse_create_count++;
3515 if (gcse_file != NULL)
3517 fprintf (gcse_file, "GCSE: Creating insn %d to copy value of reg %d",
3518 INSN_UID (NEXT_INSN (insn_computes_expr)),
3519 REGNO (SET_SRC (PATTERN (NEXT_INSN (insn_computes_expr)))));
3520 fprintf (gcse_file, ", computed in insn %d,\n",
3521 INSN_UID (insn_computes_expr));
3522 fprintf (gcse_file, " into newly allocated reg %d\n",
3523 REGNO (to));
3526 pat = PATTERN (insn);
3528 /* Do register replacement for INSN. */
3529 changed = validate_change (insn, &SET_SRC (pat),
3530 SET_DEST (PATTERN
3531 (NEXT_INSN (insn_computes_expr))),
3534 /* We should be able to ignore the return code from validate_change but
3535 to play it safe we check. */
3536 if (changed)
3538 gcse_subst_count++;
3539 if (gcse_file != NULL)
3541 fprintf (gcse_file,
3542 "GCSE: Replacing the source in insn %d with reg %d ",
3543 INSN_UID (insn),
3544 REGNO (SET_DEST (PATTERN (NEXT_INSN
3545 (insn_computes_expr)))));
3546 fprintf (gcse_file, "set in insn %d\n",
3547 INSN_UID (insn_computes_expr));
3552 return changed;
3555 /* Perform classic GCSE. This is called by one_classic_gcse_pass after all
3556 the dataflow analysis has been done.
3558 The result is non-zero if a change was made. */
3560 static int
3561 classic_gcse ()
3563 int bb, changed;
3564 rtx insn;
3566 /* Note we start at block 1. */
3568 changed = 0;
3569 for (bb = 1; bb < n_basic_blocks; bb++)
3571 /* Reset tables used to keep track of what's still valid [since the
3572 start of the block]. */
3573 reset_opr_set_tables ();
3575 for (insn = BLOCK_HEAD (bb);
3576 insn != NULL && insn != NEXT_INSN (BLOCK_END (bb));
3577 insn = NEXT_INSN (insn))
3579 /* Is insn of form (set (pseudo-reg) ...)? */
3580 if (GET_CODE (insn) == INSN
3581 && GET_CODE (PATTERN (insn)) == SET
3582 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
3583 && REGNO (SET_DEST (PATTERN (insn))) >= FIRST_PSEUDO_REGISTER)
3585 rtx pat = PATTERN (insn);
3586 rtx src = SET_SRC (pat);
3587 struct expr *expr;
3589 if (want_to_gcse_p (src)
3590 /* Is the expression recorded? */
3591 && ((expr = lookup_expr (src)) != NULL)
3592 /* Is the expression available [at the start of the
3593 block]? */
3594 && TEST_BIT (ae_in[bb], expr->bitmap_index)
3595 /* Are the operands unchanged since the start of the
3596 block? */
3597 && oprs_not_set_p (src, insn))
3598 changed |= handle_avail_expr (insn, expr);
3601 /* Keep track of everything modified by this insn. */
3602 /* ??? Need to be careful w.r.t. mods done to INSN. */
3603 if (INSN_P (insn))
3604 mark_oprs_set (insn);
3608 return changed;
3611 /* Top level routine to perform one classic GCSE pass.
3613 Return non-zero if a change was made. */
3615 static int
3616 one_classic_gcse_pass (pass)
3617 int pass;
3619 int changed = 0;
3621 gcse_subst_count = 0;
3622 gcse_create_count = 0;
3624 alloc_expr_hash_table (max_cuid);
3625 alloc_rd_mem (n_basic_blocks, max_cuid);
3626 compute_expr_hash_table ();
3627 if (gcse_file)
3628 dump_hash_table (gcse_file, "Expression", expr_hash_table,
3629 expr_hash_table_size, n_exprs);
3631 if (n_exprs > 0)
3633 compute_kill_rd ();
3634 compute_rd ();
3635 alloc_avail_expr_mem (n_basic_blocks, n_exprs);
3636 compute_ae_gen ();
3637 compute_ae_kill (ae_gen, ae_kill);
3638 compute_available (ae_gen, ae_kill, ae_out, ae_in);
3639 changed = classic_gcse ();
3640 free_avail_expr_mem ();
3643 free_rd_mem ();
3644 free_expr_hash_table ();
3646 if (gcse_file)
3648 fprintf (gcse_file, "\n");
3649 fprintf (gcse_file, "GCSE of %s, pass %d: %d bytes needed, %d substs,",
3650 current_function_name, pass, bytes_used, gcse_subst_count);
3651 fprintf (gcse_file, "%d insns created\n", gcse_create_count);
3654 return changed;
3657 /* Compute copy/constant propagation working variables. */
3659 /* Local properties of assignments. */
3660 static sbitmap *cprop_pavloc;
3661 static sbitmap *cprop_absaltered;
3663 /* Global properties of assignments (computed from the local properties). */
3664 static sbitmap *cprop_avin;
3665 static sbitmap *cprop_avout;
3667 /* Allocate vars used for copy/const propagation. N_BLOCKS is the number of
3668 basic blocks. N_SETS is the number of sets. */
3670 static void
3671 alloc_cprop_mem (n_blocks, n_sets)
3672 int n_blocks, n_sets;
3674 cprop_pavloc = sbitmap_vector_alloc (n_blocks, n_sets);
3675 cprop_absaltered = sbitmap_vector_alloc (n_blocks, n_sets);
3677 cprop_avin = sbitmap_vector_alloc (n_blocks, n_sets);
3678 cprop_avout = sbitmap_vector_alloc (n_blocks, n_sets);
3681 /* Free vars used by copy/const propagation. */
3683 static void
3684 free_cprop_mem ()
3686 sbitmap_vector_free (cprop_pavloc);
3687 sbitmap_vector_free (cprop_absaltered);
3688 sbitmap_vector_free (cprop_avin);
3689 sbitmap_vector_free (cprop_avout);
3692 /* For each block, compute whether X is transparent. X is either an
3693 expression or an assignment [though we don't care which, for this context
3694 an assignment is treated as an expression]. For each block where an
3695 element of X is modified, set (SET_P == 1) or reset (SET_P == 0) the INDX
3696 bit in BMAP. */
3698 static void
3699 compute_transp (x, indx, bmap, set_p)
3700 rtx x;
3701 int indx;
3702 sbitmap *bmap;
3703 int set_p;
3705 int bb, i, j;
3706 enum rtx_code code;
3707 reg_set *r;
3708 const char *fmt;
3710 /* repeat is used to turn tail-recursion into iteration since GCC
3711 can't do it when there's no return value. */
3712 repeat:
3714 if (x == 0)
3715 return;
3717 code = GET_CODE (x);
3718 switch (code)
3720 case REG:
3721 if (set_p)
3723 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3725 for (bb = 0; bb < n_basic_blocks; bb++)
3726 if (TEST_BIT (reg_set_in_block[bb], REGNO (x)))
3727 SET_BIT (bmap[bb], indx);
3729 else
3731 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
3732 SET_BIT (bmap[BLOCK_NUM (r->insn)], indx);
3735 else
3737 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3739 for (bb = 0; bb < n_basic_blocks; bb++)
3740 if (TEST_BIT (reg_set_in_block[bb], REGNO (x)))
3741 RESET_BIT (bmap[bb], indx);
3743 else
3745 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
3746 RESET_BIT (bmap[BLOCK_NUM (r->insn)], indx);
3750 return;
3752 case MEM:
3753 for (bb = 0; bb < n_basic_blocks; bb++)
3755 rtx list_entry = canon_modify_mem_list[bb];
3757 while (list_entry)
3759 rtx dest, dest_addr;
3761 if (GET_CODE (XEXP (list_entry, 0)) == CALL_INSN)
3763 if (set_p)
3764 SET_BIT (bmap[bb], indx);
3765 else
3766 RESET_BIT (bmap[bb], indx);
3767 break;
3769 /* LIST_ENTRY must be an INSN of some kind that sets memory.
3770 Examine each hunk of memory that is modified. */
3772 dest = XEXP (list_entry, 0);
3773 list_entry = XEXP (list_entry, 1);
3774 dest_addr = XEXP (list_entry, 0);
3776 if (canon_true_dependence (dest, GET_MODE (dest), dest_addr,
3777 x, rtx_addr_varies_p))
3779 if (set_p)
3780 SET_BIT (bmap[bb], indx);
3781 else
3782 RESET_BIT (bmap[bb], indx);
3783 break;
3785 list_entry = XEXP (list_entry, 1);
3789 x = XEXP (x, 0);
3790 goto repeat;
3792 case PC:
3793 case CC0: /*FIXME*/
3794 case CONST:
3795 case CONST_INT:
3796 case CONST_DOUBLE:
3797 case SYMBOL_REF:
3798 case LABEL_REF:
3799 case ADDR_VEC:
3800 case ADDR_DIFF_VEC:
3801 return;
3803 default:
3804 break;
3807 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
3809 if (fmt[i] == 'e')
3811 /* If we are about to do the last recursive call
3812 needed at this level, change it into iteration.
3813 This function is called enough to be worth it. */
3814 if (i == 0)
3816 x = XEXP (x, i);
3817 goto repeat;
3820 compute_transp (XEXP (x, i), indx, bmap, set_p);
3822 else if (fmt[i] == 'E')
3823 for (j = 0; j < XVECLEN (x, i); j++)
3824 compute_transp (XVECEXP (x, i, j), indx, bmap, set_p);
3828 /* Top level routine to do the dataflow analysis needed by copy/const
3829 propagation. */
3831 static void
3832 compute_cprop_data ()
3834 compute_local_properties (cprop_absaltered, cprop_pavloc, NULL, 1);
3835 compute_available (cprop_pavloc, cprop_absaltered,
3836 cprop_avout, cprop_avin);
3839 /* Copy/constant propagation. */
3841 /* Maximum number of register uses in an insn that we handle. */
3842 #define MAX_USES 8
3844 /* Table of uses found in an insn.
3845 Allocated statically to avoid alloc/free complexity and overhead. */
3846 static struct reg_use reg_use_table[MAX_USES];
3848 /* Index into `reg_use_table' while building it. */
3849 static int reg_use_count;
3851 /* Set up a list of register numbers used in INSN. The found uses are stored
3852 in `reg_use_table'. `reg_use_count' is initialized to zero before entry,
3853 and contains the number of uses in the table upon exit.
3855 ??? If a register appears multiple times we will record it multiple times.
3856 This doesn't hurt anything but it will slow things down. */
3858 static void
3859 find_used_regs (xptr, data)
3860 rtx *xptr;
3861 void *data ATTRIBUTE_UNUSED;
3863 int i, j;
3864 enum rtx_code code;
3865 const char *fmt;
3866 rtx x = *xptr;
3868 /* repeat is used to turn tail-recursion into iteration since GCC
3869 can't do it when there's no return value. */
3870 repeat:
3871 if (x == 0)
3872 return;
3874 code = GET_CODE (x);
3875 if (REG_P (x))
3877 if (reg_use_count == MAX_USES)
3878 return;
3880 reg_use_table[reg_use_count].reg_rtx = x;
3881 reg_use_count++;
3884 /* Recursively scan the operands of this expression. */
3886 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
3888 if (fmt[i] == 'e')
3890 /* If we are about to do the last recursive call
3891 needed at this level, change it into iteration.
3892 This function is called enough to be worth it. */
3893 if (i == 0)
3895 x = XEXP (x, 0);
3896 goto repeat;
3899 find_used_regs (&XEXP (x, i), data);
3901 else if (fmt[i] == 'E')
3902 for (j = 0; j < XVECLEN (x, i); j++)
3903 find_used_regs (&XVECEXP (x, i, j), data);
3907 /* Try to replace all non-SET_DEST occurrences of FROM in INSN with TO.
3908 Returns non-zero is successful. */
3910 static int
3911 try_replace_reg (from, to, insn)
3912 rtx from, to, insn;
3914 rtx note = find_reg_equal_equiv_note (insn);
3915 rtx src = 0;
3916 int success = 0;
3917 rtx set = single_set (insn);
3919 success = validate_replace_src (from, to, insn);
3921 /* If above failed and this is a single set, try to simplify the source of
3922 the set given our substitution. We could perhaps try this for multiple
3923 SETs, but it probably won't buy us anything. */
3924 if (!success && set != 0)
3926 src = simplify_replace_rtx (SET_SRC (set), from, to);
3928 if (!rtx_equal_p (src, SET_SRC (set))
3929 && validate_change (insn, &SET_SRC (set), src, 0))
3930 success = 1;
3933 /* If we've failed to do replacement, have a single SET, and don't already
3934 have a note, add a REG_EQUAL note to not lose information. */
3935 if (!success && note == 0 && set != 0)
3936 note = REG_NOTES (insn)
3937 = gen_rtx_EXPR_LIST (REG_EQUAL, src, REG_NOTES (insn));
3939 /* If there is already a NOTE, update the expression in it with our
3940 replacement. */
3941 else if (note != 0)
3942 XEXP (note, 0) = simplify_replace_rtx (XEXP (note, 0), from, to);
3944 /* REG_EQUAL may get simplified into register.
3945 We don't allow that. Remove that note. This code ought
3946 not to hapen, because previous code ought to syntetize
3947 reg-reg move, but be on the safe side. */
3948 if (note && REG_P (XEXP (note, 0)))
3949 remove_note (insn, note);
3951 return success;
3954 /* Find a set of REGNOs that are available on entry to INSN's block. Returns
3955 NULL no such set is found. */
3957 static struct expr *
3958 find_avail_set (regno, insn)
3959 int regno;
3960 rtx insn;
3962 /* SET1 contains the last set found that can be returned to the caller for
3963 use in a substitution. */
3964 struct expr *set1 = 0;
3966 /* Loops are not possible here. To get a loop we would need two sets
3967 available at the start of the block containing INSN. ie we would
3968 need two sets like this available at the start of the block:
3970 (set (reg X) (reg Y))
3971 (set (reg Y) (reg X))
3973 This can not happen since the set of (reg Y) would have killed the
3974 set of (reg X) making it unavailable at the start of this block. */
3975 while (1)
3977 rtx src;
3978 struct expr *set = lookup_set (regno, NULL_RTX);
3980 /* Find a set that is available at the start of the block
3981 which contains INSN. */
3982 while (set)
3984 if (TEST_BIT (cprop_avin[BLOCK_NUM (insn)], set->bitmap_index))
3985 break;
3986 set = next_set (regno, set);
3989 /* If no available set was found we've reached the end of the
3990 (possibly empty) copy chain. */
3991 if (set == 0)
3992 break;
3994 if (GET_CODE (set->expr) != SET)
3995 abort ();
3997 src = SET_SRC (set->expr);
3999 /* We know the set is available.
4000 Now check that SRC is ANTLOC (i.e. none of the source operands
4001 have changed since the start of the block).
4003 If the source operand changed, we may still use it for the next
4004 iteration of this loop, but we may not use it for substitutions. */
4006 if (CONSTANT_P (src) || oprs_not_set_p (src, insn))
4007 set1 = set;
4009 /* If the source of the set is anything except a register, then
4010 we have reached the end of the copy chain. */
4011 if (GET_CODE (src) != REG)
4012 break;
4014 /* Follow the copy chain, ie start another iteration of the loop
4015 and see if we have an available copy into SRC. */
4016 regno = REGNO (src);
4019 /* SET1 holds the last set that was available and anticipatable at
4020 INSN. */
4021 return set1;
4024 /* Subroutine of cprop_insn that tries to propagate constants into
4025 JUMP_INSNS. INSN must be a conditional jump. FROM is what we will try to
4026 replace, SRC is the constant we will try to substitute for it. Returns
4027 nonzero if a change was made. We know INSN has just a SET. */
4029 static int
4030 cprop_jump (bb, insn, from, src)
4031 rtx insn;
4032 rtx from;
4033 rtx src;
4034 basic_block bb;
4036 rtx set = PATTERN (insn);
4037 rtx new = simplify_replace_rtx (SET_SRC (set), from, src);
4039 /* If no simplification can be made, then try the next
4040 register. */
4041 if (rtx_equal_p (new, SET_SRC (set)))
4042 return 0;
4044 /* If this is now a no-op leave it that way, but update LABEL_NUSED if
4045 necessary. */
4046 if (new == pc_rtx)
4048 SET_SRC (set) = new;
4050 if (JUMP_LABEL (insn) != 0)
4051 --LABEL_NUSES (JUMP_LABEL (insn));
4054 /* Otherwise, this must be a valid instruction. */
4055 else if (! validate_change (insn, &SET_SRC (set), new, 0))
4056 return 0;
4058 /* If this has turned into an unconditional jump,
4059 then put a barrier after it so that the unreachable
4060 code will be deleted. */
4061 if (GET_CODE (SET_SRC (set)) == LABEL_REF)
4062 emit_barrier_after (insn);
4064 run_jump_opt_after_gcse = 1;
4066 const_prop_count++;
4067 if (gcse_file != NULL)
4069 fprintf (gcse_file,
4070 "CONST-PROP: Replacing reg %d in insn %d with constant ",
4071 REGNO (from), INSN_UID (insn));
4072 print_rtl (gcse_file, src);
4073 fprintf (gcse_file, "\n");
4075 purge_dead_edges (bb);
4077 return 1;
4080 #ifdef HAVE_cc0
4082 /* Subroutine of cprop_insn that tries to propagate constants into JUMP_INSNS
4083 for machines that have CC0. INSN is a single set that stores into CC0;
4084 the insn following it is a conditional jump. REG_USED is the use we will
4085 try to replace, SRC is the constant we will try to substitute for it.
4086 Returns nonzero if a change was made. */
4088 static int
4089 cprop_cc0_jump (bb, insn, reg_used, src)
4090 basic_block bb;
4091 rtx insn;
4092 struct reg_use *reg_used;
4093 rtx src;
4095 /* First substitute in the SET_SRC of INSN, then substitute that for
4096 CC0 in JUMP. */
4097 rtx jump = NEXT_INSN (insn);
4098 rtx new_src = simplify_replace_rtx (SET_SRC (PATTERN (insn)),
4099 reg_used->reg_rtx, src);
4101 if (! cprop_jump (bb, jump, cc0_rtx, new_src))
4102 return 0;
4104 /* If we succeeded, delete the cc0 setter. */
4105 PUT_CODE (insn, NOTE);
4106 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
4107 NOTE_SOURCE_FILE (insn) = 0;
4109 return 1;
4111 #endif
4113 /* Perform constant and copy propagation on INSN.
4114 The result is non-zero if a change was made. */
4116 static int
4117 cprop_insn (bb, insn, alter_jumps)
4118 basic_block bb;
4119 rtx insn;
4120 int alter_jumps;
4122 struct reg_use *reg_used;
4123 int changed = 0;
4124 rtx note;
4126 if (!INSN_P (insn))
4127 return 0;
4129 reg_use_count = 0;
4130 note_uses (&PATTERN (insn), find_used_regs, NULL);
4132 note = find_reg_equal_equiv_note (insn);
4134 /* We may win even when propagating constants into notes. */
4135 if (note)
4136 find_used_regs (&XEXP (note, 0), NULL);
4138 for (reg_used = &reg_use_table[0]; reg_use_count > 0;
4139 reg_used++, reg_use_count--)
4141 unsigned int regno = REGNO (reg_used->reg_rtx);
4142 rtx pat, src;
4143 struct expr *set;
4145 /* Ignore registers created by GCSE.
4146 We do this because ... */
4147 if (regno >= max_gcse_regno)
4148 continue;
4150 /* If the register has already been set in this block, there's
4151 nothing we can do. */
4152 if (! oprs_not_set_p (reg_used->reg_rtx, insn))
4153 continue;
4155 /* Find an assignment that sets reg_used and is available
4156 at the start of the block. */
4157 set = find_avail_set (regno, insn);
4158 if (! set)
4159 continue;
4161 pat = set->expr;
4162 /* ??? We might be able to handle PARALLELs. Later. */
4163 if (GET_CODE (pat) != SET)
4164 abort ();
4166 src = SET_SRC (pat);
4168 /* Constant propagation. */
4169 if (GET_CODE (src) == CONST_INT || GET_CODE (src) == CONST_DOUBLE
4170 || GET_CODE (src) == SYMBOL_REF)
4172 /* Handle normal insns first. */
4173 if (GET_CODE (insn) == INSN
4174 && try_replace_reg (reg_used->reg_rtx, src, insn))
4176 changed = 1;
4177 const_prop_count++;
4178 if (gcse_file != NULL)
4180 fprintf (gcse_file, "CONST-PROP: Replacing reg %d in ",
4181 regno);
4182 fprintf (gcse_file, "insn %d with constant ",
4183 INSN_UID (insn));
4184 print_rtl (gcse_file, src);
4185 fprintf (gcse_file, "\n");
4188 /* The original insn setting reg_used may or may not now be
4189 deletable. We leave the deletion to flow. */
4192 /* Try to propagate a CONST_INT into a conditional jump.
4193 We're pretty specific about what we will handle in this
4194 code, we can extend this as necessary over time.
4196 Right now the insn in question must look like
4197 (set (pc) (if_then_else ...)) */
4198 else if (alter_jumps
4199 && GET_CODE (insn) == JUMP_INSN
4200 && condjump_p (insn)
4201 && ! simplejump_p (insn))
4202 changed |= cprop_jump (bb, insn, reg_used->reg_rtx, src);
4204 #ifdef HAVE_cc0
4205 /* Similar code for machines that use a pair of CC0 setter and
4206 conditional jump insn. */
4207 else if (alter_jumps
4208 && GET_CODE (PATTERN (insn)) == SET
4209 && SET_DEST (PATTERN (insn)) == cc0_rtx
4210 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
4211 && condjump_p (NEXT_INSN (insn))
4212 && ! simplejump_p (NEXT_INSN (insn))
4213 && cprop_cc0_jump (bb, insn, reg_used, src))
4215 changed = 1;
4216 break;
4218 #endif
4220 else if (GET_CODE (src) == REG
4221 && REGNO (src) >= FIRST_PSEUDO_REGISTER
4222 && REGNO (src) != regno)
4224 if (try_replace_reg (reg_used->reg_rtx, src, insn))
4226 changed = 1;
4227 copy_prop_count++;
4228 if (gcse_file != NULL)
4230 fprintf (gcse_file, "COPY-PROP: Replacing reg %d in insn %d",
4231 regno, INSN_UID (insn));
4232 fprintf (gcse_file, " with reg %d\n", REGNO (src));
4235 /* The original insn setting reg_used may or may not now be
4236 deletable. We leave the deletion to flow. */
4237 /* FIXME: If it turns out that the insn isn't deletable,
4238 then we may have unnecessarily extended register lifetimes
4239 and made things worse. */
4244 return changed;
4247 /* Forward propagate copies. This includes copies and constants. Return
4248 non-zero if a change was made. */
4250 static int
4251 cprop (alter_jumps)
4252 int alter_jumps;
4254 int bb, changed;
4255 rtx insn;
4257 /* Note we start at block 1. */
4259 changed = 0;
4260 for (bb = 1; bb < n_basic_blocks; bb++)
4262 /* Reset tables used to keep track of what's still valid [since the
4263 start of the block]. */
4264 reset_opr_set_tables ();
4266 for (insn = BLOCK_HEAD (bb);
4267 insn != NULL && insn != NEXT_INSN (BLOCK_END (bb));
4268 insn = NEXT_INSN (insn))
4269 if (INSN_P (insn))
4271 changed |= cprop_insn (BASIC_BLOCK (bb), insn, alter_jumps);
4273 /* Keep track of everything modified by this insn. */
4274 /* ??? Need to be careful w.r.t. mods done to INSN. Don't
4275 call mark_oprs_set if we turned the insn into a NOTE. */
4276 if (GET_CODE (insn) != NOTE)
4277 mark_oprs_set (insn);
4281 if (gcse_file != NULL)
4282 fprintf (gcse_file, "\n");
4284 return changed;
4287 /* Perform one copy/constant propagation pass.
4288 F is the first insn in the function.
4289 PASS is the pass count. */
4291 static int
4292 one_cprop_pass (pass, alter_jumps)
4293 int pass;
4294 int alter_jumps;
4296 int changed = 0;
4298 const_prop_count = 0;
4299 copy_prop_count = 0;
4301 alloc_set_hash_table (max_cuid);
4302 compute_set_hash_table ();
4303 if (gcse_file)
4304 dump_hash_table (gcse_file, "SET", set_hash_table, set_hash_table_size,
4305 n_sets);
4306 if (n_sets > 0)
4308 alloc_cprop_mem (n_basic_blocks, n_sets);
4309 compute_cprop_data ();
4310 changed = cprop (alter_jumps);
4311 free_cprop_mem ();
4314 free_set_hash_table ();
4316 if (gcse_file)
4318 fprintf (gcse_file, "CPROP of %s, pass %d: %d bytes needed, ",
4319 current_function_name, pass, bytes_used);
4320 fprintf (gcse_file, "%d const props, %d copy props\n\n",
4321 const_prop_count, copy_prop_count);
4324 return changed;
4327 /* Compute PRE+LCM working variables. */
4329 /* Local properties of expressions. */
4330 /* Nonzero for expressions that are transparent in the block. */
4331 static sbitmap *transp;
4333 /* Nonzero for expressions that are transparent at the end of the block.
4334 This is only zero for expressions killed by abnormal critical edge
4335 created by a calls. */
4336 static sbitmap *transpout;
4338 /* Nonzero for expressions that are computed (available) in the block. */
4339 static sbitmap *comp;
4341 /* Nonzero for expressions that are locally anticipatable in the block. */
4342 static sbitmap *antloc;
4344 /* Nonzero for expressions where this block is an optimal computation
4345 point. */
4346 static sbitmap *pre_optimal;
4348 /* Nonzero for expressions which are redundant in a particular block. */
4349 static sbitmap *pre_redundant;
4351 /* Nonzero for expressions which should be inserted on a specific edge. */
4352 static sbitmap *pre_insert_map;
4354 /* Nonzero for expressions which should be deleted in a specific block. */
4355 static sbitmap *pre_delete_map;
4357 /* Contains the edge_list returned by pre_edge_lcm. */
4358 static struct edge_list *edge_list;
4360 /* Redundant insns. */
4361 static sbitmap pre_redundant_insns;
4363 /* Allocate vars used for PRE analysis. */
4365 static void
4366 alloc_pre_mem (n_blocks, n_exprs)
4367 int n_blocks, n_exprs;
4369 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
4370 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
4371 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
4373 pre_optimal = NULL;
4374 pre_redundant = NULL;
4375 pre_insert_map = NULL;
4376 pre_delete_map = NULL;
4377 ae_in = NULL;
4378 ae_out = NULL;
4379 ae_kill = sbitmap_vector_alloc (n_blocks, n_exprs);
4381 /* pre_insert and pre_delete are allocated later. */
4384 /* Free vars used for PRE analysis. */
4386 static void
4387 free_pre_mem ()
4389 sbitmap_vector_free (transp);
4390 sbitmap_vector_free (comp);
4392 /* ANTLOC and AE_KILL are freed just after pre_lcm finishes. */
4394 if (pre_optimal)
4395 sbitmap_vector_free (pre_optimal);
4396 if (pre_redundant)
4397 sbitmap_vector_free (pre_redundant);
4398 if (pre_insert_map)
4399 sbitmap_vector_free (pre_insert_map);
4400 if (pre_delete_map)
4401 sbitmap_vector_free (pre_delete_map);
4402 if (ae_in)
4403 sbitmap_vector_free (ae_in);
4404 if (ae_out)
4405 sbitmap_vector_free (ae_out);
4407 transp = comp = NULL;
4408 pre_optimal = pre_redundant = pre_insert_map = pre_delete_map = NULL;
4409 ae_in = ae_out = NULL;
4412 /* Top level routine to do the dataflow analysis needed by PRE. */
4414 static void
4415 compute_pre_data ()
4417 sbitmap trapping_expr;
4418 int i;
4419 unsigned int ui;
4421 compute_local_properties (transp, comp, antloc, 0);
4422 sbitmap_vector_zero (ae_kill, n_basic_blocks);
4424 /* Collect expressions which might trap. */
4425 trapping_expr = sbitmap_alloc (n_exprs);
4426 sbitmap_zero (trapping_expr);
4427 for (ui = 0; ui < expr_hash_table_size; ui++)
4429 struct expr *e;
4430 for (e = expr_hash_table[ui]; e != NULL; e = e->next_same_hash)
4431 if (may_trap_p (e->expr))
4432 SET_BIT (trapping_expr, e->bitmap_index);
4435 /* Compute ae_kill for each basic block using:
4437 ~(TRANSP | COMP)
4439 This is significantly faster than compute_ae_kill. */
4441 for (i = 0; i < n_basic_blocks; i++)
4443 edge e;
4445 /* If the current block is the destination of an abnormal edge, we
4446 kill all trapping expressions because we won't be able to properly
4447 place the instruction on the edge. So make them neither
4448 anticipatable nor transparent. This is fairly conservative. */
4449 for (e = BASIC_BLOCK (i)->pred; e ; e = e->pred_next)
4450 if (e->flags & EDGE_ABNORMAL)
4452 sbitmap_difference (antloc[i], antloc[i], trapping_expr);
4453 sbitmap_difference (transp[i], transp[i], trapping_expr);
4454 break;
4457 sbitmap_a_or_b (ae_kill[i], transp[i], comp[i]);
4458 sbitmap_not (ae_kill[i], ae_kill[i]);
4461 edge_list = pre_edge_lcm (gcse_file, n_exprs, transp, comp, antloc,
4462 ae_kill, &pre_insert_map, &pre_delete_map);
4463 sbitmap_vector_free (antloc);
4464 antloc = NULL;
4465 sbitmap_vector_free (ae_kill);
4466 ae_kill = NULL;
4467 free (trapping_expr);
4470 /* PRE utilities */
4472 /* Return non-zero if an occurrence of expression EXPR in OCCR_BB would reach
4473 block BB.
4475 VISITED is a pointer to a working buffer for tracking which BB's have
4476 been visited. It is NULL for the top-level call.
4478 We treat reaching expressions that go through blocks containing the same
4479 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
4480 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
4481 2 as not reaching. The intent is to improve the probability of finding
4482 only one reaching expression and to reduce register lifetimes by picking
4483 the closest such expression. */
4485 static int
4486 pre_expr_reaches_here_p_work (occr_bb, expr, bb, visited)
4487 basic_block occr_bb;
4488 struct expr *expr;
4489 basic_block bb;
4490 char *visited;
4492 edge pred;
4494 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
4496 basic_block pred_bb = pred->src;
4498 if (pred->src == ENTRY_BLOCK_PTR
4499 /* Has predecessor has already been visited? */
4500 || visited[pred_bb->index])
4501 ;/* Nothing to do. */
4503 /* Does this predecessor generate this expression? */
4504 else if (TEST_BIT (comp[pred_bb->index], expr->bitmap_index))
4506 /* Is this the occurrence we're looking for?
4507 Note that there's only one generating occurrence per block
4508 so we just need to check the block number. */
4509 if (occr_bb == pred_bb)
4510 return 1;
4512 visited[pred_bb->index] = 1;
4514 /* Ignore this predecessor if it kills the expression. */
4515 else if (! TEST_BIT (transp[pred_bb->index], expr->bitmap_index))
4516 visited[pred_bb->index] = 1;
4518 /* Neither gen nor kill. */
4519 else
4521 visited[pred_bb->index] = 1;
4522 if (pre_expr_reaches_here_p_work (occr_bb, expr, pred_bb, visited))
4523 return 1;
4527 /* All paths have been checked. */
4528 return 0;
4531 /* The wrapper for pre_expr_reaches_here_work that ensures that any
4532 memory allocated for that function is returned. */
4534 static int
4535 pre_expr_reaches_here_p (occr_bb, expr, bb)
4536 basic_block occr_bb;
4537 struct expr *expr;
4538 basic_block bb;
4540 int rval;
4541 char *visited = (char *) xcalloc (n_basic_blocks, 1);
4543 rval = pre_expr_reaches_here_p_work(occr_bb, expr, bb, visited);
4545 free (visited);
4546 return rval;
4550 /* Given an expr, generate RTL which we can insert at the end of a BB,
4551 or on an edge. Set the block number of any insns generated to
4552 the value of BB. */
4554 static rtx
4555 process_insert_insn (expr)
4556 struct expr *expr;
4558 rtx reg = expr->reaching_reg;
4559 rtx exp = copy_rtx (expr->expr);
4560 rtx pat;
4562 start_sequence ();
4564 /* If the expression is something that's an operand, like a constant,
4565 just copy it to a register. */
4566 if (general_operand (exp, GET_MODE (reg)))
4567 emit_move_insn (reg, exp);
4569 /* Otherwise, make a new insn to compute this expression and make sure the
4570 insn will be recognized (this also adds any needed CLOBBERs). Copy the
4571 expression to make sure we don't have any sharing issues. */
4572 else if (insn_invalid_p (emit_insn (gen_rtx_SET (VOIDmode, reg, exp))))
4573 abort ();
4575 pat = gen_sequence ();
4576 end_sequence ();
4578 return pat;
4581 /* Add EXPR to the end of basic block BB.
4583 This is used by both the PRE and code hoisting.
4585 For PRE, we want to verify that the expr is either transparent
4586 or locally anticipatable in the target block. This check makes
4587 no sense for code hoisting. */
4589 static void
4590 insert_insn_end_bb (expr, bb, pre)
4591 struct expr *expr;
4592 basic_block bb;
4593 int pre;
4595 rtx insn = bb->end;
4596 rtx new_insn;
4597 rtx reg = expr->reaching_reg;
4598 int regno = REGNO (reg);
4599 rtx pat;
4600 int i;
4602 pat = process_insert_insn (expr);
4604 /* If the last insn is a jump, insert EXPR in front [taking care to
4605 handle cc0, etc. properly]. */
4607 if (GET_CODE (insn) == JUMP_INSN)
4609 #ifdef HAVE_cc0
4610 rtx note;
4611 #endif
4613 /* If this is a jump table, then we can't insert stuff here. Since
4614 we know the previous real insn must be the tablejump, we insert
4615 the new instruction just before the tablejump. */
4616 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
4617 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
4618 insn = prev_real_insn (insn);
4620 #ifdef HAVE_cc0
4621 /* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts
4622 if cc0 isn't set. */
4623 note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
4624 if (note)
4625 insn = XEXP (note, 0);
4626 else
4628 rtx maybe_cc0_setter = prev_nonnote_insn (insn);
4629 if (maybe_cc0_setter
4630 && INSN_P (maybe_cc0_setter)
4631 && sets_cc0_p (PATTERN (maybe_cc0_setter)))
4632 insn = maybe_cc0_setter;
4634 #endif
4635 /* FIXME: What if something in cc0/jump uses value set in new insn? */
4636 new_insn = emit_insn_before (pat, insn);
4639 /* Likewise if the last insn is a call, as will happen in the presence
4640 of exception handling. */
4641 else if (GET_CODE (insn) == CALL_INSN)
4643 /* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers,
4644 we search backward and place the instructions before the first
4645 parameter is loaded. Do this for everyone for consistency and a
4646 presumtion that we'll get better code elsewhere as well.
4648 It should always be the case that we can put these instructions
4649 anywhere in the basic block with performing PRE optimizations.
4650 Check this. */
4652 if (pre
4653 && !TEST_BIT (antloc[bb->index], expr->bitmap_index)
4654 && !TEST_BIT (transp[bb->index], expr->bitmap_index))
4655 abort ();
4657 /* Since different machines initialize their parameter registers
4658 in different orders, assume nothing. Collect the set of all
4659 parameter registers. */
4660 insn = find_first_parameter_load (insn, bb->head);
4662 /* If we found all the parameter loads, then we want to insert
4663 before the first parameter load.
4665 If we did not find all the parameter loads, then we might have
4666 stopped on the head of the block, which could be a CODE_LABEL.
4667 If we inserted before the CODE_LABEL, then we would be putting
4668 the insn in the wrong basic block. In that case, put the insn
4669 after the CODE_LABEL. Also, respect NOTE_INSN_BASIC_BLOCK. */
4670 while (GET_CODE (insn) == CODE_LABEL
4671 || NOTE_INSN_BASIC_BLOCK_P (insn))
4672 insn = NEXT_INSN (insn);
4674 new_insn = emit_insn_before (pat, insn);
4676 else
4677 new_insn = emit_insn_after (pat, insn);
4679 /* Keep block number table up to date.
4680 Note, PAT could be a multiple insn sequence, we have to make
4681 sure that each insn in the sequence is handled. */
4682 if (GET_CODE (pat) == SEQUENCE)
4684 for (i = 0; i < XVECLEN (pat, 0); i++)
4686 rtx insn = XVECEXP (pat, 0, i);
4687 if (INSN_P (insn))
4688 add_label_notes (PATTERN (insn), new_insn);
4690 note_stores (PATTERN (insn), record_set_info, insn);
4693 else
4695 add_label_notes (SET_SRC (pat), new_insn);
4697 /* Keep register set table up to date. */
4698 record_one_set (regno, new_insn);
4701 gcse_create_count++;
4703 if (gcse_file)
4705 fprintf (gcse_file, "PRE/HOIST: end of bb %d, insn %d, ",
4706 bb->index, INSN_UID (new_insn));
4707 fprintf (gcse_file, "copying expression %d to reg %d\n",
4708 expr->bitmap_index, regno);
4712 /* Insert partially redundant expressions on edges in the CFG to make
4713 the expressions fully redundant. */
4715 static int
4716 pre_edge_insert (edge_list, index_map)
4717 struct edge_list *edge_list;
4718 struct expr **index_map;
4720 int e, i, j, num_edges, set_size, did_insert = 0;
4721 sbitmap *inserted;
4723 /* Where PRE_INSERT_MAP is nonzero, we add the expression on that edge
4724 if it reaches any of the deleted expressions. */
4726 set_size = pre_insert_map[0]->size;
4727 num_edges = NUM_EDGES (edge_list);
4728 inserted = sbitmap_vector_alloc (num_edges, n_exprs);
4729 sbitmap_vector_zero (inserted, num_edges);
4731 for (e = 0; e < num_edges; e++)
4733 int indx;
4734 basic_block bb = INDEX_EDGE_PRED_BB (edge_list, e);
4736 for (i = indx = 0; i < set_size; i++, indx += SBITMAP_ELT_BITS)
4738 SBITMAP_ELT_TYPE insert = pre_insert_map[e]->elms[i];
4740 for (j = indx; insert && j < n_exprs; j++, insert >>= 1)
4741 if ((insert & 1) != 0 && index_map[j]->reaching_reg != NULL_RTX)
4743 struct expr *expr = index_map[j];
4744 struct occr *occr;
4746 /* Now look at each deleted occurence of this expression. */
4747 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
4749 if (! occr->deleted_p)
4750 continue;
4752 /* Insert this expression on this edge if if it would
4753 reach the deleted occurence in BB. */
4754 if (!TEST_BIT (inserted[e], j))
4756 rtx insn;
4757 edge eg = INDEX_EDGE (edge_list, e);
4759 /* We can't insert anything on an abnormal and
4760 critical edge, so we insert the insn at the end of
4761 the previous block. There are several alternatives
4762 detailed in Morgans book P277 (sec 10.5) for
4763 handling this situation. This one is easiest for
4764 now. */
4766 if ((eg->flags & EDGE_ABNORMAL) == EDGE_ABNORMAL)
4767 insert_insn_end_bb (index_map[j], bb, 0);
4768 else
4770 insn = process_insert_insn (index_map[j]);
4771 insert_insn_on_edge (insn, eg);
4774 if (gcse_file)
4776 fprintf (gcse_file, "PRE/HOIST: edge (%d,%d), ",
4777 bb->index,
4778 INDEX_EDGE_SUCC_BB (edge_list, e)->index);
4779 fprintf (gcse_file, "copy expression %d\n",
4780 expr->bitmap_index);
4783 update_ld_motion_stores (expr);
4784 SET_BIT (inserted[e], j);
4785 did_insert = 1;
4786 gcse_create_count++;
4793 sbitmap_vector_free (inserted);
4794 return did_insert;
4797 /* Copy the result of INSN to REG. INDX is the expression number. */
4799 static void
4800 pre_insert_copy_insn (expr, insn)
4801 struct expr *expr;
4802 rtx insn;
4804 rtx reg = expr->reaching_reg;
4805 int regno = REGNO (reg);
4806 int indx = expr->bitmap_index;
4807 rtx set = single_set (insn);
4808 rtx new_insn;
4810 if (!set)
4811 abort ();
4813 new_insn = emit_insn_after (gen_move_insn (reg, SET_DEST (set)), insn);
4815 /* Keep register set table up to date. */
4816 record_one_set (regno, new_insn);
4818 gcse_create_count++;
4820 if (gcse_file)
4821 fprintf (gcse_file,
4822 "PRE: bb %d, insn %d, copy expression %d in insn %d to reg %d\n",
4823 BLOCK_NUM (insn), INSN_UID (new_insn), indx,
4824 INSN_UID (insn), regno);
4825 update_ld_motion_stores (expr);
4828 /* Copy available expressions that reach the redundant expression
4829 to `reaching_reg'. */
4831 static void
4832 pre_insert_copies ()
4834 unsigned int i;
4835 struct expr *expr;
4836 struct occr *occr;
4837 struct occr *avail;
4839 /* For each available expression in the table, copy the result to
4840 `reaching_reg' if the expression reaches a deleted one.
4842 ??? The current algorithm is rather brute force.
4843 Need to do some profiling. */
4845 for (i = 0; i < expr_hash_table_size; i++)
4846 for (expr = expr_hash_table[i]; expr != NULL; expr = expr->next_same_hash)
4848 /* If the basic block isn't reachable, PPOUT will be TRUE. However,
4849 we don't want to insert a copy here because the expression may not
4850 really be redundant. So only insert an insn if the expression was
4851 deleted. This test also avoids further processing if the
4852 expression wasn't deleted anywhere. */
4853 if (expr->reaching_reg == NULL)
4854 continue;
4856 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
4858 if (! occr->deleted_p)
4859 continue;
4861 for (avail = expr->avail_occr; avail != NULL; avail = avail->next)
4863 rtx insn = avail->insn;
4865 /* No need to handle this one if handled already. */
4866 if (avail->copied_p)
4867 continue;
4869 /* Don't handle this one if it's a redundant one. */
4870 if (TEST_BIT (pre_redundant_insns, INSN_CUID (insn)))
4871 continue;
4873 /* Or if the expression doesn't reach the deleted one. */
4874 if (! pre_expr_reaches_here_p (BLOCK_FOR_INSN (avail->insn),
4875 expr,
4876 BLOCK_FOR_INSN (occr->insn)))
4877 continue;
4879 /* Copy the result of avail to reaching_reg. */
4880 pre_insert_copy_insn (expr, insn);
4881 avail->copied_p = 1;
4887 /* Delete redundant computations.
4888 Deletion is done by changing the insn to copy the `reaching_reg' of
4889 the expression into the result of the SET. It is left to later passes
4890 (cprop, cse2, flow, combine, regmove) to propagate the copy or eliminate it.
4892 Returns non-zero if a change is made. */
4894 static int
4895 pre_delete ()
4897 unsigned int i;
4898 int changed;
4899 struct expr *expr;
4900 struct occr *occr;
4902 changed = 0;
4903 for (i = 0; i < expr_hash_table_size; i++)
4904 for (expr = expr_hash_table[i]; expr != NULL; expr = expr->next_same_hash)
4906 int indx = expr->bitmap_index;
4908 /* We only need to search antic_occr since we require
4909 ANTLOC != 0. */
4911 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
4913 rtx insn = occr->insn;
4914 rtx set;
4915 basic_block bb = BLOCK_FOR_INSN (insn);
4917 if (TEST_BIT (pre_delete_map[bb->index], indx))
4919 set = single_set (insn);
4920 if (! set)
4921 abort ();
4923 /* Create a pseudo-reg to store the result of reaching
4924 expressions into. Get the mode for the new pseudo from
4925 the mode of the original destination pseudo. */
4926 if (expr->reaching_reg == NULL)
4927 expr->reaching_reg
4928 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
4930 /* In theory this should never fail since we're creating
4931 a reg->reg copy.
4933 However, on the x86 some of the movXX patterns actually
4934 contain clobbers of scratch regs. This may cause the
4935 insn created by validate_change to not match any pattern
4936 and thus cause validate_change to fail. */
4937 if (validate_change (insn, &SET_SRC (set),
4938 expr->reaching_reg, 0))
4940 occr->deleted_p = 1;
4941 SET_BIT (pre_redundant_insns, INSN_CUID (insn));
4942 changed = 1;
4943 gcse_subst_count++;
4946 if (gcse_file)
4948 fprintf (gcse_file,
4949 "PRE: redundant insn %d (expression %d) in ",
4950 INSN_UID (insn), indx);
4951 fprintf (gcse_file, "bb %d, reaching reg is %d\n",
4952 bb->index, REGNO (expr->reaching_reg));
4958 return changed;
4961 /* Perform GCSE optimizations using PRE.
4962 This is called by one_pre_gcse_pass after all the dataflow analysis
4963 has been done.
4965 This is based on the original Morel-Renvoise paper Fred Chow's thesis, and
4966 lazy code motion from Knoop, Ruthing and Steffen as described in Advanced
4967 Compiler Design and Implementation.
4969 ??? A new pseudo reg is created to hold the reaching expression. The nice
4970 thing about the classical approach is that it would try to use an existing
4971 reg. If the register can't be adequately optimized [i.e. we introduce
4972 reload problems], one could add a pass here to propagate the new register
4973 through the block.
4975 ??? We don't handle single sets in PARALLELs because we're [currently] not
4976 able to copy the rest of the parallel when we insert copies to create full
4977 redundancies from partial redundancies. However, there's no reason why we
4978 can't handle PARALLELs in the cases where there are no partial
4979 redundancies. */
4981 static int
4982 pre_gcse ()
4984 unsigned int i;
4985 int did_insert, changed;
4986 struct expr **index_map;
4987 struct expr *expr;
4989 /* Compute a mapping from expression number (`bitmap_index') to
4990 hash table entry. */
4992 index_map = (struct expr **) xcalloc (n_exprs, sizeof (struct expr *));
4993 for (i = 0; i < expr_hash_table_size; i++)
4994 for (expr = expr_hash_table[i]; expr != NULL; expr = expr->next_same_hash)
4995 index_map[expr->bitmap_index] = expr;
4997 /* Reset bitmap used to track which insns are redundant. */
4998 pre_redundant_insns = sbitmap_alloc (max_cuid);
4999 sbitmap_zero (pre_redundant_insns);
5001 /* Delete the redundant insns first so that
5002 - we know what register to use for the new insns and for the other
5003 ones with reaching expressions
5004 - we know which insns are redundant when we go to create copies */
5006 changed = pre_delete ();
5008 did_insert = pre_edge_insert (edge_list, index_map);
5010 /* In other places with reaching expressions, copy the expression to the
5011 specially allocated pseudo-reg that reaches the redundant expr. */
5012 pre_insert_copies ();
5013 if (did_insert)
5015 commit_edge_insertions ();
5016 changed = 1;
5019 free (index_map);
5020 free (pre_redundant_insns);
5021 return changed;
5024 /* Top level routine to perform one PRE GCSE pass.
5026 Return non-zero if a change was made. */
5028 static int
5029 one_pre_gcse_pass (pass)
5030 int pass;
5032 int changed = 0;
5034 gcse_subst_count = 0;
5035 gcse_create_count = 0;
5037 alloc_expr_hash_table (max_cuid);
5038 add_noreturn_fake_exit_edges ();
5039 if (flag_gcse_lm)
5040 compute_ld_motion_mems ();
5042 compute_expr_hash_table ();
5043 trim_ld_motion_mems ();
5044 if (gcse_file)
5045 dump_hash_table (gcse_file, "Expression", expr_hash_table,
5046 expr_hash_table_size, n_exprs);
5048 if (n_exprs > 0)
5050 alloc_pre_mem (n_basic_blocks, n_exprs);
5051 compute_pre_data ();
5052 changed |= pre_gcse ();
5053 free_edge_list (edge_list);
5054 free_pre_mem ();
5057 free_ldst_mems ();
5058 remove_fake_edges ();
5059 free_expr_hash_table ();
5061 if (gcse_file)
5063 fprintf (gcse_file, "\nPRE GCSE of %s, pass %d: %d bytes needed, ",
5064 current_function_name, pass, bytes_used);
5065 fprintf (gcse_file, "%d substs, %d insns created\n",
5066 gcse_subst_count, gcse_create_count);
5069 return changed;
5072 /* If X contains any LABEL_REF's, add REG_LABEL notes for them to INSN.
5073 If notes are added to an insn which references a CODE_LABEL, the
5074 LABEL_NUSES count is incremented. We have to add REG_LABEL notes,
5075 because the following loop optimization pass requires them. */
5077 /* ??? This is very similar to the loop.c add_label_notes function. We
5078 could probably share code here. */
5080 /* ??? If there was a jump optimization pass after gcse and before loop,
5081 then we would not need to do this here, because jump would add the
5082 necessary REG_LABEL notes. */
5084 static void
5085 add_label_notes (x, insn)
5086 rtx x;
5087 rtx insn;
5089 enum rtx_code code = GET_CODE (x);
5090 int i, j;
5091 const char *fmt;
5093 if (code == LABEL_REF && !LABEL_REF_NONLOCAL_P (x))
5095 /* This code used to ignore labels that referred to dispatch tables to
5096 avoid flow generating (slighly) worse code.
5098 We no longer ignore such label references (see LABEL_REF handling in
5099 mark_jump_label for additional information). */
5101 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, XEXP (x, 0),
5102 REG_NOTES (insn));
5103 if (LABEL_P (XEXP (x, 0)))
5104 LABEL_NUSES (XEXP (x, 0))++;
5105 return;
5108 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
5110 if (fmt[i] == 'e')
5111 add_label_notes (XEXP (x, i), insn);
5112 else if (fmt[i] == 'E')
5113 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5114 add_label_notes (XVECEXP (x, i, j), insn);
5118 /* Compute transparent outgoing information for each block.
5120 An expression is transparent to an edge unless it is killed by
5121 the edge itself. This can only happen with abnormal control flow,
5122 when the edge is traversed through a call. This happens with
5123 non-local labels and exceptions.
5125 This would not be necessary if we split the edge. While this is
5126 normally impossible for abnormal critical edges, with some effort
5127 it should be possible with exception handling, since we still have
5128 control over which handler should be invoked. But due to increased
5129 EH table sizes, this may not be worthwhile. */
5131 static void
5132 compute_transpout ()
5134 int bb;
5135 unsigned int i;
5136 struct expr *expr;
5138 sbitmap_vector_ones (transpout, n_basic_blocks);
5140 for (bb = 0; bb < n_basic_blocks; ++bb)
5142 /* Note that flow inserted a nop a the end of basic blocks that
5143 end in call instructions for reasons other than abnormal
5144 control flow. */
5145 if (GET_CODE (BLOCK_END (bb)) != CALL_INSN)
5146 continue;
5148 for (i = 0; i < expr_hash_table_size; i++)
5149 for (expr = expr_hash_table[i]; expr ; expr = expr->next_same_hash)
5150 if (GET_CODE (expr->expr) == MEM)
5152 if (GET_CODE (XEXP (expr->expr, 0)) == SYMBOL_REF
5153 && CONSTANT_POOL_ADDRESS_P (XEXP (expr->expr, 0)))
5154 continue;
5156 /* ??? Optimally, we would use interprocedural alias
5157 analysis to determine if this mem is actually killed
5158 by this call. */
5159 RESET_BIT (transpout[bb], expr->bitmap_index);
5164 /* Removal of useless null pointer checks */
5166 /* Called via note_stores. X is set by SETTER. If X is a register we must
5167 invalidate nonnull_local and set nonnull_killed. DATA is really a
5168 `null_pointer_info *'.
5170 We ignore hard registers. */
5172 static void
5173 invalidate_nonnull_info (x, setter, data)
5174 rtx x;
5175 rtx setter ATTRIBUTE_UNUSED;
5176 void *data;
5178 unsigned int regno;
5179 struct null_pointer_info *npi = (struct null_pointer_info *) data;
5181 while (GET_CODE (x) == SUBREG)
5182 x = SUBREG_REG (x);
5184 /* Ignore anything that is not a register or is a hard register. */
5185 if (GET_CODE (x) != REG
5186 || REGNO (x) < npi->min_reg
5187 || REGNO (x) >= npi->max_reg)
5188 return;
5190 regno = REGNO (x) - npi->min_reg;
5192 RESET_BIT (npi->nonnull_local[npi->current_block], regno);
5193 SET_BIT (npi->nonnull_killed[npi->current_block], regno);
5196 /* Do null-pointer check elimination for the registers indicated in
5197 NPI. NONNULL_AVIN and NONNULL_AVOUT are pre-allocated sbitmaps;
5198 they are not our responsibility to free. */
5200 static void
5201 delete_null_pointer_checks_1 (delete_list, block_reg, nonnull_avin,
5202 nonnull_avout, npi)
5203 varray_type *delete_list;
5204 unsigned int *block_reg;
5205 sbitmap *nonnull_avin;
5206 sbitmap *nonnull_avout;
5207 struct null_pointer_info *npi;
5209 int bb;
5210 int current_block;
5211 sbitmap *nonnull_local = npi->nonnull_local;
5212 sbitmap *nonnull_killed = npi->nonnull_killed;
5214 /* Compute local properties, nonnull and killed. A register will have
5215 the nonnull property if at the end of the current block its value is
5216 known to be nonnull. The killed property indicates that somewhere in
5217 the block any information we had about the register is killed.
5219 Note that a register can have both properties in a single block. That
5220 indicates that it's killed, then later in the block a new value is
5221 computed. */
5222 sbitmap_vector_zero (nonnull_local, n_basic_blocks);
5223 sbitmap_vector_zero (nonnull_killed, n_basic_blocks);
5225 for (current_block = 0; current_block < n_basic_blocks; current_block++)
5227 rtx insn, stop_insn;
5229 /* Set the current block for invalidate_nonnull_info. */
5230 npi->current_block = current_block;
5232 /* Scan each insn in the basic block looking for memory references and
5233 register sets. */
5234 stop_insn = NEXT_INSN (BLOCK_END (current_block));
5235 for (insn = BLOCK_HEAD (current_block);
5236 insn != stop_insn;
5237 insn = NEXT_INSN (insn))
5239 rtx set;
5240 rtx reg;
5242 /* Ignore anything that is not a normal insn. */
5243 if (! INSN_P (insn))
5244 continue;
5246 /* Basically ignore anything that is not a simple SET. We do have
5247 to make sure to invalidate nonnull_local and set nonnull_killed
5248 for such insns though. */
5249 set = single_set (insn);
5250 if (!set)
5252 note_stores (PATTERN (insn), invalidate_nonnull_info, npi);
5253 continue;
5256 /* See if we've got a useable memory load. We handle it first
5257 in case it uses its address register as a dest (which kills
5258 the nonnull property). */
5259 if (GET_CODE (SET_SRC (set)) == MEM
5260 && GET_CODE ((reg = XEXP (SET_SRC (set), 0))) == REG
5261 && REGNO (reg) >= npi->min_reg
5262 && REGNO (reg) < npi->max_reg)
5263 SET_BIT (nonnull_local[current_block],
5264 REGNO (reg) - npi->min_reg);
5266 /* Now invalidate stuff clobbered by this insn. */
5267 note_stores (PATTERN (insn), invalidate_nonnull_info, npi);
5269 /* And handle stores, we do these last since any sets in INSN can
5270 not kill the nonnull property if it is derived from a MEM
5271 appearing in a SET_DEST. */
5272 if (GET_CODE (SET_DEST (set)) == MEM
5273 && GET_CODE ((reg = XEXP (SET_DEST (set), 0))) == REG
5274 && REGNO (reg) >= npi->min_reg
5275 && REGNO (reg) < npi->max_reg)
5276 SET_BIT (nonnull_local[current_block],
5277 REGNO (reg) - npi->min_reg);
5281 /* Now compute global properties based on the local properties. This
5282 is a classic global availablity algorithm. */
5283 compute_available (nonnull_local, nonnull_killed,
5284 nonnull_avout, nonnull_avin);
5286 /* Now look at each bb and see if it ends with a compare of a value
5287 against zero. */
5288 for (bb = 0; bb < n_basic_blocks; bb++)
5290 rtx last_insn = BLOCK_END (bb);
5291 rtx condition, earliest;
5292 int compare_and_branch;
5294 /* Since MIN_REG is always at least FIRST_PSEUDO_REGISTER, and
5295 since BLOCK_REG[BB] is zero if this block did not end with a
5296 comparison against zero, this condition works. */
5297 if (block_reg[bb] < npi->min_reg
5298 || block_reg[bb] >= npi->max_reg)
5299 continue;
5301 /* LAST_INSN is a conditional jump. Get its condition. */
5302 condition = get_condition (last_insn, &earliest);
5304 /* If we can't determine the condition then skip. */
5305 if (! condition)
5306 continue;
5308 /* Is the register known to have a nonzero value? */
5309 if (!TEST_BIT (nonnull_avout[bb], block_reg[bb] - npi->min_reg))
5310 continue;
5312 /* Try to compute whether the compare/branch at the loop end is one or
5313 two instructions. */
5314 if (earliest == last_insn)
5315 compare_and_branch = 1;
5316 else if (earliest == prev_nonnote_insn (last_insn))
5317 compare_and_branch = 2;
5318 else
5319 continue;
5321 /* We know the register in this comparison is nonnull at exit from
5322 this block. We can optimize this comparison. */
5323 if (GET_CODE (condition) == NE)
5325 rtx new_jump;
5327 new_jump = emit_jump_insn_before (gen_jump (JUMP_LABEL (last_insn)),
5328 last_insn);
5329 JUMP_LABEL (new_jump) = JUMP_LABEL (last_insn);
5330 LABEL_NUSES (JUMP_LABEL (new_jump))++;
5331 emit_barrier_after (new_jump);
5333 if (!*delete_list)
5334 VARRAY_RTX_INIT (*delete_list, 10, "delete_list");
5336 VARRAY_PUSH_RTX (*delete_list, last_insn);
5337 if (compare_and_branch == 2)
5338 VARRAY_PUSH_RTX (*delete_list, earliest);
5340 /* Don't check this block again. (Note that BLOCK_END is
5341 invalid here; we deleted the last instruction in the
5342 block.) */
5343 block_reg[bb] = 0;
5347 /* Find EQ/NE comparisons against zero which can be (indirectly) evaluated
5348 at compile time.
5350 This is conceptually similar to global constant/copy propagation and
5351 classic global CSE (it even uses the same dataflow equations as cprop).
5353 If a register is used as memory address with the form (mem (reg)), then we
5354 know that REG can not be zero at that point in the program. Any instruction
5355 which sets REG "kills" this property.
5357 So, if every path leading to a conditional branch has an available memory
5358 reference of that form, then we know the register can not have the value
5359 zero at the conditional branch.
5361 So we merely need to compute the local properies and propagate that data
5362 around the cfg, then optimize where possible.
5364 We run this pass two times. Once before CSE, then again after CSE. This
5365 has proven to be the most profitable approach. It is rare for new
5366 optimization opportunities of this nature to appear after the first CSE
5367 pass.
5369 This could probably be integrated with global cprop with a little work. */
5371 void
5372 delete_null_pointer_checks (f)
5373 rtx f ATTRIBUTE_UNUSED;
5375 sbitmap *nonnull_avin, *nonnull_avout;
5376 unsigned int *block_reg;
5377 varray_type delete_list = NULL;
5378 int bb;
5379 int reg;
5380 int regs_per_pass;
5381 int max_reg;
5382 unsigned int i;
5383 struct null_pointer_info npi;
5385 /* If we have only a single block, then there's nothing to do. */
5386 if (n_basic_blocks <= 1)
5387 return;
5389 /* Trying to perform global optimizations on flow graphs which have
5390 a high connectivity will take a long time and is unlikely to be
5391 particularly useful.
5393 In normal circumstances a cfg should have about twice as many edges
5394 as blocks. But we do not want to punish small functions which have
5395 a couple switch statements. So we require a relatively large number
5396 of basic blocks and the ratio of edges to blocks to be high. */
5397 if (n_basic_blocks > 1000 && n_edges / n_basic_blocks >= 20)
5398 return;
5400 /* We need four bitmaps, each with a bit for each register in each
5401 basic block. */
5402 max_reg = max_reg_num ();
5403 regs_per_pass = get_bitmap_width (4, n_basic_blocks, max_reg);
5405 /* Allocate bitmaps to hold local and global properties. */
5406 npi.nonnull_local = sbitmap_vector_alloc (n_basic_blocks, regs_per_pass);
5407 npi.nonnull_killed = sbitmap_vector_alloc (n_basic_blocks, regs_per_pass);
5408 nonnull_avin = sbitmap_vector_alloc (n_basic_blocks, regs_per_pass);
5409 nonnull_avout = sbitmap_vector_alloc (n_basic_blocks, regs_per_pass);
5411 /* Go through the basic blocks, seeing whether or not each block
5412 ends with a conditional branch whose condition is a comparison
5413 against zero. Record the register compared in BLOCK_REG. */
5414 block_reg = (unsigned int *) xcalloc (n_basic_blocks, sizeof (int));
5415 for (bb = 0; bb < n_basic_blocks; bb++)
5417 rtx last_insn = BLOCK_END (bb);
5418 rtx condition, earliest, reg;
5420 /* We only want conditional branches. */
5421 if (GET_CODE (last_insn) != JUMP_INSN
5422 || !any_condjump_p (last_insn)
5423 || !onlyjump_p (last_insn))
5424 continue;
5426 /* LAST_INSN is a conditional jump. Get its condition. */
5427 condition = get_condition (last_insn, &earliest);
5429 /* If we were unable to get the condition, or it is not a equality
5430 comparison against zero then there's nothing we can do. */
5431 if (!condition
5432 || (GET_CODE (condition) != NE && GET_CODE (condition) != EQ)
5433 || GET_CODE (XEXP (condition, 1)) != CONST_INT
5434 || (XEXP (condition, 1)
5435 != CONST0_RTX (GET_MODE (XEXP (condition, 0)))))
5436 continue;
5438 /* We must be checking a register against zero. */
5439 reg = XEXP (condition, 0);
5440 if (GET_CODE (reg) != REG)
5441 continue;
5443 block_reg[bb] = REGNO (reg);
5446 /* Go through the algorithm for each block of registers. */
5447 for (reg = FIRST_PSEUDO_REGISTER; reg < max_reg; reg += regs_per_pass)
5449 npi.min_reg = reg;
5450 npi.max_reg = MIN (reg + regs_per_pass, max_reg);
5451 delete_null_pointer_checks_1 (&delete_list, block_reg, nonnull_avin,
5452 nonnull_avout, &npi);
5455 /* Now delete the instructions all at once. This breaks the CFG. */
5456 if (delete_list)
5458 for (i = 0; i < VARRAY_ACTIVE_SIZE (delete_list); i++)
5459 delete_insn (VARRAY_RTX (delete_list, i));
5460 VARRAY_FREE (delete_list);
5463 /* Free the table of registers compared at the end of every block. */
5464 free (block_reg);
5466 /* Free bitmaps. */
5467 sbitmap_vector_free (npi.nonnull_local);
5468 sbitmap_vector_free (npi.nonnull_killed);
5469 sbitmap_vector_free (nonnull_avin);
5470 sbitmap_vector_free (nonnull_avout);
5473 /* Code Hoisting variables and subroutines. */
5475 /* Very busy expressions. */
5476 static sbitmap *hoist_vbein;
5477 static sbitmap *hoist_vbeout;
5479 /* Hoistable expressions. */
5480 static sbitmap *hoist_exprs;
5482 /* Dominator bitmaps. */
5483 static sbitmap *dominators;
5485 /* ??? We could compute post dominators and run this algorithm in
5486 reverse to to perform tail merging, doing so would probably be
5487 more effective than the tail merging code in jump.c.
5489 It's unclear if tail merging could be run in parallel with
5490 code hoisting. It would be nice. */
5492 /* Allocate vars used for code hoisting analysis. */
5494 static void
5495 alloc_code_hoist_mem (n_blocks, n_exprs)
5496 int n_blocks, n_exprs;
5498 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
5499 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
5500 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
5502 hoist_vbein = sbitmap_vector_alloc (n_blocks, n_exprs);
5503 hoist_vbeout = sbitmap_vector_alloc (n_blocks, n_exprs);
5504 hoist_exprs = sbitmap_vector_alloc (n_blocks, n_exprs);
5505 transpout = sbitmap_vector_alloc (n_blocks, n_exprs);
5507 dominators = sbitmap_vector_alloc (n_blocks, n_blocks);
5510 /* Free vars used for code hoisting analysis. */
5512 static void
5513 free_code_hoist_mem ()
5515 sbitmap_vector_free (antloc);
5516 sbitmap_vector_free (transp);
5517 sbitmap_vector_free (comp);
5519 sbitmap_vector_free (hoist_vbein);
5520 sbitmap_vector_free (hoist_vbeout);
5521 sbitmap_vector_free (hoist_exprs);
5522 sbitmap_vector_free (transpout);
5524 sbitmap_vector_free (dominators);
5527 /* Compute the very busy expressions at entry/exit from each block.
5529 An expression is very busy if all paths from a given point
5530 compute the expression. */
5532 static void
5533 compute_code_hoist_vbeinout ()
5535 int bb, changed, passes;
5537 sbitmap_vector_zero (hoist_vbeout, n_basic_blocks);
5538 sbitmap_vector_zero (hoist_vbein, n_basic_blocks);
5540 passes = 0;
5541 changed = 1;
5543 while (changed)
5545 changed = 0;
5547 /* We scan the blocks in the reverse order to speed up
5548 the convergence. */
5549 for (bb = n_basic_blocks - 1; bb >= 0; bb--)
5551 changed |= sbitmap_a_or_b_and_c (hoist_vbein[bb], antloc[bb],
5552 hoist_vbeout[bb], transp[bb]);
5553 if (bb != n_basic_blocks - 1)
5554 sbitmap_intersection_of_succs (hoist_vbeout[bb], hoist_vbein, bb);
5557 passes++;
5560 if (gcse_file)
5561 fprintf (gcse_file, "hoisting vbeinout computation: %d passes\n", passes);
5564 /* Top level routine to do the dataflow analysis needed by code hoisting. */
5566 static void
5567 compute_code_hoist_data ()
5569 compute_local_properties (transp, comp, antloc, 0);
5570 compute_transpout ();
5571 compute_code_hoist_vbeinout ();
5572 calculate_dominance_info (NULL, dominators, CDI_DOMINATORS);
5573 if (gcse_file)
5574 fprintf (gcse_file, "\n");
5577 /* Determine if the expression identified by EXPR_INDEX would
5578 reach BB unimpared if it was placed at the end of EXPR_BB.
5580 It's unclear exactly what Muchnick meant by "unimpared". It seems
5581 to me that the expression must either be computed or transparent in
5582 *every* block in the path(s) from EXPR_BB to BB. Any other definition
5583 would allow the expression to be hoisted out of loops, even if
5584 the expression wasn't a loop invariant.
5586 Contrast this to reachability for PRE where an expression is
5587 considered reachable if *any* path reaches instead of *all*
5588 paths. */
5590 static int
5591 hoist_expr_reaches_here_p (expr_bb, expr_index, bb, visited)
5592 basic_block expr_bb;
5593 int expr_index;
5594 basic_block bb;
5595 char *visited;
5597 edge pred;
5598 int visited_allocated_locally = 0;
5601 if (visited == NULL)
5603 visited_allocated_locally = 1;
5604 visited = xcalloc (n_basic_blocks, 1);
5607 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
5609 basic_block pred_bb = pred->src;
5611 if (pred->src == ENTRY_BLOCK_PTR)
5612 break;
5613 else if (visited[pred_bb->index])
5614 continue;
5616 /* Does this predecessor generate this expression? */
5617 else if (TEST_BIT (comp[pred_bb->index], expr_index))
5618 break;
5619 else if (! TEST_BIT (transp[pred_bb->index], expr_index))
5620 break;
5622 /* Not killed. */
5623 else
5625 visited[pred_bb->index] = 1;
5626 if (! hoist_expr_reaches_here_p (expr_bb, expr_index,
5627 pred_bb, visited))
5628 break;
5631 if (visited_allocated_locally)
5632 free (visited);
5634 return (pred == NULL);
5637 /* Actually perform code hoisting. */
5639 static void
5640 hoist_code ()
5642 int bb, dominated;
5643 unsigned int i;
5644 struct expr **index_map;
5645 struct expr *expr;
5647 sbitmap_vector_zero (hoist_exprs, n_basic_blocks);
5649 /* Compute a mapping from expression number (`bitmap_index') to
5650 hash table entry. */
5652 index_map = (struct expr **) xcalloc (n_exprs, sizeof (struct expr *));
5653 for (i = 0; i < expr_hash_table_size; i++)
5654 for (expr = expr_hash_table[i]; expr != NULL; expr = expr->next_same_hash)
5655 index_map[expr->bitmap_index] = expr;
5657 /* Walk over each basic block looking for potentially hoistable
5658 expressions, nothing gets hoisted from the entry block. */
5659 for (bb = 0; bb < n_basic_blocks; bb++)
5661 int found = 0;
5662 int insn_inserted_p;
5664 /* Examine each expression that is very busy at the exit of this
5665 block. These are the potentially hoistable expressions. */
5666 for (i = 0; i < hoist_vbeout[bb]->n_bits; i++)
5668 int hoistable = 0;
5670 if (TEST_BIT (hoist_vbeout[bb], i) && TEST_BIT (transpout[bb], i))
5672 /* We've found a potentially hoistable expression, now
5673 we look at every block BB dominates to see if it
5674 computes the expression. */
5675 for (dominated = 0; dominated < n_basic_blocks; dominated++)
5677 /* Ignore self dominance. */
5678 if (bb == dominated
5679 || ! TEST_BIT (dominators[dominated], bb))
5680 continue;
5682 /* We've found a dominated block, now see if it computes
5683 the busy expression and whether or not moving that
5684 expression to the "beginning" of that block is safe. */
5685 if (!TEST_BIT (antloc[dominated], i))
5686 continue;
5688 /* Note if the expression would reach the dominated block
5689 unimpared if it was placed at the end of BB.
5691 Keep track of how many times this expression is hoistable
5692 from a dominated block into BB. */
5693 if (hoist_expr_reaches_here_p (BASIC_BLOCK (bb), i,
5694 BASIC_BLOCK (dominated), NULL))
5695 hoistable++;
5698 /* If we found more than one hoistable occurence of this
5699 expression, then note it in the bitmap of expressions to
5700 hoist. It makes no sense to hoist things which are computed
5701 in only one BB, and doing so tends to pessimize register
5702 allocation. One could increase this value to try harder
5703 to avoid any possible code expansion due to register
5704 allocation issues; however experiments have shown that
5705 the vast majority of hoistable expressions are only movable
5706 from two successors, so raising this threshhold is likely
5707 to nullify any benefit we get from code hoisting. */
5708 if (hoistable > 1)
5710 SET_BIT (hoist_exprs[bb], i);
5711 found = 1;
5716 /* If we found nothing to hoist, then quit now. */
5717 if (! found)
5718 continue;
5720 /* Loop over all the hoistable expressions. */
5721 for (i = 0; i < hoist_exprs[bb]->n_bits; i++)
5723 /* We want to insert the expression into BB only once, so
5724 note when we've inserted it. */
5725 insn_inserted_p = 0;
5727 /* These tests should be the same as the tests above. */
5728 if (TEST_BIT (hoist_vbeout[bb], i))
5730 /* We've found a potentially hoistable expression, now
5731 we look at every block BB dominates to see if it
5732 computes the expression. */
5733 for (dominated = 0; dominated < n_basic_blocks; dominated++)
5735 /* Ignore self dominance. */
5736 if (bb == dominated
5737 || ! TEST_BIT (dominators[dominated], bb))
5738 continue;
5740 /* We've found a dominated block, now see if it computes
5741 the busy expression and whether or not moving that
5742 expression to the "beginning" of that block is safe. */
5743 if (!TEST_BIT (antloc[dominated], i))
5744 continue;
5746 /* The expression is computed in the dominated block and
5747 it would be safe to compute it at the start of the
5748 dominated block. Now we have to determine if the
5749 expresion would reach the dominated block if it was
5750 placed at the end of BB. */
5751 if (hoist_expr_reaches_here_p (BASIC_BLOCK (bb), i,
5752 BASIC_BLOCK (dominated), NULL))
5754 struct expr *expr = index_map[i];
5755 struct occr *occr = expr->antic_occr;
5756 rtx insn;
5757 rtx set;
5759 /* Find the right occurence of this expression. */
5760 while (BLOCK_NUM (occr->insn) != dominated && occr)
5761 occr = occr->next;
5763 /* Should never happen. */
5764 if (!occr)
5765 abort ();
5767 insn = occr->insn;
5769 set = single_set (insn);
5770 if (! set)
5771 abort ();
5773 /* Create a pseudo-reg to store the result of reaching
5774 expressions into. Get the mode for the new pseudo
5775 from the mode of the original destination pseudo. */
5776 if (expr->reaching_reg == NULL)
5777 expr->reaching_reg
5778 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
5780 /* In theory this should never fail since we're creating
5781 a reg->reg copy.
5783 However, on the x86 some of the movXX patterns
5784 actually contain clobbers of scratch regs. This may
5785 cause the insn created by validate_change to not
5786 match any pattern and thus cause validate_change to
5787 fail. */
5788 if (validate_change (insn, &SET_SRC (set),
5789 expr->reaching_reg, 0))
5791 occr->deleted_p = 1;
5792 if (!insn_inserted_p)
5794 insert_insn_end_bb (index_map[i],
5795 BASIC_BLOCK (bb), 0);
5796 insn_inserted_p = 1;
5805 free (index_map);
5808 /* Top level routine to perform one code hoisting (aka unification) pass
5810 Return non-zero if a change was made. */
5812 static int
5813 one_code_hoisting_pass ()
5815 int changed = 0;
5817 alloc_expr_hash_table (max_cuid);
5818 compute_expr_hash_table ();
5819 if (gcse_file)
5820 dump_hash_table (gcse_file, "Code Hosting Expressions", expr_hash_table,
5821 expr_hash_table_size, n_exprs);
5823 if (n_exprs > 0)
5825 alloc_code_hoist_mem (n_basic_blocks, n_exprs);
5826 compute_code_hoist_data ();
5827 hoist_code ();
5828 free_code_hoist_mem ();
5831 free_expr_hash_table ();
5833 return changed;
5836 /* Here we provide the things required to do store motion towards
5837 the exit. In order for this to be effective, gcse also needed to
5838 be taught how to move a load when it is kill only by a store to itself.
5840 int i;
5841 float a[10];
5843 void foo(float scale)
5845 for (i=0; i<10; i++)
5846 a[i] *= scale;
5849 'i' is both loaded and stored to in the loop. Normally, gcse cannot move
5850 the load out since its live around the loop, and stored at the bottom
5851 of the loop.
5853 The 'Load Motion' referred to and implemented in this file is
5854 an enhancement to gcse which when using edge based lcm, recognizes
5855 this situation and allows gcse to move the load out of the loop.
5857 Once gcse has hoisted the load, store motion can then push this
5858 load towards the exit, and we end up with no loads or stores of 'i'
5859 in the loop. */
5861 /* This will search the ldst list for a matching expresion. If it
5862 doesn't find one, we create one and initialize it. */
5864 static struct ls_expr *
5865 ldst_entry (x)
5866 rtx x;
5868 struct ls_expr * ptr;
5870 for (ptr = first_ls_expr(); ptr != NULL; ptr = next_ls_expr (ptr))
5871 if (expr_equiv_p (ptr->pattern, x))
5872 break;
5874 if (!ptr)
5876 ptr = (struct ls_expr *) xmalloc (sizeof (struct ls_expr));
5878 ptr->next = pre_ldst_mems;
5879 ptr->expr = NULL;
5880 ptr->pattern = x;
5881 ptr->loads = NULL_RTX;
5882 ptr->stores = NULL_RTX;
5883 ptr->reaching_reg = NULL_RTX;
5884 ptr->invalid = 0;
5885 ptr->index = 0;
5886 ptr->hash_index = 0;
5887 pre_ldst_mems = ptr;
5890 return ptr;
5893 /* Free up an individual ldst entry. */
5895 static void
5896 free_ldst_entry (ptr)
5897 struct ls_expr * ptr;
5899 free_INSN_LIST_list (& ptr->loads);
5900 free_INSN_LIST_list (& ptr->stores);
5902 free (ptr);
5905 /* Free up all memory associated with the ldst list. */
5907 static void
5908 free_ldst_mems ()
5910 while (pre_ldst_mems)
5912 struct ls_expr * tmp = pre_ldst_mems;
5914 pre_ldst_mems = pre_ldst_mems->next;
5916 free_ldst_entry (tmp);
5919 pre_ldst_mems = NULL;
5922 /* Dump debugging info about the ldst list. */
5924 static void
5925 print_ldst_list (file)
5926 FILE * file;
5928 struct ls_expr * ptr;
5930 fprintf (file, "LDST list: \n");
5932 for (ptr = first_ls_expr(); ptr != NULL; ptr = next_ls_expr (ptr))
5934 fprintf (file, " Pattern (%3d): ", ptr->index);
5936 print_rtl (file, ptr->pattern);
5938 fprintf (file, "\n Loads : ");
5940 if (ptr->loads)
5941 print_rtl (file, ptr->loads);
5942 else
5943 fprintf (file, "(nil)");
5945 fprintf (file, "\n Stores : ");
5947 if (ptr->stores)
5948 print_rtl (file, ptr->stores);
5949 else
5950 fprintf (file, "(nil)");
5952 fprintf (file, "\n\n");
5955 fprintf (file, "\n");
5958 /* Returns 1 if X is in the list of ldst only expressions. */
5960 static struct ls_expr *
5961 find_rtx_in_ldst (x)
5962 rtx x;
5964 struct ls_expr * ptr;
5966 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
5967 if (expr_equiv_p (ptr->pattern, x) && ! ptr->invalid)
5968 return ptr;
5970 return NULL;
5973 /* Assign each element of the list of mems a monotonically increasing value. */
5975 static int
5976 enumerate_ldsts ()
5978 struct ls_expr * ptr;
5979 int n = 0;
5981 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
5982 ptr->index = n++;
5984 return n;
5987 /* Return first item in the list. */
5989 static inline struct ls_expr *
5990 first_ls_expr ()
5992 return pre_ldst_mems;
5995 /* Return the next item in ther list after the specified one. */
5997 static inline struct ls_expr *
5998 next_ls_expr (ptr)
5999 struct ls_expr * ptr;
6001 return ptr->next;
6004 /* Load Motion for loads which only kill themselves. */
6006 /* Return true if x is a simple MEM operation, with no registers or
6007 side effects. These are the types of loads we consider for the
6008 ld_motion list, otherwise we let the usual aliasing take care of it. */
6010 static int
6011 simple_mem (x)
6012 rtx x;
6014 if (GET_CODE (x) != MEM)
6015 return 0;
6017 if (MEM_VOLATILE_P (x))
6018 return 0;
6020 if (GET_MODE (x) == BLKmode)
6021 return 0;
6023 if (!rtx_varies_p (XEXP (x, 0), 0))
6024 return 1;
6026 return 0;
6029 /* Make sure there isn't a buried reference in this pattern anywhere.
6030 If there is, invalidate the entry for it since we're not capable
6031 of fixing it up just yet.. We have to be sure we know about ALL
6032 loads since the aliasing code will allow all entries in the
6033 ld_motion list to not-alias itself. If we miss a load, we will get
6034 the wrong value since gcse might common it and we won't know to
6035 fix it up. */
6037 static void
6038 invalidate_any_buried_refs (x)
6039 rtx x;
6041 const char * fmt;
6042 int i,j;
6043 struct ls_expr * ptr;
6045 /* Invalidate it in the list. */
6046 if (GET_CODE (x) == MEM && simple_mem (x))
6048 ptr = ldst_entry (x);
6049 ptr->invalid = 1;
6052 /* Recursively process the insn. */
6053 fmt = GET_RTX_FORMAT (GET_CODE (x));
6055 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6057 if (fmt[i] == 'e')
6058 invalidate_any_buried_refs (XEXP (x, i));
6059 else if (fmt[i] == 'E')
6060 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6061 invalidate_any_buried_refs (XVECEXP (x, i, j));
6065 /* Find all the 'simple' MEMs which are used in LOADs and STORES. Simple
6066 being defined as MEM loads and stores to symbols, with no
6067 side effects and no registers in the expression. If there are any
6068 uses/defs which dont match this criteria, it is invalidated and
6069 trimmed out later. */
6071 static void
6072 compute_ld_motion_mems ()
6074 struct ls_expr * ptr;
6075 int bb;
6076 rtx insn;
6078 pre_ldst_mems = NULL;
6080 for (bb = 0; bb < n_basic_blocks; bb++)
6082 for (insn = BLOCK_HEAD (bb);
6083 insn && insn != NEXT_INSN (BLOCK_END (bb));
6084 insn = NEXT_INSN (insn))
6086 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
6088 if (GET_CODE (PATTERN (insn)) == SET)
6090 rtx src = SET_SRC (PATTERN (insn));
6091 rtx dest = SET_DEST (PATTERN (insn));
6093 /* Check for a simple LOAD... */
6094 if (GET_CODE (src) == MEM && simple_mem (src))
6096 ptr = ldst_entry (src);
6097 if (GET_CODE (dest) == REG)
6098 ptr->loads = alloc_INSN_LIST (insn, ptr->loads);
6099 else
6100 ptr->invalid = 1;
6102 else
6104 /* Make sure there isn't a buried load somewhere. */
6105 invalidate_any_buried_refs (src);
6108 /* Check for stores. Don't worry about aliased ones, they
6109 will block any movement we might do later. We only care
6110 about this exact pattern since those are the only
6111 circumstance that we will ignore the aliasing info. */
6112 if (GET_CODE (dest) == MEM && simple_mem (dest))
6114 ptr = ldst_entry (dest);
6116 if (GET_CODE (src) != MEM
6117 && GET_CODE (src) != ASM_OPERANDS)
6118 ptr->stores = alloc_INSN_LIST (insn, ptr->stores);
6119 else
6120 ptr->invalid = 1;
6123 else
6124 invalidate_any_buried_refs (PATTERN (insn));
6130 /* Remove any references that have been either invalidated or are not in the
6131 expression list for pre gcse. */
6133 static void
6134 trim_ld_motion_mems ()
6136 struct ls_expr * last = NULL;
6137 struct ls_expr * ptr = first_ls_expr ();
6139 while (ptr != NULL)
6141 int del = ptr->invalid;
6142 struct expr * expr = NULL;
6144 /* Delete if entry has been made invalid. */
6145 if (!del)
6147 unsigned int i;
6149 del = 1;
6150 /* Delete if we cannot find this mem in the expression list. */
6151 for (i = 0; i < expr_hash_table_size && del; i++)
6153 for (expr = expr_hash_table[i];
6154 expr != NULL;
6155 expr = expr->next_same_hash)
6156 if (expr_equiv_p (expr->expr, ptr->pattern))
6158 del = 0;
6159 break;
6164 if (del)
6166 if (last != NULL)
6168 last->next = ptr->next;
6169 free_ldst_entry (ptr);
6170 ptr = last->next;
6172 else
6174 pre_ldst_mems = pre_ldst_mems->next;
6175 free_ldst_entry (ptr);
6176 ptr = pre_ldst_mems;
6179 else
6181 /* Set the expression field if we are keeping it. */
6182 last = ptr;
6183 ptr->expr = expr;
6184 ptr = ptr->next;
6188 /* Show the world what we've found. */
6189 if (gcse_file && pre_ldst_mems != NULL)
6190 print_ldst_list (gcse_file);
6193 /* This routine will take an expression which we are replacing with
6194 a reaching register, and update any stores that are needed if
6195 that expression is in the ld_motion list. Stores are updated by
6196 copying their SRC to the reaching register, and then storeing
6197 the reaching register into the store location. These keeps the
6198 correct value in the reaching register for the loads. */
6200 static void
6201 update_ld_motion_stores (expr)
6202 struct expr * expr;
6204 struct ls_expr * mem_ptr;
6206 if ((mem_ptr = find_rtx_in_ldst (expr->expr)))
6208 /* We can try to find just the REACHED stores, but is shouldn't
6209 matter to set the reaching reg everywhere... some might be
6210 dead and should be eliminated later. */
6212 /* We replace SET mem = expr with
6213 SET reg = expr
6214 SET mem = reg , where reg is the
6215 reaching reg used in the load. */
6216 rtx list = mem_ptr->stores;
6218 for ( ; list != NULL_RTX; list = XEXP (list, 1))
6220 rtx insn = XEXP (list, 0);
6221 rtx pat = PATTERN (insn);
6222 rtx src = SET_SRC (pat);
6223 rtx reg = expr->reaching_reg;
6224 rtx copy, new;
6226 /* If we've already copied it, continue. */
6227 if (expr->reaching_reg == src)
6228 continue;
6230 if (gcse_file)
6232 fprintf (gcse_file, "PRE: store updated with reaching reg ");
6233 print_rtl (gcse_file, expr->reaching_reg);
6234 fprintf (gcse_file, ":\n ");
6235 print_inline_rtx (gcse_file, insn, 8);
6236 fprintf (gcse_file, "\n");
6239 copy = gen_move_insn ( reg, SET_SRC (pat));
6240 new = emit_insn_before (copy, insn);
6241 record_one_set (REGNO (reg), new);
6242 SET_SRC (pat) = reg;
6244 /* un-recognize this pattern since it's probably different now. */
6245 INSN_CODE (insn) = -1;
6246 gcse_create_count++;
6251 /* Store motion code. */
6253 /* This is used to communicate the target bitvector we want to use in the
6254 reg_set_info routine when called via the note_stores mechanism. */
6255 static sbitmap * regvec;
6257 /* Used in computing the reverse edge graph bit vectors. */
6258 static sbitmap * st_antloc;
6260 /* Global holding the number of store expressions we are dealing with. */
6261 static int num_stores;
6263 /* Checks to set if we need to mark a register set. Called from note_stores. */
6265 static void
6266 reg_set_info (dest, setter, data)
6267 rtx dest, setter ATTRIBUTE_UNUSED;
6268 void * data ATTRIBUTE_UNUSED;
6270 if (GET_CODE (dest) == SUBREG)
6271 dest = SUBREG_REG (dest);
6273 if (GET_CODE (dest) == REG)
6274 SET_BIT (*regvec, REGNO (dest));
6277 /* Return non-zero if the register operands of expression X are killed
6278 anywhere in basic block BB. */
6280 static int
6281 store_ops_ok (x, bb)
6282 rtx x;
6283 basic_block bb;
6285 int i;
6286 enum rtx_code code;
6287 const char * fmt;
6289 /* Repeat is used to turn tail-recursion into iteration. */
6290 repeat:
6292 if (x == 0)
6293 return 1;
6295 code = GET_CODE (x);
6296 switch (code)
6298 case REG:
6299 /* If a reg has changed after us in this
6300 block, the operand has been killed. */
6301 return TEST_BIT (reg_set_in_block[bb->index], REGNO (x));
6303 case MEM:
6304 x = XEXP (x, 0);
6305 goto repeat;
6307 case PRE_DEC:
6308 case PRE_INC:
6309 case POST_DEC:
6310 case POST_INC:
6311 return 0;
6313 case PC:
6314 case CC0: /*FIXME*/
6315 case CONST:
6316 case CONST_INT:
6317 case CONST_DOUBLE:
6318 case SYMBOL_REF:
6319 case LABEL_REF:
6320 case ADDR_VEC:
6321 case ADDR_DIFF_VEC:
6322 return 1;
6324 default:
6325 break;
6328 i = GET_RTX_LENGTH (code) - 1;
6329 fmt = GET_RTX_FORMAT (code);
6331 for (; i >= 0; i--)
6333 if (fmt[i] == 'e')
6335 rtx tem = XEXP (x, i);
6337 /* If we are about to do the last recursive call
6338 needed at this level, change it into iteration.
6339 This function is called enough to be worth it. */
6340 if (i == 0)
6342 x = tem;
6343 goto repeat;
6346 if (! store_ops_ok (tem, bb))
6347 return 0;
6349 else if (fmt[i] == 'E')
6351 int j;
6353 for (j = 0; j < XVECLEN (x, i); j++)
6355 if (! store_ops_ok (XVECEXP (x, i, j), bb))
6356 return 0;
6361 return 1;
6364 /* Determine whether insn is MEM store pattern that we will consider moving. */
6366 static void
6367 find_moveable_store (insn)
6368 rtx insn;
6370 struct ls_expr * ptr;
6371 rtx dest = PATTERN (insn);
6373 if (GET_CODE (dest) != SET
6374 || GET_CODE (SET_SRC (dest)) == ASM_OPERANDS)
6375 return;
6377 dest = SET_DEST (dest);
6379 if (GET_CODE (dest) != MEM || MEM_VOLATILE_P (dest)
6380 || GET_MODE (dest) == BLKmode)
6381 return;
6383 if (GET_CODE (XEXP (dest, 0)) != SYMBOL_REF)
6384 return;
6386 if (rtx_varies_p (XEXP (dest, 0), 0))
6387 return;
6389 ptr = ldst_entry (dest);
6390 ptr->stores = alloc_INSN_LIST (insn, ptr->stores);
6393 /* Perform store motion. Much like gcse, except we move expressions the
6394 other way by looking at the flowgraph in reverse. */
6396 static int
6397 compute_store_table ()
6399 int bb, ret;
6400 unsigned regno;
6401 rtx insn, pat;
6403 max_gcse_regno = max_reg_num ();
6405 reg_set_in_block = (sbitmap *) sbitmap_vector_alloc (n_basic_blocks,
6406 max_gcse_regno);
6407 sbitmap_vector_zero (reg_set_in_block, n_basic_blocks);
6408 pre_ldst_mems = 0;
6410 /* Find all the stores we care about. */
6411 for (bb = 0; bb < n_basic_blocks; bb++)
6413 regvec = & (reg_set_in_block[bb]);
6414 for (insn = BLOCK_END (bb);
6415 insn && insn != PREV_INSN (BLOCK_HEAD (bb));
6416 insn = PREV_INSN (insn))
6418 /* Ignore anything that is not a normal insn. */
6419 if (! INSN_P (insn))
6420 continue;
6422 if (GET_CODE (insn) == CALL_INSN)
6424 bool clobbers_all = false;
6425 #ifdef NON_SAVING_SETJMP
6426 if (NON_SAVING_SETJMP
6427 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
6428 clobbers_all = true;
6429 #endif
6431 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6432 if (clobbers_all
6433 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
6434 SET_BIT (reg_set_in_block[bb], regno);
6437 pat = PATTERN (insn);
6438 note_stores (pat, reg_set_info, NULL);
6440 /* Now that we've marked regs, look for stores. */
6441 if (GET_CODE (pat) == SET)
6442 find_moveable_store (insn);
6446 ret = enumerate_ldsts ();
6448 if (gcse_file)
6450 fprintf (gcse_file, "Store Motion Expressions.\n");
6451 print_ldst_list (gcse_file);
6454 return ret;
6457 /* Check to see if the load X is aliased with STORE_PATTERN. */
6459 static int
6460 load_kills_store (x, store_pattern)
6461 rtx x, store_pattern;
6463 if (true_dependence (x, GET_MODE (x), store_pattern, rtx_addr_varies_p))
6464 return 1;
6465 return 0;
6468 /* Go through the entire insn X, looking for any loads which might alias
6469 STORE_PATTERN. Return 1 if found. */
6471 static int
6472 find_loads (x, store_pattern)
6473 rtx x, store_pattern;
6475 const char * fmt;
6476 int i,j;
6477 int ret = 0;
6479 if (!x)
6480 return 0;
6482 if (GET_CODE (x) == SET)
6483 x = SET_SRC (x);
6485 if (GET_CODE (x) == MEM)
6487 if (load_kills_store (x, store_pattern))
6488 return 1;
6491 /* Recursively process the insn. */
6492 fmt = GET_RTX_FORMAT (GET_CODE (x));
6494 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0 && !ret; i--)
6496 if (fmt[i] == 'e')
6497 ret |= find_loads (XEXP (x, i), store_pattern);
6498 else if (fmt[i] == 'E')
6499 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6500 ret |= find_loads (XVECEXP (x, i, j), store_pattern);
6502 return ret;
6505 /* Check if INSN kills the store pattern X (is aliased with it).
6506 Return 1 if it it does. */
6508 static int
6509 store_killed_in_insn (x, insn)
6510 rtx x, insn;
6512 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
6513 return 0;
6515 if (GET_CODE (insn) == CALL_INSN)
6517 if (CONST_OR_PURE_CALL_P (insn))
6518 return 0;
6519 else
6520 return 1;
6523 if (GET_CODE (PATTERN (insn)) == SET)
6525 rtx pat = PATTERN (insn);
6526 /* Check for memory stores to aliased objects. */
6527 if (GET_CODE (SET_DEST (pat)) == MEM && !expr_equiv_p (SET_DEST (pat), x))
6528 /* pretend its a load and check for aliasing. */
6529 if (find_loads (SET_DEST (pat), x))
6530 return 1;
6531 return find_loads (SET_SRC (pat), x);
6533 else
6534 return find_loads (PATTERN (insn), x);
6537 /* Returns 1 if the expression X is loaded or clobbered on or after INSN
6538 within basic block BB. */
6540 static int
6541 store_killed_after (x, insn, bb)
6542 rtx x, insn;
6543 basic_block bb;
6545 rtx last = bb->end;
6547 if (insn == last)
6548 return 0;
6550 /* Check if the register operands of the store are OK in this block.
6551 Note that if registers are changed ANYWHERE in the block, we'll
6552 decide we can't move it, regardless of whether it changed above
6553 or below the store. This could be improved by checking the register
6554 operands while lookinng for aliasing in each insn. */
6555 if (!store_ops_ok (XEXP (x, 0), bb))
6556 return 1;
6558 for ( ; insn && insn != NEXT_INSN (last); insn = NEXT_INSN (insn))
6559 if (store_killed_in_insn (x, insn))
6560 return 1;
6562 return 0;
6565 /* Returns 1 if the expression X is loaded or clobbered on or before INSN
6566 within basic block BB. */
6567 static int
6568 store_killed_before (x, insn, bb)
6569 rtx x, insn;
6570 basic_block bb;
6572 rtx first = bb->head;
6574 if (insn == first)
6575 return store_killed_in_insn (x, insn);
6577 /* Check if the register operands of the store are OK in this block.
6578 Note that if registers are changed ANYWHERE in the block, we'll
6579 decide we can't move it, regardless of whether it changed above
6580 or below the store. This could be improved by checking the register
6581 operands while lookinng for aliasing in each insn. */
6582 if (!store_ops_ok (XEXP (x, 0), bb))
6583 return 1;
6585 for ( ; insn && insn != PREV_INSN (first); insn = PREV_INSN (insn))
6586 if (store_killed_in_insn (x, insn))
6587 return 1;
6589 return 0;
6592 #define ANTIC_STORE_LIST(x) ((x)->loads)
6593 #define AVAIL_STORE_LIST(x) ((x)->stores)
6595 /* Given the table of available store insns at the end of blocks,
6596 determine which ones are not killed by aliasing, and generate
6597 the appropriate vectors for gen and killed. */
6598 static void
6599 build_store_vectors ()
6601 basic_block bb;
6602 int b;
6603 rtx insn, st;
6604 struct ls_expr * ptr;
6606 /* Build the gen_vector. This is any store in the table which is not killed
6607 by aliasing later in its block. */
6608 ae_gen = (sbitmap *) sbitmap_vector_alloc (n_basic_blocks, num_stores);
6609 sbitmap_vector_zero (ae_gen, n_basic_blocks);
6611 st_antloc = (sbitmap *) sbitmap_vector_alloc (n_basic_blocks, num_stores);
6612 sbitmap_vector_zero (st_antloc, n_basic_blocks);
6614 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
6616 /* Put all the stores into either the antic list, or the avail list,
6617 or both. */
6618 rtx store_list = ptr->stores;
6619 ptr->stores = NULL_RTX;
6621 for (st = store_list; st != NULL; st = XEXP (st, 1))
6623 insn = XEXP (st, 0);
6624 bb = BLOCK_FOR_INSN (insn);
6626 if (!store_killed_after (ptr->pattern, insn, bb))
6628 /* If we've already seen an availale expression in this block,
6629 we can delete the one we saw already (It occurs earlier in
6630 the block), and replace it with this one). We'll copy the
6631 old SRC expression to an unused register in case there
6632 are any side effects. */
6633 if (TEST_BIT (ae_gen[bb->index], ptr->index))
6635 /* Find previous store. */
6636 rtx st;
6637 for (st = AVAIL_STORE_LIST (ptr); st ; st = XEXP (st, 1))
6638 if (BLOCK_FOR_INSN (XEXP (st, 0)) == bb)
6639 break;
6640 if (st)
6642 rtx r = gen_reg_rtx (GET_MODE (ptr->pattern));
6643 if (gcse_file)
6644 fprintf(gcse_file, "Removing redundant store:\n");
6645 replace_store_insn (r, XEXP (st, 0), bb);
6646 XEXP (st, 0) = insn;
6647 continue;
6650 SET_BIT (ae_gen[bb->index], ptr->index);
6651 AVAIL_STORE_LIST (ptr) = alloc_INSN_LIST (insn,
6652 AVAIL_STORE_LIST (ptr));
6655 if (!store_killed_before (ptr->pattern, insn, bb))
6657 SET_BIT (st_antloc[BLOCK_NUM (insn)], ptr->index);
6658 ANTIC_STORE_LIST (ptr) = alloc_INSN_LIST (insn,
6659 ANTIC_STORE_LIST (ptr));
6663 /* Free the original list of store insns. */
6664 free_INSN_LIST_list (&store_list);
6667 ae_kill = (sbitmap *) sbitmap_vector_alloc (n_basic_blocks, num_stores);
6668 sbitmap_vector_zero (ae_kill, n_basic_blocks);
6670 transp = (sbitmap *) sbitmap_vector_alloc (n_basic_blocks, num_stores);
6671 sbitmap_vector_zero (transp, n_basic_blocks);
6673 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
6674 for (b = 0; b < n_basic_blocks; b++)
6676 if (store_killed_after (ptr->pattern, BLOCK_HEAD (b), BASIC_BLOCK (b)))
6678 /* The anticipatable expression is not killed if it's gen'd. */
6680 We leave this check out for now. If we have a code sequence
6681 in a block which looks like:
6682 ST MEMa = x
6683 L y = MEMa
6684 ST MEMa = z
6685 We should flag this as having an ANTIC expression, NOT
6686 transparent, NOT killed, and AVAIL.
6687 Unfortunately, since we haven't re-written all loads to
6688 use the reaching reg, we'll end up doing an incorrect
6689 Load in the middle here if we push the store down. It happens in
6690 gcc.c-torture/execute/960311-1.c with -O3
6691 If we always kill it in this case, we'll sometimes do
6692 uneccessary work, but it shouldn't actually hurt anything.
6693 if (!TEST_BIT (ae_gen[b], ptr->index)). */
6694 SET_BIT (ae_kill[b], ptr->index);
6696 else
6697 SET_BIT (transp[b], ptr->index);
6700 /* Any block with no exits calls some non-returning function, so
6701 we better mark the store killed here, or we might not store to
6702 it at all. If we knew it was abort, we wouldn't have to store,
6703 but we don't know that for sure. */
6704 if (gcse_file)
6706 fprintf (gcse_file, "ST_avail and ST_antic (shown under loads..)\n");
6707 print_ldst_list (gcse_file);
6708 dump_sbitmap_vector (gcse_file, "st_antloc", "", st_antloc, n_basic_blocks);
6709 dump_sbitmap_vector (gcse_file, "st_kill", "", ae_kill, n_basic_blocks);
6710 dump_sbitmap_vector (gcse_file, "Transpt", "", transp, n_basic_blocks);
6711 dump_sbitmap_vector (gcse_file, "st_avloc", "", ae_gen, n_basic_blocks);
6715 /* Insert an instruction at the begining of a basic block, and update
6716 the BLOCK_HEAD if needed. */
6718 static void
6719 insert_insn_start_bb (insn, bb)
6720 rtx insn;
6721 basic_block bb;
6723 /* Insert at start of successor block. */
6724 rtx prev = PREV_INSN (bb->head);
6725 rtx before = bb->head;
6726 while (before != 0)
6728 if (GET_CODE (before) != CODE_LABEL
6729 && (GET_CODE (before) != NOTE
6730 || NOTE_LINE_NUMBER (before) != NOTE_INSN_BASIC_BLOCK))
6731 break;
6732 prev = before;
6733 if (prev == bb->end)
6734 break;
6735 before = NEXT_INSN (before);
6738 insn = emit_insn_after (insn, prev);
6740 if (gcse_file)
6742 fprintf (gcse_file, "STORE_MOTION insert store at start of BB %d:\n",
6743 bb->index);
6744 print_inline_rtx (gcse_file, insn, 6);
6745 fprintf (gcse_file, "\n");
6749 /* This routine will insert a store on an edge. EXPR is the ldst entry for
6750 the memory reference, and E is the edge to insert it on. Returns non-zero
6751 if an edge insertion was performed. */
6753 static int
6754 insert_store (expr, e)
6755 struct ls_expr * expr;
6756 edge e;
6758 rtx reg, insn;
6759 basic_block bb;
6760 edge tmp;
6762 /* We did all the deleted before this insert, so if we didn't delete a
6763 store, then we haven't set the reaching reg yet either. */
6764 if (expr->reaching_reg == NULL_RTX)
6765 return 0;
6767 reg = expr->reaching_reg;
6768 insn = gen_move_insn (expr->pattern, reg);
6770 /* If we are inserting this expression on ALL predecessor edges of a BB,
6771 insert it at the start of the BB, and reset the insert bits on the other
6772 edges so we don;t try to insert it on the other edges. */
6773 bb = e->dest;
6774 for (tmp = e->dest->pred; tmp ; tmp = tmp->pred_next)
6776 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
6777 if (index == EDGE_INDEX_NO_EDGE)
6778 abort ();
6779 if (! TEST_BIT (pre_insert_map[index], expr->index))
6780 break;
6783 /* If tmp is NULL, we found an insertion on every edge, blank the
6784 insertion vector for these edges, and insert at the start of the BB. */
6785 if (!tmp && bb != EXIT_BLOCK_PTR)
6787 for (tmp = e->dest->pred; tmp ; tmp = tmp->pred_next)
6789 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
6790 RESET_BIT (pre_insert_map[index], expr->index);
6792 insert_insn_start_bb (insn, bb);
6793 return 0;
6796 /* We can't insert on this edge, so we'll insert at the head of the
6797 successors block. See Morgan, sec 10.5. */
6798 if ((e->flags & EDGE_ABNORMAL) == EDGE_ABNORMAL)
6800 insert_insn_start_bb (insn, bb);
6801 return 0;
6804 insert_insn_on_edge (insn, e);
6806 if (gcse_file)
6808 fprintf (gcse_file, "STORE_MOTION insert insn on edge (%d, %d):\n",
6809 e->src->index, e->dest->index);
6810 print_inline_rtx (gcse_file, insn, 6);
6811 fprintf (gcse_file, "\n");
6814 return 1;
6817 /* This routine will replace a store with a SET to a specified register. */
6819 static void
6820 replace_store_insn (reg, del, bb)
6821 rtx reg, del;
6822 basic_block bb;
6824 rtx insn;
6826 insn = gen_move_insn (reg, SET_SRC (PATTERN (del)));
6827 insn = emit_insn_after (insn, del);
6829 if (gcse_file)
6831 fprintf (gcse_file,
6832 "STORE_MOTION delete insn in BB %d:\n ", bb->index);
6833 print_inline_rtx (gcse_file, del, 6);
6834 fprintf(gcse_file, "\nSTORE MOTION replaced with insn:\n ");
6835 print_inline_rtx (gcse_file, insn, 6);
6836 fprintf(gcse_file, "\n");
6839 delete_insn (del);
6843 /* Delete a store, but copy the value that would have been stored into
6844 the reaching_reg for later storing. */
6846 static void
6847 delete_store (expr, bb)
6848 struct ls_expr * expr;
6849 basic_block bb;
6851 rtx reg, i, del;
6853 if (expr->reaching_reg == NULL_RTX)
6854 expr->reaching_reg = gen_reg_rtx (GET_MODE (expr->pattern));
6857 /* If there is more than 1 store, the earlier ones will be dead,
6858 but it doesn't hurt to replace them here. */
6859 reg = expr->reaching_reg;
6861 for (i = AVAIL_STORE_LIST (expr); i; i = XEXP (i, 1))
6863 del = XEXP (i, 0);
6864 if (BLOCK_FOR_INSN (del) == bb)
6866 /* We know there is only one since we deleted redundant
6867 ones during the available computation. */
6868 replace_store_insn (reg, del, bb);
6869 break;
6874 /* Free memory used by store motion. */
6876 static void
6877 free_store_memory ()
6879 free_ldst_mems ();
6881 if (ae_gen)
6882 sbitmap_vector_free (ae_gen);
6883 if (ae_kill)
6884 sbitmap_vector_free (ae_kill);
6885 if (transp)
6886 sbitmap_vector_free (transp);
6887 if (st_antloc)
6888 sbitmap_vector_free (st_antloc);
6889 if (pre_insert_map)
6890 sbitmap_vector_free (pre_insert_map);
6891 if (pre_delete_map)
6892 sbitmap_vector_free (pre_delete_map);
6893 if (reg_set_in_block)
6894 sbitmap_vector_free (reg_set_in_block);
6896 ae_gen = ae_kill = transp = st_antloc = NULL;
6897 pre_insert_map = pre_delete_map = reg_set_in_block = NULL;
6900 /* Perform store motion. Much like gcse, except we move expressions the
6901 other way by looking at the flowgraph in reverse. */
6903 static void
6904 store_motion ()
6906 int x;
6907 struct ls_expr * ptr;
6908 int update_flow = 0;
6910 if (gcse_file)
6912 fprintf (gcse_file, "before store motion\n");
6913 print_rtl (gcse_file, get_insns ());
6917 init_alias_analysis ();
6919 /* Find all the stores that are live to the end of their block. */
6920 num_stores = compute_store_table ();
6921 if (num_stores == 0)
6923 sbitmap_vector_free (reg_set_in_block);
6924 end_alias_analysis ();
6925 return;
6928 /* Now compute whats actually available to move. */
6929 add_noreturn_fake_exit_edges ();
6930 build_store_vectors ();
6932 edge_list = pre_edge_rev_lcm (gcse_file, num_stores, transp, ae_gen,
6933 st_antloc, ae_kill, &pre_insert_map,
6934 &pre_delete_map);
6936 /* Now we want to insert the new stores which are going to be needed. */
6937 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
6939 for (x = 0; x < n_basic_blocks; x++)
6940 if (TEST_BIT (pre_delete_map[x], ptr->index))
6941 delete_store (ptr, BASIC_BLOCK (x));
6943 for (x = 0; x < NUM_EDGES (edge_list); x++)
6944 if (TEST_BIT (pre_insert_map[x], ptr->index))
6945 update_flow |= insert_store (ptr, INDEX_EDGE (edge_list, x));
6948 if (update_flow)
6949 commit_edge_insertions ();
6951 free_store_memory ();
6952 free_edge_list (edge_list);
6953 remove_fake_edges ();
6954 end_alias_analysis ();