oops - minor formatting tidy ups to previous delta
[official-gcc.git] / gcc / gcse.c
blobb0798951cc28bb95801fc53b493f6f8217e2b21c
1 /* Global common subexpression elimination/Partial redundancy elimination
2 and global constant/copy propagation for GNU compiler.
3 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 02111-1307, USA. */
23 /* TODO
24 - reordering of memory allocation and freeing to be more space efficient
25 - do rough calc of how many regs are needed in each block, and a rough
26 calc of how many regs are available in each class and use that to
27 throttle back the code in cases where RTX_COST is minimal.
28 - a store to the same address as a load does not kill the load if the
29 source of the store is also the destination of the load. Handling this
30 allows more load motion, particularly out of loops.
31 - ability to realloc sbitmap vectors would allow one initial computation
32 of reg_set_in_block with only subsequent additions, rather than
33 recomputing it for each pass
37 /* References searched while implementing this.
39 Compilers Principles, Techniques and Tools
40 Aho, Sethi, Ullman
41 Addison-Wesley, 1988
43 Global Optimization by Suppression of Partial Redundancies
44 E. Morel, C. Renvoise
45 communications of the acm, Vol. 22, Num. 2, Feb. 1979
47 A Portable Machine-Independent Global Optimizer - Design and Measurements
48 Frederick Chow
49 Stanford Ph.D. thesis, Dec. 1983
51 A Fast Algorithm for Code Movement Optimization
52 D.M. Dhamdhere
53 SIGPLAN Notices, Vol. 23, Num. 10, Oct. 1988
55 A Solution to a Problem with Morel and Renvoise's
56 Global Optimization by Suppression of Partial Redundancies
57 K-H Drechsler, M.P. Stadel
58 ACM TOPLAS, Vol. 10, Num. 4, Oct. 1988
60 Practical Adaptation of the Global Optimization
61 Algorithm of Morel and Renvoise
62 D.M. Dhamdhere
63 ACM TOPLAS, Vol. 13, Num. 2. Apr. 1991
65 Efficiently Computing Static Single Assignment Form and the Control
66 Dependence Graph
67 R. Cytron, J. Ferrante, B.K. Rosen, M.N. Wegman, and F.K. Zadeck
68 ACM TOPLAS, Vol. 13, Num. 4, Oct. 1991
70 Lazy Code Motion
71 J. Knoop, O. Ruthing, B. Steffen
72 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
74 What's In a Region? Or Computing Control Dependence Regions in Near-Linear
75 Time for Reducible Flow Control
76 Thomas Ball
77 ACM Letters on Programming Languages and Systems,
78 Vol. 2, Num. 1-4, Mar-Dec 1993
80 An Efficient Representation for Sparse Sets
81 Preston Briggs, Linda Torczon
82 ACM Letters on Programming Languages and Systems,
83 Vol. 2, Num. 1-4, Mar-Dec 1993
85 A Variation of Knoop, Ruthing, and Steffen's Lazy Code Motion
86 K-H Drechsler, M.P. Stadel
87 ACM SIGPLAN Notices, Vol. 28, Num. 5, May 1993
89 Partial Dead Code Elimination
90 J. Knoop, O. Ruthing, B. Steffen
91 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
93 Effective Partial Redundancy Elimination
94 P. Briggs, K.D. Cooper
95 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
97 The Program Structure Tree: Computing Control Regions in Linear Time
98 R. Johnson, D. Pearson, K. Pingali
99 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
101 Optimal Code Motion: Theory and Practice
102 J. Knoop, O. Ruthing, B. Steffen
103 ACM TOPLAS, Vol. 16, Num. 4, Jul. 1994
105 The power of assignment motion
106 J. Knoop, O. Ruthing, B. Steffen
107 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
109 Global code motion / global value numbering
110 C. Click
111 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
113 Value Driven Redundancy Elimination
114 L.T. Simpson
115 Rice University Ph.D. thesis, Apr. 1996
117 Value Numbering
118 L.T. Simpson
119 Massively Scalar Compiler Project, Rice University, Sep. 1996
121 High Performance Compilers for Parallel Computing
122 Michael Wolfe
123 Addison-Wesley, 1996
125 Advanced Compiler Design and Implementation
126 Steven Muchnick
127 Morgan Kaufmann, 1997
129 Building an Optimizing Compiler
130 Robert Morgan
131 Digital Press, 1998
133 People wishing to speed up the code here should read:
134 Elimination Algorithms for Data Flow Analysis
135 B.G. Ryder, M.C. Paull
136 ACM Computing Surveys, Vol. 18, Num. 3, Sep. 1986
138 How to Analyze Large Programs Efficiently and Informatively
139 D.M. Dhamdhere, B.K. Rosen, F.K. Zadeck
140 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
142 People wishing to do something different can find various possibilities
143 in the above papers and elsewhere.
146 #include "config.h"
147 #include "system.h"
148 #include "toplev.h"
150 #include "rtl.h"
151 #include "tm_p.h"
152 #include "regs.h"
153 #include "hard-reg-set.h"
154 #include "flags.h"
155 #include "real.h"
156 #include "insn-config.h"
157 #include "recog.h"
158 #include "basic-block.h"
159 #include "output.h"
160 #include "function.h"
161 #include "expr.h"
162 #include "except.h"
163 #include "ggc.h"
164 #include "params.h"
165 #include "cselib.h"
167 #include "obstack.h"
169 /* Propagate flow information through back edges and thus enable PRE's
170 moving loop invariant calculations out of loops.
172 Originally this tended to create worse overall code, but several
173 improvements during the development of PRE seem to have made following
174 back edges generally a win.
176 Note much of the loop invariant code motion done here would normally
177 be done by loop.c, which has more heuristics for when to move invariants
178 out of loops. At some point we might need to move some of those
179 heuristics into gcse.c. */
181 /* We support GCSE via Partial Redundancy Elimination. PRE optimizations
182 are a superset of those done by GCSE.
184 We perform the following steps:
186 1) Compute basic block information.
188 2) Compute table of places where registers are set.
190 3) Perform copy/constant propagation.
192 4) Perform global cse.
194 5) Perform another pass of copy/constant propagation.
196 Two passes of copy/constant propagation are done because the first one
197 enables more GCSE and the second one helps to clean up the copies that
198 GCSE creates. This is needed more for PRE than for Classic because Classic
199 GCSE will try to use an existing register containing the common
200 subexpression rather than create a new one. This is harder to do for PRE
201 because of the code motion (which Classic GCSE doesn't do).
203 Expressions we are interested in GCSE-ing are of the form
204 (set (pseudo-reg) (expression)).
205 Function want_to_gcse_p says what these are.
207 PRE handles moving invariant expressions out of loops (by treating them as
208 partially redundant).
210 Eventually it would be nice to replace cse.c/gcse.c with SSA (static single
211 assignment) based GVN (global value numbering). L. T. Simpson's paper
212 (Rice University) on value numbering is a useful reference for this.
214 **********************
216 We used to support multiple passes but there are diminishing returns in
217 doing so. The first pass usually makes 90% of the changes that are doable.
218 A second pass can make a few more changes made possible by the first pass.
219 Experiments show any further passes don't make enough changes to justify
220 the expense.
222 A study of spec92 using an unlimited number of passes:
223 [1 pass] = 1208 substitutions, [2] = 577, [3] = 202, [4] = 192, [5] = 83,
224 [6] = 34, [7] = 17, [8] = 9, [9] = 4, [10] = 4, [11] = 2,
225 [12] = 2, [13] = 1, [15] = 1, [16] = 2, [41] = 1
227 It was found doing copy propagation between each pass enables further
228 substitutions.
230 PRE is quite expensive in complicated functions because the DFA can take
231 awhile to converge. Hence we only perform one pass. The parameter max-gcse-passes can
232 be modified if one wants to experiment.
234 **********************
236 The steps for PRE are:
238 1) Build the hash table of expressions we wish to GCSE (expr_hash_table).
240 2) Perform the data flow analysis for PRE.
242 3) Delete the redundant instructions
244 4) Insert the required copies [if any] that make the partially
245 redundant instructions fully redundant.
247 5) For other reaching expressions, insert an instruction to copy the value
248 to a newly created pseudo that will reach the redundant instruction.
250 The deletion is done first so that when we do insertions we
251 know which pseudo reg to use.
253 Various papers have argued that PRE DFA is expensive (O(n^2)) and others
254 argue it is not. The number of iterations for the algorithm to converge
255 is typically 2-4 so I don't view it as that expensive (relatively speaking).
257 PRE GCSE depends heavily on the second CSE pass to clean up the copies
258 we create. To make an expression reach the place where it's redundant,
259 the result of the expression is copied to a new register, and the redundant
260 expression is deleted by replacing it with this new register. Classic GCSE
261 doesn't have this problem as much as it computes the reaching defs of
262 each register in each block and thus can try to use an existing register.
264 **********************
266 A fair bit of simplicity is created by creating small functions for simple
267 tasks, even when the function is only called in one place. This may
268 measurably slow things down [or may not] by creating more function call
269 overhead than is necessary. The source is laid out so that it's trivial
270 to make the affected functions inline so that one can measure what speed
271 up, if any, can be achieved, and maybe later when things settle things can
272 be rearranged.
274 Help stamp out big monolithic functions! */
276 /* GCSE global vars. */
278 /* -dG dump file. */
279 static FILE *gcse_file;
281 /* Note whether or not we should run jump optimization after gcse. We
282 want to do this for two cases.
284 * If we changed any jumps via cprop.
286 * If we added any labels via edge splitting. */
288 static int run_jump_opt_after_gcse;
290 /* Bitmaps are normally not included in debugging dumps.
291 However it's useful to be able to print them from GDB.
292 We could create special functions for this, but it's simpler to
293 just allow passing stderr to the dump_foo fns. Since stderr can
294 be a macro, we store a copy here. */
295 static FILE *debug_stderr;
297 /* An obstack for our working variables. */
298 static struct obstack gcse_obstack;
300 /* Non-zero for each mode that supports (set (reg) (reg)).
301 This is trivially true for integer and floating point values.
302 It may or may not be true for condition codes. */
303 static char can_copy_p[(int) NUM_MACHINE_MODES];
305 /* Non-zero if can_copy_p has been initialized. */
306 static int can_copy_init_p;
308 struct reg_use {rtx reg_rtx; };
310 /* Hash table of expressions. */
312 struct expr
314 /* The expression (SET_SRC for expressions, PATTERN for assignments). */
315 rtx expr;
316 /* Index in the available expression bitmaps. */
317 int bitmap_index;
318 /* Next entry with the same hash. */
319 struct expr *next_same_hash;
320 /* List of anticipatable occurrences in basic blocks in the function.
321 An "anticipatable occurrence" is one that is the first occurrence in the
322 basic block, the operands are not modified in the basic block prior
323 to the occurrence and the output is not used between the start of
324 the block and the occurrence. */
325 struct occr *antic_occr;
326 /* List of available occurrence in basic blocks in the function.
327 An "available occurrence" is one that is the last occurrence in the
328 basic block and the operands are not modified by following statements in
329 the basic block [including this insn]. */
330 struct occr *avail_occr;
331 /* Non-null if the computation is PRE redundant.
332 The value is the newly created pseudo-reg to record a copy of the
333 expression in all the places that reach the redundant copy. */
334 rtx reaching_reg;
337 /* Occurrence of an expression.
338 There is one per basic block. If a pattern appears more than once the
339 last appearance is used [or first for anticipatable expressions]. */
341 struct occr
343 /* Next occurrence of this expression. */
344 struct occr *next;
345 /* The insn that computes the expression. */
346 rtx insn;
347 /* Non-zero if this [anticipatable] occurrence has been deleted. */
348 char deleted_p;
349 /* Non-zero if this [available] occurrence has been copied to
350 reaching_reg. */
351 /* ??? This is mutually exclusive with deleted_p, so they could share
352 the same byte. */
353 char copied_p;
356 /* Expression and copy propagation hash tables.
357 Each hash table is an array of buckets.
358 ??? It is known that if it were an array of entries, structure elements
359 `next_same_hash' and `bitmap_index' wouldn't be necessary. However, it is
360 not clear whether in the final analysis a sufficient amount of memory would
361 be saved as the size of the available expression bitmaps would be larger
362 [one could build a mapping table without holes afterwards though].
363 Someday I'll perform the computation and figure it out. */
365 struct hash_table
367 /* The table itself.
368 This is an array of `expr_hash_table_size' elements. */
369 struct expr **table;
371 /* Size of the hash table, in elements. */
372 unsigned int size;
374 /* Number of hash table elements. */
375 unsigned int n_elems;
377 /* Whether the table is expression of copy propagation one. */
378 int set_p;
381 /* Expression hash table. */
382 static struct hash_table expr_hash_table;
384 /* Copy propagation hash table. */
385 static struct hash_table set_hash_table;
387 /* Mapping of uids to cuids.
388 Only real insns get cuids. */
389 static int *uid_cuid;
391 /* Highest UID in UID_CUID. */
392 static int max_uid;
394 /* Get the cuid of an insn. */
395 #ifdef ENABLE_CHECKING
396 #define INSN_CUID(INSN) (INSN_UID (INSN) > max_uid ? (abort (), 0) : uid_cuid[INSN_UID (INSN)])
397 #else
398 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
399 #endif
401 /* Number of cuids. */
402 static int max_cuid;
404 /* Mapping of cuids to insns. */
405 static rtx *cuid_insn;
407 /* Get insn from cuid. */
408 #define CUID_INSN(CUID) (cuid_insn[CUID])
410 /* Maximum register number in function prior to doing gcse + 1.
411 Registers created during this pass have regno >= max_gcse_regno.
412 This is named with "gcse" to not collide with global of same name. */
413 static unsigned int max_gcse_regno;
415 /* Table of registers that are modified.
417 For each register, each element is a list of places where the pseudo-reg
418 is set.
420 For simplicity, GCSE is done on sets of pseudo-regs only. PRE GCSE only
421 requires knowledge of which blocks kill which regs [and thus could use
422 a bitmap instead of the lists `reg_set_table' uses].
424 `reg_set_table' and could be turned into an array of bitmaps (num-bbs x
425 num-regs) [however perhaps it may be useful to keep the data as is]. One
426 advantage of recording things this way is that `reg_set_table' is fairly
427 sparse with respect to pseudo regs but for hard regs could be fairly dense
428 [relatively speaking]. And recording sets of pseudo-regs in lists speeds
429 up functions like compute_transp since in the case of pseudo-regs we only
430 need to iterate over the number of times a pseudo-reg is set, not over the
431 number of basic blocks [clearly there is a bit of a slow down in the cases
432 where a pseudo is set more than once in a block, however it is believed
433 that the net effect is to speed things up]. This isn't done for hard-regs
434 because recording call-clobbered hard-regs in `reg_set_table' at each
435 function call can consume a fair bit of memory, and iterating over
436 hard-regs stored this way in compute_transp will be more expensive. */
438 typedef struct reg_set
440 /* The next setting of this register. */
441 struct reg_set *next;
442 /* The insn where it was set. */
443 rtx insn;
444 } reg_set;
446 static reg_set **reg_set_table;
448 /* Size of `reg_set_table'.
449 The table starts out at max_gcse_regno + slop, and is enlarged as
450 necessary. */
451 static int reg_set_table_size;
453 /* Amount to grow `reg_set_table' by when it's full. */
454 #define REG_SET_TABLE_SLOP 100
456 /* This is a list of expressions which are MEMs and will be used by load
457 or store motion.
458 Load motion tracks MEMs which aren't killed by
459 anything except itself. (ie, loads and stores to a single location).
460 We can then allow movement of these MEM refs with a little special
461 allowance. (all stores copy the same value to the reaching reg used
462 for the loads). This means all values used to store into memory must have
463 no side effects so we can re-issue the setter value.
464 Store Motion uses this structure as an expression table to track stores
465 which look interesting, and might be moveable towards the exit block. */
467 struct ls_expr
469 struct expr * expr; /* Gcse expression reference for LM. */
470 rtx pattern; /* Pattern of this mem. */
471 rtx loads; /* INSN list of loads seen. */
472 rtx stores; /* INSN list of stores seen. */
473 struct ls_expr * next; /* Next in the list. */
474 int invalid; /* Invalid for some reason. */
475 int index; /* If it maps to a bitmap index. */
476 int hash_index; /* Index when in a hash table. */
477 rtx reaching_reg; /* Register to use when re-writing. */
480 /* Head of the list of load/store memory refs. */
481 static struct ls_expr * pre_ldst_mems = NULL;
483 /* Bitmap containing one bit for each register in the program.
484 Used when performing GCSE to track which registers have been set since
485 the start of the basic block. */
486 static regset reg_set_bitmap;
488 /* For each block, a bitmap of registers set in the block.
489 This is used by expr_killed_p and compute_transp.
490 It is computed during hash table computation and not by compute_sets
491 as it includes registers added since the last pass (or between cprop and
492 gcse) and it's currently not easy to realloc sbitmap vectors. */
493 static sbitmap *reg_set_in_block;
495 /* Array, indexed by basic block number for a list of insns which modify
496 memory within that block. */
497 static rtx * modify_mem_list;
498 bitmap modify_mem_list_set;
500 /* This array parallels modify_mem_list, but is kept canonicalized. */
501 static rtx * canon_modify_mem_list;
502 bitmap canon_modify_mem_list_set;
503 /* Various variables for statistics gathering. */
505 /* Memory used in a pass.
506 This isn't intended to be absolutely precise. Its intent is only
507 to keep an eye on memory usage. */
508 static int bytes_used;
510 /* GCSE substitutions made. */
511 static int gcse_subst_count;
512 /* Number of copy instructions created. */
513 static int gcse_create_count;
514 /* Number of constants propagated. */
515 static int const_prop_count;
516 /* Number of copys propagated. */
517 static int copy_prop_count;
519 /* These variables are used by classic GCSE.
520 Normally they'd be defined a bit later, but `rd_gen' needs to
521 be declared sooner. */
523 /* Each block has a bitmap of each type.
524 The length of each blocks bitmap is:
526 max_cuid - for reaching definitions
527 n_exprs - for available expressions
529 Thus we view the bitmaps as 2 dimensional arrays. i.e.
530 rd_kill[block_num][cuid_num]
531 ae_kill[block_num][expr_num] */
533 /* For reaching defs */
534 static sbitmap *rd_kill, *rd_gen, *reaching_defs, *rd_out;
536 /* for available exprs */
537 static sbitmap *ae_kill, *ae_gen, *ae_in, *ae_out;
539 /* Objects of this type are passed around by the null-pointer check
540 removal routines. */
541 struct null_pointer_info
543 /* The basic block being processed. */
544 basic_block current_block;
545 /* The first register to be handled in this pass. */
546 unsigned int min_reg;
547 /* One greater than the last register to be handled in this pass. */
548 unsigned int max_reg;
549 sbitmap *nonnull_local;
550 sbitmap *nonnull_killed;
553 static void compute_can_copy PARAMS ((void));
554 static char *gmalloc PARAMS ((unsigned int));
555 static char *grealloc PARAMS ((char *, unsigned int));
556 static char *gcse_alloc PARAMS ((unsigned long));
557 static void alloc_gcse_mem PARAMS ((rtx));
558 static void free_gcse_mem PARAMS ((void));
559 static void alloc_reg_set_mem PARAMS ((int));
560 static void free_reg_set_mem PARAMS ((void));
561 static int get_bitmap_width PARAMS ((int, int, int));
562 static void record_one_set PARAMS ((int, rtx));
563 static void record_set_info PARAMS ((rtx, rtx, void *));
564 static void compute_sets PARAMS ((rtx));
565 static void hash_scan_insn PARAMS ((rtx, struct hash_table *, int));
566 static void hash_scan_set PARAMS ((rtx, rtx, struct hash_table *));
567 static void hash_scan_clobber PARAMS ((rtx, rtx, struct hash_table *));
568 static void hash_scan_call PARAMS ((rtx, rtx, struct hash_table *));
569 static int want_to_gcse_p PARAMS ((rtx));
570 static int oprs_unchanged_p PARAMS ((rtx, rtx, int));
571 static int oprs_anticipatable_p PARAMS ((rtx, rtx));
572 static int oprs_available_p PARAMS ((rtx, rtx));
573 static void insert_expr_in_table PARAMS ((rtx, enum machine_mode, rtx,
574 int, int, struct hash_table *));
575 static void insert_set_in_table PARAMS ((rtx, rtx, struct hash_table *));
576 static unsigned int hash_expr PARAMS ((rtx, enum machine_mode, int *, int));
577 static unsigned int hash_expr_1 PARAMS ((rtx, enum machine_mode, int *));
578 static unsigned int hash_string_1 PARAMS ((const char *));
579 static unsigned int hash_set PARAMS ((int, int));
580 static int expr_equiv_p PARAMS ((rtx, rtx));
581 static void record_last_reg_set_info PARAMS ((rtx, int));
582 static void record_last_mem_set_info PARAMS ((rtx));
583 static void record_last_set_info PARAMS ((rtx, rtx, void *));
584 static void compute_hash_table PARAMS ((struct hash_table *));
585 static void alloc_hash_table PARAMS ((int, struct hash_table *, int));
586 static void free_hash_table PARAMS ((struct hash_table *));
587 static void compute_hash_table_work PARAMS ((struct hash_table *));
588 static void dump_hash_table PARAMS ((FILE *, const char *,
589 struct hash_table *));
590 static struct expr *lookup_expr PARAMS ((rtx, struct hash_table *));
591 static struct expr *lookup_set PARAMS ((unsigned int, rtx, struct hash_table *));
592 static struct expr *next_set PARAMS ((unsigned int, struct expr *));
593 static void reset_opr_set_tables PARAMS ((void));
594 static int oprs_not_set_p PARAMS ((rtx, rtx));
595 static void mark_call PARAMS ((rtx));
596 static void mark_set PARAMS ((rtx, rtx));
597 static void mark_clobber PARAMS ((rtx, rtx));
598 static void mark_oprs_set PARAMS ((rtx));
599 static void alloc_cprop_mem PARAMS ((int, int));
600 static void free_cprop_mem PARAMS ((void));
601 static void compute_transp PARAMS ((rtx, int, sbitmap *, int));
602 static void compute_transpout PARAMS ((void));
603 static void compute_local_properties PARAMS ((sbitmap *, sbitmap *, sbitmap *,
604 struct hash_table *));
605 static void compute_cprop_data PARAMS ((void));
606 static void find_used_regs PARAMS ((rtx *, void *));
607 static int try_replace_reg PARAMS ((rtx, rtx, rtx));
608 static struct expr *find_avail_set PARAMS ((int, rtx));
609 static int cprop_jump PARAMS ((basic_block, rtx, rtx, rtx, rtx));
610 static void mems_conflict_for_gcse_p PARAMS ((rtx, rtx, void *));
611 static int load_killed_in_block_p PARAMS ((basic_block, int, rtx, int));
612 static void canon_list_insert PARAMS ((rtx, rtx, void *));
613 static int cprop_insn PARAMS ((rtx, int));
614 static int cprop PARAMS ((int));
615 static int one_cprop_pass PARAMS ((int, int));
616 static bool constprop_register PARAMS ((rtx, rtx, rtx, int));
617 static struct expr *find_bypass_set PARAMS ((int, int));
618 static int bypass_block PARAMS ((basic_block, rtx, rtx));
619 static int bypass_conditional_jumps PARAMS ((void));
620 static void alloc_pre_mem PARAMS ((int, int));
621 static void free_pre_mem PARAMS ((void));
622 static void compute_pre_data PARAMS ((void));
623 static int pre_expr_reaches_here_p PARAMS ((basic_block, struct expr *,
624 basic_block));
625 static void insert_insn_end_bb PARAMS ((struct expr *, basic_block, int));
626 static void pre_insert_copy_insn PARAMS ((struct expr *, rtx));
627 static void pre_insert_copies PARAMS ((void));
628 static int pre_delete PARAMS ((void));
629 static int pre_gcse PARAMS ((void));
630 static int one_pre_gcse_pass PARAMS ((int));
631 static void add_label_notes PARAMS ((rtx, rtx));
632 static void alloc_code_hoist_mem PARAMS ((int, int));
633 static void free_code_hoist_mem PARAMS ((void));
634 static void compute_code_hoist_vbeinout PARAMS ((void));
635 static void compute_code_hoist_data PARAMS ((void));
636 static int hoist_expr_reaches_here_p PARAMS ((basic_block, int, basic_block,
637 char *));
638 static void hoist_code PARAMS ((void));
639 static int one_code_hoisting_pass PARAMS ((void));
640 static void alloc_rd_mem PARAMS ((int, int));
641 static void free_rd_mem PARAMS ((void));
642 static void handle_rd_kill_set PARAMS ((rtx, int, basic_block));
643 static void compute_kill_rd PARAMS ((void));
644 static void compute_rd PARAMS ((void));
645 static void alloc_avail_expr_mem PARAMS ((int, int));
646 static void free_avail_expr_mem PARAMS ((void));
647 static void compute_ae_gen PARAMS ((struct hash_table *));
648 static int expr_killed_p PARAMS ((rtx, basic_block));
649 static void compute_ae_kill PARAMS ((sbitmap *, sbitmap *, struct hash_table *));
650 static int expr_reaches_here_p PARAMS ((struct occr *, struct expr *,
651 basic_block, int));
652 static rtx computing_insn PARAMS ((struct expr *, rtx));
653 static int def_reaches_here_p PARAMS ((rtx, rtx));
654 static int can_disregard_other_sets PARAMS ((struct reg_set **, rtx, int));
655 static int handle_avail_expr PARAMS ((rtx, struct expr *));
656 static int classic_gcse PARAMS ((void));
657 static int one_classic_gcse_pass PARAMS ((int));
658 static void invalidate_nonnull_info PARAMS ((rtx, rtx, void *));
659 static int delete_null_pointer_checks_1 PARAMS ((unsigned int *,
660 sbitmap *, sbitmap *,
661 struct null_pointer_info *));
662 static rtx process_insert_insn PARAMS ((struct expr *));
663 static int pre_edge_insert PARAMS ((struct edge_list *, struct expr **));
664 static int expr_reaches_here_p_work PARAMS ((struct occr *, struct expr *,
665 basic_block, int, char *));
666 static int pre_expr_reaches_here_p_work PARAMS ((basic_block, struct expr *,
667 basic_block, char *));
668 static struct ls_expr * ldst_entry PARAMS ((rtx));
669 static void free_ldst_entry PARAMS ((struct ls_expr *));
670 static void free_ldst_mems PARAMS ((void));
671 static void print_ldst_list PARAMS ((FILE *));
672 static struct ls_expr * find_rtx_in_ldst PARAMS ((rtx));
673 static int enumerate_ldsts PARAMS ((void));
674 static inline struct ls_expr * first_ls_expr PARAMS ((void));
675 static inline struct ls_expr * next_ls_expr PARAMS ((struct ls_expr *));
676 static int simple_mem PARAMS ((rtx));
677 static void invalidate_any_buried_refs PARAMS ((rtx));
678 static void compute_ld_motion_mems PARAMS ((void));
679 static void trim_ld_motion_mems PARAMS ((void));
680 static void update_ld_motion_stores PARAMS ((struct expr *));
681 static void reg_set_info PARAMS ((rtx, rtx, void *));
682 static int store_ops_ok PARAMS ((rtx, basic_block));
683 static void find_moveable_store PARAMS ((rtx));
684 static int compute_store_table PARAMS ((void));
685 static int load_kills_store PARAMS ((rtx, rtx));
686 static int find_loads PARAMS ((rtx, rtx));
687 static int store_killed_in_insn PARAMS ((rtx, rtx));
688 static int store_killed_after PARAMS ((rtx, rtx, basic_block));
689 static int store_killed_before PARAMS ((rtx, rtx, basic_block));
690 static void build_store_vectors PARAMS ((void));
691 static void insert_insn_start_bb PARAMS ((rtx, basic_block));
692 static int insert_store PARAMS ((struct ls_expr *, edge));
693 static void replace_store_insn PARAMS ((rtx, rtx, basic_block));
694 static void delete_store PARAMS ((struct ls_expr *,
695 basic_block));
696 static void free_store_memory PARAMS ((void));
697 static void store_motion PARAMS ((void));
698 static void free_insn_expr_list_list PARAMS ((rtx *));
699 static void clear_modify_mem_tables PARAMS ((void));
700 static void free_modify_mem_tables PARAMS ((void));
701 static rtx gcse_emit_move_after PARAMS ((rtx, rtx, rtx));
702 static bool do_local_cprop PARAMS ((rtx, rtx, int));
703 static void local_cprop_pass PARAMS ((int));
705 /* Entry point for global common subexpression elimination.
706 F is the first instruction in the function. */
709 gcse_main (f, file)
710 rtx f;
711 FILE *file;
713 int changed, pass;
714 /* Bytes used at start of pass. */
715 int initial_bytes_used;
716 /* Maximum number of bytes used by a pass. */
717 int max_pass_bytes;
718 /* Point to release obstack data from for each pass. */
719 char *gcse_obstack_bottom;
721 /* Insertion of instructions on edges can create new basic blocks; we
722 need the original basic block count so that we can properly deallocate
723 arrays sized on the number of basic blocks originally in the cfg. */
724 int orig_bb_count;
725 /* We do not construct an accurate cfg in functions which call
726 setjmp, so just punt to be safe. */
727 if (current_function_calls_setjmp)
728 return 0;
730 /* Assume that we do not need to run jump optimizations after gcse. */
731 run_jump_opt_after_gcse = 0;
733 /* For calling dump_foo fns from gdb. */
734 debug_stderr = stderr;
735 gcse_file = file;
737 /* Identify the basic block information for this function, including
738 successors and predecessors. */
739 max_gcse_regno = max_reg_num ();
741 if (file)
742 dump_flow_info (file);
744 orig_bb_count = n_basic_blocks;
745 /* Return if there's nothing to do. */
746 if (n_basic_blocks <= 1)
747 return 0;
749 /* Trying to perform global optimizations on flow graphs which have
750 a high connectivity will take a long time and is unlikely to be
751 particularly useful.
753 In normal circumstances a cfg should have about twice as many edges
754 as blocks. But we do not want to punish small functions which have
755 a couple switch statements. So we require a relatively large number
756 of basic blocks and the ratio of edges to blocks to be high. */
757 if (n_basic_blocks > 1000 && n_edges / n_basic_blocks >= 20)
759 if (warn_disabled_optimization)
760 warning ("GCSE disabled: %d > 1000 basic blocks and %d >= 20 edges/basic block",
761 n_basic_blocks, n_edges / n_basic_blocks);
762 return 0;
765 /* If allocating memory for the cprop bitmap would take up too much
766 storage it's better just to disable the optimization. */
767 if ((n_basic_blocks
768 * SBITMAP_SET_SIZE (max_gcse_regno)
769 * sizeof (SBITMAP_ELT_TYPE)) > MAX_GCSE_MEMORY)
771 if (warn_disabled_optimization)
772 warning ("GCSE disabled: %d basic blocks and %d registers",
773 n_basic_blocks, max_gcse_regno);
775 return 0;
778 /* See what modes support reg/reg copy operations. */
779 if (! can_copy_init_p)
781 compute_can_copy ();
782 can_copy_init_p = 1;
785 gcc_obstack_init (&gcse_obstack);
786 bytes_used = 0;
788 /* We need alias. */
789 init_alias_analysis ();
790 /* Record where pseudo-registers are set. This data is kept accurate
791 during each pass. ??? We could also record hard-reg information here
792 [since it's unchanging], however it is currently done during hash table
793 computation.
795 It may be tempting to compute MEM set information here too, but MEM sets
796 will be subject to code motion one day and thus we need to compute
797 information about memory sets when we build the hash tables. */
799 alloc_reg_set_mem (max_gcse_regno);
800 compute_sets (f);
802 pass = 0;
803 initial_bytes_used = bytes_used;
804 max_pass_bytes = 0;
805 gcse_obstack_bottom = gcse_alloc (1);
806 changed = 1;
807 while (changed && pass < MAX_GCSE_PASSES)
809 changed = 0;
810 if (file)
811 fprintf (file, "GCSE pass %d\n\n", pass + 1);
813 /* Initialize bytes_used to the space for the pred/succ lists,
814 and the reg_set_table data. */
815 bytes_used = initial_bytes_used;
817 /* Each pass may create new registers, so recalculate each time. */
818 max_gcse_regno = max_reg_num ();
820 alloc_gcse_mem (f);
822 /* Don't allow constant propagation to modify jumps
823 during this pass. */
824 changed = one_cprop_pass (pass + 1, 0);
826 if (optimize_size)
827 changed |= one_classic_gcse_pass (pass + 1);
828 else
830 changed |= one_pre_gcse_pass (pass + 1);
831 /* We may have just created new basic blocks. Release and
832 recompute various things which are sized on the number of
833 basic blocks. */
834 if (changed)
836 free_modify_mem_tables ();
837 modify_mem_list
838 = (rtx *) gmalloc (last_basic_block * sizeof (rtx));
839 canon_modify_mem_list
840 = (rtx *) gmalloc (last_basic_block * sizeof (rtx));
841 memset ((char *) modify_mem_list, 0, last_basic_block * sizeof (rtx));
842 memset ((char *) canon_modify_mem_list, 0, last_basic_block * sizeof (rtx));
843 orig_bb_count = n_basic_blocks;
845 free_reg_set_mem ();
846 alloc_reg_set_mem (max_reg_num ());
847 compute_sets (f);
848 run_jump_opt_after_gcse = 1;
851 if (max_pass_bytes < bytes_used)
852 max_pass_bytes = bytes_used;
854 /* Free up memory, then reallocate for code hoisting. We can
855 not re-use the existing allocated memory because the tables
856 will not have info for the insns or registers created by
857 partial redundancy elimination. */
858 free_gcse_mem ();
860 /* It does not make sense to run code hoisting unless we optimizing
861 for code size -- it rarely makes programs faster, and can make
862 them bigger if we did partial redundancy elimination (when optimizing
863 for space, we use a classic gcse algorithm instead of partial
864 redundancy algorithms). */
865 if (optimize_size)
867 max_gcse_regno = max_reg_num ();
868 alloc_gcse_mem (f);
869 changed |= one_code_hoisting_pass ();
870 free_gcse_mem ();
872 if (max_pass_bytes < bytes_used)
873 max_pass_bytes = bytes_used;
876 if (file)
878 fprintf (file, "\n");
879 fflush (file);
882 obstack_free (&gcse_obstack, gcse_obstack_bottom);
883 pass++;
886 /* Do one last pass of copy propagation, including cprop into
887 conditional jumps. */
889 max_gcse_regno = max_reg_num ();
890 alloc_gcse_mem (f);
891 /* This time, go ahead and allow cprop to alter jumps. */
892 one_cprop_pass (pass + 1, 1);
893 free_gcse_mem ();
895 if (file)
897 fprintf (file, "GCSE of %s: %d basic blocks, ",
898 current_function_name, n_basic_blocks);
899 fprintf (file, "%d pass%s, %d bytes\n\n",
900 pass, pass > 1 ? "es" : "", max_pass_bytes);
903 obstack_free (&gcse_obstack, NULL);
904 free_reg_set_mem ();
905 /* We are finished with alias. */
906 end_alias_analysis ();
907 allocate_reg_info (max_reg_num (), FALSE, FALSE);
909 /* Store motion disabled until it is fixed. */
910 if (0 && !optimize_size && flag_gcse_sm)
911 store_motion ();
912 /* Record where pseudo-registers are set. */
913 return run_jump_opt_after_gcse;
916 /* Misc. utilities. */
918 /* Compute which modes support reg/reg copy operations. */
920 static void
921 compute_can_copy ()
923 int i;
924 #ifndef AVOID_CCMODE_COPIES
925 rtx reg, insn;
926 #endif
927 memset (can_copy_p, 0, NUM_MACHINE_MODES);
929 start_sequence ();
930 for (i = 0; i < NUM_MACHINE_MODES; i++)
931 if (GET_MODE_CLASS (i) == MODE_CC)
933 #ifdef AVOID_CCMODE_COPIES
934 can_copy_p[i] = 0;
935 #else
936 reg = gen_rtx_REG ((enum machine_mode) i, LAST_VIRTUAL_REGISTER + 1);
937 insn = emit_insn (gen_rtx_SET (VOIDmode, reg, reg));
938 if (recog (PATTERN (insn), insn, NULL) >= 0)
939 can_copy_p[i] = 1;
940 #endif
942 else
943 can_copy_p[i] = 1;
945 end_sequence ();
948 /* Cover function to xmalloc to record bytes allocated. */
950 static char *
951 gmalloc (size)
952 unsigned int size;
954 bytes_used += size;
955 return xmalloc (size);
958 /* Cover function to xrealloc.
959 We don't record the additional size since we don't know it.
960 It won't affect memory usage stats much anyway. */
962 static char *
963 grealloc (ptr, size)
964 char *ptr;
965 unsigned int size;
967 return xrealloc (ptr, size);
970 /* Cover function to obstack_alloc. */
972 static char *
973 gcse_alloc (size)
974 unsigned long size;
976 bytes_used += size;
977 return (char *) obstack_alloc (&gcse_obstack, size);
980 /* Allocate memory for the cuid mapping array,
981 and reg/memory set tracking tables.
983 This is called at the start of each pass. */
985 static void
986 alloc_gcse_mem (f)
987 rtx f;
989 int i, n;
990 rtx insn;
992 /* Find the largest UID and create a mapping from UIDs to CUIDs.
993 CUIDs are like UIDs except they increase monotonically, have no gaps,
994 and only apply to real insns. */
996 max_uid = get_max_uid ();
997 n = (max_uid + 1) * sizeof (int);
998 uid_cuid = (int *) gmalloc (n);
999 memset ((char *) uid_cuid, 0, n);
1000 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
1002 if (INSN_P (insn))
1003 uid_cuid[INSN_UID (insn)] = i++;
1004 else
1005 uid_cuid[INSN_UID (insn)] = i;
1008 /* Create a table mapping cuids to insns. */
1010 max_cuid = i;
1011 n = (max_cuid + 1) * sizeof (rtx);
1012 cuid_insn = (rtx *) gmalloc (n);
1013 memset ((char *) cuid_insn, 0, n);
1014 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
1015 if (INSN_P (insn))
1016 CUID_INSN (i++) = insn;
1018 /* Allocate vars to track sets of regs. */
1019 reg_set_bitmap = BITMAP_XMALLOC ();
1021 /* Allocate vars to track sets of regs, memory per block. */
1022 reg_set_in_block = (sbitmap *) sbitmap_vector_alloc (last_basic_block,
1023 max_gcse_regno);
1024 /* Allocate array to keep a list of insns which modify memory in each
1025 basic block. */
1026 modify_mem_list = (rtx *) gmalloc (last_basic_block * sizeof (rtx));
1027 canon_modify_mem_list = (rtx *) gmalloc (last_basic_block * sizeof (rtx));
1028 memset ((char *) modify_mem_list, 0, last_basic_block * sizeof (rtx));
1029 memset ((char *) canon_modify_mem_list, 0, last_basic_block * sizeof (rtx));
1030 modify_mem_list_set = BITMAP_XMALLOC ();
1031 canon_modify_mem_list_set = BITMAP_XMALLOC ();
1034 /* Free memory allocated by alloc_gcse_mem. */
1036 static void
1037 free_gcse_mem ()
1039 free (uid_cuid);
1040 free (cuid_insn);
1042 BITMAP_XFREE (reg_set_bitmap);
1044 sbitmap_vector_free (reg_set_in_block);
1045 free_modify_mem_tables ();
1046 BITMAP_XFREE (modify_mem_list_set);
1047 BITMAP_XFREE (canon_modify_mem_list_set);
1050 /* Many of the global optimization algorithms work by solving dataflow
1051 equations for various expressions. Initially, some local value is
1052 computed for each expression in each block. Then, the values across the
1053 various blocks are combined (by following flow graph edges) to arrive at
1054 global values. Conceptually, each set of equations is independent. We
1055 may therefore solve all the equations in parallel, solve them one at a
1056 time, or pick any intermediate approach.
1058 When you're going to need N two-dimensional bitmaps, each X (say, the
1059 number of blocks) by Y (say, the number of expressions), call this
1060 function. It's not important what X and Y represent; only that Y
1061 correspond to the things that can be done in parallel. This function will
1062 return an appropriate chunking factor C; you should solve C sets of
1063 equations in parallel. By going through this function, we can easily
1064 trade space against time; by solving fewer equations in parallel we use
1065 less space. */
1067 static int
1068 get_bitmap_width (n, x, y)
1069 int n;
1070 int x;
1071 int y;
1073 /* It's not really worth figuring out *exactly* how much memory will
1074 be used by a particular choice. The important thing is to get
1075 something approximately right. */
1076 size_t max_bitmap_memory = 10 * 1024 * 1024;
1078 /* The number of bytes we'd use for a single column of minimum
1079 width. */
1080 size_t column_size = n * x * sizeof (SBITMAP_ELT_TYPE);
1082 /* Often, it's reasonable just to solve all the equations in
1083 parallel. */
1084 if (column_size * SBITMAP_SET_SIZE (y) <= max_bitmap_memory)
1085 return y;
1087 /* Otherwise, pick the largest width we can, without going over the
1088 limit. */
1089 return SBITMAP_ELT_BITS * ((max_bitmap_memory + column_size - 1)
1090 / column_size);
1093 /* Compute the local properties of each recorded expression.
1095 Local properties are those that are defined by the block, irrespective of
1096 other blocks.
1098 An expression is transparent in a block if its operands are not modified
1099 in the block.
1101 An expression is computed (locally available) in a block if it is computed
1102 at least once and expression would contain the same value if the
1103 computation was moved to the end of the block.
1105 An expression is locally anticipatable in a block if it is computed at
1106 least once and expression would contain the same value if the computation
1107 was moved to the beginning of the block.
1109 We call this routine for cprop, pre and code hoisting. They all compute
1110 basically the same information and thus can easily share this code.
1112 TRANSP, COMP, and ANTLOC are destination sbitmaps for recording local
1113 properties. If NULL, then it is not necessary to compute or record that
1114 particular property.
1116 TABLE controls which hash table to look at. If it is set hash table,
1117 additionally, TRANSP is computed as ~TRANSP, since this is really cprop's
1118 ABSALTERED. */
1120 static void
1121 compute_local_properties (transp, comp, antloc, table)
1122 sbitmap *transp;
1123 sbitmap *comp;
1124 sbitmap *antloc;
1125 struct hash_table *table;
1127 unsigned int i;
1129 /* Initialize any bitmaps that were passed in. */
1130 if (transp)
1132 if (table->set_p)
1133 sbitmap_vector_zero (transp, last_basic_block);
1134 else
1135 sbitmap_vector_ones (transp, last_basic_block);
1138 if (comp)
1139 sbitmap_vector_zero (comp, last_basic_block);
1140 if (antloc)
1141 sbitmap_vector_zero (antloc, last_basic_block);
1143 for (i = 0; i < table->size; i++)
1145 struct expr *expr;
1147 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
1149 int indx = expr->bitmap_index;
1150 struct occr *occr;
1152 /* The expression is transparent in this block if it is not killed.
1153 We start by assuming all are transparent [none are killed], and
1154 then reset the bits for those that are. */
1155 if (transp)
1156 compute_transp (expr->expr, indx, transp, table->set_p);
1158 /* The occurrences recorded in antic_occr are exactly those that
1159 we want to set to non-zero in ANTLOC. */
1160 if (antloc)
1161 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
1163 SET_BIT (antloc[BLOCK_NUM (occr->insn)], indx);
1165 /* While we're scanning the table, this is a good place to
1166 initialize this. */
1167 occr->deleted_p = 0;
1170 /* The occurrences recorded in avail_occr are exactly those that
1171 we want to set to non-zero in COMP. */
1172 if (comp)
1173 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
1175 SET_BIT (comp[BLOCK_NUM (occr->insn)], indx);
1177 /* While we're scanning the table, this is a good place to
1178 initialize this. */
1179 occr->copied_p = 0;
1182 /* While we're scanning the table, this is a good place to
1183 initialize this. */
1184 expr->reaching_reg = 0;
1189 /* Register set information.
1191 `reg_set_table' records where each register is set or otherwise
1192 modified. */
1194 static struct obstack reg_set_obstack;
1196 static void
1197 alloc_reg_set_mem (n_regs)
1198 int n_regs;
1200 unsigned int n;
1202 reg_set_table_size = n_regs + REG_SET_TABLE_SLOP;
1203 n = reg_set_table_size * sizeof (struct reg_set *);
1204 reg_set_table = (struct reg_set **) gmalloc (n);
1205 memset ((char *) reg_set_table, 0, n);
1207 gcc_obstack_init (&reg_set_obstack);
1210 static void
1211 free_reg_set_mem ()
1213 free (reg_set_table);
1214 obstack_free (&reg_set_obstack, NULL);
1217 /* Record REGNO in the reg_set table. */
1219 static void
1220 record_one_set (regno, insn)
1221 int regno;
1222 rtx insn;
1224 /* Allocate a new reg_set element and link it onto the list. */
1225 struct reg_set *new_reg_info;
1227 /* If the table isn't big enough, enlarge it. */
1228 if (regno >= reg_set_table_size)
1230 int new_size = regno + REG_SET_TABLE_SLOP;
1232 reg_set_table
1233 = (struct reg_set **) grealloc ((char *) reg_set_table,
1234 new_size * sizeof (struct reg_set *));
1235 memset ((char *) (reg_set_table + reg_set_table_size), 0,
1236 (new_size - reg_set_table_size) * sizeof (struct reg_set *));
1237 reg_set_table_size = new_size;
1240 new_reg_info = (struct reg_set *) obstack_alloc (&reg_set_obstack,
1241 sizeof (struct reg_set));
1242 bytes_used += sizeof (struct reg_set);
1243 new_reg_info->insn = insn;
1244 new_reg_info->next = reg_set_table[regno];
1245 reg_set_table[regno] = new_reg_info;
1248 /* Called from compute_sets via note_stores to handle one SET or CLOBBER in
1249 an insn. The DATA is really the instruction in which the SET is
1250 occurring. */
1252 static void
1253 record_set_info (dest, setter, data)
1254 rtx dest, setter ATTRIBUTE_UNUSED;
1255 void *data;
1257 rtx record_set_insn = (rtx) data;
1259 if (GET_CODE (dest) == REG && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
1260 record_one_set (REGNO (dest), record_set_insn);
1263 /* Scan the function and record each set of each pseudo-register.
1265 This is called once, at the start of the gcse pass. See the comments for
1266 `reg_set_table' for further documenation. */
1268 static void
1269 compute_sets (f)
1270 rtx f;
1272 rtx insn;
1274 for (insn = f; insn != 0; insn = NEXT_INSN (insn))
1275 if (INSN_P (insn))
1276 note_stores (PATTERN (insn), record_set_info, insn);
1279 /* Hash table support. */
1281 struct reg_avail_info
1283 basic_block last_bb;
1284 int first_set;
1285 int last_set;
1288 static struct reg_avail_info *reg_avail_info;
1289 static basic_block current_bb;
1292 /* See whether X, the source of a set, is something we want to consider for
1293 GCSE. */
1295 static GTY(()) rtx test_insn;
1296 static int
1297 want_to_gcse_p (x)
1298 rtx x;
1300 int num_clobbers = 0;
1301 int icode;
1303 switch (GET_CODE (x))
1305 case REG:
1306 case SUBREG:
1307 case CONST_INT:
1308 case CONST_DOUBLE:
1309 case CONST_VECTOR:
1310 case CALL:
1311 return 0;
1313 default:
1314 break;
1317 /* If this is a valid operand, we are OK. If it's VOIDmode, we aren't. */
1318 if (general_operand (x, GET_MODE (x)))
1319 return 1;
1320 else if (GET_MODE (x) == VOIDmode)
1321 return 0;
1323 /* Otherwise, check if we can make a valid insn from it. First initialize
1324 our test insn if we haven't already. */
1325 if (test_insn == 0)
1327 test_insn
1328 = make_insn_raw (gen_rtx_SET (VOIDmode,
1329 gen_rtx_REG (word_mode,
1330 FIRST_PSEUDO_REGISTER * 2),
1331 const0_rtx));
1332 NEXT_INSN (test_insn) = PREV_INSN (test_insn) = 0;
1335 /* Now make an insn like the one we would make when GCSE'ing and see if
1336 valid. */
1337 PUT_MODE (SET_DEST (PATTERN (test_insn)), GET_MODE (x));
1338 SET_SRC (PATTERN (test_insn)) = x;
1339 return ((icode = recog (PATTERN (test_insn), test_insn, &num_clobbers)) >= 0
1340 && (num_clobbers == 0 || ! added_clobbers_hard_reg_p (icode)));
1343 /* Return non-zero if the operands of expression X are unchanged from the
1344 start of INSN's basic block up to but not including INSN (if AVAIL_P == 0),
1345 or from INSN to the end of INSN's basic block (if AVAIL_P != 0). */
1347 static int
1348 oprs_unchanged_p (x, insn, avail_p)
1349 rtx x, insn;
1350 int avail_p;
1352 int i, j;
1353 enum rtx_code code;
1354 const char *fmt;
1356 if (x == 0)
1357 return 1;
1359 code = GET_CODE (x);
1360 switch (code)
1362 case REG:
1364 struct reg_avail_info *info = &reg_avail_info[REGNO (x)];
1366 if (info->last_bb != current_bb)
1367 return 1;
1368 if (avail_p)
1369 return info->last_set < INSN_CUID (insn);
1370 else
1371 return info->first_set >= INSN_CUID (insn);
1374 case MEM:
1375 if (load_killed_in_block_p (current_bb, INSN_CUID (insn),
1376 x, avail_p))
1377 return 0;
1378 else
1379 return oprs_unchanged_p (XEXP (x, 0), insn, avail_p);
1381 case PRE_DEC:
1382 case PRE_INC:
1383 case POST_DEC:
1384 case POST_INC:
1385 case PRE_MODIFY:
1386 case POST_MODIFY:
1387 return 0;
1389 case PC:
1390 case CC0: /*FIXME*/
1391 case CONST:
1392 case CONST_INT:
1393 case CONST_DOUBLE:
1394 case CONST_VECTOR:
1395 case SYMBOL_REF:
1396 case LABEL_REF:
1397 case ADDR_VEC:
1398 case ADDR_DIFF_VEC:
1399 return 1;
1401 default:
1402 break;
1405 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
1407 if (fmt[i] == 'e')
1409 /* If we are about to do the last recursive call needed at this
1410 level, change it into iteration. This function is called enough
1411 to be worth it. */
1412 if (i == 0)
1413 return oprs_unchanged_p (XEXP (x, i), insn, avail_p);
1415 else if (! oprs_unchanged_p (XEXP (x, i), insn, avail_p))
1416 return 0;
1418 else if (fmt[i] == 'E')
1419 for (j = 0; j < XVECLEN (x, i); j++)
1420 if (! oprs_unchanged_p (XVECEXP (x, i, j), insn, avail_p))
1421 return 0;
1424 return 1;
1427 /* Used for communication between mems_conflict_for_gcse_p and
1428 load_killed_in_block_p. Nonzero if mems_conflict_for_gcse_p finds a
1429 conflict between two memory references. */
1430 static int gcse_mems_conflict_p;
1432 /* Used for communication between mems_conflict_for_gcse_p and
1433 load_killed_in_block_p. A memory reference for a load instruction,
1434 mems_conflict_for_gcse_p will see if a memory store conflicts with
1435 this memory load. */
1436 static rtx gcse_mem_operand;
1438 /* DEST is the output of an instruction. If it is a memory reference, and
1439 possibly conflicts with the load found in gcse_mem_operand, then set
1440 gcse_mems_conflict_p to a nonzero value. */
1442 static void
1443 mems_conflict_for_gcse_p (dest, setter, data)
1444 rtx dest, setter ATTRIBUTE_UNUSED;
1445 void *data ATTRIBUTE_UNUSED;
1447 while (GET_CODE (dest) == SUBREG
1448 || GET_CODE (dest) == ZERO_EXTRACT
1449 || GET_CODE (dest) == SIGN_EXTRACT
1450 || GET_CODE (dest) == STRICT_LOW_PART)
1451 dest = XEXP (dest, 0);
1453 /* If DEST is not a MEM, then it will not conflict with the load. Note
1454 that function calls are assumed to clobber memory, but are handled
1455 elsewhere. */
1456 if (GET_CODE (dest) != MEM)
1457 return;
1459 /* If we are setting a MEM in our list of specially recognized MEMs,
1460 don't mark as killed this time. */
1462 if (dest == gcse_mem_operand && pre_ldst_mems != NULL)
1464 if (!find_rtx_in_ldst (dest))
1465 gcse_mems_conflict_p = 1;
1466 return;
1469 if (true_dependence (dest, GET_MODE (dest), gcse_mem_operand,
1470 rtx_addr_varies_p))
1471 gcse_mems_conflict_p = 1;
1474 /* Return nonzero if the expression in X (a memory reference) is killed
1475 in block BB before or after the insn with the CUID in UID_LIMIT.
1476 AVAIL_P is nonzero for kills after UID_LIMIT, and zero for kills
1477 before UID_LIMIT.
1479 To check the entire block, set UID_LIMIT to max_uid + 1 and
1480 AVAIL_P to 0. */
1482 static int
1483 load_killed_in_block_p (bb, uid_limit, x, avail_p)
1484 basic_block bb;
1485 int uid_limit;
1486 rtx x;
1487 int avail_p;
1489 rtx list_entry = modify_mem_list[bb->index];
1490 while (list_entry)
1492 rtx setter;
1493 /* Ignore entries in the list that do not apply. */
1494 if ((avail_p
1495 && INSN_CUID (XEXP (list_entry, 0)) < uid_limit)
1496 || (! avail_p
1497 && INSN_CUID (XEXP (list_entry, 0)) > uid_limit))
1499 list_entry = XEXP (list_entry, 1);
1500 continue;
1503 setter = XEXP (list_entry, 0);
1505 /* If SETTER is a call everything is clobbered. Note that calls
1506 to pure functions are never put on the list, so we need not
1507 worry about them. */
1508 if (GET_CODE (setter) == CALL_INSN)
1509 return 1;
1511 /* SETTER must be an INSN of some kind that sets memory. Call
1512 note_stores to examine each hunk of memory that is modified.
1514 The note_stores interface is pretty limited, so we have to
1515 communicate via global variables. Yuk. */
1516 gcse_mem_operand = x;
1517 gcse_mems_conflict_p = 0;
1518 note_stores (PATTERN (setter), mems_conflict_for_gcse_p, NULL);
1519 if (gcse_mems_conflict_p)
1520 return 1;
1521 list_entry = XEXP (list_entry, 1);
1523 return 0;
1526 /* Return non-zero if the operands of expression X are unchanged from
1527 the start of INSN's basic block up to but not including INSN. */
1529 static int
1530 oprs_anticipatable_p (x, insn)
1531 rtx x, insn;
1533 return oprs_unchanged_p (x, insn, 0);
1536 /* Return non-zero if the operands of expression X are unchanged from
1537 INSN to the end of INSN's basic block. */
1539 static int
1540 oprs_available_p (x, insn)
1541 rtx x, insn;
1543 return oprs_unchanged_p (x, insn, 1);
1546 /* Hash expression X.
1548 MODE is only used if X is a CONST_INT. DO_NOT_RECORD_P is a boolean
1549 indicating if a volatile operand is found or if the expression contains
1550 something we don't want to insert in the table.
1552 ??? One might want to merge this with canon_hash. Later. */
1554 static unsigned int
1555 hash_expr (x, mode, do_not_record_p, hash_table_size)
1556 rtx x;
1557 enum machine_mode mode;
1558 int *do_not_record_p;
1559 int hash_table_size;
1561 unsigned int hash;
1563 *do_not_record_p = 0;
1565 hash = hash_expr_1 (x, mode, do_not_record_p);
1566 return hash % hash_table_size;
1569 /* Hash a string. Just add its bytes up. */
1571 static inline unsigned
1572 hash_string_1 (ps)
1573 const char *ps;
1575 unsigned hash = 0;
1576 const unsigned char *p = (const unsigned char *) ps;
1578 if (p)
1579 while (*p)
1580 hash += *p++;
1582 return hash;
1585 /* Subroutine of hash_expr to do the actual work. */
1587 static unsigned int
1588 hash_expr_1 (x, mode, do_not_record_p)
1589 rtx x;
1590 enum machine_mode mode;
1591 int *do_not_record_p;
1593 int i, j;
1594 unsigned hash = 0;
1595 enum rtx_code code;
1596 const char *fmt;
1598 /* Used to turn recursion into iteration. We can't rely on GCC's
1599 tail-recursion eliminatio since we need to keep accumulating values
1600 in HASH. */
1602 if (x == 0)
1603 return hash;
1605 repeat:
1606 code = GET_CODE (x);
1607 switch (code)
1609 case REG:
1610 hash += ((unsigned int) REG << 7) + REGNO (x);
1611 return hash;
1613 case CONST_INT:
1614 hash += (((unsigned int) CONST_INT << 7) + (unsigned int) mode
1615 + (unsigned int) INTVAL (x));
1616 return hash;
1618 case CONST_DOUBLE:
1619 /* This is like the general case, except that it only counts
1620 the integers representing the constant. */
1621 hash += (unsigned int) code + (unsigned int) GET_MODE (x);
1622 if (GET_MODE (x) != VOIDmode)
1623 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
1624 hash += (unsigned int) XWINT (x, i);
1625 else
1626 hash += ((unsigned int) CONST_DOUBLE_LOW (x)
1627 + (unsigned int) CONST_DOUBLE_HIGH (x));
1628 return hash;
1630 case CONST_VECTOR:
1632 int units;
1633 rtx elt;
1635 units = CONST_VECTOR_NUNITS (x);
1637 for (i = 0; i < units; ++i)
1639 elt = CONST_VECTOR_ELT (x, i);
1640 hash += hash_expr_1 (elt, GET_MODE (elt), do_not_record_p);
1643 return hash;
1646 /* Assume there is only one rtx object for any given label. */
1647 case LABEL_REF:
1648 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
1649 differences and differences between each stage's debugging dumps. */
1650 hash += (((unsigned int) LABEL_REF << 7)
1651 + CODE_LABEL_NUMBER (XEXP (x, 0)));
1652 return hash;
1654 case SYMBOL_REF:
1656 /* Don't hash on the symbol's address to avoid bootstrap differences.
1657 Different hash values may cause expressions to be recorded in
1658 different orders and thus different registers to be used in the
1659 final assembler. This also avoids differences in the dump files
1660 between various stages. */
1661 unsigned int h = 0;
1662 const unsigned char *p = (const unsigned char *) XSTR (x, 0);
1664 while (*p)
1665 h += (h << 7) + *p++; /* ??? revisit */
1667 hash += ((unsigned int) SYMBOL_REF << 7) + h;
1668 return hash;
1671 case MEM:
1672 if (MEM_VOLATILE_P (x))
1674 *do_not_record_p = 1;
1675 return 0;
1678 hash += (unsigned int) MEM;
1679 /* We used alias set for hashing, but this is not good, since the alias
1680 set may differ in -fprofile-arcs and -fbranch-probabilities compilation
1681 causing the profiles to fail to match. */
1682 x = XEXP (x, 0);
1683 goto repeat;
1685 case PRE_DEC:
1686 case PRE_INC:
1687 case POST_DEC:
1688 case POST_INC:
1689 case PC:
1690 case CC0:
1691 case CALL:
1692 case UNSPEC_VOLATILE:
1693 *do_not_record_p = 1;
1694 return 0;
1696 case ASM_OPERANDS:
1697 if (MEM_VOLATILE_P (x))
1699 *do_not_record_p = 1;
1700 return 0;
1702 else
1704 /* We don't want to take the filename and line into account. */
1705 hash += (unsigned) code + (unsigned) GET_MODE (x)
1706 + hash_string_1 (ASM_OPERANDS_TEMPLATE (x))
1707 + hash_string_1 (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
1708 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
1710 if (ASM_OPERANDS_INPUT_LENGTH (x))
1712 for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
1714 hash += (hash_expr_1 (ASM_OPERANDS_INPUT (x, i),
1715 GET_MODE (ASM_OPERANDS_INPUT (x, i)),
1716 do_not_record_p)
1717 + hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT
1718 (x, i)));
1721 hash += hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
1722 x = ASM_OPERANDS_INPUT (x, 0);
1723 mode = GET_MODE (x);
1724 goto repeat;
1726 return hash;
1729 default:
1730 break;
1733 hash += (unsigned) code + (unsigned) GET_MODE (x);
1734 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
1736 if (fmt[i] == 'e')
1738 /* If we are about to do the last recursive call
1739 needed at this level, change it into iteration.
1740 This function is called enough to be worth it. */
1741 if (i == 0)
1743 x = XEXP (x, i);
1744 goto repeat;
1747 hash += hash_expr_1 (XEXP (x, i), 0, do_not_record_p);
1748 if (*do_not_record_p)
1749 return 0;
1752 else if (fmt[i] == 'E')
1753 for (j = 0; j < XVECLEN (x, i); j++)
1755 hash += hash_expr_1 (XVECEXP (x, i, j), 0, do_not_record_p);
1756 if (*do_not_record_p)
1757 return 0;
1760 else if (fmt[i] == 's')
1761 hash += hash_string_1 (XSTR (x, i));
1762 else if (fmt[i] == 'i')
1763 hash += (unsigned int) XINT (x, i);
1764 else
1765 abort ();
1768 return hash;
1771 /* Hash a set of register REGNO.
1773 Sets are hashed on the register that is set. This simplifies the PRE copy
1774 propagation code.
1776 ??? May need to make things more elaborate. Later, as necessary. */
1778 static unsigned int
1779 hash_set (regno, hash_table_size)
1780 int regno;
1781 int hash_table_size;
1783 unsigned int hash;
1785 hash = regno;
1786 return hash % hash_table_size;
1789 /* Return non-zero if exp1 is equivalent to exp2.
1790 ??? Borrowed from cse.c. Might want to remerge with cse.c. Later. */
1792 static int
1793 expr_equiv_p (x, y)
1794 rtx x, y;
1796 int i, j;
1797 enum rtx_code code;
1798 const char *fmt;
1800 if (x == y)
1801 return 1;
1803 if (x == 0 || y == 0)
1804 return x == y;
1806 code = GET_CODE (x);
1807 if (code != GET_CODE (y))
1808 return 0;
1810 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
1811 if (GET_MODE (x) != GET_MODE (y))
1812 return 0;
1814 switch (code)
1816 case PC:
1817 case CC0:
1818 return x == y;
1820 case CONST_INT:
1821 return INTVAL (x) == INTVAL (y);
1823 case LABEL_REF:
1824 return XEXP (x, 0) == XEXP (y, 0);
1826 case SYMBOL_REF:
1827 return XSTR (x, 0) == XSTR (y, 0);
1829 case REG:
1830 return REGNO (x) == REGNO (y);
1832 case MEM:
1833 /* Can't merge two expressions in different alias sets, since we can
1834 decide that the expression is transparent in a block when it isn't,
1835 due to it being set with the different alias set. */
1836 if (MEM_ALIAS_SET (x) != MEM_ALIAS_SET (y))
1837 return 0;
1838 break;
1840 /* For commutative operations, check both orders. */
1841 case PLUS:
1842 case MULT:
1843 case AND:
1844 case IOR:
1845 case XOR:
1846 case NE:
1847 case EQ:
1848 return ((expr_equiv_p (XEXP (x, 0), XEXP (y, 0))
1849 && expr_equiv_p (XEXP (x, 1), XEXP (y, 1)))
1850 || (expr_equiv_p (XEXP (x, 0), XEXP (y, 1))
1851 && expr_equiv_p (XEXP (x, 1), XEXP (y, 0))));
1853 case ASM_OPERANDS:
1854 /* We don't use the generic code below because we want to
1855 disregard filename and line numbers. */
1857 /* A volatile asm isn't equivalent to any other. */
1858 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
1859 return 0;
1861 if (GET_MODE (x) != GET_MODE (y)
1862 || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
1863 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
1864 ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
1865 || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
1866 || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
1867 return 0;
1869 if (ASM_OPERANDS_INPUT_LENGTH (x))
1871 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
1872 if (! expr_equiv_p (ASM_OPERANDS_INPUT (x, i),
1873 ASM_OPERANDS_INPUT (y, i))
1874 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
1875 ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
1876 return 0;
1879 return 1;
1881 default:
1882 break;
1885 /* Compare the elements. If any pair of corresponding elements
1886 fail to match, return 0 for the whole thing. */
1888 fmt = GET_RTX_FORMAT (code);
1889 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1891 switch (fmt[i])
1893 case 'e':
1894 if (! expr_equiv_p (XEXP (x, i), XEXP (y, i)))
1895 return 0;
1896 break;
1898 case 'E':
1899 if (XVECLEN (x, i) != XVECLEN (y, i))
1900 return 0;
1901 for (j = 0; j < XVECLEN (x, i); j++)
1902 if (! expr_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
1903 return 0;
1904 break;
1906 case 's':
1907 if (strcmp (XSTR (x, i), XSTR (y, i)))
1908 return 0;
1909 break;
1911 case 'i':
1912 if (XINT (x, i) != XINT (y, i))
1913 return 0;
1914 break;
1916 case 'w':
1917 if (XWINT (x, i) != XWINT (y, i))
1918 return 0;
1919 break;
1921 case '0':
1922 break;
1924 default:
1925 abort ();
1929 return 1;
1932 /* Insert expression X in INSN in the hash TABLE.
1933 If it is already present, record it as the last occurrence in INSN's
1934 basic block.
1936 MODE is the mode of the value X is being stored into.
1937 It is only used if X is a CONST_INT.
1939 ANTIC_P is non-zero if X is an anticipatable expression.
1940 AVAIL_P is non-zero if X is an available expression. */
1942 static void
1943 insert_expr_in_table (x, mode, insn, antic_p, avail_p, table)
1944 rtx x;
1945 enum machine_mode mode;
1946 rtx insn;
1947 int antic_p, avail_p;
1948 struct hash_table *table;
1950 int found, do_not_record_p;
1951 unsigned int hash;
1952 struct expr *cur_expr, *last_expr = NULL;
1953 struct occr *antic_occr, *avail_occr;
1954 struct occr *last_occr = NULL;
1956 hash = hash_expr (x, mode, &do_not_record_p, table->size);
1958 /* Do not insert expression in table if it contains volatile operands,
1959 or if hash_expr determines the expression is something we don't want
1960 to or can't handle. */
1961 if (do_not_record_p)
1962 return;
1964 cur_expr = table->table[hash];
1965 found = 0;
1967 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
1969 /* If the expression isn't found, save a pointer to the end of
1970 the list. */
1971 last_expr = cur_expr;
1972 cur_expr = cur_expr->next_same_hash;
1975 if (! found)
1977 cur_expr = (struct expr *) gcse_alloc (sizeof (struct expr));
1978 bytes_used += sizeof (struct expr);
1979 if (table->table[hash] == NULL)
1980 /* This is the first pattern that hashed to this index. */
1981 table->table[hash] = cur_expr;
1982 else
1983 /* Add EXPR to end of this hash chain. */
1984 last_expr->next_same_hash = cur_expr;
1986 /* Set the fields of the expr element. */
1987 cur_expr->expr = x;
1988 cur_expr->bitmap_index = table->n_elems++;
1989 cur_expr->next_same_hash = NULL;
1990 cur_expr->antic_occr = NULL;
1991 cur_expr->avail_occr = NULL;
1994 /* Now record the occurrence(s). */
1995 if (antic_p)
1997 antic_occr = cur_expr->antic_occr;
1999 /* Search for another occurrence in the same basic block. */
2000 while (antic_occr && BLOCK_NUM (antic_occr->insn) != BLOCK_NUM (insn))
2002 /* If an occurrence isn't found, save a pointer to the end of
2003 the list. */
2004 last_occr = antic_occr;
2005 antic_occr = antic_occr->next;
2008 if (antic_occr)
2009 /* Found another instance of the expression in the same basic block.
2010 Prefer the currently recorded one. We want the first one in the
2011 block and the block is scanned from start to end. */
2012 ; /* nothing to do */
2013 else
2015 /* First occurrence of this expression in this basic block. */
2016 antic_occr = (struct occr *) gcse_alloc (sizeof (struct occr));
2017 bytes_used += sizeof (struct occr);
2018 /* First occurrence of this expression in any block? */
2019 if (cur_expr->antic_occr == NULL)
2020 cur_expr->antic_occr = antic_occr;
2021 else
2022 last_occr->next = antic_occr;
2024 antic_occr->insn = insn;
2025 antic_occr->next = NULL;
2029 if (avail_p)
2031 avail_occr = cur_expr->avail_occr;
2033 /* Search for another occurrence in the same basic block. */
2034 while (avail_occr && BLOCK_NUM (avail_occr->insn) != BLOCK_NUM (insn))
2036 /* If an occurrence isn't found, save a pointer to the end of
2037 the list. */
2038 last_occr = avail_occr;
2039 avail_occr = avail_occr->next;
2042 if (avail_occr)
2043 /* Found another instance of the expression in the same basic block.
2044 Prefer this occurrence to the currently recorded one. We want
2045 the last one in the block and the block is scanned from start
2046 to end. */
2047 avail_occr->insn = insn;
2048 else
2050 /* First occurrence of this expression in this basic block. */
2051 avail_occr = (struct occr *) gcse_alloc (sizeof (struct occr));
2052 bytes_used += sizeof (struct occr);
2054 /* First occurrence of this expression in any block? */
2055 if (cur_expr->avail_occr == NULL)
2056 cur_expr->avail_occr = avail_occr;
2057 else
2058 last_occr->next = avail_occr;
2060 avail_occr->insn = insn;
2061 avail_occr->next = NULL;
2066 /* Insert pattern X in INSN in the hash table.
2067 X is a SET of a reg to either another reg or a constant.
2068 If it is already present, record it as the last occurrence in INSN's
2069 basic block. */
2071 static void
2072 insert_set_in_table (x, insn, table)
2073 rtx x;
2074 rtx insn;
2075 struct hash_table *table;
2077 int found;
2078 unsigned int hash;
2079 struct expr *cur_expr, *last_expr = NULL;
2080 struct occr *cur_occr, *last_occr = NULL;
2082 if (GET_CODE (x) != SET
2083 || GET_CODE (SET_DEST (x)) != REG)
2084 abort ();
2086 hash = hash_set (REGNO (SET_DEST (x)), table->size);
2088 cur_expr = table->table[hash];
2089 found = 0;
2091 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
2093 /* If the expression isn't found, save a pointer to the end of
2094 the list. */
2095 last_expr = cur_expr;
2096 cur_expr = cur_expr->next_same_hash;
2099 if (! found)
2101 cur_expr = (struct expr *) gcse_alloc (sizeof (struct expr));
2102 bytes_used += sizeof (struct expr);
2103 if (table->table[hash] == NULL)
2104 /* This is the first pattern that hashed to this index. */
2105 table->table[hash] = cur_expr;
2106 else
2107 /* Add EXPR to end of this hash chain. */
2108 last_expr->next_same_hash = cur_expr;
2110 /* Set the fields of the expr element.
2111 We must copy X because it can be modified when copy propagation is
2112 performed on its operands. */
2113 cur_expr->expr = copy_rtx (x);
2114 cur_expr->bitmap_index = table->n_elems++;
2115 cur_expr->next_same_hash = NULL;
2116 cur_expr->antic_occr = NULL;
2117 cur_expr->avail_occr = NULL;
2120 /* Now record the occurrence. */
2121 cur_occr = cur_expr->avail_occr;
2123 /* Search for another occurrence in the same basic block. */
2124 while (cur_occr && BLOCK_NUM (cur_occr->insn) != BLOCK_NUM (insn))
2126 /* If an occurrence isn't found, save a pointer to the end of
2127 the list. */
2128 last_occr = cur_occr;
2129 cur_occr = cur_occr->next;
2132 if (cur_occr)
2133 /* Found another instance of the expression in the same basic block.
2134 Prefer this occurrence to the currently recorded one. We want the
2135 last one in the block and the block is scanned from start to end. */
2136 cur_occr->insn = insn;
2137 else
2139 /* First occurrence of this expression in this basic block. */
2140 cur_occr = (struct occr *) gcse_alloc (sizeof (struct occr));
2141 bytes_used += sizeof (struct occr);
2143 /* First occurrence of this expression in any block? */
2144 if (cur_expr->avail_occr == NULL)
2145 cur_expr->avail_occr = cur_occr;
2146 else
2147 last_occr->next = cur_occr;
2149 cur_occr->insn = insn;
2150 cur_occr->next = NULL;
2154 /* Scan pattern PAT of INSN and add an entry to the hash TABLE (set or
2155 expression one). */
2157 static void
2158 hash_scan_set (pat, insn, table)
2159 rtx pat, insn;
2160 struct hash_table *table;
2162 rtx src = SET_SRC (pat);
2163 rtx dest = SET_DEST (pat);
2164 rtx note;
2166 if (GET_CODE (src) == CALL)
2167 hash_scan_call (src, insn, table);
2169 else if (GET_CODE (dest) == REG)
2171 unsigned int regno = REGNO (dest);
2172 rtx tmp;
2174 /* If this is a single set and we are doing constant propagation,
2175 see if a REG_NOTE shows this equivalent to a constant. */
2176 if (table->set_p && (note = find_reg_equal_equiv_note (insn)) != 0
2177 && CONSTANT_P (XEXP (note, 0)))
2178 src = XEXP (note, 0), pat = gen_rtx_SET (VOIDmode, dest, src);
2180 /* Only record sets of pseudo-regs in the hash table. */
2181 if (! table->set_p
2182 && regno >= FIRST_PSEUDO_REGISTER
2183 /* Don't GCSE something if we can't do a reg/reg copy. */
2184 && can_copy_p [GET_MODE (dest)]
2185 /* GCSE commonly inserts instruction after the insn. We can't
2186 do that easily for EH_REGION notes so disable GCSE on these
2187 for now. */
2188 && !find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2189 /* Is SET_SRC something we want to gcse? */
2190 && want_to_gcse_p (src)
2191 /* Don't CSE a nop. */
2192 && ! set_noop_p (pat)
2193 /* Don't GCSE if it has attached REG_EQUIV note.
2194 At this point this only function parameters should have
2195 REG_EQUIV notes and if the argument slot is used somewhere
2196 explicitly, it means address of parameter has been taken,
2197 so we should not extend the lifetime of the pseudo. */
2198 && ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) == 0
2199 || GET_CODE (XEXP (note, 0)) != MEM))
2201 /* An expression is not anticipatable if its operands are
2202 modified before this insn or if this is not the only SET in
2203 this insn. */
2204 int antic_p = oprs_anticipatable_p (src, insn) && single_set (insn);
2205 /* An expression is not available if its operands are
2206 subsequently modified, including this insn. It's also not
2207 available if this is a branch, because we can't insert
2208 a set after the branch. */
2209 int avail_p = (oprs_available_p (src, insn)
2210 && ! JUMP_P (insn));
2212 insert_expr_in_table (src, GET_MODE (dest), insn, antic_p, avail_p, table);
2215 /* Record sets for constant/copy propagation. */
2216 else if (table->set_p
2217 && regno >= FIRST_PSEUDO_REGISTER
2218 && ((GET_CODE (src) == REG
2219 && REGNO (src) >= FIRST_PSEUDO_REGISTER
2220 && can_copy_p [GET_MODE (dest)]
2221 && REGNO (src) != regno)
2222 || CONSTANT_P (src))
2223 /* A copy is not available if its src or dest is subsequently
2224 modified. Here we want to search from INSN+1 on, but
2225 oprs_available_p searches from INSN on. */
2226 && (insn == BLOCK_END (BLOCK_NUM (insn))
2227 || ((tmp = next_nonnote_insn (insn)) != NULL_RTX
2228 && oprs_available_p (pat, tmp))))
2229 insert_set_in_table (pat, insn, table);
2233 static void
2234 hash_scan_clobber (x, insn, table)
2235 rtx x ATTRIBUTE_UNUSED, insn ATTRIBUTE_UNUSED;
2236 struct hash_table *table ATTRIBUTE_UNUSED;
2238 /* Currently nothing to do. */
2241 static void
2242 hash_scan_call (x, insn, table)
2243 rtx x ATTRIBUTE_UNUSED, insn ATTRIBUTE_UNUSED;
2244 struct hash_table *table ATTRIBUTE_UNUSED;
2246 /* Currently nothing to do. */
2249 /* Process INSN and add hash table entries as appropriate.
2251 Only available expressions that set a single pseudo-reg are recorded.
2253 Single sets in a PARALLEL could be handled, but it's an extra complication
2254 that isn't dealt with right now. The trick is handling the CLOBBERs that
2255 are also in the PARALLEL. Later.
2257 If SET_P is non-zero, this is for the assignment hash table,
2258 otherwise it is for the expression hash table.
2259 If IN_LIBCALL_BLOCK nonzero, we are in a libcall block, and should
2260 not record any expressions. */
2262 static void
2263 hash_scan_insn (insn, table, in_libcall_block)
2264 rtx insn;
2265 struct hash_table *table;
2266 int in_libcall_block;
2268 rtx pat = PATTERN (insn);
2269 int i;
2271 if (in_libcall_block)
2272 return;
2274 /* Pick out the sets of INSN and for other forms of instructions record
2275 what's been modified. */
2277 if (GET_CODE (pat) == SET)
2278 hash_scan_set (pat, insn, table);
2279 else if (GET_CODE (pat) == PARALLEL)
2280 for (i = 0; i < XVECLEN (pat, 0); i++)
2282 rtx x = XVECEXP (pat, 0, i);
2284 if (GET_CODE (x) == SET)
2285 hash_scan_set (x, insn, table);
2286 else if (GET_CODE (x) == CLOBBER)
2287 hash_scan_clobber (x, insn, table);
2288 else if (GET_CODE (x) == CALL)
2289 hash_scan_call (x, insn, table);
2292 else if (GET_CODE (pat) == CLOBBER)
2293 hash_scan_clobber (pat, insn, table);
2294 else if (GET_CODE (pat) == CALL)
2295 hash_scan_call (pat, insn, table);
2298 static void
2299 dump_hash_table (file, name, table)
2300 FILE *file;
2301 const char *name;
2302 struct hash_table *table;
2304 int i;
2305 /* Flattened out table, so it's printed in proper order. */
2306 struct expr **flat_table;
2307 unsigned int *hash_val;
2308 struct expr *expr;
2310 flat_table
2311 = (struct expr **) xcalloc (table->n_elems, sizeof (struct expr *));
2312 hash_val = (unsigned int *) xmalloc (table->n_elems * sizeof (unsigned int));
2314 for (i = 0; i < (int) table->size; i++)
2315 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
2317 flat_table[expr->bitmap_index] = expr;
2318 hash_val[expr->bitmap_index] = i;
2321 fprintf (file, "%s hash table (%d buckets, %d entries)\n",
2322 name, table->size, table->n_elems);
2324 for (i = 0; i < (int) table->n_elems; i++)
2325 if (flat_table[i] != 0)
2327 expr = flat_table[i];
2328 fprintf (file, "Index %d (hash value %d)\n ",
2329 expr->bitmap_index, hash_val[i]);
2330 print_rtl (file, expr->expr);
2331 fprintf (file, "\n");
2334 fprintf (file, "\n");
2336 free (flat_table);
2337 free (hash_val);
2340 /* Record register first/last/block set information for REGNO in INSN.
2342 first_set records the first place in the block where the register
2343 is set and is used to compute "anticipatability".
2345 last_set records the last place in the block where the register
2346 is set and is used to compute "availability".
2348 last_bb records the block for which first_set and last_set are
2349 valid, as a quick test to invalidate them.
2351 reg_set_in_block records whether the register is set in the block
2352 and is used to compute "transparency". */
2354 static void
2355 record_last_reg_set_info (insn, regno)
2356 rtx insn;
2357 int regno;
2359 struct reg_avail_info *info = &reg_avail_info[regno];
2360 int cuid = INSN_CUID (insn);
2362 info->last_set = cuid;
2363 if (info->last_bb != current_bb)
2365 info->last_bb = current_bb;
2366 info->first_set = cuid;
2367 SET_BIT (reg_set_in_block[current_bb->index], regno);
2372 /* Record all of the canonicalized MEMs of record_last_mem_set_info's insn.
2373 Note we store a pair of elements in the list, so they have to be
2374 taken off pairwise. */
2376 static void
2377 canon_list_insert (dest, unused1, v_insn)
2378 rtx dest ATTRIBUTE_UNUSED;
2379 rtx unused1 ATTRIBUTE_UNUSED;
2380 void * v_insn;
2382 rtx dest_addr, insn;
2383 int bb;
2385 while (GET_CODE (dest) == SUBREG
2386 || GET_CODE (dest) == ZERO_EXTRACT
2387 || GET_CODE (dest) == SIGN_EXTRACT
2388 || GET_CODE (dest) == STRICT_LOW_PART)
2389 dest = XEXP (dest, 0);
2391 /* If DEST is not a MEM, then it will not conflict with a load. Note
2392 that function calls are assumed to clobber memory, but are handled
2393 elsewhere. */
2395 if (GET_CODE (dest) != MEM)
2396 return;
2398 dest_addr = get_addr (XEXP (dest, 0));
2399 dest_addr = canon_rtx (dest_addr);
2400 insn = (rtx) v_insn;
2401 bb = BLOCK_NUM (insn);
2403 canon_modify_mem_list[bb] =
2404 alloc_EXPR_LIST (VOIDmode, dest_addr, canon_modify_mem_list[bb]);
2405 canon_modify_mem_list[bb] =
2406 alloc_EXPR_LIST (VOIDmode, dest, canon_modify_mem_list[bb]);
2407 bitmap_set_bit (canon_modify_mem_list_set, bb);
2410 /* Record memory modification information for INSN. We do not actually care
2411 about the memory location(s) that are set, or even how they are set (consider
2412 a CALL_INSN). We merely need to record which insns modify memory. */
2414 static void
2415 record_last_mem_set_info (insn)
2416 rtx insn;
2418 int bb = BLOCK_NUM (insn);
2420 /* load_killed_in_block_p will handle the case of calls clobbering
2421 everything. */
2422 modify_mem_list[bb] = alloc_INSN_LIST (insn, modify_mem_list[bb]);
2423 bitmap_set_bit (modify_mem_list_set, bb);
2425 if (GET_CODE (insn) == CALL_INSN)
2427 /* Note that traversals of this loop (other than for free-ing)
2428 will break after encountering a CALL_INSN. So, there's no
2429 need to insert a pair of items, as canon_list_insert does. */
2430 canon_modify_mem_list[bb] =
2431 alloc_INSN_LIST (insn, canon_modify_mem_list[bb]);
2432 bitmap_set_bit (canon_modify_mem_list_set, bb);
2434 else
2435 note_stores (PATTERN (insn), canon_list_insert, (void*) insn);
2438 /* Called from compute_hash_table via note_stores to handle one
2439 SET or CLOBBER in an insn. DATA is really the instruction in which
2440 the SET is taking place. */
2442 static void
2443 record_last_set_info (dest, setter, data)
2444 rtx dest, setter ATTRIBUTE_UNUSED;
2445 void *data;
2447 rtx last_set_insn = (rtx) data;
2449 if (GET_CODE (dest) == SUBREG)
2450 dest = SUBREG_REG (dest);
2452 if (GET_CODE (dest) == REG)
2453 record_last_reg_set_info (last_set_insn, REGNO (dest));
2454 else if (GET_CODE (dest) == MEM
2455 /* Ignore pushes, they clobber nothing. */
2456 && ! push_operand (dest, GET_MODE (dest)))
2457 record_last_mem_set_info (last_set_insn);
2460 /* Top level function to create an expression or assignment hash table.
2462 Expression entries are placed in the hash table if
2463 - they are of the form (set (pseudo-reg) src),
2464 - src is something we want to perform GCSE on,
2465 - none of the operands are subsequently modified in the block
2467 Assignment entries are placed in the hash table if
2468 - they are of the form (set (pseudo-reg) src),
2469 - src is something we want to perform const/copy propagation on,
2470 - none of the operands or target are subsequently modified in the block
2472 Currently src must be a pseudo-reg or a const_int.
2474 F is the first insn.
2475 TABLE is the table computed. */
2477 static void
2478 compute_hash_table_work (table)
2479 struct hash_table *table;
2481 unsigned int i;
2483 /* While we compute the hash table we also compute a bit array of which
2484 registers are set in which blocks.
2485 ??? This isn't needed during const/copy propagation, but it's cheap to
2486 compute. Later. */
2487 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
2489 /* re-Cache any INSN_LIST nodes we have allocated. */
2490 clear_modify_mem_tables ();
2491 /* Some working arrays used to track first and last set in each block. */
2492 reg_avail_info = (struct reg_avail_info*)
2493 gmalloc (max_gcse_regno * sizeof (struct reg_avail_info));
2495 for (i = 0; i < max_gcse_regno; ++i)
2496 reg_avail_info[i].last_bb = NULL;
2498 FOR_EACH_BB (current_bb)
2500 rtx insn;
2501 unsigned int regno;
2502 int in_libcall_block;
2504 /* First pass over the instructions records information used to
2505 determine when registers and memory are first and last set.
2506 ??? hard-reg reg_set_in_block computation
2507 could be moved to compute_sets since they currently don't change. */
2509 for (insn = current_bb->head;
2510 insn && insn != NEXT_INSN (current_bb->end);
2511 insn = NEXT_INSN (insn))
2513 if (! INSN_P (insn))
2514 continue;
2516 if (GET_CODE (insn) == CALL_INSN)
2518 bool clobbers_all = false;
2519 #ifdef NON_SAVING_SETJMP
2520 if (NON_SAVING_SETJMP
2521 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
2522 clobbers_all = true;
2523 #endif
2525 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2526 if (clobbers_all
2527 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2528 record_last_reg_set_info (insn, regno);
2530 mark_call (insn);
2533 note_stores (PATTERN (insn), record_last_set_info, insn);
2536 /* The next pass builds the hash table. */
2538 for (insn = current_bb->head, in_libcall_block = 0;
2539 insn && insn != NEXT_INSN (current_bb->end);
2540 insn = NEXT_INSN (insn))
2541 if (INSN_P (insn))
2543 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
2544 in_libcall_block = 1;
2545 else if (table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
2546 in_libcall_block = 0;
2547 hash_scan_insn (insn, table, in_libcall_block);
2548 if (!table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
2549 in_libcall_block = 0;
2553 free (reg_avail_info);
2554 reg_avail_info = NULL;
2557 /* Allocate space for the set/expr hash TABLE.
2558 N_INSNS is the number of instructions in the function.
2559 It is used to determine the number of buckets to use.
2560 SET_P determines whether set or expression table will
2561 be created. */
2563 static void
2564 alloc_hash_table (n_insns, table, set_p)
2565 int n_insns;
2566 struct hash_table *table;
2567 int set_p;
2569 int n;
2571 table->size = n_insns / 4;
2572 if (table->size < 11)
2573 table->size = 11;
2575 /* Attempt to maintain efficient use of hash table.
2576 Making it an odd number is simplest for now.
2577 ??? Later take some measurements. */
2578 table->size |= 1;
2579 n = table->size * sizeof (struct expr *);
2580 table->table = (struct expr **) gmalloc (n);
2581 table->set_p = set_p;
2584 /* Free things allocated by alloc_hash_table. */
2586 static void
2587 free_hash_table (table)
2588 struct hash_table *table;
2590 free (table->table);
2593 /* Compute the hash TABLE for doing copy/const propagation or
2594 expression hash table. */
2596 static void
2597 compute_hash_table (table)
2598 struct hash_table *table;
2600 /* Initialize count of number of entries in hash table. */
2601 table->n_elems = 0;
2602 memset ((char *) table->table, 0,
2603 table->size * sizeof (struct expr *));
2605 compute_hash_table_work (table);
2608 /* Expression tracking support. */
2610 /* Lookup pattern PAT in the expression TABLE.
2611 The result is a pointer to the table entry, or NULL if not found. */
2613 static struct expr *
2614 lookup_expr (pat, table)
2615 rtx pat;
2616 struct hash_table *table;
2618 int do_not_record_p;
2619 unsigned int hash = hash_expr (pat, GET_MODE (pat), &do_not_record_p,
2620 table->size);
2621 struct expr *expr;
2623 if (do_not_record_p)
2624 return NULL;
2626 expr = table->table[hash];
2628 while (expr && ! expr_equiv_p (expr->expr, pat))
2629 expr = expr->next_same_hash;
2631 return expr;
2634 /* Lookup REGNO in the set TABLE. If PAT is non-NULL look for the entry that
2635 matches it, otherwise return the first entry for REGNO. The result is a
2636 pointer to the table entry, or NULL if not found. */
2638 static struct expr *
2639 lookup_set (regno, pat, table)
2640 unsigned int regno;
2641 rtx pat;
2642 struct hash_table *table;
2644 unsigned int hash = hash_set (regno, table->size);
2645 struct expr *expr;
2647 expr = table->table[hash];
2649 if (pat)
2651 while (expr && ! expr_equiv_p (expr->expr, pat))
2652 expr = expr->next_same_hash;
2654 else
2656 while (expr && REGNO (SET_DEST (expr->expr)) != regno)
2657 expr = expr->next_same_hash;
2660 return expr;
2663 /* Return the next entry for REGNO in list EXPR. */
2665 static struct expr *
2666 next_set (regno, expr)
2667 unsigned int regno;
2668 struct expr *expr;
2671 expr = expr->next_same_hash;
2672 while (expr && REGNO (SET_DEST (expr->expr)) != regno);
2674 return expr;
2677 /* Like free_INSN_LIST_list or free_EXPR_LIST_list, except that the node
2678 types may be mixed. */
2680 static void
2681 free_insn_expr_list_list (listp)
2682 rtx *listp;
2684 rtx list, next;
2686 for (list = *listp; list ; list = next)
2688 next = XEXP (list, 1);
2689 if (GET_CODE (list) == EXPR_LIST)
2690 free_EXPR_LIST_node (list);
2691 else
2692 free_INSN_LIST_node (list);
2695 *listp = NULL;
2698 /* Clear canon_modify_mem_list and modify_mem_list tables. */
2699 static void
2700 clear_modify_mem_tables ()
2702 int i;
2704 EXECUTE_IF_SET_IN_BITMAP
2705 (modify_mem_list_set, 0, i, free_INSN_LIST_list (modify_mem_list + i));
2706 bitmap_clear (modify_mem_list_set);
2708 EXECUTE_IF_SET_IN_BITMAP
2709 (canon_modify_mem_list_set, 0, i,
2710 free_insn_expr_list_list (canon_modify_mem_list + i));
2711 bitmap_clear (canon_modify_mem_list_set);
2714 /* Release memory used by modify_mem_list_set and canon_modify_mem_list_set. */
2716 static void
2717 free_modify_mem_tables ()
2719 clear_modify_mem_tables ();
2720 free (modify_mem_list);
2721 free (canon_modify_mem_list);
2722 modify_mem_list = 0;
2723 canon_modify_mem_list = 0;
2726 /* Reset tables used to keep track of what's still available [since the
2727 start of the block]. */
2729 static void
2730 reset_opr_set_tables ()
2732 /* Maintain a bitmap of which regs have been set since beginning of
2733 the block. */
2734 CLEAR_REG_SET (reg_set_bitmap);
2736 /* Also keep a record of the last instruction to modify memory.
2737 For now this is very trivial, we only record whether any memory
2738 location has been modified. */
2739 clear_modify_mem_tables ();
2742 /* Return non-zero if the operands of X are not set before INSN in
2743 INSN's basic block. */
2745 static int
2746 oprs_not_set_p (x, insn)
2747 rtx x, insn;
2749 int i, j;
2750 enum rtx_code code;
2751 const char *fmt;
2753 if (x == 0)
2754 return 1;
2756 code = GET_CODE (x);
2757 switch (code)
2759 case PC:
2760 case CC0:
2761 case CONST:
2762 case CONST_INT:
2763 case CONST_DOUBLE:
2764 case CONST_VECTOR:
2765 case SYMBOL_REF:
2766 case LABEL_REF:
2767 case ADDR_VEC:
2768 case ADDR_DIFF_VEC:
2769 return 1;
2771 case MEM:
2772 if (load_killed_in_block_p (BLOCK_FOR_INSN (insn),
2773 INSN_CUID (insn), x, 0))
2774 return 0;
2775 else
2776 return oprs_not_set_p (XEXP (x, 0), insn);
2778 case REG:
2779 return ! REGNO_REG_SET_P (reg_set_bitmap, REGNO (x));
2781 default:
2782 break;
2785 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
2787 if (fmt[i] == 'e')
2789 /* If we are about to do the last recursive call
2790 needed at this level, change it into iteration.
2791 This function is called enough to be worth it. */
2792 if (i == 0)
2793 return oprs_not_set_p (XEXP (x, i), insn);
2795 if (! oprs_not_set_p (XEXP (x, i), insn))
2796 return 0;
2798 else if (fmt[i] == 'E')
2799 for (j = 0; j < XVECLEN (x, i); j++)
2800 if (! oprs_not_set_p (XVECEXP (x, i, j), insn))
2801 return 0;
2804 return 1;
2807 /* Mark things set by a CALL. */
2809 static void
2810 mark_call (insn)
2811 rtx insn;
2813 if (! CONST_OR_PURE_CALL_P (insn))
2814 record_last_mem_set_info (insn);
2817 /* Mark things set by a SET. */
2819 static void
2820 mark_set (pat, insn)
2821 rtx pat, insn;
2823 rtx dest = SET_DEST (pat);
2825 while (GET_CODE (dest) == SUBREG
2826 || GET_CODE (dest) == ZERO_EXTRACT
2827 || GET_CODE (dest) == SIGN_EXTRACT
2828 || GET_CODE (dest) == STRICT_LOW_PART)
2829 dest = XEXP (dest, 0);
2831 if (GET_CODE (dest) == REG)
2832 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (dest));
2833 else if (GET_CODE (dest) == MEM)
2834 record_last_mem_set_info (insn);
2836 if (GET_CODE (SET_SRC (pat)) == CALL)
2837 mark_call (insn);
2840 /* Record things set by a CLOBBER. */
2842 static void
2843 mark_clobber (pat, insn)
2844 rtx pat, insn;
2846 rtx clob = XEXP (pat, 0);
2848 while (GET_CODE (clob) == SUBREG || GET_CODE (clob) == STRICT_LOW_PART)
2849 clob = XEXP (clob, 0);
2851 if (GET_CODE (clob) == REG)
2852 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (clob));
2853 else
2854 record_last_mem_set_info (insn);
2857 /* Record things set by INSN.
2858 This data is used by oprs_not_set_p. */
2860 static void
2861 mark_oprs_set (insn)
2862 rtx insn;
2864 rtx pat = PATTERN (insn);
2865 int i;
2867 if (GET_CODE (pat) == SET)
2868 mark_set (pat, insn);
2869 else if (GET_CODE (pat) == PARALLEL)
2870 for (i = 0; i < XVECLEN (pat, 0); i++)
2872 rtx x = XVECEXP (pat, 0, i);
2874 if (GET_CODE (x) == SET)
2875 mark_set (x, insn);
2876 else if (GET_CODE (x) == CLOBBER)
2877 mark_clobber (x, insn);
2878 else if (GET_CODE (x) == CALL)
2879 mark_call (insn);
2882 else if (GET_CODE (pat) == CLOBBER)
2883 mark_clobber (pat, insn);
2884 else if (GET_CODE (pat) == CALL)
2885 mark_call (insn);
2889 /* Classic GCSE reaching definition support. */
2891 /* Allocate reaching def variables. */
2893 static void
2894 alloc_rd_mem (n_blocks, n_insns)
2895 int n_blocks, n_insns;
2897 rd_kill = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2898 sbitmap_vector_zero (rd_kill, n_blocks);
2900 rd_gen = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2901 sbitmap_vector_zero (rd_gen, n_blocks);
2903 reaching_defs = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2904 sbitmap_vector_zero (reaching_defs, n_blocks);
2906 rd_out = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2907 sbitmap_vector_zero (rd_out, n_blocks);
2910 /* Free reaching def variables. */
2912 static void
2913 free_rd_mem ()
2915 sbitmap_vector_free (rd_kill);
2916 sbitmap_vector_free (rd_gen);
2917 sbitmap_vector_free (reaching_defs);
2918 sbitmap_vector_free (rd_out);
2921 /* Add INSN to the kills of BB. REGNO, set in BB, is killed by INSN. */
2923 static void
2924 handle_rd_kill_set (insn, regno, bb)
2925 rtx insn;
2926 int regno;
2927 basic_block bb;
2929 struct reg_set *this_reg;
2931 for (this_reg = reg_set_table[regno]; this_reg; this_reg = this_reg ->next)
2932 if (BLOCK_NUM (this_reg->insn) != BLOCK_NUM (insn))
2933 SET_BIT (rd_kill[bb->index], INSN_CUID (this_reg->insn));
2936 /* Compute the set of kill's for reaching definitions. */
2938 static void
2939 compute_kill_rd ()
2941 int cuid;
2942 unsigned int regno;
2943 int i;
2944 basic_block bb;
2946 /* For each block
2947 For each set bit in `gen' of the block (i.e each insn which
2948 generates a definition in the block)
2949 Call the reg set by the insn corresponding to that bit regx
2950 Look at the linked list starting at reg_set_table[regx]
2951 For each setting of regx in the linked list, which is not in
2952 this block
2953 Set the bit in `kill' corresponding to that insn. */
2954 FOR_EACH_BB (bb)
2955 for (cuid = 0; cuid < max_cuid; cuid++)
2956 if (TEST_BIT (rd_gen[bb->index], cuid))
2958 rtx insn = CUID_INSN (cuid);
2959 rtx pat = PATTERN (insn);
2961 if (GET_CODE (insn) == CALL_INSN)
2963 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2964 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2965 handle_rd_kill_set (insn, regno, bb);
2968 if (GET_CODE (pat) == PARALLEL)
2970 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
2972 enum rtx_code code = GET_CODE (XVECEXP (pat, 0, i));
2974 if ((code == SET || code == CLOBBER)
2975 && GET_CODE (XEXP (XVECEXP (pat, 0, i), 0)) == REG)
2976 handle_rd_kill_set (insn,
2977 REGNO (XEXP (XVECEXP (pat, 0, i), 0)),
2978 bb);
2981 else if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == REG)
2982 /* Each setting of this register outside of this block
2983 must be marked in the set of kills in this block. */
2984 handle_rd_kill_set (insn, REGNO (SET_DEST (pat)), bb);
2988 /* Compute the reaching definitions as in
2989 Compilers Principles, Techniques, and Tools. Aho, Sethi, Ullman,
2990 Chapter 10. It is the same algorithm as used for computing available
2991 expressions but applied to the gens and kills of reaching definitions. */
2993 static void
2994 compute_rd ()
2996 int changed, passes;
2997 basic_block bb;
2999 FOR_EACH_BB (bb)
3000 sbitmap_copy (rd_out[bb->index] /*dst*/, rd_gen[bb->index] /*src*/);
3002 passes = 0;
3003 changed = 1;
3004 while (changed)
3006 changed = 0;
3007 FOR_EACH_BB (bb)
3009 sbitmap_union_of_preds (reaching_defs[bb->index], rd_out, bb->index);
3010 changed |= sbitmap_union_of_diff_cg (rd_out[bb->index], rd_gen[bb->index],
3011 reaching_defs[bb->index], rd_kill[bb->index]);
3013 passes++;
3016 if (gcse_file)
3017 fprintf (gcse_file, "reaching def computation: %d passes\n", passes);
3020 /* Classic GCSE available expression support. */
3022 /* Allocate memory for available expression computation. */
3024 static void
3025 alloc_avail_expr_mem (n_blocks, n_exprs)
3026 int n_blocks, n_exprs;
3028 ae_kill = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
3029 sbitmap_vector_zero (ae_kill, n_blocks);
3031 ae_gen = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
3032 sbitmap_vector_zero (ae_gen, n_blocks);
3034 ae_in = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
3035 sbitmap_vector_zero (ae_in, n_blocks);
3037 ae_out = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
3038 sbitmap_vector_zero (ae_out, n_blocks);
3041 static void
3042 free_avail_expr_mem ()
3044 sbitmap_vector_free (ae_kill);
3045 sbitmap_vector_free (ae_gen);
3046 sbitmap_vector_free (ae_in);
3047 sbitmap_vector_free (ae_out);
3050 /* Compute the set of available expressions generated in each basic block. */
3052 static void
3053 compute_ae_gen (expr_hash_table)
3054 struct hash_table *expr_hash_table;
3056 unsigned int i;
3057 struct expr *expr;
3058 struct occr *occr;
3060 /* For each recorded occurrence of each expression, set ae_gen[bb][expr].
3061 This is all we have to do because an expression is not recorded if it
3062 is not available, and the only expressions we want to work with are the
3063 ones that are recorded. */
3064 for (i = 0; i < expr_hash_table->size; i++)
3065 for (expr = expr_hash_table->table[i]; expr != 0; expr = expr->next_same_hash)
3066 for (occr = expr->avail_occr; occr != 0; occr = occr->next)
3067 SET_BIT (ae_gen[BLOCK_NUM (occr->insn)], expr->bitmap_index);
3070 /* Return non-zero if expression X is killed in BB. */
3072 static int
3073 expr_killed_p (x, bb)
3074 rtx x;
3075 basic_block bb;
3077 int i, j;
3078 enum rtx_code code;
3079 const char *fmt;
3081 if (x == 0)
3082 return 1;
3084 code = GET_CODE (x);
3085 switch (code)
3087 case REG:
3088 return TEST_BIT (reg_set_in_block[bb->index], REGNO (x));
3090 case MEM:
3091 if (load_killed_in_block_p (bb, get_max_uid () + 1, x, 0))
3092 return 1;
3093 else
3094 return expr_killed_p (XEXP (x, 0), bb);
3096 case PC:
3097 case CC0: /*FIXME*/
3098 case CONST:
3099 case CONST_INT:
3100 case CONST_DOUBLE:
3101 case CONST_VECTOR:
3102 case SYMBOL_REF:
3103 case LABEL_REF:
3104 case ADDR_VEC:
3105 case ADDR_DIFF_VEC:
3106 return 0;
3108 default:
3109 break;
3112 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
3114 if (fmt[i] == 'e')
3116 /* If we are about to do the last recursive call
3117 needed at this level, change it into iteration.
3118 This function is called enough to be worth it. */
3119 if (i == 0)
3120 return expr_killed_p (XEXP (x, i), bb);
3121 else if (expr_killed_p (XEXP (x, i), bb))
3122 return 1;
3124 else if (fmt[i] == 'E')
3125 for (j = 0; j < XVECLEN (x, i); j++)
3126 if (expr_killed_p (XVECEXP (x, i, j), bb))
3127 return 1;
3130 return 0;
3133 /* Compute the set of available expressions killed in each basic block. */
3135 static void
3136 compute_ae_kill (ae_gen, ae_kill, expr_hash_table)
3137 sbitmap *ae_gen, *ae_kill;
3138 struct hash_table *expr_hash_table;
3140 basic_block bb;
3141 unsigned int i;
3142 struct expr *expr;
3144 FOR_EACH_BB (bb)
3145 for (i = 0; i < expr_hash_table->size; i++)
3146 for (expr = expr_hash_table->table[i]; expr; expr = expr->next_same_hash)
3148 /* Skip EXPR if generated in this block. */
3149 if (TEST_BIT (ae_gen[bb->index], expr->bitmap_index))
3150 continue;
3152 if (expr_killed_p (expr->expr, bb))
3153 SET_BIT (ae_kill[bb->index], expr->bitmap_index);
3157 /* Actually perform the Classic GCSE optimizations. */
3159 /* Return non-zero if occurrence OCCR of expression EXPR reaches block BB.
3161 CHECK_SELF_LOOP is non-zero if we should consider a block reaching itself
3162 as a positive reach. We want to do this when there are two computations
3163 of the expression in the block.
3165 VISITED is a pointer to a working buffer for tracking which BB's have
3166 been visited. It is NULL for the top-level call.
3168 We treat reaching expressions that go through blocks containing the same
3169 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
3170 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
3171 2 as not reaching. The intent is to improve the probability of finding
3172 only one reaching expression and to reduce register lifetimes by picking
3173 the closest such expression. */
3175 static int
3176 expr_reaches_here_p_work (occr, expr, bb, check_self_loop, visited)
3177 struct occr *occr;
3178 struct expr *expr;
3179 basic_block bb;
3180 int check_self_loop;
3181 char *visited;
3183 edge pred;
3185 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
3187 basic_block pred_bb = pred->src;
3189 if (visited[pred_bb->index])
3190 /* This predecessor has already been visited. Nothing to do. */
3192 else if (pred_bb == bb)
3194 /* BB loops on itself. */
3195 if (check_self_loop
3196 && TEST_BIT (ae_gen[pred_bb->index], expr->bitmap_index)
3197 && BLOCK_NUM (occr->insn) == pred_bb->index)
3198 return 1;
3200 visited[pred_bb->index] = 1;
3203 /* Ignore this predecessor if it kills the expression. */
3204 else if (TEST_BIT (ae_kill[pred_bb->index], expr->bitmap_index))
3205 visited[pred_bb->index] = 1;
3207 /* Does this predecessor generate this expression? */
3208 else if (TEST_BIT (ae_gen[pred_bb->index], expr->bitmap_index))
3210 /* Is this the occurrence we're looking for?
3211 Note that there's only one generating occurrence per block
3212 so we just need to check the block number. */
3213 if (BLOCK_NUM (occr->insn) == pred_bb->index)
3214 return 1;
3216 visited[pred_bb->index] = 1;
3219 /* Neither gen nor kill. */
3220 else
3222 visited[pred_bb->index] = 1;
3223 if (expr_reaches_here_p_work (occr, expr, pred_bb, check_self_loop,
3224 visited))
3226 return 1;
3230 /* All paths have been checked. */
3231 return 0;
3234 /* This wrapper for expr_reaches_here_p_work() is to ensure that any
3235 memory allocated for that function is returned. */
3237 static int
3238 expr_reaches_here_p (occr, expr, bb, check_self_loop)
3239 struct occr *occr;
3240 struct expr *expr;
3241 basic_block bb;
3242 int check_self_loop;
3244 int rval;
3245 char *visited = (char *) xcalloc (last_basic_block, 1);
3247 rval = expr_reaches_here_p_work (occr, expr, bb, check_self_loop, visited);
3249 free (visited);
3250 return rval;
3253 /* Return the instruction that computes EXPR that reaches INSN's basic block.
3254 If there is more than one such instruction, return NULL.
3256 Called only by handle_avail_expr. */
3258 static rtx
3259 computing_insn (expr, insn)
3260 struct expr *expr;
3261 rtx insn;
3263 basic_block bb = BLOCK_FOR_INSN (insn);
3265 if (expr->avail_occr->next == NULL)
3267 if (BLOCK_FOR_INSN (expr->avail_occr->insn) == bb)
3268 /* The available expression is actually itself
3269 (i.e. a loop in the flow graph) so do nothing. */
3270 return NULL;
3272 /* (FIXME) Case that we found a pattern that was created by
3273 a substitution that took place. */
3274 return expr->avail_occr->insn;
3276 else
3278 /* Pattern is computed more than once.
3279 Search backwards from this insn to see how many of these
3280 computations actually reach this insn. */
3281 struct occr *occr;
3282 rtx insn_computes_expr = NULL;
3283 int can_reach = 0;
3285 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
3287 if (BLOCK_FOR_INSN (occr->insn) == bb)
3289 /* The expression is generated in this block.
3290 The only time we care about this is when the expression
3291 is generated later in the block [and thus there's a loop].
3292 We let the normal cse pass handle the other cases. */
3293 if (INSN_CUID (insn) < INSN_CUID (occr->insn)
3294 && expr_reaches_here_p (occr, expr, bb, 1))
3296 can_reach++;
3297 if (can_reach > 1)
3298 return NULL;
3300 insn_computes_expr = occr->insn;
3303 else if (expr_reaches_here_p (occr, expr, bb, 0))
3305 can_reach++;
3306 if (can_reach > 1)
3307 return NULL;
3309 insn_computes_expr = occr->insn;
3313 if (insn_computes_expr == NULL)
3314 abort ();
3316 return insn_computes_expr;
3320 /* Return non-zero if the definition in DEF_INSN can reach INSN.
3321 Only called by can_disregard_other_sets. */
3323 static int
3324 def_reaches_here_p (insn, def_insn)
3325 rtx insn, def_insn;
3327 rtx reg;
3329 if (TEST_BIT (reaching_defs[BLOCK_NUM (insn)], INSN_CUID (def_insn)))
3330 return 1;
3332 if (BLOCK_NUM (insn) == BLOCK_NUM (def_insn))
3334 if (INSN_CUID (def_insn) < INSN_CUID (insn))
3336 if (GET_CODE (PATTERN (def_insn)) == PARALLEL)
3337 return 1;
3338 else if (GET_CODE (PATTERN (def_insn)) == CLOBBER)
3339 reg = XEXP (PATTERN (def_insn), 0);
3340 else if (GET_CODE (PATTERN (def_insn)) == SET)
3341 reg = SET_DEST (PATTERN (def_insn));
3342 else
3343 abort ();
3345 return ! reg_set_between_p (reg, NEXT_INSN (def_insn), insn);
3347 else
3348 return 0;
3351 return 0;
3354 /* Return non-zero if *ADDR_THIS_REG can only have one value at INSN. The
3355 value returned is the number of definitions that reach INSN. Returning a
3356 value of zero means that [maybe] more than one definition reaches INSN and
3357 the caller can't perform whatever optimization it is trying. i.e. it is
3358 always safe to return zero. */
3360 static int
3361 can_disregard_other_sets (addr_this_reg, insn, for_combine)
3362 struct reg_set **addr_this_reg;
3363 rtx insn;
3364 int for_combine;
3366 int number_of_reaching_defs = 0;
3367 struct reg_set *this_reg;
3369 for (this_reg = *addr_this_reg; this_reg != 0; this_reg = this_reg->next)
3370 if (def_reaches_here_p (insn, this_reg->insn))
3372 number_of_reaching_defs++;
3373 /* Ignore parallels for now. */
3374 if (GET_CODE (PATTERN (this_reg->insn)) == PARALLEL)
3375 return 0;
3377 if (!for_combine
3378 && (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER
3379 || ! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)),
3380 SET_SRC (PATTERN (insn)))))
3381 /* A setting of the reg to a different value reaches INSN. */
3382 return 0;
3384 if (number_of_reaching_defs > 1)
3386 /* If in this setting the value the register is being set to is
3387 equal to the previous value the register was set to and this
3388 setting reaches the insn we are trying to do the substitution
3389 on then we are ok. */
3390 if (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER)
3391 return 0;
3392 else if (! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)),
3393 SET_SRC (PATTERN (insn))))
3394 return 0;
3397 *addr_this_reg = this_reg;
3400 return number_of_reaching_defs;
3403 /* Expression computed by insn is available and the substitution is legal,
3404 so try to perform the substitution.
3406 The result is non-zero if any changes were made. */
3408 static int
3409 handle_avail_expr (insn, expr)
3410 rtx insn;
3411 struct expr *expr;
3413 rtx pat, insn_computes_expr, expr_set;
3414 rtx to;
3415 struct reg_set *this_reg;
3416 int found_setting, use_src;
3417 int changed = 0;
3419 /* We only handle the case where one computation of the expression
3420 reaches this instruction. */
3421 insn_computes_expr = computing_insn (expr, insn);
3422 if (insn_computes_expr == NULL)
3423 return 0;
3424 expr_set = single_set (insn_computes_expr);
3425 if (!expr_set)
3426 abort ();
3428 found_setting = 0;
3429 use_src = 0;
3431 /* At this point we know only one computation of EXPR outside of this
3432 block reaches this insn. Now try to find a register that the
3433 expression is computed into. */
3434 if (GET_CODE (SET_SRC (expr_set)) == REG)
3436 /* This is the case when the available expression that reaches
3437 here has already been handled as an available expression. */
3438 unsigned int regnum_for_replacing
3439 = REGNO (SET_SRC (expr_set));
3441 /* If the register was created by GCSE we can't use `reg_set_table',
3442 however we know it's set only once. */
3443 if (regnum_for_replacing >= max_gcse_regno
3444 /* If the register the expression is computed into is set only once,
3445 or only one set reaches this insn, we can use it. */
3446 || (((this_reg = reg_set_table[regnum_for_replacing]),
3447 this_reg->next == NULL)
3448 || can_disregard_other_sets (&this_reg, insn, 0)))
3450 use_src = 1;
3451 found_setting = 1;
3455 if (!found_setting)
3457 unsigned int regnum_for_replacing
3458 = REGNO (SET_DEST (expr_set));
3460 /* This shouldn't happen. */
3461 if (regnum_for_replacing >= max_gcse_regno)
3462 abort ();
3464 this_reg = reg_set_table[regnum_for_replacing];
3466 /* If the register the expression is computed into is set only once,
3467 or only one set reaches this insn, use it. */
3468 if (this_reg->next == NULL
3469 || can_disregard_other_sets (&this_reg, insn, 0))
3470 found_setting = 1;
3473 if (found_setting)
3475 pat = PATTERN (insn);
3476 if (use_src)
3477 to = SET_SRC (expr_set);
3478 else
3479 to = SET_DEST (expr_set);
3480 changed = validate_change (insn, &SET_SRC (pat), to, 0);
3482 /* We should be able to ignore the return code from validate_change but
3483 to play it safe we check. */
3484 if (changed)
3486 gcse_subst_count++;
3487 if (gcse_file != NULL)
3489 fprintf (gcse_file, "GCSE: Replacing the source in insn %d with",
3490 INSN_UID (insn));
3491 fprintf (gcse_file, " reg %d %s insn %d\n",
3492 REGNO (to), use_src ? "from" : "set in",
3493 INSN_UID (insn_computes_expr));
3498 /* The register that the expr is computed into is set more than once. */
3499 else if (1 /*expensive_op(this_pattrn->op) && do_expensive_gcse)*/)
3501 /* Insert an insn after insnx that copies the reg set in insnx
3502 into a new pseudo register call this new register REGN.
3503 From insnb until end of basic block or until REGB is set
3504 replace all uses of REGB with REGN. */
3505 rtx new_insn;
3507 to = gen_reg_rtx (GET_MODE (SET_DEST (expr_set)));
3509 /* Generate the new insn. */
3510 /* ??? If the change fails, we return 0, even though we created
3511 an insn. I think this is ok. */
3512 new_insn
3513 = emit_insn_after (gen_rtx_SET (VOIDmode, to,
3514 SET_DEST (expr_set)),
3515 insn_computes_expr);
3517 /* Keep register set table up to date. */
3518 record_one_set (REGNO (to), new_insn);
3520 gcse_create_count++;
3521 if (gcse_file != NULL)
3523 fprintf (gcse_file, "GCSE: Creating insn %d to copy value of reg %d",
3524 INSN_UID (NEXT_INSN (insn_computes_expr)),
3525 REGNO (SET_SRC (PATTERN (NEXT_INSN (insn_computes_expr)))));
3526 fprintf (gcse_file, ", computed in insn %d,\n",
3527 INSN_UID (insn_computes_expr));
3528 fprintf (gcse_file, " into newly allocated reg %d\n",
3529 REGNO (to));
3532 pat = PATTERN (insn);
3534 /* Do register replacement for INSN. */
3535 changed = validate_change (insn, &SET_SRC (pat),
3536 SET_DEST (PATTERN
3537 (NEXT_INSN (insn_computes_expr))),
3540 /* We should be able to ignore the return code from validate_change but
3541 to play it safe we check. */
3542 if (changed)
3544 gcse_subst_count++;
3545 if (gcse_file != NULL)
3547 fprintf (gcse_file,
3548 "GCSE: Replacing the source in insn %d with reg %d ",
3549 INSN_UID (insn),
3550 REGNO (SET_DEST (PATTERN (NEXT_INSN
3551 (insn_computes_expr)))));
3552 fprintf (gcse_file, "set in insn %d\n",
3553 INSN_UID (insn_computes_expr));
3558 return changed;
3561 /* Perform classic GCSE. This is called by one_classic_gcse_pass after all
3562 the dataflow analysis has been done.
3564 The result is non-zero if a change was made. */
3566 static int
3567 classic_gcse ()
3569 int changed;
3570 rtx insn;
3571 basic_block bb;
3573 /* Note we start at block 1. */
3575 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
3576 return 0;
3578 changed = 0;
3579 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR, next_bb)
3581 /* Reset tables used to keep track of what's still valid [since the
3582 start of the block]. */
3583 reset_opr_set_tables ();
3585 for (insn = bb->head;
3586 insn != NULL && insn != NEXT_INSN (bb->end);
3587 insn = NEXT_INSN (insn))
3589 /* Is insn of form (set (pseudo-reg) ...)? */
3590 if (GET_CODE (insn) == INSN
3591 && GET_CODE (PATTERN (insn)) == SET
3592 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
3593 && REGNO (SET_DEST (PATTERN (insn))) >= FIRST_PSEUDO_REGISTER)
3595 rtx pat = PATTERN (insn);
3596 rtx src = SET_SRC (pat);
3597 struct expr *expr;
3599 if (want_to_gcse_p (src)
3600 /* Is the expression recorded? */
3601 && ((expr = lookup_expr (src, &expr_hash_table)) != NULL)
3602 /* Is the expression available [at the start of the
3603 block]? */
3604 && TEST_BIT (ae_in[bb->index], expr->bitmap_index)
3605 /* Are the operands unchanged since the start of the
3606 block? */
3607 && oprs_not_set_p (src, insn))
3608 changed |= handle_avail_expr (insn, expr);
3611 /* Keep track of everything modified by this insn. */
3612 /* ??? Need to be careful w.r.t. mods done to INSN. */
3613 if (INSN_P (insn))
3614 mark_oprs_set (insn);
3618 return changed;
3621 /* Top level routine to perform one classic GCSE pass.
3623 Return non-zero if a change was made. */
3625 static int
3626 one_classic_gcse_pass (pass)
3627 int pass;
3629 int changed = 0;
3631 gcse_subst_count = 0;
3632 gcse_create_count = 0;
3634 alloc_hash_table (max_cuid, &expr_hash_table, 0);
3635 alloc_rd_mem (last_basic_block, max_cuid);
3636 compute_hash_table (&expr_hash_table);
3637 if (gcse_file)
3638 dump_hash_table (gcse_file, "Expression", &expr_hash_table);
3640 if (expr_hash_table.n_elems > 0)
3642 compute_kill_rd ();
3643 compute_rd ();
3644 alloc_avail_expr_mem (last_basic_block, expr_hash_table.n_elems);
3645 compute_ae_gen (&expr_hash_table);
3646 compute_ae_kill (ae_gen, ae_kill, &expr_hash_table);
3647 compute_available (ae_gen, ae_kill, ae_out, ae_in);
3648 changed = classic_gcse ();
3649 free_avail_expr_mem ();
3652 free_rd_mem ();
3653 free_hash_table (&expr_hash_table);
3655 if (gcse_file)
3657 fprintf (gcse_file, "\n");
3658 fprintf (gcse_file, "GCSE of %s, pass %d: %d bytes needed, %d substs,",
3659 current_function_name, pass, bytes_used, gcse_subst_count);
3660 fprintf (gcse_file, "%d insns created\n", gcse_create_count);
3663 return changed;
3666 /* Compute copy/constant propagation working variables. */
3668 /* Local properties of assignments. */
3669 static sbitmap *cprop_pavloc;
3670 static sbitmap *cprop_absaltered;
3672 /* Global properties of assignments (computed from the local properties). */
3673 static sbitmap *cprop_avin;
3674 static sbitmap *cprop_avout;
3676 /* Allocate vars used for copy/const propagation. N_BLOCKS is the number of
3677 basic blocks. N_SETS is the number of sets. */
3679 static void
3680 alloc_cprop_mem (n_blocks, n_sets)
3681 int n_blocks, n_sets;
3683 cprop_pavloc = sbitmap_vector_alloc (n_blocks, n_sets);
3684 cprop_absaltered = sbitmap_vector_alloc (n_blocks, n_sets);
3686 cprop_avin = sbitmap_vector_alloc (n_blocks, n_sets);
3687 cprop_avout = sbitmap_vector_alloc (n_blocks, n_sets);
3690 /* Free vars used by copy/const propagation. */
3692 static void
3693 free_cprop_mem ()
3695 sbitmap_vector_free (cprop_pavloc);
3696 sbitmap_vector_free (cprop_absaltered);
3697 sbitmap_vector_free (cprop_avin);
3698 sbitmap_vector_free (cprop_avout);
3701 /* For each block, compute whether X is transparent. X is either an
3702 expression or an assignment [though we don't care which, for this context
3703 an assignment is treated as an expression]. For each block where an
3704 element of X is modified, set (SET_P == 1) or reset (SET_P == 0) the INDX
3705 bit in BMAP. */
3707 static void
3708 compute_transp (x, indx, bmap, set_p)
3709 rtx x;
3710 int indx;
3711 sbitmap *bmap;
3712 int set_p;
3714 int i, j;
3715 basic_block bb;
3716 enum rtx_code code;
3717 reg_set *r;
3718 const char *fmt;
3720 /* repeat is used to turn tail-recursion into iteration since GCC
3721 can't do it when there's no return value. */
3722 repeat:
3724 if (x == 0)
3725 return;
3727 code = GET_CODE (x);
3728 switch (code)
3730 case REG:
3731 if (set_p)
3733 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3735 FOR_EACH_BB (bb)
3736 if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
3737 SET_BIT (bmap[bb->index], indx);
3739 else
3741 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
3742 SET_BIT (bmap[BLOCK_NUM (r->insn)], indx);
3745 else
3747 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3749 FOR_EACH_BB (bb)
3750 if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
3751 RESET_BIT (bmap[bb->index], indx);
3753 else
3755 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
3756 RESET_BIT (bmap[BLOCK_NUM (r->insn)], indx);
3760 return;
3762 case MEM:
3763 FOR_EACH_BB (bb)
3765 rtx list_entry = canon_modify_mem_list[bb->index];
3767 while (list_entry)
3769 rtx dest, dest_addr;
3771 if (GET_CODE (XEXP (list_entry, 0)) == CALL_INSN)
3773 if (set_p)
3774 SET_BIT (bmap[bb->index], indx);
3775 else
3776 RESET_BIT (bmap[bb->index], indx);
3777 break;
3779 /* LIST_ENTRY must be an INSN of some kind that sets memory.
3780 Examine each hunk of memory that is modified. */
3782 dest = XEXP (list_entry, 0);
3783 list_entry = XEXP (list_entry, 1);
3784 dest_addr = XEXP (list_entry, 0);
3786 if (canon_true_dependence (dest, GET_MODE (dest), dest_addr,
3787 x, rtx_addr_varies_p))
3789 if (set_p)
3790 SET_BIT (bmap[bb->index], indx);
3791 else
3792 RESET_BIT (bmap[bb->index], indx);
3793 break;
3795 list_entry = XEXP (list_entry, 1);
3799 x = XEXP (x, 0);
3800 goto repeat;
3802 case PC:
3803 case CC0: /*FIXME*/
3804 case CONST:
3805 case CONST_INT:
3806 case CONST_DOUBLE:
3807 case CONST_VECTOR:
3808 case SYMBOL_REF:
3809 case LABEL_REF:
3810 case ADDR_VEC:
3811 case ADDR_DIFF_VEC:
3812 return;
3814 default:
3815 break;
3818 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
3820 if (fmt[i] == 'e')
3822 /* If we are about to do the last recursive call
3823 needed at this level, change it into iteration.
3824 This function is called enough to be worth it. */
3825 if (i == 0)
3827 x = XEXP (x, i);
3828 goto repeat;
3831 compute_transp (XEXP (x, i), indx, bmap, set_p);
3833 else if (fmt[i] == 'E')
3834 for (j = 0; j < XVECLEN (x, i); j++)
3835 compute_transp (XVECEXP (x, i, j), indx, bmap, set_p);
3839 /* Top level routine to do the dataflow analysis needed by copy/const
3840 propagation. */
3842 static void
3843 compute_cprop_data ()
3845 compute_local_properties (cprop_absaltered, cprop_pavloc, NULL, &set_hash_table);
3846 compute_available (cprop_pavloc, cprop_absaltered,
3847 cprop_avout, cprop_avin);
3850 /* Copy/constant propagation. */
3852 /* Maximum number of register uses in an insn that we handle. */
3853 #define MAX_USES 8
3855 /* Table of uses found in an insn.
3856 Allocated statically to avoid alloc/free complexity and overhead. */
3857 static struct reg_use reg_use_table[MAX_USES];
3859 /* Index into `reg_use_table' while building it. */
3860 static int reg_use_count;
3862 /* Set up a list of register numbers used in INSN. The found uses are stored
3863 in `reg_use_table'. `reg_use_count' is initialized to zero before entry,
3864 and contains the number of uses in the table upon exit.
3866 ??? If a register appears multiple times we will record it multiple times.
3867 This doesn't hurt anything but it will slow things down. */
3869 static void
3870 find_used_regs (xptr, data)
3871 rtx *xptr;
3872 void *data ATTRIBUTE_UNUSED;
3874 int i, j;
3875 enum rtx_code code;
3876 const char *fmt;
3877 rtx x = *xptr;
3879 /* repeat is used to turn tail-recursion into iteration since GCC
3880 can't do it when there's no return value. */
3881 repeat:
3882 if (x == 0)
3883 return;
3885 code = GET_CODE (x);
3886 if (REG_P (x))
3888 if (reg_use_count == MAX_USES)
3889 return;
3891 reg_use_table[reg_use_count].reg_rtx = x;
3892 reg_use_count++;
3895 /* Recursively scan the operands of this expression. */
3897 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
3899 if (fmt[i] == 'e')
3901 /* If we are about to do the last recursive call
3902 needed at this level, change it into iteration.
3903 This function is called enough to be worth it. */
3904 if (i == 0)
3906 x = XEXP (x, 0);
3907 goto repeat;
3910 find_used_regs (&XEXP (x, i), data);
3912 else if (fmt[i] == 'E')
3913 for (j = 0; j < XVECLEN (x, i); j++)
3914 find_used_regs (&XVECEXP (x, i, j), data);
3918 /* Try to replace all non-SET_DEST occurrences of FROM in INSN with TO.
3919 Returns non-zero is successful. */
3921 static int
3922 try_replace_reg (from, to, insn)
3923 rtx from, to, insn;
3925 rtx note = find_reg_equal_equiv_note (insn);
3926 rtx src = 0;
3927 int success = 0;
3928 rtx set = single_set (insn);
3930 validate_replace_src_group (from, to, insn);
3931 if (num_changes_pending () && apply_change_group ())
3932 success = 1;
3934 if (!success && set && reg_mentioned_p (from, SET_SRC (set)))
3936 /* If above failed and this is a single set, try to simplify the source of
3937 the set given our substitution. We could perhaps try this for multiple
3938 SETs, but it probably won't buy us anything. */
3939 src = simplify_replace_rtx (SET_SRC (set), from, to);
3941 if (!rtx_equal_p (src, SET_SRC (set))
3942 && validate_change (insn, &SET_SRC (set), src, 0))
3943 success = 1;
3945 /* If we've failed to do replacement, have a single SET, and don't already
3946 have a note, add a REG_EQUAL note to not lose information. */
3947 if (!success && note == 0 && set != 0)
3948 note = set_unique_reg_note (insn, REG_EQUAL, copy_rtx (src));
3951 /* If there is already a NOTE, update the expression in it with our
3952 replacement. */
3953 else if (note != 0)
3954 XEXP (note, 0) = simplify_replace_rtx (XEXP (note, 0), from, to);
3956 /* REG_EQUAL may get simplified into register.
3957 We don't allow that. Remove that note. This code ought
3958 not to hapen, because previous code ought to syntetize
3959 reg-reg move, but be on the safe side. */
3960 if (note && REG_P (XEXP (note, 0)))
3961 remove_note (insn, note);
3963 return success;
3966 /* Find a set of REGNOs that are available on entry to INSN's block. Returns
3967 NULL no such set is found. */
3969 static struct expr *
3970 find_avail_set (regno, insn)
3971 int regno;
3972 rtx insn;
3974 /* SET1 contains the last set found that can be returned to the caller for
3975 use in a substitution. */
3976 struct expr *set1 = 0;
3978 /* Loops are not possible here. To get a loop we would need two sets
3979 available at the start of the block containing INSN. ie we would
3980 need two sets like this available at the start of the block:
3982 (set (reg X) (reg Y))
3983 (set (reg Y) (reg X))
3985 This can not happen since the set of (reg Y) would have killed the
3986 set of (reg X) making it unavailable at the start of this block. */
3987 while (1)
3989 rtx src;
3990 struct expr *set = lookup_set (regno, NULL_RTX, &set_hash_table);
3992 /* Find a set that is available at the start of the block
3993 which contains INSN. */
3994 while (set)
3996 if (TEST_BIT (cprop_avin[BLOCK_NUM (insn)], set->bitmap_index))
3997 break;
3998 set = next_set (regno, set);
4001 /* If no available set was found we've reached the end of the
4002 (possibly empty) copy chain. */
4003 if (set == 0)
4004 break;
4006 if (GET_CODE (set->expr) != SET)
4007 abort ();
4009 src = SET_SRC (set->expr);
4011 /* We know the set is available.
4012 Now check that SRC is ANTLOC (i.e. none of the source operands
4013 have changed since the start of the block).
4015 If the source operand changed, we may still use it for the next
4016 iteration of this loop, but we may not use it for substitutions. */
4018 if (CONSTANT_P (src) || oprs_not_set_p (src, insn))
4019 set1 = set;
4021 /* If the source of the set is anything except a register, then
4022 we have reached the end of the copy chain. */
4023 if (GET_CODE (src) != REG)
4024 break;
4026 /* Follow the copy chain, ie start another iteration of the loop
4027 and see if we have an available copy into SRC. */
4028 regno = REGNO (src);
4031 /* SET1 holds the last set that was available and anticipatable at
4032 INSN. */
4033 return set1;
4036 /* Subroutine of cprop_insn that tries to propagate constants into
4037 JUMP_INSNS. JUMP must be a conditional jump. If SETCC is non-NULL
4038 it is the instruction that immediately preceeds JUMP, and must be a
4039 single SET of a register. FROM is what we will try to replace,
4040 SRC is the constant we will try to substitute for it. Returns nonzero
4041 if a change was made. */
4043 static int
4044 cprop_jump (bb, setcc, jump, from, src)
4045 basic_block bb;
4046 rtx setcc;
4047 rtx jump;
4048 rtx from;
4049 rtx src;
4051 rtx new, new_set;
4052 rtx set = pc_set (jump);
4054 /* First substitute in the INSN condition as the SET_SRC of the JUMP,
4055 then substitute that given values in this expanded JUMP. */
4056 if (setcc != NULL)
4058 rtx setcc_set = single_set (setcc);
4059 new_set = simplify_replace_rtx (SET_SRC (set),
4060 SET_DEST (setcc_set),
4061 SET_SRC (setcc_set));
4063 else
4064 new_set = set;
4066 new = simplify_replace_rtx (new_set, from, src);
4068 /* If no simplification can be made, then try the next
4069 register. */
4070 if (rtx_equal_p (new, new_set))
4071 return 0;
4073 /* If this is now a no-op delete it, otherwise this must be a valid insn. */
4074 if (new == pc_rtx)
4075 delete_insn (jump);
4076 else
4078 if (! validate_change (jump, &SET_SRC (set), new, 0))
4079 return 0;
4081 /* If this has turned into an unconditional jump,
4082 then put a barrier after it so that the unreachable
4083 code will be deleted. */
4084 if (GET_CODE (SET_SRC (set)) == LABEL_REF)
4085 emit_barrier_after (jump);
4088 #ifdef HAVE_cc0
4089 /* Delete the cc0 setter. */
4090 if (setcc != NULL && CC0_P (SET_DEST (single_set (setcc))))
4091 delete_insn (setcc);
4092 #endif
4094 run_jump_opt_after_gcse = 1;
4096 const_prop_count++;
4097 if (gcse_file != NULL)
4099 fprintf (gcse_file,
4100 "CONST-PROP: Replacing reg %d in jump_insn %d with constant ",
4101 REGNO (from), INSN_UID (jump));
4102 print_rtl (gcse_file, src);
4103 fprintf (gcse_file, "\n");
4105 purge_dead_edges (bb);
4107 return 1;
4110 static bool
4111 constprop_register (insn, from, to, alter_jumps)
4112 rtx insn;
4113 rtx from;
4114 rtx to;
4115 int alter_jumps;
4117 rtx sset;
4119 /* Check for reg or cc0 setting instructions followed by
4120 conditional branch instructions first. */
4121 if (alter_jumps
4122 && (sset = single_set (insn)) != NULL
4123 && any_condjump_p (NEXT_INSN (insn)) && onlyjump_p (NEXT_INSN (insn)))
4125 rtx dest = SET_DEST (sset);
4126 if ((REG_P (dest) || CC0_P (dest))
4127 && cprop_jump (BLOCK_FOR_INSN (insn), insn, NEXT_INSN (insn), from, to))
4128 return 1;
4131 /* Handle normal insns next. */
4132 if (GET_CODE (insn) == INSN
4133 && try_replace_reg (from, to, insn))
4134 return 1;
4136 /* Try to propagate a CONST_INT into a conditional jump.
4137 We're pretty specific about what we will handle in this
4138 code, we can extend this as necessary over time.
4140 Right now the insn in question must look like
4141 (set (pc) (if_then_else ...)) */
4142 else if (alter_jumps && any_condjump_p (insn) && onlyjump_p (insn))
4143 return cprop_jump (BLOCK_FOR_INSN (insn), NULL, insn, from, to);
4144 return 0;
4147 /* Perform constant and copy propagation on INSN.
4148 The result is non-zero if a change was made. */
4150 static int
4151 cprop_insn (insn, alter_jumps)
4152 rtx insn;
4153 int alter_jumps;
4155 struct reg_use *reg_used;
4156 int changed = 0;
4157 rtx note;
4159 if (!INSN_P (insn))
4160 return 0;
4162 reg_use_count = 0;
4163 note_uses (&PATTERN (insn), find_used_regs, NULL);
4165 note = find_reg_equal_equiv_note (insn);
4167 /* We may win even when propagating constants into notes. */
4168 if (note)
4169 find_used_regs (&XEXP (note, 0), NULL);
4171 for (reg_used = &reg_use_table[0]; reg_use_count > 0;
4172 reg_used++, reg_use_count--)
4174 unsigned int regno = REGNO (reg_used->reg_rtx);
4175 rtx pat, src;
4176 struct expr *set;
4178 /* Ignore registers created by GCSE.
4179 We do this because ... */
4180 if (regno >= max_gcse_regno)
4181 continue;
4183 /* If the register has already been set in this block, there's
4184 nothing we can do. */
4185 if (! oprs_not_set_p (reg_used->reg_rtx, insn))
4186 continue;
4188 /* Find an assignment that sets reg_used and is available
4189 at the start of the block. */
4190 set = find_avail_set (regno, insn);
4191 if (! set)
4192 continue;
4194 pat = set->expr;
4195 /* ??? We might be able to handle PARALLELs. Later. */
4196 if (GET_CODE (pat) != SET)
4197 abort ();
4199 src = SET_SRC (pat);
4201 /* Constant propagation. */
4202 if (CONSTANT_P (src))
4204 if (constprop_register (insn, reg_used->reg_rtx, src, alter_jumps))
4206 changed = 1;
4207 const_prop_count++;
4208 if (gcse_file != NULL)
4210 fprintf (gcse_file, "GLOBAL CONST-PROP: Replacing reg %d in ", regno);
4211 fprintf (gcse_file, "insn %d with constant ", INSN_UID (insn));
4212 print_rtl (gcse_file, src);
4213 fprintf (gcse_file, "\n");
4217 else if (GET_CODE (src) == REG
4218 && REGNO (src) >= FIRST_PSEUDO_REGISTER
4219 && REGNO (src) != regno)
4221 if (try_replace_reg (reg_used->reg_rtx, src, insn))
4223 changed = 1;
4224 copy_prop_count++;
4225 if (gcse_file != NULL)
4227 fprintf (gcse_file, "GLOBAL COPY-PROP: Replacing reg %d in insn %d",
4228 regno, INSN_UID (insn));
4229 fprintf (gcse_file, " with reg %d\n", REGNO (src));
4232 /* The original insn setting reg_used may or may not now be
4233 deletable. We leave the deletion to flow. */
4234 /* FIXME: If it turns out that the insn isn't deletable,
4235 then we may have unnecessarily extended register lifetimes
4236 and made things worse. */
4241 return changed;
4244 static bool
4245 do_local_cprop (x, insn, alter_jumps)
4246 rtx x;
4247 rtx insn;
4248 int alter_jumps;
4250 rtx newreg = NULL, newcnst = NULL;
4252 /* Rule out USE instructions and ASM statements as we don't want to change the hard
4253 registers mentioned. */
4254 if (GET_CODE (x) == REG
4255 && (REGNO (x) >= FIRST_PSEUDO_REGISTER
4256 || (GET_CODE (PATTERN (insn)) != USE && asm_noperands (PATTERN (insn)) < 0)))
4258 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0);
4259 struct elt_loc_list *l;
4261 if (!val)
4262 return false;
4263 for (l = val->locs; l; l = l->next)
4265 rtx this_rtx = l->loc;
4266 rtx note;
4268 if (CONSTANT_P (this_rtx))
4269 newcnst = this_rtx;
4270 if (REG_P (this_rtx) && REGNO (this_rtx) >= FIRST_PSEUDO_REGISTER
4271 /* Don't copy propagate if it has attached REG_EQUIV note.
4272 At this point this only function parameters should have
4273 REG_EQUIV notes and if the argument slot is used somewhere
4274 explicitly, it means address of parameter has been taken,
4275 so we should not extend the lifetime of the pseudo. */
4276 && (!(note = find_reg_note (l->setting_insn, REG_EQUIV, NULL_RTX))
4277 || GET_CODE (XEXP (note, 0)) != MEM))
4278 newreg = this_rtx;
4280 if (newcnst && constprop_register (insn, x, newcnst, alter_jumps))
4282 if (gcse_file != NULL)
4284 fprintf (gcse_file, "LOCAL CONST-PROP: Replacing reg %d in ",
4285 REGNO (x));
4286 fprintf (gcse_file, "insn %d with constant ",
4287 INSN_UID (insn));
4288 print_rtl (gcse_file, newcnst);
4289 fprintf (gcse_file, "\n");
4291 const_prop_count++;
4292 return true;
4294 else if (newreg && newreg != x && try_replace_reg (x, newreg, insn))
4296 if (gcse_file != NULL)
4298 fprintf (gcse_file,
4299 "LOCAL COPY-PROP: Replacing reg %d in insn %d",
4300 REGNO (x), INSN_UID (insn));
4301 fprintf (gcse_file, " with reg %d\n", REGNO (newreg));
4303 copy_prop_count++;
4304 return true;
4307 return false;
4310 static void
4311 local_cprop_pass (alter_jumps)
4312 int alter_jumps;
4314 rtx insn;
4315 struct reg_use *reg_used;
4317 cselib_init ();
4318 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4320 if (INSN_P (insn))
4322 rtx note = find_reg_equal_equiv_note (insn);
4326 reg_use_count = 0;
4327 note_uses (&PATTERN (insn), find_used_regs, NULL);
4328 if (note)
4329 find_used_regs (&XEXP (note, 0), NULL);
4331 for (reg_used = &reg_use_table[0]; reg_use_count > 0;
4332 reg_used++, reg_use_count--)
4333 if (do_local_cprop (reg_used->reg_rtx, insn, alter_jumps))
4334 break;
4336 while (reg_use_count);
4338 cselib_process_insn (insn);
4340 cselib_finish ();
4343 /* Forward propagate copies. This includes copies and constants. Return
4344 non-zero if a change was made. */
4346 static int
4347 cprop (alter_jumps)
4348 int alter_jumps;
4350 int changed;
4351 basic_block bb;
4352 rtx insn;
4354 /* Note we start at block 1. */
4355 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
4357 if (gcse_file != NULL)
4358 fprintf (gcse_file, "\n");
4359 return 0;
4362 changed = 0;
4363 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR, next_bb)
4365 /* Reset tables used to keep track of what's still valid [since the
4366 start of the block]. */
4367 reset_opr_set_tables ();
4369 for (insn = bb->head;
4370 insn != NULL && insn != NEXT_INSN (bb->end);
4371 insn = NEXT_INSN (insn))
4372 if (INSN_P (insn))
4374 changed |= cprop_insn (insn, alter_jumps);
4376 /* Keep track of everything modified by this insn. */
4377 /* ??? Need to be careful w.r.t. mods done to INSN. Don't
4378 call mark_oprs_set if we turned the insn into a NOTE. */
4379 if (GET_CODE (insn) != NOTE)
4380 mark_oprs_set (insn);
4384 if (gcse_file != NULL)
4385 fprintf (gcse_file, "\n");
4387 return changed;
4390 /* Perform one copy/constant propagation pass.
4391 F is the first insn in the function.
4392 PASS is the pass count. */
4394 static int
4395 one_cprop_pass (pass, alter_jumps)
4396 int pass;
4397 int alter_jumps;
4399 int changed = 0;
4401 const_prop_count = 0;
4402 copy_prop_count = 0;
4404 local_cprop_pass (alter_jumps);
4406 alloc_hash_table (max_cuid, &set_hash_table, 1);
4407 compute_hash_table (&set_hash_table);
4408 if (gcse_file)
4409 dump_hash_table (gcse_file, "SET", &set_hash_table);
4410 if (set_hash_table.n_elems > 0)
4412 alloc_cprop_mem (last_basic_block, set_hash_table.n_elems);
4413 compute_cprop_data ();
4414 changed = cprop (alter_jumps);
4415 if (alter_jumps)
4416 changed |= bypass_conditional_jumps ();
4417 free_cprop_mem ();
4420 free_hash_table (&set_hash_table);
4422 if (gcse_file)
4424 fprintf (gcse_file, "CPROP of %s, pass %d: %d bytes needed, ",
4425 current_function_name, pass, bytes_used);
4426 fprintf (gcse_file, "%d const props, %d copy props\n\n",
4427 const_prop_count, copy_prop_count);
4430 return changed;
4433 /* Bypass conditional jumps. */
4435 /* Find a set of REGNO to a constant that is available at the end of basic
4436 block BB. Returns NULL if no such set is found. Based heavily upon
4437 find_avail_set. */
4439 static struct expr *
4440 find_bypass_set (regno, bb)
4441 int regno;
4442 int bb;
4444 struct expr *result = 0;
4446 for (;;)
4448 rtx src;
4449 struct expr *set = lookup_set (regno, NULL_RTX, &set_hash_table);
4451 while (set)
4453 if (TEST_BIT (cprop_avout[bb], set->bitmap_index))
4454 break;
4455 set = next_set (regno, set);
4458 if (set == 0)
4459 break;
4461 if (GET_CODE (set->expr) != SET)
4462 abort ();
4464 src = SET_SRC (set->expr);
4465 if (CONSTANT_P (src))
4466 result = set;
4468 if (GET_CODE (src) != REG)
4469 break;
4471 regno = REGNO (src);
4473 return result;
4477 /* Subroutine of bypass_conditional_jumps that attempts to bypass the given
4478 basic block BB which has more than one predecessor. If not NULL, SETCC
4479 is the first instruction of BB, which is immediately followed by JUMP_INSN
4480 JUMP. Otherwise, SETCC is NULL, and JUMP is the first insn of BB.
4481 Returns nonzero if a change was made. */
4483 static int
4484 bypass_block (bb, setcc, jump)
4485 basic_block bb;
4486 rtx setcc, jump;
4488 rtx insn, note;
4489 edge e, enext;
4490 int i, change;
4492 insn = (setcc != NULL) ? setcc : jump;
4494 /* Determine set of register uses in INSN. */
4495 reg_use_count = 0;
4496 note_uses (&PATTERN (insn), find_used_regs, NULL);
4497 note = find_reg_equal_equiv_note (insn);
4498 if (note)
4499 find_used_regs (&XEXP (note, 0), NULL);
4501 change = 0;
4502 for (e = bb->pred; e; e = enext)
4504 enext = e->pred_next;
4505 for (i = 0; i < reg_use_count; i++)
4507 struct reg_use *reg_used = &reg_use_table[i];
4508 unsigned int regno = REGNO (reg_used->reg_rtx);
4509 basic_block dest, old_dest;
4510 struct expr *set;
4511 rtx src, new;
4513 if (regno >= max_gcse_regno)
4514 continue;
4516 set = find_bypass_set (regno, e->src->index);
4518 if (! set)
4519 continue;
4521 src = SET_SRC (pc_set (jump));
4523 if (setcc != NULL)
4524 src = simplify_replace_rtx (src,
4525 SET_DEST (PATTERN (setcc)),
4526 SET_SRC (PATTERN (setcc)));
4528 new = simplify_replace_rtx (src, reg_used->reg_rtx,
4529 SET_SRC (set->expr));
4531 if (new == pc_rtx)
4532 dest = FALLTHRU_EDGE (bb)->dest;
4533 else if (GET_CODE (new) == LABEL_REF)
4534 dest = BRANCH_EDGE (bb)->dest;
4535 else
4536 dest = NULL;
4538 /* Once basic block indices are stable, we should be able
4539 to use redirect_edge_and_branch_force instead. */
4540 old_dest = e->dest;
4541 if (dest != NULL && dest != old_dest
4542 && redirect_edge_and_branch (e, dest))
4544 /* Copy the register setter to the redirected edge.
4545 Don't copy CC0 setters, as CC0 is dead after jump. */
4546 if (setcc)
4548 rtx pat = PATTERN (setcc);
4549 if (!CC0_P (SET_DEST (pat)))
4550 insert_insn_on_edge (copy_insn (pat), e);
4553 if (gcse_file != NULL)
4555 fprintf (gcse_file, "JUMP-BYPASS: Proved reg %d in jump_insn %d equals constant ",
4556 regno, INSN_UID (jump));
4557 print_rtl (gcse_file, SET_SRC (set->expr));
4558 fprintf (gcse_file, "\nBypass edge from %d->%d to %d\n",
4559 e->src->index, old_dest->index, dest->index);
4561 change = 1;
4562 break;
4566 return change;
4569 /* Find basic blocks with more than one predecessor that only contain a
4570 single conditional jump. If the result of the comparison is known at
4571 compile-time from any incoming edge, redirect that edge to the
4572 appropriate target. Returns nonzero if a change was made. */
4574 static int
4575 bypass_conditional_jumps ()
4577 basic_block bb;
4578 int changed;
4579 rtx setcc;
4580 rtx insn;
4581 rtx dest;
4583 /* Note we start at block 1. */
4584 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
4585 return 0;
4587 changed = 0;
4588 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb,
4589 EXIT_BLOCK_PTR, next_bb)
4591 /* Check for more than one predecessor. */
4592 if (bb->pred && bb->pred->pred_next)
4594 setcc = NULL_RTX;
4595 for (insn = bb->head;
4596 insn != NULL && insn != NEXT_INSN (bb->end);
4597 insn = NEXT_INSN (insn))
4598 if (GET_CODE (insn) == INSN)
4600 if (setcc)
4601 break;
4602 if (GET_CODE (PATTERN (insn)) != SET)
4603 break;
4605 dest = SET_DEST (PATTERN (insn));
4606 if (REG_P (dest) || CC0_P (dest))
4607 setcc = insn;
4608 else
4609 break;
4611 else if (GET_CODE (insn) == JUMP_INSN)
4613 if (any_condjump_p (insn) && onlyjump_p (insn))
4614 changed |= bypass_block (bb, setcc, insn);
4615 break;
4617 else if (INSN_P (insn))
4618 break;
4622 /* If we bypassed any register setting insns, we inserted a
4623 copy on the redirected edge. These need to be commited. */
4624 if (changed)
4625 commit_edge_insertions();
4627 return changed;
4630 /* Compute PRE+LCM working variables. */
4632 /* Local properties of expressions. */
4633 /* Nonzero for expressions that are transparent in the block. */
4634 static sbitmap *transp;
4636 /* Nonzero for expressions that are transparent at the end of the block.
4637 This is only zero for expressions killed by abnormal critical edge
4638 created by a calls. */
4639 static sbitmap *transpout;
4641 /* Nonzero for expressions that are computed (available) in the block. */
4642 static sbitmap *comp;
4644 /* Nonzero for expressions that are locally anticipatable in the block. */
4645 static sbitmap *antloc;
4647 /* Nonzero for expressions where this block is an optimal computation
4648 point. */
4649 static sbitmap *pre_optimal;
4651 /* Nonzero for expressions which are redundant in a particular block. */
4652 static sbitmap *pre_redundant;
4654 /* Nonzero for expressions which should be inserted on a specific edge. */
4655 static sbitmap *pre_insert_map;
4657 /* Nonzero for expressions which should be deleted in a specific block. */
4658 static sbitmap *pre_delete_map;
4660 /* Contains the edge_list returned by pre_edge_lcm. */
4661 static struct edge_list *edge_list;
4663 /* Redundant insns. */
4664 static sbitmap pre_redundant_insns;
4666 /* Allocate vars used for PRE analysis. */
4668 static void
4669 alloc_pre_mem (n_blocks, n_exprs)
4670 int n_blocks, n_exprs;
4672 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
4673 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
4674 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
4676 pre_optimal = NULL;
4677 pre_redundant = NULL;
4678 pre_insert_map = NULL;
4679 pre_delete_map = NULL;
4680 ae_in = NULL;
4681 ae_out = NULL;
4682 ae_kill = sbitmap_vector_alloc (n_blocks, n_exprs);
4684 /* pre_insert and pre_delete are allocated later. */
4687 /* Free vars used for PRE analysis. */
4689 static void
4690 free_pre_mem ()
4692 sbitmap_vector_free (transp);
4693 sbitmap_vector_free (comp);
4695 /* ANTLOC and AE_KILL are freed just after pre_lcm finishes. */
4697 if (pre_optimal)
4698 sbitmap_vector_free (pre_optimal);
4699 if (pre_redundant)
4700 sbitmap_vector_free (pre_redundant);
4701 if (pre_insert_map)
4702 sbitmap_vector_free (pre_insert_map);
4703 if (pre_delete_map)
4704 sbitmap_vector_free (pre_delete_map);
4705 if (ae_in)
4706 sbitmap_vector_free (ae_in);
4707 if (ae_out)
4708 sbitmap_vector_free (ae_out);
4710 transp = comp = NULL;
4711 pre_optimal = pre_redundant = pre_insert_map = pre_delete_map = NULL;
4712 ae_in = ae_out = NULL;
4715 /* Top level routine to do the dataflow analysis needed by PRE. */
4717 static void
4718 compute_pre_data ()
4720 sbitmap trapping_expr;
4721 basic_block bb;
4722 unsigned int ui;
4724 compute_local_properties (transp, comp, antloc, &expr_hash_table);
4725 sbitmap_vector_zero (ae_kill, last_basic_block);
4727 /* Collect expressions which might trap. */
4728 trapping_expr = sbitmap_alloc (expr_hash_table.n_elems);
4729 sbitmap_zero (trapping_expr);
4730 for (ui = 0; ui < expr_hash_table.size; ui++)
4732 struct expr *e;
4733 for (e = expr_hash_table.table[ui]; e != NULL; e = e->next_same_hash)
4734 if (may_trap_p (e->expr))
4735 SET_BIT (trapping_expr, e->bitmap_index);
4738 /* Compute ae_kill for each basic block using:
4740 ~(TRANSP | COMP)
4742 This is significantly faster than compute_ae_kill. */
4744 FOR_EACH_BB (bb)
4746 edge e;
4748 /* If the current block is the destination of an abnormal edge, we
4749 kill all trapping expressions because we won't be able to properly
4750 place the instruction on the edge. So make them neither
4751 anticipatable nor transparent. This is fairly conservative. */
4752 for (e = bb->pred; e ; e = e->pred_next)
4753 if (e->flags & EDGE_ABNORMAL)
4755 sbitmap_difference (antloc[bb->index], antloc[bb->index], trapping_expr);
4756 sbitmap_difference (transp[bb->index], transp[bb->index], trapping_expr);
4757 break;
4760 sbitmap_a_or_b (ae_kill[bb->index], transp[bb->index], comp[bb->index]);
4761 sbitmap_not (ae_kill[bb->index], ae_kill[bb->index]);
4764 edge_list = pre_edge_lcm (gcse_file, expr_hash_table.n_elems, transp, comp, antloc,
4765 ae_kill, &pre_insert_map, &pre_delete_map);
4766 sbitmap_vector_free (antloc);
4767 antloc = NULL;
4768 sbitmap_vector_free (ae_kill);
4769 ae_kill = NULL;
4770 sbitmap_free (trapping_expr);
4773 /* PRE utilities */
4775 /* Return non-zero if an occurrence of expression EXPR in OCCR_BB would reach
4776 block BB.
4778 VISITED is a pointer to a working buffer for tracking which BB's have
4779 been visited. It is NULL for the top-level call.
4781 We treat reaching expressions that go through blocks containing the same
4782 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
4783 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
4784 2 as not reaching. The intent is to improve the probability of finding
4785 only one reaching expression and to reduce register lifetimes by picking
4786 the closest such expression. */
4788 static int
4789 pre_expr_reaches_here_p_work (occr_bb, expr, bb, visited)
4790 basic_block occr_bb;
4791 struct expr *expr;
4792 basic_block bb;
4793 char *visited;
4795 edge pred;
4797 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
4799 basic_block pred_bb = pred->src;
4801 if (pred->src == ENTRY_BLOCK_PTR
4802 /* Has predecessor has already been visited? */
4803 || visited[pred_bb->index])
4804 ;/* Nothing to do. */
4806 /* Does this predecessor generate this expression? */
4807 else if (TEST_BIT (comp[pred_bb->index], expr->bitmap_index))
4809 /* Is this the occurrence we're looking for?
4810 Note that there's only one generating occurrence per block
4811 so we just need to check the block number. */
4812 if (occr_bb == pred_bb)
4813 return 1;
4815 visited[pred_bb->index] = 1;
4817 /* Ignore this predecessor if it kills the expression. */
4818 else if (! TEST_BIT (transp[pred_bb->index], expr->bitmap_index))
4819 visited[pred_bb->index] = 1;
4821 /* Neither gen nor kill. */
4822 else
4824 visited[pred_bb->index] = 1;
4825 if (pre_expr_reaches_here_p_work (occr_bb, expr, pred_bb, visited))
4826 return 1;
4830 /* All paths have been checked. */
4831 return 0;
4834 /* The wrapper for pre_expr_reaches_here_work that ensures that any
4835 memory allocated for that function is returned. */
4837 static int
4838 pre_expr_reaches_here_p (occr_bb, expr, bb)
4839 basic_block occr_bb;
4840 struct expr *expr;
4841 basic_block bb;
4843 int rval;
4844 char *visited = (char *) xcalloc (last_basic_block, 1);
4846 rval = pre_expr_reaches_here_p_work (occr_bb, expr, bb, visited);
4848 free (visited);
4849 return rval;
4853 /* Given an expr, generate RTL which we can insert at the end of a BB,
4854 or on an edge. Set the block number of any insns generated to
4855 the value of BB. */
4857 static rtx
4858 process_insert_insn (expr)
4859 struct expr *expr;
4861 rtx reg = expr->reaching_reg;
4862 rtx exp = copy_rtx (expr->expr);
4863 rtx pat;
4865 start_sequence ();
4867 /* If the expression is something that's an operand, like a constant,
4868 just copy it to a register. */
4869 if (general_operand (exp, GET_MODE (reg)))
4870 emit_move_insn (reg, exp);
4872 /* Otherwise, make a new insn to compute this expression and make sure the
4873 insn will be recognized (this also adds any needed CLOBBERs). Copy the
4874 expression to make sure we don't have any sharing issues. */
4875 else if (insn_invalid_p (emit_insn (gen_rtx_SET (VOIDmode, reg, exp))))
4876 abort ();
4878 pat = get_insns ();
4879 end_sequence ();
4881 return pat;
4884 /* Add EXPR to the end of basic block BB.
4886 This is used by both the PRE and code hoisting.
4888 For PRE, we want to verify that the expr is either transparent
4889 or locally anticipatable in the target block. This check makes
4890 no sense for code hoisting. */
4892 static void
4893 insert_insn_end_bb (expr, bb, pre)
4894 struct expr *expr;
4895 basic_block bb;
4896 int pre;
4898 rtx insn = bb->end;
4899 rtx new_insn;
4900 rtx reg = expr->reaching_reg;
4901 int regno = REGNO (reg);
4902 rtx pat, pat_end;
4904 pat = process_insert_insn (expr);
4905 if (pat == NULL_RTX || ! INSN_P (pat))
4906 abort ();
4908 pat_end = pat;
4909 while (NEXT_INSN (pat_end) != NULL_RTX)
4910 pat_end = NEXT_INSN (pat_end);
4912 /* If the last insn is a jump, insert EXPR in front [taking care to
4913 handle cc0, etc. properly]. Similary we need to care trapping
4914 instructions in presence of non-call exceptions. */
4916 if (GET_CODE (insn) == JUMP_INSN
4917 || (GET_CODE (insn) == INSN
4918 && (bb->succ->succ_next || (bb->succ->flags & EDGE_ABNORMAL))))
4920 #ifdef HAVE_cc0
4921 rtx note;
4922 #endif
4923 /* It should always be the case that we can put these instructions
4924 anywhere in the basic block with performing PRE optimizations.
4925 Check this. */
4926 if (GET_CODE (insn) == INSN && pre
4927 && !TEST_BIT (antloc[bb->index], expr->bitmap_index)
4928 && !TEST_BIT (transp[bb->index], expr->bitmap_index))
4929 abort ();
4931 /* If this is a jump table, then we can't insert stuff here. Since
4932 we know the previous real insn must be the tablejump, we insert
4933 the new instruction just before the tablejump. */
4934 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
4935 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
4936 insn = prev_real_insn (insn);
4938 #ifdef HAVE_cc0
4939 /* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts
4940 if cc0 isn't set. */
4941 note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
4942 if (note)
4943 insn = XEXP (note, 0);
4944 else
4946 rtx maybe_cc0_setter = prev_nonnote_insn (insn);
4947 if (maybe_cc0_setter
4948 && INSN_P (maybe_cc0_setter)
4949 && sets_cc0_p (PATTERN (maybe_cc0_setter)))
4950 insn = maybe_cc0_setter;
4952 #endif
4953 /* FIXME: What if something in cc0/jump uses value set in new insn? */
4954 new_insn = emit_insn_before (pat, insn);
4957 /* Likewise if the last insn is a call, as will happen in the presence
4958 of exception handling. */
4959 else if (GET_CODE (insn) == CALL_INSN
4960 && (bb->succ->succ_next || (bb->succ->flags & EDGE_ABNORMAL)))
4962 /* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers,
4963 we search backward and place the instructions before the first
4964 parameter is loaded. Do this for everyone for consistency and a
4965 presumtion that we'll get better code elsewhere as well.
4967 It should always be the case that we can put these instructions
4968 anywhere in the basic block with performing PRE optimizations.
4969 Check this. */
4971 if (pre
4972 && !TEST_BIT (antloc[bb->index], expr->bitmap_index)
4973 && !TEST_BIT (transp[bb->index], expr->bitmap_index))
4974 abort ();
4976 /* Since different machines initialize their parameter registers
4977 in different orders, assume nothing. Collect the set of all
4978 parameter registers. */
4979 insn = find_first_parameter_load (insn, bb->head);
4981 /* If we found all the parameter loads, then we want to insert
4982 before the first parameter load.
4984 If we did not find all the parameter loads, then we might have
4985 stopped on the head of the block, which could be a CODE_LABEL.
4986 If we inserted before the CODE_LABEL, then we would be putting
4987 the insn in the wrong basic block. In that case, put the insn
4988 after the CODE_LABEL. Also, respect NOTE_INSN_BASIC_BLOCK. */
4989 while (GET_CODE (insn) == CODE_LABEL
4990 || NOTE_INSN_BASIC_BLOCK_P (insn))
4991 insn = NEXT_INSN (insn);
4993 new_insn = emit_insn_before (pat, insn);
4995 else
4996 new_insn = emit_insn_after (pat, insn);
4998 while (1)
5000 if (INSN_P (pat))
5002 add_label_notes (PATTERN (pat), new_insn);
5003 note_stores (PATTERN (pat), record_set_info, pat);
5005 if (pat == pat_end)
5006 break;
5007 pat = NEXT_INSN (pat);
5010 gcse_create_count++;
5012 if (gcse_file)
5014 fprintf (gcse_file, "PRE/HOIST: end of bb %d, insn %d, ",
5015 bb->index, INSN_UID (new_insn));
5016 fprintf (gcse_file, "copying expression %d to reg %d\n",
5017 expr->bitmap_index, regno);
5021 /* Insert partially redundant expressions on edges in the CFG to make
5022 the expressions fully redundant. */
5024 static int
5025 pre_edge_insert (edge_list, index_map)
5026 struct edge_list *edge_list;
5027 struct expr **index_map;
5029 int e, i, j, num_edges, set_size, did_insert = 0;
5030 sbitmap *inserted;
5032 /* Where PRE_INSERT_MAP is nonzero, we add the expression on that edge
5033 if it reaches any of the deleted expressions. */
5035 set_size = pre_insert_map[0]->size;
5036 num_edges = NUM_EDGES (edge_list);
5037 inserted = sbitmap_vector_alloc (num_edges, expr_hash_table.n_elems);
5038 sbitmap_vector_zero (inserted, num_edges);
5040 for (e = 0; e < num_edges; e++)
5042 int indx;
5043 basic_block bb = INDEX_EDGE_PRED_BB (edge_list, e);
5045 for (i = indx = 0; i < set_size; i++, indx += SBITMAP_ELT_BITS)
5047 SBITMAP_ELT_TYPE insert = pre_insert_map[e]->elms[i];
5049 for (j = indx; insert && j < (int) expr_hash_table.n_elems; j++, insert >>= 1)
5050 if ((insert & 1) != 0 && index_map[j]->reaching_reg != NULL_RTX)
5052 struct expr *expr = index_map[j];
5053 struct occr *occr;
5055 /* Now look at each deleted occurrence of this expression. */
5056 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5058 if (! occr->deleted_p)
5059 continue;
5061 /* Insert this expression on this edge if if it would
5062 reach the deleted occurrence in BB. */
5063 if (!TEST_BIT (inserted[e], j))
5065 rtx insn;
5066 edge eg = INDEX_EDGE (edge_list, e);
5068 /* We can't insert anything on an abnormal and
5069 critical edge, so we insert the insn at the end of
5070 the previous block. There are several alternatives
5071 detailed in Morgans book P277 (sec 10.5) for
5072 handling this situation. This one is easiest for
5073 now. */
5075 if ((eg->flags & EDGE_ABNORMAL) == EDGE_ABNORMAL)
5076 insert_insn_end_bb (index_map[j], bb, 0);
5077 else
5079 insn = process_insert_insn (index_map[j]);
5080 insert_insn_on_edge (insn, eg);
5083 if (gcse_file)
5085 fprintf (gcse_file, "PRE/HOIST: edge (%d,%d), ",
5086 bb->index,
5087 INDEX_EDGE_SUCC_BB (edge_list, e)->index);
5088 fprintf (gcse_file, "copy expression %d\n",
5089 expr->bitmap_index);
5092 update_ld_motion_stores (expr);
5093 SET_BIT (inserted[e], j);
5094 did_insert = 1;
5095 gcse_create_count++;
5102 sbitmap_vector_free (inserted);
5103 return did_insert;
5106 /* Copy the result of INSN to REG. INDX is the expression number. */
5108 static void
5109 pre_insert_copy_insn (expr, insn)
5110 struct expr *expr;
5111 rtx insn;
5113 rtx reg = expr->reaching_reg;
5114 int regno = REGNO (reg);
5115 int indx = expr->bitmap_index;
5116 rtx set = single_set (insn);
5117 rtx new_insn;
5119 if (!set)
5120 abort ();
5122 new_insn = emit_insn_after (gen_move_insn (reg, SET_DEST (set)), insn);
5124 /* Keep register set table up to date. */
5125 record_one_set (regno, new_insn);
5127 gcse_create_count++;
5129 if (gcse_file)
5130 fprintf (gcse_file,
5131 "PRE: bb %d, insn %d, copy expression %d in insn %d to reg %d\n",
5132 BLOCK_NUM (insn), INSN_UID (new_insn), indx,
5133 INSN_UID (insn), regno);
5134 update_ld_motion_stores (expr);
5137 /* Copy available expressions that reach the redundant expression
5138 to `reaching_reg'. */
5140 static void
5141 pre_insert_copies ()
5143 unsigned int i;
5144 struct expr *expr;
5145 struct occr *occr;
5146 struct occr *avail;
5148 /* For each available expression in the table, copy the result to
5149 `reaching_reg' if the expression reaches a deleted one.
5151 ??? The current algorithm is rather brute force.
5152 Need to do some profiling. */
5154 for (i = 0; i < expr_hash_table.size; i++)
5155 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
5157 /* If the basic block isn't reachable, PPOUT will be TRUE. However,
5158 we don't want to insert a copy here because the expression may not
5159 really be redundant. So only insert an insn if the expression was
5160 deleted. This test also avoids further processing if the
5161 expression wasn't deleted anywhere. */
5162 if (expr->reaching_reg == NULL)
5163 continue;
5165 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5167 if (! occr->deleted_p)
5168 continue;
5170 for (avail = expr->avail_occr; avail != NULL; avail = avail->next)
5172 rtx insn = avail->insn;
5174 /* No need to handle this one if handled already. */
5175 if (avail->copied_p)
5176 continue;
5178 /* Don't handle this one if it's a redundant one. */
5179 if (TEST_BIT (pre_redundant_insns, INSN_CUID (insn)))
5180 continue;
5182 /* Or if the expression doesn't reach the deleted one. */
5183 if (! pre_expr_reaches_here_p (BLOCK_FOR_INSN (avail->insn),
5184 expr,
5185 BLOCK_FOR_INSN (occr->insn)))
5186 continue;
5188 /* Copy the result of avail to reaching_reg. */
5189 pre_insert_copy_insn (expr, insn);
5190 avail->copied_p = 1;
5196 /* Emit move from SRC to DEST noting the equivalence with expression computed
5197 in INSN. */
5198 static rtx
5199 gcse_emit_move_after (src, dest, insn)
5200 rtx src, dest, insn;
5202 rtx new;
5203 rtx set = single_set (insn), set2;
5204 rtx note;
5205 rtx eqv;
5207 /* This should never fail since we're creating a reg->reg copy
5208 we've verified to be valid. */
5210 new = emit_insn_after (gen_move_insn (dest, src), insn);
5212 /* Note the equivalence for local CSE pass. */
5213 set2 = single_set (new);
5214 if (!set2 || !rtx_equal_p (SET_DEST (set2), dest))
5215 return new;
5216 if ((note = find_reg_equal_equiv_note (insn)))
5217 eqv = XEXP (note, 0);
5218 else
5219 eqv = SET_SRC (set);
5221 set_unique_reg_note (new, REG_EQUAL, copy_insn_1 (src));
5223 return new;
5226 /* Delete redundant computations.
5227 Deletion is done by changing the insn to copy the `reaching_reg' of
5228 the expression into the result of the SET. It is left to later passes
5229 (cprop, cse2, flow, combine, regmove) to propagate the copy or eliminate it.
5231 Returns non-zero if a change is made. */
5233 static int
5234 pre_delete ()
5236 unsigned int i;
5237 int changed;
5238 struct expr *expr;
5239 struct occr *occr;
5241 changed = 0;
5242 for (i = 0; i < expr_hash_table.size; i++)
5243 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
5245 int indx = expr->bitmap_index;
5247 /* We only need to search antic_occr since we require
5248 ANTLOC != 0. */
5250 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5252 rtx insn = occr->insn;
5253 rtx set;
5254 basic_block bb = BLOCK_FOR_INSN (insn);
5256 if (TEST_BIT (pre_delete_map[bb->index], indx))
5258 set = single_set (insn);
5259 if (! set)
5260 abort ();
5262 /* Create a pseudo-reg to store the result of reaching
5263 expressions into. Get the mode for the new pseudo from
5264 the mode of the original destination pseudo. */
5265 if (expr->reaching_reg == NULL)
5266 expr->reaching_reg
5267 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
5269 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
5270 delete_insn (insn);
5271 occr->deleted_p = 1;
5272 SET_BIT (pre_redundant_insns, INSN_CUID (insn));
5273 changed = 1;
5274 gcse_subst_count++;
5276 if (gcse_file)
5278 fprintf (gcse_file,
5279 "PRE: redundant insn %d (expression %d) in ",
5280 INSN_UID (insn), indx);
5281 fprintf (gcse_file, "bb %d, reaching reg is %d\n",
5282 bb->index, REGNO (expr->reaching_reg));
5288 return changed;
5291 /* Perform GCSE optimizations using PRE.
5292 This is called by one_pre_gcse_pass after all the dataflow analysis
5293 has been done.
5295 This is based on the original Morel-Renvoise paper Fred Chow's thesis, and
5296 lazy code motion from Knoop, Ruthing and Steffen as described in Advanced
5297 Compiler Design and Implementation.
5299 ??? A new pseudo reg is created to hold the reaching expression. The nice
5300 thing about the classical approach is that it would try to use an existing
5301 reg. If the register can't be adequately optimized [i.e. we introduce
5302 reload problems], one could add a pass here to propagate the new register
5303 through the block.
5305 ??? We don't handle single sets in PARALLELs because we're [currently] not
5306 able to copy the rest of the parallel when we insert copies to create full
5307 redundancies from partial redundancies. However, there's no reason why we
5308 can't handle PARALLELs in the cases where there are no partial
5309 redundancies. */
5311 static int
5312 pre_gcse ()
5314 unsigned int i;
5315 int did_insert, changed;
5316 struct expr **index_map;
5317 struct expr *expr;
5319 /* Compute a mapping from expression number (`bitmap_index') to
5320 hash table entry. */
5322 index_map = (struct expr **) xcalloc (expr_hash_table.n_elems, sizeof (struct expr *));
5323 for (i = 0; i < expr_hash_table.size; i++)
5324 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
5325 index_map[expr->bitmap_index] = expr;
5327 /* Reset bitmap used to track which insns are redundant. */
5328 pre_redundant_insns = sbitmap_alloc (max_cuid);
5329 sbitmap_zero (pre_redundant_insns);
5331 /* Delete the redundant insns first so that
5332 - we know what register to use for the new insns and for the other
5333 ones with reaching expressions
5334 - we know which insns are redundant when we go to create copies */
5336 changed = pre_delete ();
5338 did_insert = pre_edge_insert (edge_list, index_map);
5340 /* In other places with reaching expressions, copy the expression to the
5341 specially allocated pseudo-reg that reaches the redundant expr. */
5342 pre_insert_copies ();
5343 if (did_insert)
5345 commit_edge_insertions ();
5346 changed = 1;
5349 free (index_map);
5350 sbitmap_free (pre_redundant_insns);
5351 return changed;
5354 /* Top level routine to perform one PRE GCSE pass.
5356 Return non-zero if a change was made. */
5358 static int
5359 one_pre_gcse_pass (pass)
5360 int pass;
5362 int changed = 0;
5364 gcse_subst_count = 0;
5365 gcse_create_count = 0;
5367 alloc_hash_table (max_cuid, &expr_hash_table, 0);
5368 add_noreturn_fake_exit_edges ();
5369 if (flag_gcse_lm)
5370 compute_ld_motion_mems ();
5372 compute_hash_table (&expr_hash_table);
5373 trim_ld_motion_mems ();
5374 if (gcse_file)
5375 dump_hash_table (gcse_file, "Expression", &expr_hash_table);
5377 if (expr_hash_table.n_elems > 0)
5379 alloc_pre_mem (last_basic_block, expr_hash_table.n_elems);
5380 compute_pre_data ();
5381 changed |= pre_gcse ();
5382 free_edge_list (edge_list);
5383 free_pre_mem ();
5386 free_ldst_mems ();
5387 remove_fake_edges ();
5388 free_hash_table (&expr_hash_table);
5390 if (gcse_file)
5392 fprintf (gcse_file, "\nPRE GCSE of %s, pass %d: %d bytes needed, ",
5393 current_function_name, pass, bytes_used);
5394 fprintf (gcse_file, "%d substs, %d insns created\n",
5395 gcse_subst_count, gcse_create_count);
5398 return changed;
5401 /* If X contains any LABEL_REF's, add REG_LABEL notes for them to INSN.
5402 If notes are added to an insn which references a CODE_LABEL, the
5403 LABEL_NUSES count is incremented. We have to add REG_LABEL notes,
5404 because the following loop optimization pass requires them. */
5406 /* ??? This is very similar to the loop.c add_label_notes function. We
5407 could probably share code here. */
5409 /* ??? If there was a jump optimization pass after gcse and before loop,
5410 then we would not need to do this here, because jump would add the
5411 necessary REG_LABEL notes. */
5413 static void
5414 add_label_notes (x, insn)
5415 rtx x;
5416 rtx insn;
5418 enum rtx_code code = GET_CODE (x);
5419 int i, j;
5420 const char *fmt;
5422 if (code == LABEL_REF && !LABEL_REF_NONLOCAL_P (x))
5424 /* This code used to ignore labels that referred to dispatch tables to
5425 avoid flow generating (slighly) worse code.
5427 We no longer ignore such label references (see LABEL_REF handling in
5428 mark_jump_label for additional information). */
5430 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, XEXP (x, 0),
5431 REG_NOTES (insn));
5432 if (LABEL_P (XEXP (x, 0)))
5433 LABEL_NUSES (XEXP (x, 0))++;
5434 return;
5437 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
5439 if (fmt[i] == 'e')
5440 add_label_notes (XEXP (x, i), insn);
5441 else if (fmt[i] == 'E')
5442 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5443 add_label_notes (XVECEXP (x, i, j), insn);
5447 /* Compute transparent outgoing information for each block.
5449 An expression is transparent to an edge unless it is killed by
5450 the edge itself. This can only happen with abnormal control flow,
5451 when the edge is traversed through a call. This happens with
5452 non-local labels and exceptions.
5454 This would not be necessary if we split the edge. While this is
5455 normally impossible for abnormal critical edges, with some effort
5456 it should be possible with exception handling, since we still have
5457 control over which handler should be invoked. But due to increased
5458 EH table sizes, this may not be worthwhile. */
5460 static void
5461 compute_transpout ()
5463 basic_block bb;
5464 unsigned int i;
5465 struct expr *expr;
5467 sbitmap_vector_ones (transpout, last_basic_block);
5469 FOR_EACH_BB (bb)
5471 /* Note that flow inserted a nop a the end of basic blocks that
5472 end in call instructions for reasons other than abnormal
5473 control flow. */
5474 if (GET_CODE (bb->end) != CALL_INSN)
5475 continue;
5477 for (i = 0; i < expr_hash_table.size; i++)
5478 for (expr = expr_hash_table.table[i]; expr ; expr = expr->next_same_hash)
5479 if (GET_CODE (expr->expr) == MEM)
5481 if (GET_CODE (XEXP (expr->expr, 0)) == SYMBOL_REF
5482 && CONSTANT_POOL_ADDRESS_P (XEXP (expr->expr, 0)))
5483 continue;
5485 /* ??? Optimally, we would use interprocedural alias
5486 analysis to determine if this mem is actually killed
5487 by this call. */
5488 RESET_BIT (transpout[bb->index], expr->bitmap_index);
5493 /* Removal of useless null pointer checks */
5495 /* Called via note_stores. X is set by SETTER. If X is a register we must
5496 invalidate nonnull_local and set nonnull_killed. DATA is really a
5497 `null_pointer_info *'.
5499 We ignore hard registers. */
5501 static void
5502 invalidate_nonnull_info (x, setter, data)
5503 rtx x;
5504 rtx setter ATTRIBUTE_UNUSED;
5505 void *data;
5507 unsigned int regno;
5508 struct null_pointer_info *npi = (struct null_pointer_info *) data;
5510 while (GET_CODE (x) == SUBREG)
5511 x = SUBREG_REG (x);
5513 /* Ignore anything that is not a register or is a hard register. */
5514 if (GET_CODE (x) != REG
5515 || REGNO (x) < npi->min_reg
5516 || REGNO (x) >= npi->max_reg)
5517 return;
5519 regno = REGNO (x) - npi->min_reg;
5521 RESET_BIT (npi->nonnull_local[npi->current_block->index], regno);
5522 SET_BIT (npi->nonnull_killed[npi->current_block->index], regno);
5525 /* Do null-pointer check elimination for the registers indicated in
5526 NPI. NONNULL_AVIN and NONNULL_AVOUT are pre-allocated sbitmaps;
5527 they are not our responsibility to free. */
5529 static int
5530 delete_null_pointer_checks_1 (block_reg, nonnull_avin,
5531 nonnull_avout, npi)
5532 unsigned int *block_reg;
5533 sbitmap *nonnull_avin;
5534 sbitmap *nonnull_avout;
5535 struct null_pointer_info *npi;
5537 basic_block bb, current_block;
5538 sbitmap *nonnull_local = npi->nonnull_local;
5539 sbitmap *nonnull_killed = npi->nonnull_killed;
5540 int something_changed = 0;
5542 /* Compute local properties, nonnull and killed. A register will have
5543 the nonnull property if at the end of the current block its value is
5544 known to be nonnull. The killed property indicates that somewhere in
5545 the block any information we had about the register is killed.
5547 Note that a register can have both properties in a single block. That
5548 indicates that it's killed, then later in the block a new value is
5549 computed. */
5550 sbitmap_vector_zero (nonnull_local, last_basic_block);
5551 sbitmap_vector_zero (nonnull_killed, last_basic_block);
5553 FOR_EACH_BB (current_block)
5555 rtx insn, stop_insn;
5557 /* Set the current block for invalidate_nonnull_info. */
5558 npi->current_block = current_block;
5560 /* Scan each insn in the basic block looking for memory references and
5561 register sets. */
5562 stop_insn = NEXT_INSN (current_block->end);
5563 for (insn = current_block->head;
5564 insn != stop_insn;
5565 insn = NEXT_INSN (insn))
5567 rtx set;
5568 rtx reg;
5570 /* Ignore anything that is not a normal insn. */
5571 if (! INSN_P (insn))
5572 continue;
5574 /* Basically ignore anything that is not a simple SET. We do have
5575 to make sure to invalidate nonnull_local and set nonnull_killed
5576 for such insns though. */
5577 set = single_set (insn);
5578 if (!set)
5580 note_stores (PATTERN (insn), invalidate_nonnull_info, npi);
5581 continue;
5584 /* See if we've got a usable memory load. We handle it first
5585 in case it uses its address register as a dest (which kills
5586 the nonnull property). */
5587 if (GET_CODE (SET_SRC (set)) == MEM
5588 && GET_CODE ((reg = XEXP (SET_SRC (set), 0))) == REG
5589 && REGNO (reg) >= npi->min_reg
5590 && REGNO (reg) < npi->max_reg)
5591 SET_BIT (nonnull_local[current_block->index],
5592 REGNO (reg) - npi->min_reg);
5594 /* Now invalidate stuff clobbered by this insn. */
5595 note_stores (PATTERN (insn), invalidate_nonnull_info, npi);
5597 /* And handle stores, we do these last since any sets in INSN can
5598 not kill the nonnull property if it is derived from a MEM
5599 appearing in a SET_DEST. */
5600 if (GET_CODE (SET_DEST (set)) == MEM
5601 && GET_CODE ((reg = XEXP (SET_DEST (set), 0))) == REG
5602 && REGNO (reg) >= npi->min_reg
5603 && REGNO (reg) < npi->max_reg)
5604 SET_BIT (nonnull_local[current_block->index],
5605 REGNO (reg) - npi->min_reg);
5609 /* Now compute global properties based on the local properties. This
5610 is a classic global availablity algorithm. */
5611 compute_available (nonnull_local, nonnull_killed,
5612 nonnull_avout, nonnull_avin);
5614 /* Now look at each bb and see if it ends with a compare of a value
5615 against zero. */
5616 FOR_EACH_BB (bb)
5618 rtx last_insn = bb->end;
5619 rtx condition, earliest;
5620 int compare_and_branch;
5622 /* Since MIN_REG is always at least FIRST_PSEUDO_REGISTER, and
5623 since BLOCK_REG[BB] is zero if this block did not end with a
5624 comparison against zero, this condition works. */
5625 if (block_reg[bb->index] < npi->min_reg
5626 || block_reg[bb->index] >= npi->max_reg)
5627 continue;
5629 /* LAST_INSN is a conditional jump. Get its condition. */
5630 condition = get_condition (last_insn, &earliest);
5632 /* If we can't determine the condition then skip. */
5633 if (! condition)
5634 continue;
5636 /* Is the register known to have a nonzero value? */
5637 if (!TEST_BIT (nonnull_avout[bb->index], block_reg[bb->index] - npi->min_reg))
5638 continue;
5640 /* Try to compute whether the compare/branch at the loop end is one or
5641 two instructions. */
5642 if (earliest == last_insn)
5643 compare_and_branch = 1;
5644 else if (earliest == prev_nonnote_insn (last_insn))
5645 compare_and_branch = 2;
5646 else
5647 continue;
5649 /* We know the register in this comparison is nonnull at exit from
5650 this block. We can optimize this comparison. */
5651 if (GET_CODE (condition) == NE)
5653 rtx new_jump;
5655 new_jump = emit_jump_insn_after (gen_jump (JUMP_LABEL (last_insn)),
5656 last_insn);
5657 JUMP_LABEL (new_jump) = JUMP_LABEL (last_insn);
5658 LABEL_NUSES (JUMP_LABEL (new_jump))++;
5659 emit_barrier_after (new_jump);
5662 something_changed = 1;
5663 delete_insn (last_insn);
5664 if (compare_and_branch == 2)
5665 delete_insn (earliest);
5666 purge_dead_edges (bb);
5668 /* Don't check this block again. (Note that BLOCK_END is
5669 invalid here; we deleted the last instruction in the
5670 block.) */
5671 block_reg[bb->index] = 0;
5674 return something_changed;
5677 /* Find EQ/NE comparisons against zero which can be (indirectly) evaluated
5678 at compile time.
5680 This is conceptually similar to global constant/copy propagation and
5681 classic global CSE (it even uses the same dataflow equations as cprop).
5683 If a register is used as memory address with the form (mem (reg)), then we
5684 know that REG can not be zero at that point in the program. Any instruction
5685 which sets REG "kills" this property.
5687 So, if every path leading to a conditional branch has an available memory
5688 reference of that form, then we know the register can not have the value
5689 zero at the conditional branch.
5691 So we merely need to compute the local properies and propagate that data
5692 around the cfg, then optimize where possible.
5694 We run this pass two times. Once before CSE, then again after CSE. This
5695 has proven to be the most profitable approach. It is rare for new
5696 optimization opportunities of this nature to appear after the first CSE
5697 pass.
5699 This could probably be integrated with global cprop with a little work. */
5702 delete_null_pointer_checks (f)
5703 rtx f ATTRIBUTE_UNUSED;
5705 sbitmap *nonnull_avin, *nonnull_avout;
5706 unsigned int *block_reg;
5707 basic_block bb;
5708 int reg;
5709 int regs_per_pass;
5710 int max_reg;
5711 struct null_pointer_info npi;
5712 int something_changed = 0;
5714 /* If we have only a single block, then there's nothing to do. */
5715 if (n_basic_blocks <= 1)
5716 return 0;
5718 /* Trying to perform global optimizations on flow graphs which have
5719 a high connectivity will take a long time and is unlikely to be
5720 particularly useful.
5722 In normal circumstances a cfg should have about twice as many edges
5723 as blocks. But we do not want to punish small functions which have
5724 a couple switch statements. So we require a relatively large number
5725 of basic blocks and the ratio of edges to blocks to be high. */
5726 if (n_basic_blocks > 1000 && n_edges / n_basic_blocks >= 20)
5727 return 0;
5729 /* We need four bitmaps, each with a bit for each register in each
5730 basic block. */
5731 max_reg = max_reg_num ();
5732 regs_per_pass = get_bitmap_width (4, last_basic_block, max_reg);
5734 /* Allocate bitmaps to hold local and global properties. */
5735 npi.nonnull_local = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
5736 npi.nonnull_killed = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
5737 nonnull_avin = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
5738 nonnull_avout = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
5740 /* Go through the basic blocks, seeing whether or not each block
5741 ends with a conditional branch whose condition is a comparison
5742 against zero. Record the register compared in BLOCK_REG. */
5743 block_reg = (unsigned int *) xcalloc (last_basic_block, sizeof (int));
5744 FOR_EACH_BB (bb)
5746 rtx last_insn = bb->end;
5747 rtx condition, earliest, reg;
5749 /* We only want conditional branches. */
5750 if (GET_CODE (last_insn) != JUMP_INSN
5751 || !any_condjump_p (last_insn)
5752 || !onlyjump_p (last_insn))
5753 continue;
5755 /* LAST_INSN is a conditional jump. Get its condition. */
5756 condition = get_condition (last_insn, &earliest);
5758 /* If we were unable to get the condition, or it is not an equality
5759 comparison against zero then there's nothing we can do. */
5760 if (!condition
5761 || (GET_CODE (condition) != NE && GET_CODE (condition) != EQ)
5762 || GET_CODE (XEXP (condition, 1)) != CONST_INT
5763 || (XEXP (condition, 1)
5764 != CONST0_RTX (GET_MODE (XEXP (condition, 0)))))
5765 continue;
5767 /* We must be checking a register against zero. */
5768 reg = XEXP (condition, 0);
5769 if (GET_CODE (reg) != REG)
5770 continue;
5772 block_reg[bb->index] = REGNO (reg);
5775 /* Go through the algorithm for each block of registers. */
5776 for (reg = FIRST_PSEUDO_REGISTER; reg < max_reg; reg += regs_per_pass)
5778 npi.min_reg = reg;
5779 npi.max_reg = MIN (reg + regs_per_pass, max_reg);
5780 something_changed |= delete_null_pointer_checks_1 (block_reg,
5781 nonnull_avin,
5782 nonnull_avout,
5783 &npi);
5786 /* Free the table of registers compared at the end of every block. */
5787 free (block_reg);
5789 /* Free bitmaps. */
5790 sbitmap_vector_free (npi.nonnull_local);
5791 sbitmap_vector_free (npi.nonnull_killed);
5792 sbitmap_vector_free (nonnull_avin);
5793 sbitmap_vector_free (nonnull_avout);
5795 return something_changed;
5798 /* Code Hoisting variables and subroutines. */
5800 /* Very busy expressions. */
5801 static sbitmap *hoist_vbein;
5802 static sbitmap *hoist_vbeout;
5804 /* Hoistable expressions. */
5805 static sbitmap *hoist_exprs;
5807 /* Dominator bitmaps. */
5808 dominance_info dominators;
5810 /* ??? We could compute post dominators and run this algorithm in
5811 reverse to perform tail merging, doing so would probably be
5812 more effective than the tail merging code in jump.c.
5814 It's unclear if tail merging could be run in parallel with
5815 code hoisting. It would be nice. */
5817 /* Allocate vars used for code hoisting analysis. */
5819 static void
5820 alloc_code_hoist_mem (n_blocks, n_exprs)
5821 int n_blocks, n_exprs;
5823 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
5824 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
5825 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
5827 hoist_vbein = sbitmap_vector_alloc (n_blocks, n_exprs);
5828 hoist_vbeout = sbitmap_vector_alloc (n_blocks, n_exprs);
5829 hoist_exprs = sbitmap_vector_alloc (n_blocks, n_exprs);
5830 transpout = sbitmap_vector_alloc (n_blocks, n_exprs);
5833 /* Free vars used for code hoisting analysis. */
5835 static void
5836 free_code_hoist_mem ()
5838 sbitmap_vector_free (antloc);
5839 sbitmap_vector_free (transp);
5840 sbitmap_vector_free (comp);
5842 sbitmap_vector_free (hoist_vbein);
5843 sbitmap_vector_free (hoist_vbeout);
5844 sbitmap_vector_free (hoist_exprs);
5845 sbitmap_vector_free (transpout);
5847 free_dominance_info (dominators);
5850 /* Compute the very busy expressions at entry/exit from each block.
5852 An expression is very busy if all paths from a given point
5853 compute the expression. */
5855 static void
5856 compute_code_hoist_vbeinout ()
5858 int changed, passes;
5859 basic_block bb;
5861 sbitmap_vector_zero (hoist_vbeout, last_basic_block);
5862 sbitmap_vector_zero (hoist_vbein, last_basic_block);
5864 passes = 0;
5865 changed = 1;
5867 while (changed)
5869 changed = 0;
5871 /* We scan the blocks in the reverse order to speed up
5872 the convergence. */
5873 FOR_EACH_BB_REVERSE (bb)
5875 changed |= sbitmap_a_or_b_and_c_cg (hoist_vbein[bb->index], antloc[bb->index],
5876 hoist_vbeout[bb->index], transp[bb->index]);
5877 if (bb->next_bb != EXIT_BLOCK_PTR)
5878 sbitmap_intersection_of_succs (hoist_vbeout[bb->index], hoist_vbein, bb->index);
5881 passes++;
5884 if (gcse_file)
5885 fprintf (gcse_file, "hoisting vbeinout computation: %d passes\n", passes);
5888 /* Top level routine to do the dataflow analysis needed by code hoisting. */
5890 static void
5891 compute_code_hoist_data ()
5893 compute_local_properties (transp, comp, antloc, &expr_hash_table);
5894 compute_transpout ();
5895 compute_code_hoist_vbeinout ();
5896 dominators = calculate_dominance_info (CDI_DOMINATORS);
5897 if (gcse_file)
5898 fprintf (gcse_file, "\n");
5901 /* Determine if the expression identified by EXPR_INDEX would
5902 reach BB unimpared if it was placed at the end of EXPR_BB.
5904 It's unclear exactly what Muchnick meant by "unimpared". It seems
5905 to me that the expression must either be computed or transparent in
5906 *every* block in the path(s) from EXPR_BB to BB. Any other definition
5907 would allow the expression to be hoisted out of loops, even if
5908 the expression wasn't a loop invariant.
5910 Contrast this to reachability for PRE where an expression is
5911 considered reachable if *any* path reaches instead of *all*
5912 paths. */
5914 static int
5915 hoist_expr_reaches_here_p (expr_bb, expr_index, bb, visited)
5916 basic_block expr_bb;
5917 int expr_index;
5918 basic_block bb;
5919 char *visited;
5921 edge pred;
5922 int visited_allocated_locally = 0;
5925 if (visited == NULL)
5927 visited_allocated_locally = 1;
5928 visited = xcalloc (last_basic_block, 1);
5931 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
5933 basic_block pred_bb = pred->src;
5935 if (pred->src == ENTRY_BLOCK_PTR)
5936 break;
5937 else if (pred_bb == expr_bb)
5938 continue;
5939 else if (visited[pred_bb->index])
5940 continue;
5942 /* Does this predecessor generate this expression? */
5943 else if (TEST_BIT (comp[pred_bb->index], expr_index))
5944 break;
5945 else if (! TEST_BIT (transp[pred_bb->index], expr_index))
5946 break;
5948 /* Not killed. */
5949 else
5951 visited[pred_bb->index] = 1;
5952 if (! hoist_expr_reaches_here_p (expr_bb, expr_index,
5953 pred_bb, visited))
5954 break;
5957 if (visited_allocated_locally)
5958 free (visited);
5960 return (pred == NULL);
5963 /* Actually perform code hoisting. */
5965 static void
5966 hoist_code ()
5968 basic_block bb, dominated;
5969 basic_block *domby;
5970 unsigned int domby_len;
5971 unsigned int i,j;
5972 struct expr **index_map;
5973 struct expr *expr;
5975 sbitmap_vector_zero (hoist_exprs, last_basic_block);
5977 /* Compute a mapping from expression number (`bitmap_index') to
5978 hash table entry. */
5980 index_map = (struct expr **) xcalloc (expr_hash_table.n_elems, sizeof (struct expr *));
5981 for (i = 0; i < expr_hash_table.size; i++)
5982 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
5983 index_map[expr->bitmap_index] = expr;
5985 /* Walk over each basic block looking for potentially hoistable
5986 expressions, nothing gets hoisted from the entry block. */
5987 FOR_EACH_BB (bb)
5989 int found = 0;
5990 int insn_inserted_p;
5992 domby_len = get_dominated_by (dominators, bb, &domby);
5993 /* Examine each expression that is very busy at the exit of this
5994 block. These are the potentially hoistable expressions. */
5995 for (i = 0; i < hoist_vbeout[bb->index]->n_bits; i++)
5997 int hoistable = 0;
5999 if (TEST_BIT (hoist_vbeout[bb->index], i)
6000 && TEST_BIT (transpout[bb->index], i))
6002 /* We've found a potentially hoistable expression, now
6003 we look at every block BB dominates to see if it
6004 computes the expression. */
6005 for (j = 0; j < domby_len; j++)
6007 dominated = domby[j];
6008 /* Ignore self dominance. */
6009 if (bb == dominated)
6010 continue;
6011 /* We've found a dominated block, now see if it computes
6012 the busy expression and whether or not moving that
6013 expression to the "beginning" of that block is safe. */
6014 if (!TEST_BIT (antloc[dominated->index], i))
6015 continue;
6017 /* Note if the expression would reach the dominated block
6018 unimpared if it was placed at the end of BB.
6020 Keep track of how many times this expression is hoistable
6021 from a dominated block into BB. */
6022 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
6023 hoistable++;
6026 /* If we found more than one hoistable occurrence of this
6027 expression, then note it in the bitmap of expressions to
6028 hoist. It makes no sense to hoist things which are computed
6029 in only one BB, and doing so tends to pessimize register
6030 allocation. One could increase this value to try harder
6031 to avoid any possible code expansion due to register
6032 allocation issues; however experiments have shown that
6033 the vast majority of hoistable expressions are only movable
6034 from two successors, so raising this threshhold is likely
6035 to nullify any benefit we get from code hoisting. */
6036 if (hoistable > 1)
6038 SET_BIT (hoist_exprs[bb->index], i);
6039 found = 1;
6043 /* If we found nothing to hoist, then quit now. */
6044 if (! found)
6046 free (domby);
6047 continue;
6050 /* Loop over all the hoistable expressions. */
6051 for (i = 0; i < hoist_exprs[bb->index]->n_bits; i++)
6053 /* We want to insert the expression into BB only once, so
6054 note when we've inserted it. */
6055 insn_inserted_p = 0;
6057 /* These tests should be the same as the tests above. */
6058 if (TEST_BIT (hoist_vbeout[bb->index], i))
6060 /* We've found a potentially hoistable expression, now
6061 we look at every block BB dominates to see if it
6062 computes the expression. */
6063 for (j = 0; j < domby_len; j++)
6065 dominated = domby[j];
6066 /* Ignore self dominance. */
6067 if (bb == dominated)
6068 continue;
6070 /* We've found a dominated block, now see if it computes
6071 the busy expression and whether or not moving that
6072 expression to the "beginning" of that block is safe. */
6073 if (!TEST_BIT (antloc[dominated->index], i))
6074 continue;
6076 /* The expression is computed in the dominated block and
6077 it would be safe to compute it at the start of the
6078 dominated block. Now we have to determine if the
6079 expression would reach the dominated block if it was
6080 placed at the end of BB. */
6081 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
6083 struct expr *expr = index_map[i];
6084 struct occr *occr = expr->antic_occr;
6085 rtx insn;
6086 rtx set;
6088 /* Find the right occurrence of this expression. */
6089 while (BLOCK_FOR_INSN (occr->insn) != dominated && occr)
6090 occr = occr->next;
6092 /* Should never happen. */
6093 if (!occr)
6094 abort ();
6096 insn = occr->insn;
6098 set = single_set (insn);
6099 if (! set)
6100 abort ();
6102 /* Create a pseudo-reg to store the result of reaching
6103 expressions into. Get the mode for the new pseudo
6104 from the mode of the original destination pseudo. */
6105 if (expr->reaching_reg == NULL)
6106 expr->reaching_reg
6107 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
6109 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
6110 delete_insn (insn);
6111 occr->deleted_p = 1;
6112 if (!insn_inserted_p)
6114 insert_insn_end_bb (index_map[i], bb, 0);
6115 insn_inserted_p = 1;
6121 free (domby);
6124 free (index_map);
6127 /* Top level routine to perform one code hoisting (aka unification) pass
6129 Return non-zero if a change was made. */
6131 static int
6132 one_code_hoisting_pass ()
6134 int changed = 0;
6136 alloc_hash_table (max_cuid, &expr_hash_table, 0);
6137 compute_hash_table (&expr_hash_table);
6138 if (gcse_file)
6139 dump_hash_table (gcse_file, "Code Hosting Expressions", &expr_hash_table);
6141 if (expr_hash_table.n_elems > 0)
6143 alloc_code_hoist_mem (last_basic_block, expr_hash_table.n_elems);
6144 compute_code_hoist_data ();
6145 hoist_code ();
6146 free_code_hoist_mem ();
6149 free_hash_table (&expr_hash_table);
6151 return changed;
6154 /* Here we provide the things required to do store motion towards
6155 the exit. In order for this to be effective, gcse also needed to
6156 be taught how to move a load when it is kill only by a store to itself.
6158 int i;
6159 float a[10];
6161 void foo(float scale)
6163 for (i=0; i<10; i++)
6164 a[i] *= scale;
6167 'i' is both loaded and stored to in the loop. Normally, gcse cannot move
6168 the load out since its live around the loop, and stored at the bottom
6169 of the loop.
6171 The 'Load Motion' referred to and implemented in this file is
6172 an enhancement to gcse which when using edge based lcm, recognizes
6173 this situation and allows gcse to move the load out of the loop.
6175 Once gcse has hoisted the load, store motion can then push this
6176 load towards the exit, and we end up with no loads or stores of 'i'
6177 in the loop. */
6179 /* This will search the ldst list for a matching expression. If it
6180 doesn't find one, we create one and initialize it. */
6182 static struct ls_expr *
6183 ldst_entry (x)
6184 rtx x;
6186 struct ls_expr * ptr;
6188 for (ptr = first_ls_expr(); ptr != NULL; ptr = next_ls_expr (ptr))
6189 if (expr_equiv_p (ptr->pattern, x))
6190 break;
6192 if (!ptr)
6194 ptr = (struct ls_expr *) xmalloc (sizeof (struct ls_expr));
6196 ptr->next = pre_ldst_mems;
6197 ptr->expr = NULL;
6198 ptr->pattern = x;
6199 ptr->loads = NULL_RTX;
6200 ptr->stores = NULL_RTX;
6201 ptr->reaching_reg = NULL_RTX;
6202 ptr->invalid = 0;
6203 ptr->index = 0;
6204 ptr->hash_index = 0;
6205 pre_ldst_mems = ptr;
6208 return ptr;
6211 /* Free up an individual ldst entry. */
6213 static void
6214 free_ldst_entry (ptr)
6215 struct ls_expr * ptr;
6217 free_INSN_LIST_list (& ptr->loads);
6218 free_INSN_LIST_list (& ptr->stores);
6220 free (ptr);
6223 /* Free up all memory associated with the ldst list. */
6225 static void
6226 free_ldst_mems ()
6228 while (pre_ldst_mems)
6230 struct ls_expr * tmp = pre_ldst_mems;
6232 pre_ldst_mems = pre_ldst_mems->next;
6234 free_ldst_entry (tmp);
6237 pre_ldst_mems = NULL;
6240 /* Dump debugging info about the ldst list. */
6242 static void
6243 print_ldst_list (file)
6244 FILE * file;
6246 struct ls_expr * ptr;
6248 fprintf (file, "LDST list: \n");
6250 for (ptr = first_ls_expr(); ptr != NULL; ptr = next_ls_expr (ptr))
6252 fprintf (file, " Pattern (%3d): ", ptr->index);
6254 print_rtl (file, ptr->pattern);
6256 fprintf (file, "\n Loads : ");
6258 if (ptr->loads)
6259 print_rtl (file, ptr->loads);
6260 else
6261 fprintf (file, "(nil)");
6263 fprintf (file, "\n Stores : ");
6265 if (ptr->stores)
6266 print_rtl (file, ptr->stores);
6267 else
6268 fprintf (file, "(nil)");
6270 fprintf (file, "\n\n");
6273 fprintf (file, "\n");
6276 /* Returns 1 if X is in the list of ldst only expressions. */
6278 static struct ls_expr *
6279 find_rtx_in_ldst (x)
6280 rtx x;
6282 struct ls_expr * ptr;
6284 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
6285 if (expr_equiv_p (ptr->pattern, x) && ! ptr->invalid)
6286 return ptr;
6288 return NULL;
6291 /* Assign each element of the list of mems a monotonically increasing value. */
6293 static int
6294 enumerate_ldsts ()
6296 struct ls_expr * ptr;
6297 int n = 0;
6299 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
6300 ptr->index = n++;
6302 return n;
6305 /* Return first item in the list. */
6307 static inline struct ls_expr *
6308 first_ls_expr ()
6310 return pre_ldst_mems;
6313 /* Return the next item in ther list after the specified one. */
6315 static inline struct ls_expr *
6316 next_ls_expr (ptr)
6317 struct ls_expr * ptr;
6319 return ptr->next;
6322 /* Load Motion for loads which only kill themselves. */
6324 /* Return true if x is a simple MEM operation, with no registers or
6325 side effects. These are the types of loads we consider for the
6326 ld_motion list, otherwise we let the usual aliasing take care of it. */
6328 static int
6329 simple_mem (x)
6330 rtx x;
6332 if (GET_CODE (x) != MEM)
6333 return 0;
6335 if (MEM_VOLATILE_P (x))
6336 return 0;
6338 if (GET_MODE (x) == BLKmode)
6339 return 0;
6341 if (!rtx_varies_p (XEXP (x, 0), 0))
6342 return 1;
6344 return 0;
6347 /* Make sure there isn't a buried reference in this pattern anywhere.
6348 If there is, invalidate the entry for it since we're not capable
6349 of fixing it up just yet.. We have to be sure we know about ALL
6350 loads since the aliasing code will allow all entries in the
6351 ld_motion list to not-alias itself. If we miss a load, we will get
6352 the wrong value since gcse might common it and we won't know to
6353 fix it up. */
6355 static void
6356 invalidate_any_buried_refs (x)
6357 rtx x;
6359 const char * fmt;
6360 int i, j;
6361 struct ls_expr * ptr;
6363 /* Invalidate it in the list. */
6364 if (GET_CODE (x) == MEM && simple_mem (x))
6366 ptr = ldst_entry (x);
6367 ptr->invalid = 1;
6370 /* Recursively process the insn. */
6371 fmt = GET_RTX_FORMAT (GET_CODE (x));
6373 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6375 if (fmt[i] == 'e')
6376 invalidate_any_buried_refs (XEXP (x, i));
6377 else if (fmt[i] == 'E')
6378 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6379 invalidate_any_buried_refs (XVECEXP (x, i, j));
6383 /* Find all the 'simple' MEMs which are used in LOADs and STORES. Simple
6384 being defined as MEM loads and stores to symbols, with no
6385 side effects and no registers in the expression. If there are any
6386 uses/defs which don't match this criteria, it is invalidated and
6387 trimmed out later. */
6389 static void
6390 compute_ld_motion_mems ()
6392 struct ls_expr * ptr;
6393 basic_block bb;
6394 rtx insn;
6396 pre_ldst_mems = NULL;
6398 FOR_EACH_BB (bb)
6400 for (insn = bb->head;
6401 insn && insn != NEXT_INSN (bb->end);
6402 insn = NEXT_INSN (insn))
6404 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
6406 if (GET_CODE (PATTERN (insn)) == SET)
6408 rtx src = SET_SRC (PATTERN (insn));
6409 rtx dest = SET_DEST (PATTERN (insn));
6411 /* Check for a simple LOAD... */
6412 if (GET_CODE (src) == MEM && simple_mem (src))
6414 ptr = ldst_entry (src);
6415 if (GET_CODE (dest) == REG)
6416 ptr->loads = alloc_INSN_LIST (insn, ptr->loads);
6417 else
6418 ptr->invalid = 1;
6420 else
6422 /* Make sure there isn't a buried load somewhere. */
6423 invalidate_any_buried_refs (src);
6426 /* Check for stores. Don't worry about aliased ones, they
6427 will block any movement we might do later. We only care
6428 about this exact pattern since those are the only
6429 circumstance that we will ignore the aliasing info. */
6430 if (GET_CODE (dest) == MEM && simple_mem (dest))
6432 ptr = ldst_entry (dest);
6434 if (GET_CODE (src) != MEM
6435 && GET_CODE (src) != ASM_OPERANDS)
6436 ptr->stores = alloc_INSN_LIST (insn, ptr->stores);
6437 else
6438 ptr->invalid = 1;
6441 else
6442 invalidate_any_buried_refs (PATTERN (insn));
6448 /* Remove any references that have been either invalidated or are not in the
6449 expression list for pre gcse. */
6451 static void
6452 trim_ld_motion_mems ()
6454 struct ls_expr * last = NULL;
6455 struct ls_expr * ptr = first_ls_expr ();
6457 while (ptr != NULL)
6459 int del = ptr->invalid;
6460 struct expr * expr = NULL;
6462 /* Delete if entry has been made invalid. */
6463 if (!del)
6465 unsigned int i;
6467 del = 1;
6468 /* Delete if we cannot find this mem in the expression list. */
6469 for (i = 0; i < expr_hash_table.size && del; i++)
6471 for (expr = expr_hash_table.table[i];
6472 expr != NULL;
6473 expr = expr->next_same_hash)
6474 if (expr_equiv_p (expr->expr, ptr->pattern))
6476 del = 0;
6477 break;
6482 if (del)
6484 if (last != NULL)
6486 last->next = ptr->next;
6487 free_ldst_entry (ptr);
6488 ptr = last->next;
6490 else
6492 pre_ldst_mems = pre_ldst_mems->next;
6493 free_ldst_entry (ptr);
6494 ptr = pre_ldst_mems;
6497 else
6499 /* Set the expression field if we are keeping it. */
6500 last = ptr;
6501 ptr->expr = expr;
6502 ptr = ptr->next;
6506 /* Show the world what we've found. */
6507 if (gcse_file && pre_ldst_mems != NULL)
6508 print_ldst_list (gcse_file);
6511 /* This routine will take an expression which we are replacing with
6512 a reaching register, and update any stores that are needed if
6513 that expression is in the ld_motion list. Stores are updated by
6514 copying their SRC to the reaching register, and then storeing
6515 the reaching register into the store location. These keeps the
6516 correct value in the reaching register for the loads. */
6518 static void
6519 update_ld_motion_stores (expr)
6520 struct expr * expr;
6522 struct ls_expr * mem_ptr;
6524 if ((mem_ptr = find_rtx_in_ldst (expr->expr)))
6526 /* We can try to find just the REACHED stores, but is shouldn't
6527 matter to set the reaching reg everywhere... some might be
6528 dead and should be eliminated later. */
6530 /* We replace SET mem = expr with
6531 SET reg = expr
6532 SET mem = reg , where reg is the
6533 reaching reg used in the load. */
6534 rtx list = mem_ptr->stores;
6536 for ( ; list != NULL_RTX; list = XEXP (list, 1))
6538 rtx insn = XEXP (list, 0);
6539 rtx pat = PATTERN (insn);
6540 rtx src = SET_SRC (pat);
6541 rtx reg = expr->reaching_reg;
6542 rtx copy, new;
6544 /* If we've already copied it, continue. */
6545 if (expr->reaching_reg == src)
6546 continue;
6548 if (gcse_file)
6550 fprintf (gcse_file, "PRE: store updated with reaching reg ");
6551 print_rtl (gcse_file, expr->reaching_reg);
6552 fprintf (gcse_file, ":\n ");
6553 print_inline_rtx (gcse_file, insn, 8);
6554 fprintf (gcse_file, "\n");
6557 copy = gen_move_insn ( reg, SET_SRC (pat));
6558 new = emit_insn_before (copy, insn);
6559 record_one_set (REGNO (reg), new);
6560 SET_SRC (pat) = reg;
6562 /* un-recognize this pattern since it's probably different now. */
6563 INSN_CODE (insn) = -1;
6564 gcse_create_count++;
6569 /* Store motion code. */
6571 /* This is used to communicate the target bitvector we want to use in the
6572 reg_set_info routine when called via the note_stores mechanism. */
6573 static sbitmap * regvec;
6575 /* Used in computing the reverse edge graph bit vectors. */
6576 static sbitmap * st_antloc;
6578 /* Global holding the number of store expressions we are dealing with. */
6579 static int num_stores;
6581 /* Checks to set if we need to mark a register set. Called from note_stores. */
6583 static void
6584 reg_set_info (dest, setter, data)
6585 rtx dest, setter ATTRIBUTE_UNUSED;
6586 void * data ATTRIBUTE_UNUSED;
6588 if (GET_CODE (dest) == SUBREG)
6589 dest = SUBREG_REG (dest);
6591 if (GET_CODE (dest) == REG)
6592 SET_BIT (*regvec, REGNO (dest));
6595 /* Return non-zero if the register operands of expression X are killed
6596 anywhere in basic block BB. */
6598 static int
6599 store_ops_ok (x, bb)
6600 rtx x;
6601 basic_block bb;
6603 int i;
6604 enum rtx_code code;
6605 const char * fmt;
6607 /* Repeat is used to turn tail-recursion into iteration. */
6608 repeat:
6610 if (x == 0)
6611 return 1;
6613 code = GET_CODE (x);
6614 switch (code)
6616 case REG:
6617 /* If a reg has changed after us in this
6618 block, the operand has been killed. */
6619 return TEST_BIT (reg_set_in_block[bb->index], REGNO (x));
6621 case MEM:
6622 x = XEXP (x, 0);
6623 goto repeat;
6625 case PRE_DEC:
6626 case PRE_INC:
6627 case POST_DEC:
6628 case POST_INC:
6629 return 0;
6631 case PC:
6632 case CC0: /*FIXME*/
6633 case CONST:
6634 case CONST_INT:
6635 case CONST_DOUBLE:
6636 case CONST_VECTOR:
6637 case SYMBOL_REF:
6638 case LABEL_REF:
6639 case ADDR_VEC:
6640 case ADDR_DIFF_VEC:
6641 return 1;
6643 default:
6644 break;
6647 i = GET_RTX_LENGTH (code) - 1;
6648 fmt = GET_RTX_FORMAT (code);
6650 for (; i >= 0; i--)
6652 if (fmt[i] == 'e')
6654 rtx tem = XEXP (x, i);
6656 /* If we are about to do the last recursive call
6657 needed at this level, change it into iteration.
6658 This function is called enough to be worth it. */
6659 if (i == 0)
6661 x = tem;
6662 goto repeat;
6665 if (! store_ops_ok (tem, bb))
6666 return 0;
6668 else if (fmt[i] == 'E')
6670 int j;
6672 for (j = 0; j < XVECLEN (x, i); j++)
6674 if (! store_ops_ok (XVECEXP (x, i, j), bb))
6675 return 0;
6680 return 1;
6683 /* Determine whether insn is MEM store pattern that we will consider moving. */
6685 static void
6686 find_moveable_store (insn)
6687 rtx insn;
6689 struct ls_expr * ptr;
6690 rtx dest = PATTERN (insn);
6692 if (GET_CODE (dest) != SET
6693 || GET_CODE (SET_SRC (dest)) == ASM_OPERANDS)
6694 return;
6696 dest = SET_DEST (dest);
6698 if (GET_CODE (dest) != MEM || MEM_VOLATILE_P (dest)
6699 || GET_MODE (dest) == BLKmode)
6700 return;
6702 if (GET_CODE (XEXP (dest, 0)) != SYMBOL_REF)
6703 return;
6705 if (rtx_varies_p (XEXP (dest, 0), 0))
6706 return;
6708 ptr = ldst_entry (dest);
6709 ptr->stores = alloc_INSN_LIST (insn, ptr->stores);
6712 /* Perform store motion. Much like gcse, except we move expressions the
6713 other way by looking at the flowgraph in reverse. */
6715 static int
6716 compute_store_table ()
6718 int ret;
6719 basic_block bb;
6720 unsigned regno;
6721 rtx insn, pat;
6723 max_gcse_regno = max_reg_num ();
6725 reg_set_in_block = (sbitmap *) sbitmap_vector_alloc (last_basic_block,
6726 max_gcse_regno);
6727 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
6728 pre_ldst_mems = 0;
6730 /* Find all the stores we care about. */
6731 FOR_EACH_BB (bb)
6733 regvec = & (reg_set_in_block[bb->index]);
6734 for (insn = bb->end;
6735 insn && insn != PREV_INSN (bb->end);
6736 insn = PREV_INSN (insn))
6738 /* Ignore anything that is not a normal insn. */
6739 if (! INSN_P (insn))
6740 continue;
6742 if (GET_CODE (insn) == CALL_INSN)
6744 bool clobbers_all = false;
6745 #ifdef NON_SAVING_SETJMP
6746 if (NON_SAVING_SETJMP
6747 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
6748 clobbers_all = true;
6749 #endif
6751 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6752 if (clobbers_all
6753 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
6754 SET_BIT (reg_set_in_block[bb->index], regno);
6757 pat = PATTERN (insn);
6758 note_stores (pat, reg_set_info, NULL);
6760 /* Now that we've marked regs, look for stores. */
6761 if (GET_CODE (pat) == SET)
6762 find_moveable_store (insn);
6766 ret = enumerate_ldsts ();
6768 if (gcse_file)
6770 fprintf (gcse_file, "Store Motion Expressions.\n");
6771 print_ldst_list (gcse_file);
6774 return ret;
6777 /* Check to see if the load X is aliased with STORE_PATTERN. */
6779 static int
6780 load_kills_store (x, store_pattern)
6781 rtx x, store_pattern;
6783 if (true_dependence (x, GET_MODE (x), store_pattern, rtx_addr_varies_p))
6784 return 1;
6785 return 0;
6788 /* Go through the entire insn X, looking for any loads which might alias
6789 STORE_PATTERN. Return 1 if found. */
6791 static int
6792 find_loads (x, store_pattern)
6793 rtx x, store_pattern;
6795 const char * fmt;
6796 int i, j;
6797 int ret = 0;
6799 if (!x)
6800 return 0;
6802 if (GET_CODE (x) == SET)
6803 x = SET_SRC (x);
6805 if (GET_CODE (x) == MEM)
6807 if (load_kills_store (x, store_pattern))
6808 return 1;
6811 /* Recursively process the insn. */
6812 fmt = GET_RTX_FORMAT (GET_CODE (x));
6814 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0 && !ret; i--)
6816 if (fmt[i] == 'e')
6817 ret |= find_loads (XEXP (x, i), store_pattern);
6818 else if (fmt[i] == 'E')
6819 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6820 ret |= find_loads (XVECEXP (x, i, j), store_pattern);
6822 return ret;
6825 /* Check if INSN kills the store pattern X (is aliased with it).
6826 Return 1 if it it does. */
6828 static int
6829 store_killed_in_insn (x, insn)
6830 rtx x, insn;
6832 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
6833 return 0;
6835 if (GET_CODE (insn) == CALL_INSN)
6837 /* A normal or pure call might read from pattern,
6838 but a const call will not. */
6839 return ! CONST_OR_PURE_CALL_P (insn) || pure_call_p (insn);
6842 if (GET_CODE (PATTERN (insn)) == SET)
6844 rtx pat = PATTERN (insn);
6845 /* Check for memory stores to aliased objects. */
6846 if (GET_CODE (SET_DEST (pat)) == MEM && !expr_equiv_p (SET_DEST (pat), x))
6847 /* pretend its a load and check for aliasing. */
6848 if (find_loads (SET_DEST (pat), x))
6849 return 1;
6850 return find_loads (SET_SRC (pat), x);
6852 else
6853 return find_loads (PATTERN (insn), x);
6856 /* Returns 1 if the expression X is loaded or clobbered on or after INSN
6857 within basic block BB. */
6859 static int
6860 store_killed_after (x, insn, bb)
6861 rtx x, insn;
6862 basic_block bb;
6864 rtx last = bb->end;
6866 if (insn == last)
6867 return 0;
6869 /* Check if the register operands of the store are OK in this block.
6870 Note that if registers are changed ANYWHERE in the block, we'll
6871 decide we can't move it, regardless of whether it changed above
6872 or below the store. This could be improved by checking the register
6873 operands while lookinng for aliasing in each insn. */
6874 if (!store_ops_ok (XEXP (x, 0), bb))
6875 return 1;
6877 for ( ; insn && insn != NEXT_INSN (last); insn = NEXT_INSN (insn))
6878 if (store_killed_in_insn (x, insn))
6879 return 1;
6881 return 0;
6884 /* Returns 1 if the expression X is loaded or clobbered on or before INSN
6885 within basic block BB. */
6886 static int
6887 store_killed_before (x, insn, bb)
6888 rtx x, insn;
6889 basic_block bb;
6891 rtx first = bb->head;
6893 if (insn == first)
6894 return store_killed_in_insn (x, insn);
6896 /* Check if the register operands of the store are OK in this block.
6897 Note that if registers are changed ANYWHERE in the block, we'll
6898 decide we can't move it, regardless of whether it changed above
6899 or below the store. This could be improved by checking the register
6900 operands while lookinng for aliasing in each insn. */
6901 if (!store_ops_ok (XEXP (x, 0), bb))
6902 return 1;
6904 for ( ; insn && insn != PREV_INSN (first); insn = PREV_INSN (insn))
6905 if (store_killed_in_insn (x, insn))
6906 return 1;
6908 return 0;
6911 #define ANTIC_STORE_LIST(x) ((x)->loads)
6912 #define AVAIL_STORE_LIST(x) ((x)->stores)
6914 /* Given the table of available store insns at the end of blocks,
6915 determine which ones are not killed by aliasing, and generate
6916 the appropriate vectors for gen and killed. */
6917 static void
6918 build_store_vectors ()
6920 basic_block bb, b;
6921 rtx insn, st;
6922 struct ls_expr * ptr;
6924 /* Build the gen_vector. This is any store in the table which is not killed
6925 by aliasing later in its block. */
6926 ae_gen = (sbitmap *) sbitmap_vector_alloc (last_basic_block, num_stores);
6927 sbitmap_vector_zero (ae_gen, last_basic_block);
6929 st_antloc = (sbitmap *) sbitmap_vector_alloc (last_basic_block, num_stores);
6930 sbitmap_vector_zero (st_antloc, last_basic_block);
6932 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
6934 /* Put all the stores into either the antic list, or the avail list,
6935 or both. */
6936 rtx store_list = ptr->stores;
6937 ptr->stores = NULL_RTX;
6939 for (st = store_list; st != NULL; st = XEXP (st, 1))
6941 insn = XEXP (st, 0);
6942 bb = BLOCK_FOR_INSN (insn);
6944 if (!store_killed_after (ptr->pattern, insn, bb))
6946 /* If we've already seen an availale expression in this block,
6947 we can delete the one we saw already (It occurs earlier in
6948 the block), and replace it with this one). We'll copy the
6949 old SRC expression to an unused register in case there
6950 are any side effects. */
6951 if (TEST_BIT (ae_gen[bb->index], ptr->index))
6953 /* Find previous store. */
6954 rtx st;
6955 for (st = AVAIL_STORE_LIST (ptr); st ; st = XEXP (st, 1))
6956 if (BLOCK_FOR_INSN (XEXP (st, 0)) == bb)
6957 break;
6958 if (st)
6960 rtx r = gen_reg_rtx (GET_MODE (ptr->pattern));
6961 if (gcse_file)
6962 fprintf (gcse_file, "Removing redundant store:\n");
6963 replace_store_insn (r, XEXP (st, 0), bb);
6964 XEXP (st, 0) = insn;
6965 continue;
6968 SET_BIT (ae_gen[bb->index], ptr->index);
6969 AVAIL_STORE_LIST (ptr) = alloc_INSN_LIST (insn,
6970 AVAIL_STORE_LIST (ptr));
6973 if (!store_killed_before (ptr->pattern, insn, bb))
6975 SET_BIT (st_antloc[BLOCK_NUM (insn)], ptr->index);
6976 ANTIC_STORE_LIST (ptr) = alloc_INSN_LIST (insn,
6977 ANTIC_STORE_LIST (ptr));
6981 /* Free the original list of store insns. */
6982 free_INSN_LIST_list (&store_list);
6985 ae_kill = (sbitmap *) sbitmap_vector_alloc (last_basic_block, num_stores);
6986 sbitmap_vector_zero (ae_kill, last_basic_block);
6988 transp = (sbitmap *) sbitmap_vector_alloc (last_basic_block, num_stores);
6989 sbitmap_vector_zero (transp, last_basic_block);
6991 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
6992 FOR_EACH_BB (b)
6994 if (store_killed_after (ptr->pattern, b->head, b))
6996 /* The anticipatable expression is not killed if it's gen'd. */
6998 We leave this check out for now. If we have a code sequence
6999 in a block which looks like:
7000 ST MEMa = x
7001 L y = MEMa
7002 ST MEMa = z
7003 We should flag this as having an ANTIC expression, NOT
7004 transparent, NOT killed, and AVAIL.
7005 Unfortunately, since we haven't re-written all loads to
7006 use the reaching reg, we'll end up doing an incorrect
7007 Load in the middle here if we push the store down. It happens in
7008 gcc.c-torture/execute/960311-1.c with -O3
7009 If we always kill it in this case, we'll sometimes do
7010 uneccessary work, but it shouldn't actually hurt anything.
7011 if (!TEST_BIT (ae_gen[b], ptr->index)). */
7012 SET_BIT (ae_kill[b->index], ptr->index);
7014 else
7015 SET_BIT (transp[b->index], ptr->index);
7018 /* Any block with no exits calls some non-returning function, so
7019 we better mark the store killed here, or we might not store to
7020 it at all. If we knew it was abort, we wouldn't have to store,
7021 but we don't know that for sure. */
7022 if (gcse_file)
7024 fprintf (gcse_file, "ST_avail and ST_antic (shown under loads..)\n");
7025 print_ldst_list (gcse_file);
7026 dump_sbitmap_vector (gcse_file, "st_antloc", "", st_antloc, last_basic_block);
7027 dump_sbitmap_vector (gcse_file, "st_kill", "", ae_kill, last_basic_block);
7028 dump_sbitmap_vector (gcse_file, "Transpt", "", transp, last_basic_block);
7029 dump_sbitmap_vector (gcse_file, "st_avloc", "", ae_gen, last_basic_block);
7033 /* Insert an instruction at the begining of a basic block, and update
7034 the BLOCK_HEAD if needed. */
7036 static void
7037 insert_insn_start_bb (insn, bb)
7038 rtx insn;
7039 basic_block bb;
7041 /* Insert at start of successor block. */
7042 rtx prev = PREV_INSN (bb->head);
7043 rtx before = bb->head;
7044 while (before != 0)
7046 if (GET_CODE (before) != CODE_LABEL
7047 && (GET_CODE (before) != NOTE
7048 || NOTE_LINE_NUMBER (before) != NOTE_INSN_BASIC_BLOCK))
7049 break;
7050 prev = before;
7051 if (prev == bb->end)
7052 break;
7053 before = NEXT_INSN (before);
7056 insn = emit_insn_after (insn, prev);
7058 if (gcse_file)
7060 fprintf (gcse_file, "STORE_MOTION insert store at start of BB %d:\n",
7061 bb->index);
7062 print_inline_rtx (gcse_file, insn, 6);
7063 fprintf (gcse_file, "\n");
7067 /* This routine will insert a store on an edge. EXPR is the ldst entry for
7068 the memory reference, and E is the edge to insert it on. Returns non-zero
7069 if an edge insertion was performed. */
7071 static int
7072 insert_store (expr, e)
7073 struct ls_expr * expr;
7074 edge e;
7076 rtx reg, insn;
7077 basic_block bb;
7078 edge tmp;
7080 /* We did all the deleted before this insert, so if we didn't delete a
7081 store, then we haven't set the reaching reg yet either. */
7082 if (expr->reaching_reg == NULL_RTX)
7083 return 0;
7085 reg = expr->reaching_reg;
7086 insn = gen_move_insn (expr->pattern, reg);
7088 /* If we are inserting this expression on ALL predecessor edges of a BB,
7089 insert it at the start of the BB, and reset the insert bits on the other
7090 edges so we don't try to insert it on the other edges. */
7091 bb = e->dest;
7092 for (tmp = e->dest->pred; tmp ; tmp = tmp->pred_next)
7094 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
7095 if (index == EDGE_INDEX_NO_EDGE)
7096 abort ();
7097 if (! TEST_BIT (pre_insert_map[index], expr->index))
7098 break;
7101 /* If tmp is NULL, we found an insertion on every edge, blank the
7102 insertion vector for these edges, and insert at the start of the BB. */
7103 if (!tmp && bb != EXIT_BLOCK_PTR)
7105 for (tmp = e->dest->pred; tmp ; tmp = tmp->pred_next)
7107 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
7108 RESET_BIT (pre_insert_map[index], expr->index);
7110 insert_insn_start_bb (insn, bb);
7111 return 0;
7114 /* We can't insert on this edge, so we'll insert at the head of the
7115 successors block. See Morgan, sec 10.5. */
7116 if ((e->flags & EDGE_ABNORMAL) == EDGE_ABNORMAL)
7118 insert_insn_start_bb (insn, bb);
7119 return 0;
7122 insert_insn_on_edge (insn, e);
7124 if (gcse_file)
7126 fprintf (gcse_file, "STORE_MOTION insert insn on edge (%d, %d):\n",
7127 e->src->index, e->dest->index);
7128 print_inline_rtx (gcse_file, insn, 6);
7129 fprintf (gcse_file, "\n");
7132 return 1;
7135 /* This routine will replace a store with a SET to a specified register. */
7137 static void
7138 replace_store_insn (reg, del, bb)
7139 rtx reg, del;
7140 basic_block bb;
7142 rtx insn;
7144 insn = gen_move_insn (reg, SET_SRC (PATTERN (del)));
7145 insn = emit_insn_after (insn, del);
7147 if (gcse_file)
7149 fprintf (gcse_file,
7150 "STORE_MOTION delete insn in BB %d:\n ", bb->index);
7151 print_inline_rtx (gcse_file, del, 6);
7152 fprintf (gcse_file, "\nSTORE MOTION replaced with insn:\n ");
7153 print_inline_rtx (gcse_file, insn, 6);
7154 fprintf (gcse_file, "\n");
7157 delete_insn (del);
7161 /* Delete a store, but copy the value that would have been stored into
7162 the reaching_reg for later storing. */
7164 static void
7165 delete_store (expr, bb)
7166 struct ls_expr * expr;
7167 basic_block bb;
7169 rtx reg, i, del;
7171 if (expr->reaching_reg == NULL_RTX)
7172 expr->reaching_reg = gen_reg_rtx (GET_MODE (expr->pattern));
7175 /* If there is more than 1 store, the earlier ones will be dead,
7176 but it doesn't hurt to replace them here. */
7177 reg = expr->reaching_reg;
7179 for (i = AVAIL_STORE_LIST (expr); i; i = XEXP (i, 1))
7181 del = XEXP (i, 0);
7182 if (BLOCK_FOR_INSN (del) == bb)
7184 /* We know there is only one since we deleted redundant
7185 ones during the available computation. */
7186 replace_store_insn (reg, del, bb);
7187 break;
7192 /* Free memory used by store motion. */
7194 static void
7195 free_store_memory ()
7197 free_ldst_mems ();
7199 if (ae_gen)
7200 sbitmap_vector_free (ae_gen);
7201 if (ae_kill)
7202 sbitmap_vector_free (ae_kill);
7203 if (transp)
7204 sbitmap_vector_free (transp);
7205 if (st_antloc)
7206 sbitmap_vector_free (st_antloc);
7207 if (pre_insert_map)
7208 sbitmap_vector_free (pre_insert_map);
7209 if (pre_delete_map)
7210 sbitmap_vector_free (pre_delete_map);
7211 if (reg_set_in_block)
7212 sbitmap_vector_free (reg_set_in_block);
7214 ae_gen = ae_kill = transp = st_antloc = NULL;
7215 pre_insert_map = pre_delete_map = reg_set_in_block = NULL;
7218 /* Perform store motion. Much like gcse, except we move expressions the
7219 other way by looking at the flowgraph in reverse. */
7221 static void
7222 store_motion ()
7224 basic_block bb;
7225 int x;
7226 struct ls_expr * ptr;
7227 int update_flow = 0;
7229 if (gcse_file)
7231 fprintf (gcse_file, "before store motion\n");
7232 print_rtl (gcse_file, get_insns ());
7236 init_alias_analysis ();
7238 /* Find all the stores that are live to the end of their block. */
7239 num_stores = compute_store_table ();
7240 if (num_stores == 0)
7242 sbitmap_vector_free (reg_set_in_block);
7243 end_alias_analysis ();
7244 return;
7247 /* Now compute whats actually available to move. */
7248 add_noreturn_fake_exit_edges ();
7249 build_store_vectors ();
7251 edge_list = pre_edge_rev_lcm (gcse_file, num_stores, transp, ae_gen,
7252 st_antloc, ae_kill, &pre_insert_map,
7253 &pre_delete_map);
7255 /* Now we want to insert the new stores which are going to be needed. */
7256 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7258 FOR_EACH_BB (bb)
7259 if (TEST_BIT (pre_delete_map[bb->index], ptr->index))
7260 delete_store (ptr, bb);
7262 for (x = 0; x < NUM_EDGES (edge_list); x++)
7263 if (TEST_BIT (pre_insert_map[x], ptr->index))
7264 update_flow |= insert_store (ptr, INDEX_EDGE (edge_list, x));
7267 if (update_flow)
7268 commit_edge_insertions ();
7270 free_store_memory ();
7271 free_edge_list (edge_list);
7272 remove_fake_edges ();
7273 end_alias_analysis ();
7276 #include "gt-gcse.h"