FSF GCC merge 02/23/03
[official-gcc.git] / gcc / gcse.c
blobf5bb66d0053d0ef486d6a812ec7614ecb04fdb9a
1 /* Global common subexpression elimination/Partial redundancy elimination
2 and global constant/copy propagation for GNU compiler.
3 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 02111-1307, USA. */
23 /* TODO
24 - reordering of memory allocation and freeing to be more space efficient
25 - do rough calc of how many regs are needed in each block, and a rough
26 calc of how many regs are available in each class and use that to
27 throttle back the code in cases where RTX_COST is minimal.
28 - a store to the same address as a load does not kill the load if the
29 source of the store is also the destination of the load. Handling this
30 allows more load motion, particularly out of loops.
31 - ability to realloc sbitmap vectors would allow one initial computation
32 of reg_set_in_block with only subsequent additions, rather than
33 recomputing it for each pass
37 /* References searched while implementing this.
39 Compilers Principles, Techniques and Tools
40 Aho, Sethi, Ullman
41 Addison-Wesley, 1988
43 Global Optimization by Suppression of Partial Redundancies
44 E. Morel, C. Renvoise
45 communications of the acm, Vol. 22, Num. 2, Feb. 1979
47 A Portable Machine-Independent Global Optimizer - Design and Measurements
48 Frederick Chow
49 Stanford Ph.D. thesis, Dec. 1983
51 A Fast Algorithm for Code Movement Optimization
52 D.M. Dhamdhere
53 SIGPLAN Notices, Vol. 23, Num. 10, Oct. 1988
55 A Solution to a Problem with Morel and Renvoise's
56 Global Optimization by Suppression of Partial Redundancies
57 K-H Drechsler, M.P. Stadel
58 ACM TOPLAS, Vol. 10, Num. 4, Oct. 1988
60 Practical Adaptation of the Global Optimization
61 Algorithm of Morel and Renvoise
62 D.M. Dhamdhere
63 ACM TOPLAS, Vol. 13, Num. 2. Apr. 1991
65 Efficiently Computing Static Single Assignment Form and the Control
66 Dependence Graph
67 R. Cytron, J. Ferrante, B.K. Rosen, M.N. Wegman, and F.K. Zadeck
68 ACM TOPLAS, Vol. 13, Num. 4, Oct. 1991
70 Lazy Code Motion
71 J. Knoop, O. Ruthing, B. Steffen
72 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
74 What's In a Region? Or Computing Control Dependence Regions in Near-Linear
75 Time for Reducible Flow Control
76 Thomas Ball
77 ACM Letters on Programming Languages and Systems,
78 Vol. 2, Num. 1-4, Mar-Dec 1993
80 An Efficient Representation for Sparse Sets
81 Preston Briggs, Linda Torczon
82 ACM Letters on Programming Languages and Systems,
83 Vol. 2, Num. 1-4, Mar-Dec 1993
85 A Variation of Knoop, Ruthing, and Steffen's Lazy Code Motion
86 K-H Drechsler, M.P. Stadel
87 ACM SIGPLAN Notices, Vol. 28, Num. 5, May 1993
89 Partial Dead Code Elimination
90 J. Knoop, O. Ruthing, B. Steffen
91 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
93 Effective Partial Redundancy Elimination
94 P. Briggs, K.D. Cooper
95 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
97 The Program Structure Tree: Computing Control Regions in Linear Time
98 R. Johnson, D. Pearson, K. Pingali
99 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
101 Optimal Code Motion: Theory and Practice
102 J. Knoop, O. Ruthing, B. Steffen
103 ACM TOPLAS, Vol. 16, Num. 4, Jul. 1994
105 The power of assignment motion
106 J. Knoop, O. Ruthing, B. Steffen
107 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
109 Global code motion / global value numbering
110 C. Click
111 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
113 Value Driven Redundancy Elimination
114 L.T. Simpson
115 Rice University Ph.D. thesis, Apr. 1996
117 Value Numbering
118 L.T. Simpson
119 Massively Scalar Compiler Project, Rice University, Sep. 1996
121 High Performance Compilers for Parallel Computing
122 Michael Wolfe
123 Addison-Wesley, 1996
125 Advanced Compiler Design and Implementation
126 Steven Muchnick
127 Morgan Kaufmann, 1997
129 Building an Optimizing Compiler
130 Robert Morgan
131 Digital Press, 1998
133 People wishing to speed up the code here should read:
134 Elimination Algorithms for Data Flow Analysis
135 B.G. Ryder, M.C. Paull
136 ACM Computing Surveys, Vol. 18, Num. 3, Sep. 1986
138 How to Analyze Large Programs Efficiently and Informatively
139 D.M. Dhamdhere, B.K. Rosen, F.K. Zadeck
140 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
142 People wishing to do something different can find various possibilities
143 in the above papers and elsewhere.
146 #include "config.h"
147 #include "system.h"
148 #include "coretypes.h"
149 #include "tm.h"
150 #include "toplev.h"
152 #include "rtl.h"
153 #include "tm_p.h"
154 #include "regs.h"
155 #include "hard-reg-set.h"
156 #include "flags.h"
157 #include "real.h"
158 #include "insn-config.h"
159 #include "recog.h"
160 #include "basic-block.h"
161 #include "output.h"
162 #include "function.h"
163 #include "expr.h"
164 #include "except.h"
165 #include "ggc.h"
166 #include "params.h"
167 #include "cselib.h"
169 #include "obstack.h"
171 /* Propagate flow information through back edges and thus enable PRE's
172 moving loop invariant calculations out of loops.
174 Originally this tended to create worse overall code, but several
175 improvements during the development of PRE seem to have made following
176 back edges generally a win.
178 Note much of the loop invariant code motion done here would normally
179 be done by loop.c, which has more heuristics for when to move invariants
180 out of loops. At some point we might need to move some of those
181 heuristics into gcse.c. */
183 /* We support GCSE via Partial Redundancy Elimination. PRE optimizations
184 are a superset of those done by GCSE.
186 We perform the following steps:
188 1) Compute basic block information.
190 2) Compute table of places where registers are set.
192 3) Perform copy/constant propagation.
194 4) Perform global cse.
196 5) Perform another pass of copy/constant propagation.
198 Two passes of copy/constant propagation are done because the first one
199 enables more GCSE and the second one helps to clean up the copies that
200 GCSE creates. This is needed more for PRE than for Classic because Classic
201 GCSE will try to use an existing register containing the common
202 subexpression rather than create a new one. This is harder to do for PRE
203 because of the code motion (which Classic GCSE doesn't do).
205 Expressions we are interested in GCSE-ing are of the form
206 (set (pseudo-reg) (expression)).
207 Function want_to_gcse_p says what these are.
209 PRE handles moving invariant expressions out of loops (by treating them as
210 partially redundant).
212 Eventually it would be nice to replace cse.c/gcse.c with SSA (static single
213 assignment) based GVN (global value numbering). L. T. Simpson's paper
214 (Rice University) on value numbering is a useful reference for this.
216 **********************
218 We used to support multiple passes but there are diminishing returns in
219 doing so. The first pass usually makes 90% of the changes that are doable.
220 A second pass can make a few more changes made possible by the first pass.
221 Experiments show any further passes don't make enough changes to justify
222 the expense.
224 A study of spec92 using an unlimited number of passes:
225 [1 pass] = 1208 substitutions, [2] = 577, [3] = 202, [4] = 192, [5] = 83,
226 [6] = 34, [7] = 17, [8] = 9, [9] = 4, [10] = 4, [11] = 2,
227 [12] = 2, [13] = 1, [15] = 1, [16] = 2, [41] = 1
229 It was found doing copy propagation between each pass enables further
230 substitutions.
232 PRE is quite expensive in complicated functions because the DFA can take
233 awhile to converge. Hence we only perform one pass. The parameter max-gcse-passes can
234 be modified if one wants to experiment.
236 **********************
238 The steps for PRE are:
240 1) Build the hash table of expressions we wish to GCSE (expr_hash_table).
242 2) Perform the data flow analysis for PRE.
244 3) Delete the redundant instructions
246 4) Insert the required copies [if any] that make the partially
247 redundant instructions fully redundant.
249 5) For other reaching expressions, insert an instruction to copy the value
250 to a newly created pseudo that will reach the redundant instruction.
252 The deletion is done first so that when we do insertions we
253 know which pseudo reg to use.
255 Various papers have argued that PRE DFA is expensive (O(n^2)) and others
256 argue it is not. The number of iterations for the algorithm to converge
257 is typically 2-4 so I don't view it as that expensive (relatively speaking).
259 PRE GCSE depends heavily on the second CSE pass to clean up the copies
260 we create. To make an expression reach the place where it's redundant,
261 the result of the expression is copied to a new register, and the redundant
262 expression is deleted by replacing it with this new register. Classic GCSE
263 doesn't have this problem as much as it computes the reaching defs of
264 each register in each block and thus can try to use an existing register.
266 **********************
268 A fair bit of simplicity is created by creating small functions for simple
269 tasks, even when the function is only called in one place. This may
270 measurably slow things down [or may not] by creating more function call
271 overhead than is necessary. The source is laid out so that it's trivial
272 to make the affected functions inline so that one can measure what speed
273 up, if any, can be achieved, and maybe later when things settle things can
274 be rearranged.
276 Help stamp out big monolithic functions! */
278 /* GCSE global vars. */
280 /* -dG dump file. */
281 static FILE *gcse_file;
283 /* Note whether or not we should run jump optimization after gcse. We
284 want to do this for two cases.
286 * If we changed any jumps via cprop.
288 * If we added any labels via edge splitting. */
290 static int run_jump_opt_after_gcse;
292 /* Bitmaps are normally not included in debugging dumps.
293 However it's useful to be able to print them from GDB.
294 We could create special functions for this, but it's simpler to
295 just allow passing stderr to the dump_foo fns. Since stderr can
296 be a macro, we store a copy here. */
297 static FILE *debug_stderr;
299 /* An obstack for our working variables. */
300 static struct obstack gcse_obstack;
302 /* Nonzero for each mode that supports (set (reg) (reg)).
303 This is trivially true for integer and floating point values.
304 It may or may not be true for condition codes. */
305 static char can_copy_p[(int) NUM_MACHINE_MODES];
307 /* Nonzero if can_copy_p has been initialized. */
308 static int can_copy_init_p;
310 struct reg_use {rtx reg_rtx; };
312 /* Hash table of expressions. */
314 struct expr
316 /* The expression (SET_SRC for expressions, PATTERN for assignments). */
317 rtx expr;
318 /* Index in the available expression bitmaps. */
319 int bitmap_index;
320 /* Next entry with the same hash. */
321 struct expr *next_same_hash;
322 /* List of anticipatable occurrences in basic blocks in the function.
323 An "anticipatable occurrence" is one that is the first occurrence in the
324 basic block, the operands are not modified in the basic block prior
325 to the occurrence and the output is not used between the start of
326 the block and the occurrence. */
327 struct occr *antic_occr;
328 /* List of available occurrence in basic blocks in the function.
329 An "available occurrence" is one that is the last occurrence in the
330 basic block and the operands are not modified by following statements in
331 the basic block [including this insn]. */
332 struct occr *avail_occr;
333 /* Non-null if the computation is PRE redundant.
334 The value is the newly created pseudo-reg to record a copy of the
335 expression in all the places that reach the redundant copy. */
336 rtx reaching_reg;
339 /* Occurrence of an expression.
340 There is one per basic block. If a pattern appears more than once the
341 last appearance is used [or first for anticipatable expressions]. */
343 struct occr
345 /* Next occurrence of this expression. */
346 struct occr *next;
347 /* The insn that computes the expression. */
348 rtx insn;
349 /* Nonzero if this [anticipatable] occurrence has been deleted. */
350 char deleted_p;
351 /* Nonzero if this [available] occurrence has been copied to
352 reaching_reg. */
353 /* ??? This is mutually exclusive with deleted_p, so they could share
354 the same byte. */
355 char copied_p;
358 /* Expression and copy propagation hash tables.
359 Each hash table is an array of buckets.
360 ??? It is known that if it were an array of entries, structure elements
361 `next_same_hash' and `bitmap_index' wouldn't be necessary. However, it is
362 not clear whether in the final analysis a sufficient amount of memory would
363 be saved as the size of the available expression bitmaps would be larger
364 [one could build a mapping table without holes afterwards though].
365 Someday I'll perform the computation and figure it out. */
367 struct hash_table
369 /* The table itself.
370 This is an array of `expr_hash_table_size' elements. */
371 struct expr **table;
373 /* Size of the hash table, in elements. */
374 unsigned int size;
376 /* Number of hash table elements. */
377 unsigned int n_elems;
379 /* Whether the table is expression of copy propagation one. */
380 int set_p;
383 /* Expression hash table. */
384 static struct hash_table expr_hash_table;
386 /* Copy propagation hash table. */
387 static struct hash_table set_hash_table;
389 /* Mapping of uids to cuids.
390 Only real insns get cuids. */
391 static int *uid_cuid;
393 /* Highest UID in UID_CUID. */
394 static int max_uid;
396 /* Get the cuid of an insn. */
397 #ifdef ENABLE_CHECKING
398 #define INSN_CUID(INSN) (INSN_UID (INSN) > max_uid ? (abort (), 0) : uid_cuid[INSN_UID (INSN)])
399 #else
400 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
401 #endif
403 /* Number of cuids. */
404 static int max_cuid;
406 /* Mapping of cuids to insns. */
407 static rtx *cuid_insn;
409 /* Get insn from cuid. */
410 #define CUID_INSN(CUID) (cuid_insn[CUID])
412 /* Maximum register number in function prior to doing gcse + 1.
413 Registers created during this pass have regno >= max_gcse_regno.
414 This is named with "gcse" to not collide with global of same name. */
415 static unsigned int max_gcse_regno;
417 /* Table of registers that are modified.
419 For each register, each element is a list of places where the pseudo-reg
420 is set.
422 For simplicity, GCSE is done on sets of pseudo-regs only. PRE GCSE only
423 requires knowledge of which blocks kill which regs [and thus could use
424 a bitmap instead of the lists `reg_set_table' uses].
426 `reg_set_table' and could be turned into an array of bitmaps (num-bbs x
427 num-regs) [however perhaps it may be useful to keep the data as is]. One
428 advantage of recording things this way is that `reg_set_table' is fairly
429 sparse with respect to pseudo regs but for hard regs could be fairly dense
430 [relatively speaking]. And recording sets of pseudo-regs in lists speeds
431 up functions like compute_transp since in the case of pseudo-regs we only
432 need to iterate over the number of times a pseudo-reg is set, not over the
433 number of basic blocks [clearly there is a bit of a slow down in the cases
434 where a pseudo is set more than once in a block, however it is believed
435 that the net effect is to speed things up]. This isn't done for hard-regs
436 because recording call-clobbered hard-regs in `reg_set_table' at each
437 function call can consume a fair bit of memory, and iterating over
438 hard-regs stored this way in compute_transp will be more expensive. */
440 typedef struct reg_set
442 /* The next setting of this register. */
443 struct reg_set *next;
444 /* The insn where it was set. */
445 rtx insn;
446 } reg_set;
448 static reg_set **reg_set_table;
450 /* Size of `reg_set_table'.
451 The table starts out at max_gcse_regno + slop, and is enlarged as
452 necessary. */
453 static int reg_set_table_size;
455 /* Amount to grow `reg_set_table' by when it's full. */
456 #define REG_SET_TABLE_SLOP 100
458 /* This is a list of expressions which are MEMs and will be used by load
459 or store motion.
460 Load motion tracks MEMs which aren't killed by
461 anything except itself. (ie, loads and stores to a single location).
462 We can then allow movement of these MEM refs with a little special
463 allowance. (all stores copy the same value to the reaching reg used
464 for the loads). This means all values used to store into memory must have
465 no side effects so we can re-issue the setter value.
466 Store Motion uses this structure as an expression table to track stores
467 which look interesting, and might be moveable towards the exit block. */
469 struct ls_expr
471 struct expr * expr; /* Gcse expression reference for LM. */
472 rtx pattern; /* Pattern of this mem. */
473 rtx loads; /* INSN list of loads seen. */
474 rtx stores; /* INSN list of stores seen. */
475 struct ls_expr * next; /* Next in the list. */
476 int invalid; /* Invalid for some reason. */
477 int index; /* If it maps to a bitmap index. */
478 int hash_index; /* Index when in a hash table. */
479 rtx reaching_reg; /* Register to use when re-writing. */
482 /* Array of implicit set patterns indexed by basic block index. */
483 static rtx *implicit_sets;
485 /* Head of the list of load/store memory refs. */
486 static struct ls_expr * pre_ldst_mems = NULL;
488 /* Bitmap containing one bit for each register in the program.
489 Used when performing GCSE to track which registers have been set since
490 the start of the basic block. */
491 static regset reg_set_bitmap;
493 /* For each block, a bitmap of registers set in the block.
494 This is used by expr_killed_p and compute_transp.
495 It is computed during hash table computation and not by compute_sets
496 as it includes registers added since the last pass (or between cprop and
497 gcse) and it's currently not easy to realloc sbitmap vectors. */
498 static sbitmap *reg_set_in_block;
500 /* Array, indexed by basic block number for a list of insns which modify
501 memory within that block. */
502 static rtx * modify_mem_list;
503 bitmap modify_mem_list_set;
505 /* This array parallels modify_mem_list, but is kept canonicalized. */
506 static rtx * canon_modify_mem_list;
507 bitmap canon_modify_mem_list_set;
508 /* Various variables for statistics gathering. */
510 /* Memory used in a pass.
511 This isn't intended to be absolutely precise. Its intent is only
512 to keep an eye on memory usage. */
513 static int bytes_used;
515 /* GCSE substitutions made. */
516 static int gcse_subst_count;
517 /* Number of copy instructions created. */
518 static int gcse_create_count;
519 /* Number of constants propagated. */
520 static int const_prop_count;
521 /* Number of copys propagated. */
522 static int copy_prop_count;
524 /* These variables are used by classic GCSE.
525 Normally they'd be defined a bit later, but `rd_gen' needs to
526 be declared sooner. */
528 /* Each block has a bitmap of each type.
529 The length of each blocks bitmap is:
531 max_cuid - for reaching definitions
532 n_exprs - for available expressions
534 Thus we view the bitmaps as 2 dimensional arrays. i.e.
535 rd_kill[block_num][cuid_num]
536 ae_kill[block_num][expr_num] */
538 /* For reaching defs */
539 static sbitmap *rd_kill, *rd_gen, *reaching_defs, *rd_out;
541 /* for available exprs */
542 static sbitmap *ae_kill, *ae_gen, *ae_in, *ae_out;
544 /* Objects of this type are passed around by the null-pointer check
545 removal routines. */
546 struct null_pointer_info
548 /* The basic block being processed. */
549 basic_block current_block;
550 /* The first register to be handled in this pass. */
551 unsigned int min_reg;
552 /* One greater than the last register to be handled in this pass. */
553 unsigned int max_reg;
554 sbitmap *nonnull_local;
555 sbitmap *nonnull_killed;
558 static void compute_can_copy PARAMS ((void));
559 static char *gmalloc PARAMS ((unsigned int));
560 static char *grealloc PARAMS ((char *, unsigned int));
561 static char *gcse_alloc PARAMS ((unsigned long));
562 static void alloc_gcse_mem PARAMS ((rtx));
563 static void free_gcse_mem PARAMS ((void));
564 static void alloc_reg_set_mem PARAMS ((int));
565 static void free_reg_set_mem PARAMS ((void));
566 static int get_bitmap_width PARAMS ((int, int, int));
567 static void record_one_set PARAMS ((int, rtx));
568 static void record_set_info PARAMS ((rtx, rtx, void *));
569 static void compute_sets PARAMS ((rtx));
570 static void hash_scan_insn PARAMS ((rtx, struct hash_table *, int));
571 static void hash_scan_set PARAMS ((rtx, rtx, struct hash_table *));
572 static void hash_scan_clobber PARAMS ((rtx, rtx, struct hash_table *));
573 static void hash_scan_call PARAMS ((rtx, rtx, struct hash_table *));
574 static int want_to_gcse_p PARAMS ((rtx));
575 static int oprs_unchanged_p PARAMS ((rtx, rtx, int));
576 static int oprs_anticipatable_p PARAMS ((rtx, rtx));
577 static int oprs_available_p PARAMS ((rtx, rtx));
578 static void insert_expr_in_table PARAMS ((rtx, enum machine_mode, rtx,
579 int, int, struct hash_table *));
580 static void insert_set_in_table PARAMS ((rtx, rtx, struct hash_table *));
581 static unsigned int hash_expr PARAMS ((rtx, enum machine_mode, int *, int));
582 static unsigned int hash_expr_1 PARAMS ((rtx, enum machine_mode, int *));
583 static unsigned int hash_string_1 PARAMS ((const char *));
584 static unsigned int hash_set PARAMS ((int, int));
585 static int expr_equiv_p PARAMS ((rtx, rtx));
586 static void record_last_reg_set_info PARAMS ((rtx, int));
587 static void record_last_mem_set_info PARAMS ((rtx));
588 static void record_last_set_info PARAMS ((rtx, rtx, void *));
589 static void compute_hash_table PARAMS ((struct hash_table *));
590 static void alloc_hash_table PARAMS ((int, struct hash_table *, int));
591 static void free_hash_table PARAMS ((struct hash_table *));
592 static void compute_hash_table_work PARAMS ((struct hash_table *));
593 static void dump_hash_table PARAMS ((FILE *, const char *,
594 struct hash_table *));
595 static struct expr *lookup_expr PARAMS ((rtx, struct hash_table *));
596 static struct expr *lookup_set PARAMS ((unsigned int, struct hash_table *));
597 static struct expr *next_set PARAMS ((unsigned int, struct expr *));
598 static void reset_opr_set_tables PARAMS ((void));
599 static int oprs_not_set_p PARAMS ((rtx, rtx));
600 static void mark_call PARAMS ((rtx));
601 static void mark_set PARAMS ((rtx, rtx));
602 static void mark_clobber PARAMS ((rtx, rtx));
603 static void mark_oprs_set PARAMS ((rtx));
604 static void alloc_cprop_mem PARAMS ((int, int));
605 static void free_cprop_mem PARAMS ((void));
606 static void compute_transp PARAMS ((rtx, int, sbitmap *, int));
607 static void compute_transpout PARAMS ((void));
608 static void compute_local_properties PARAMS ((sbitmap *, sbitmap *, sbitmap *,
609 struct hash_table *));
610 static void compute_cprop_data PARAMS ((void));
611 static void find_used_regs PARAMS ((rtx *, void *));
612 static int try_replace_reg PARAMS ((rtx, rtx, rtx));
613 static struct expr *find_avail_set PARAMS ((int, rtx));
614 static int cprop_jump PARAMS ((basic_block, rtx, rtx, rtx, rtx));
615 static void mems_conflict_for_gcse_p PARAMS ((rtx, rtx, void *));
616 static int load_killed_in_block_p PARAMS ((basic_block, int, rtx, int));
617 static void canon_list_insert PARAMS ((rtx, rtx, void *));
618 static int cprop_insn PARAMS ((rtx, int));
619 static int cprop PARAMS ((int));
620 static rtx fis_get_condition PARAMS ((rtx));
621 static void find_implicit_sets PARAMS ((void));
622 static int one_cprop_pass PARAMS ((int, int, int));
623 static bool constprop_register PARAMS ((rtx, rtx, rtx, int));
624 static struct expr *find_bypass_set PARAMS ((int, int));
625 static int bypass_block PARAMS ((basic_block, rtx, rtx));
626 static int bypass_conditional_jumps PARAMS ((void));
627 static void alloc_pre_mem PARAMS ((int, int));
628 static void free_pre_mem PARAMS ((void));
629 static void compute_pre_data PARAMS ((void));
630 static int pre_expr_reaches_here_p PARAMS ((basic_block, struct expr *,
631 basic_block));
632 static void insert_insn_end_bb PARAMS ((struct expr *, basic_block, int));
633 static void pre_insert_copy_insn PARAMS ((struct expr *, rtx));
634 static void pre_insert_copies PARAMS ((void));
635 static int pre_delete PARAMS ((void));
636 static int pre_gcse PARAMS ((void));
637 static int one_pre_gcse_pass PARAMS ((int));
638 static void add_label_notes PARAMS ((rtx, rtx));
639 static void alloc_code_hoist_mem PARAMS ((int, int));
640 static void free_code_hoist_mem PARAMS ((void));
641 static void compute_code_hoist_vbeinout PARAMS ((void));
642 static void compute_code_hoist_data PARAMS ((void));
643 static int hoist_expr_reaches_here_p PARAMS ((basic_block, int, basic_block,
644 char *));
645 static void hoist_code PARAMS ((void));
646 static int one_code_hoisting_pass PARAMS ((void));
647 static void alloc_rd_mem PARAMS ((int, int));
648 static void free_rd_mem PARAMS ((void));
649 static void handle_rd_kill_set PARAMS ((rtx, int, basic_block));
650 static void compute_kill_rd PARAMS ((void));
651 static void compute_rd PARAMS ((void));
652 static void alloc_avail_expr_mem PARAMS ((int, int));
653 static void free_avail_expr_mem PARAMS ((void));
654 static void compute_ae_gen PARAMS ((struct hash_table *));
655 static int expr_killed_p PARAMS ((rtx, basic_block));
656 static void compute_ae_kill PARAMS ((sbitmap *, sbitmap *, struct hash_table *));
657 static int expr_reaches_here_p PARAMS ((struct occr *, struct expr *,
658 basic_block, int));
659 static rtx computing_insn PARAMS ((struct expr *, rtx));
660 static int def_reaches_here_p PARAMS ((rtx, rtx));
661 static int can_disregard_other_sets PARAMS ((struct reg_set **, rtx, int));
662 static int handle_avail_expr PARAMS ((rtx, struct expr *));
663 static int classic_gcse PARAMS ((void));
664 static int one_classic_gcse_pass PARAMS ((int));
665 static void invalidate_nonnull_info PARAMS ((rtx, rtx, void *));
666 static int delete_null_pointer_checks_1 PARAMS ((unsigned int *,
667 sbitmap *, sbitmap *,
668 struct null_pointer_info *));
669 static rtx process_insert_insn PARAMS ((struct expr *));
670 static int pre_edge_insert PARAMS ((struct edge_list *, struct expr **));
671 static int expr_reaches_here_p_work PARAMS ((struct occr *, struct expr *,
672 basic_block, int, char *));
673 static int pre_expr_reaches_here_p_work PARAMS ((basic_block, struct expr *,
674 basic_block, char *));
675 static struct ls_expr * ldst_entry PARAMS ((rtx));
676 static void free_ldst_entry PARAMS ((struct ls_expr *));
677 static void free_ldst_mems PARAMS ((void));
678 static void print_ldst_list PARAMS ((FILE *));
679 static struct ls_expr * find_rtx_in_ldst PARAMS ((rtx));
680 static int enumerate_ldsts PARAMS ((void));
681 static inline struct ls_expr * first_ls_expr PARAMS ((void));
682 static inline struct ls_expr * next_ls_expr PARAMS ((struct ls_expr *));
683 static int simple_mem PARAMS ((rtx));
684 static void invalidate_any_buried_refs PARAMS ((rtx));
685 static void compute_ld_motion_mems PARAMS ((void));
686 static void trim_ld_motion_mems PARAMS ((void));
687 static void update_ld_motion_stores PARAMS ((struct expr *));
688 static void reg_set_info PARAMS ((rtx, rtx, void *));
689 static int store_ops_ok PARAMS ((rtx, basic_block));
690 static void find_moveable_store PARAMS ((rtx));
691 static int compute_store_table PARAMS ((void));
692 static int load_kills_store PARAMS ((rtx, rtx));
693 static int find_loads PARAMS ((rtx, rtx));
694 static int store_killed_in_insn PARAMS ((rtx, rtx));
695 static int store_killed_after PARAMS ((rtx, rtx, basic_block));
696 static int store_killed_before PARAMS ((rtx, rtx, basic_block));
697 static void build_store_vectors PARAMS ((void));
698 static void insert_insn_start_bb PARAMS ((rtx, basic_block));
699 static int insert_store PARAMS ((struct ls_expr *, edge));
700 static void replace_store_insn PARAMS ((rtx, rtx, basic_block));
701 static void delete_store PARAMS ((struct ls_expr *,
702 basic_block));
703 static void free_store_memory PARAMS ((void));
704 static void store_motion PARAMS ((void));
705 static void free_insn_expr_list_list PARAMS ((rtx *));
706 static void clear_modify_mem_tables PARAMS ((void));
707 static void free_modify_mem_tables PARAMS ((void));
708 static rtx gcse_emit_move_after PARAMS ((rtx, rtx, rtx));
709 static void local_cprop_find_used_regs PARAMS ((rtx *, void *));
710 static bool do_local_cprop PARAMS ((rtx, rtx, int, rtx*));
711 static bool adjust_libcall_notes PARAMS ((rtx, rtx, rtx, rtx*));
712 static void local_cprop_pass PARAMS ((int));
714 /* Entry point for global common subexpression elimination.
715 F is the first instruction in the function. */
718 gcse_main (f, file)
719 rtx f;
720 FILE *file;
722 int changed, pass;
723 /* Bytes used at start of pass. */
724 int initial_bytes_used;
725 /* Maximum number of bytes used by a pass. */
726 int max_pass_bytes;
727 /* Point to release obstack data from for each pass. */
728 char *gcse_obstack_bottom;
730 /* We do not construct an accurate cfg in functions which call
731 setjmp, so just punt to be safe. */
732 if (current_function_calls_setjmp)
733 return 0;
735 /* Assume that we do not need to run jump optimizations after gcse. */
736 run_jump_opt_after_gcse = 0;
738 /* For calling dump_foo fns from gdb. */
739 debug_stderr = stderr;
740 gcse_file = file;
742 /* Identify the basic block information for this function, including
743 successors and predecessors. */
744 max_gcse_regno = max_reg_num ();
746 if (file)
747 dump_flow_info (file);
749 /* Return if there's nothing to do. */
750 if (n_basic_blocks <= 1)
751 return 0;
753 /* Trying to perform global optimizations on flow graphs which have
754 a high connectivity will take a long time and is unlikely to be
755 particularly useful.
757 In normal circumstances a cfg should have about twice as many edges
758 as blocks. But we do not want to punish small functions which have
759 a couple switch statements. So we require a relatively large number
760 of basic blocks and the ratio of edges to blocks to be high. */
761 if (n_basic_blocks > 1000 && n_edges / n_basic_blocks >= 20)
763 if (warn_disabled_optimization)
764 warning ("GCSE disabled: %d > 1000 basic blocks and %d >= 20 edges/basic block",
765 n_basic_blocks, n_edges / n_basic_blocks);
766 return 0;
769 /* If allocating memory for the cprop bitmap would take up too much
770 storage it's better just to disable the optimization. */
771 if ((n_basic_blocks
772 * SBITMAP_SET_SIZE (max_gcse_regno)
773 * sizeof (SBITMAP_ELT_TYPE)) > MAX_GCSE_MEMORY)
775 if (warn_disabled_optimization)
776 warning ("GCSE disabled: %d basic blocks and %d registers",
777 n_basic_blocks, max_gcse_regno);
779 return 0;
782 /* See what modes support reg/reg copy operations. */
783 if (! can_copy_init_p)
785 compute_can_copy ();
786 can_copy_init_p = 1;
789 gcc_obstack_init (&gcse_obstack);
790 bytes_used = 0;
792 /* We need alias. */
793 init_alias_analysis ();
794 /* Record where pseudo-registers are set. This data is kept accurate
795 during each pass. ??? We could also record hard-reg information here
796 [since it's unchanging], however it is currently done during hash table
797 computation.
799 It may be tempting to compute MEM set information here too, but MEM sets
800 will be subject to code motion one day and thus we need to compute
801 information about memory sets when we build the hash tables. */
803 alloc_reg_set_mem (max_gcse_regno);
804 compute_sets (f);
806 pass = 0;
807 initial_bytes_used = bytes_used;
808 max_pass_bytes = 0;
809 gcse_obstack_bottom = gcse_alloc (1);
810 changed = 1;
811 while (changed && pass < MAX_GCSE_PASSES)
813 changed = 0;
814 if (file)
815 fprintf (file, "GCSE pass %d\n\n", pass + 1);
817 /* Initialize bytes_used to the space for the pred/succ lists,
818 and the reg_set_table data. */
819 bytes_used = initial_bytes_used;
821 /* Each pass may create new registers, so recalculate each time. */
822 max_gcse_regno = max_reg_num ();
824 alloc_gcse_mem (f);
826 /* Don't allow constant propagation to modify jumps
827 during this pass. */
828 changed = one_cprop_pass (pass + 1, 0, 0);
830 if (optimize_size)
831 changed |= one_classic_gcse_pass (pass + 1);
832 else
834 changed |= one_pre_gcse_pass (pass + 1);
835 /* We may have just created new basic blocks. Release and
836 recompute various things which are sized on the number of
837 basic blocks. */
838 if (changed)
840 free_modify_mem_tables ();
841 modify_mem_list
842 = (rtx *) gmalloc (last_basic_block * sizeof (rtx));
843 canon_modify_mem_list
844 = (rtx *) gmalloc (last_basic_block * sizeof (rtx));
845 memset ((char *) modify_mem_list, 0, last_basic_block * sizeof (rtx));
846 memset ((char *) canon_modify_mem_list, 0, last_basic_block * sizeof (rtx));
848 free_reg_set_mem ();
849 alloc_reg_set_mem (max_reg_num ());
850 compute_sets (f);
851 run_jump_opt_after_gcse = 1;
854 if (max_pass_bytes < bytes_used)
855 max_pass_bytes = bytes_used;
857 /* Free up memory, then reallocate for code hoisting. We can
858 not re-use the existing allocated memory because the tables
859 will not have info for the insns or registers created by
860 partial redundancy elimination. */
861 free_gcse_mem ();
863 /* It does not make sense to run code hoisting unless we optimizing
864 for code size -- it rarely makes programs faster, and can make
865 them bigger if we did partial redundancy elimination (when optimizing
866 for space, we use a classic gcse algorithm instead of partial
867 redundancy algorithms). */
868 if (optimize_size)
870 max_gcse_regno = max_reg_num ();
871 alloc_gcse_mem (f);
872 changed |= one_code_hoisting_pass ();
873 free_gcse_mem ();
875 if (max_pass_bytes < bytes_used)
876 max_pass_bytes = bytes_used;
879 if (file)
881 fprintf (file, "\n");
882 fflush (file);
885 obstack_free (&gcse_obstack, gcse_obstack_bottom);
886 pass++;
889 /* Do one last pass of copy propagation, including cprop into
890 conditional jumps. */
892 max_gcse_regno = max_reg_num ();
893 alloc_gcse_mem (f);
894 /* This time, go ahead and allow cprop to alter jumps. */
895 one_cprop_pass (pass + 1, 1, 0);
896 free_gcse_mem ();
898 if (file)
900 fprintf (file, "GCSE of %s: %d basic blocks, ",
901 current_function_name, n_basic_blocks);
902 fprintf (file, "%d pass%s, %d bytes\n\n",
903 pass, pass > 1 ? "es" : "", max_pass_bytes);
906 obstack_free (&gcse_obstack, NULL);
907 free_reg_set_mem ();
908 /* We are finished with alias. */
909 end_alias_analysis ();
910 allocate_reg_info (max_reg_num (), FALSE, FALSE);
912 /* Store motion disabled until it is fixed. */
913 if (0 && !optimize_size && flag_gcse_sm)
914 store_motion ();
915 /* Record where pseudo-registers are set. */
916 return run_jump_opt_after_gcse;
919 /* Misc. utilities. */
921 /* Compute which modes support reg/reg copy operations. */
923 static void
924 compute_can_copy ()
926 int i;
927 #ifndef AVOID_CCMODE_COPIES
928 rtx reg, insn;
929 #endif
930 memset (can_copy_p, 0, NUM_MACHINE_MODES);
932 start_sequence ();
933 for (i = 0; i < NUM_MACHINE_MODES; i++)
934 if (GET_MODE_CLASS (i) == MODE_CC)
936 #ifdef AVOID_CCMODE_COPIES
937 can_copy_p[i] = 0;
938 #else
939 reg = gen_rtx_REG ((enum machine_mode) i, LAST_VIRTUAL_REGISTER + 1);
940 insn = emit_insn (gen_rtx_SET (VOIDmode, reg, reg));
941 if (recog (PATTERN (insn), insn, NULL) >= 0)
942 can_copy_p[i] = 1;
943 #endif
945 else
946 can_copy_p[i] = 1;
948 end_sequence ();
951 /* Cover function to xmalloc to record bytes allocated. */
953 static char *
954 gmalloc (size)
955 unsigned int size;
957 bytes_used += size;
958 return xmalloc (size);
961 /* Cover function to xrealloc.
962 We don't record the additional size since we don't know it.
963 It won't affect memory usage stats much anyway. */
965 static char *
966 grealloc (ptr, size)
967 char *ptr;
968 unsigned int size;
970 return xrealloc (ptr, size);
973 /* Cover function to obstack_alloc. */
975 static char *
976 gcse_alloc (size)
977 unsigned long size;
979 bytes_used += size;
980 return (char *) obstack_alloc (&gcse_obstack, size);
983 /* Allocate memory for the cuid mapping array,
984 and reg/memory set tracking tables.
986 This is called at the start of each pass. */
988 static void
989 alloc_gcse_mem (f)
990 rtx f;
992 int i, n;
993 rtx insn;
995 /* Find the largest UID and create a mapping from UIDs to CUIDs.
996 CUIDs are like UIDs except they increase monotonically, have no gaps,
997 and only apply to real insns. */
999 max_uid = get_max_uid ();
1000 n = (max_uid + 1) * sizeof (int);
1001 uid_cuid = (int *) gmalloc (n);
1002 memset ((char *) uid_cuid, 0, n);
1003 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
1005 if (INSN_P (insn))
1006 uid_cuid[INSN_UID (insn)] = i++;
1007 else
1008 uid_cuid[INSN_UID (insn)] = i;
1011 /* Create a table mapping cuids to insns. */
1013 max_cuid = i;
1014 n = (max_cuid + 1) * sizeof (rtx);
1015 cuid_insn = (rtx *) gmalloc (n);
1016 memset ((char *) cuid_insn, 0, n);
1017 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
1018 if (INSN_P (insn))
1019 CUID_INSN (i++) = insn;
1021 /* Allocate vars to track sets of regs. */
1022 reg_set_bitmap = BITMAP_XMALLOC ();
1024 /* Allocate vars to track sets of regs, memory per block. */
1025 reg_set_in_block = (sbitmap *) sbitmap_vector_alloc (last_basic_block,
1026 max_gcse_regno);
1027 /* Allocate array to keep a list of insns which modify memory in each
1028 basic block. */
1029 modify_mem_list = (rtx *) gmalloc (last_basic_block * sizeof (rtx));
1030 canon_modify_mem_list = (rtx *) gmalloc (last_basic_block * sizeof (rtx));
1031 memset ((char *) modify_mem_list, 0, last_basic_block * sizeof (rtx));
1032 memset ((char *) canon_modify_mem_list, 0, last_basic_block * sizeof (rtx));
1033 modify_mem_list_set = BITMAP_XMALLOC ();
1034 canon_modify_mem_list_set = BITMAP_XMALLOC ();
1037 /* Free memory allocated by alloc_gcse_mem. */
1039 static void
1040 free_gcse_mem ()
1042 free (uid_cuid);
1043 free (cuid_insn);
1045 BITMAP_XFREE (reg_set_bitmap);
1047 sbitmap_vector_free (reg_set_in_block);
1048 free_modify_mem_tables ();
1049 BITMAP_XFREE (modify_mem_list_set);
1050 BITMAP_XFREE (canon_modify_mem_list_set);
1053 /* Many of the global optimization algorithms work by solving dataflow
1054 equations for various expressions. Initially, some local value is
1055 computed for each expression in each block. Then, the values across the
1056 various blocks are combined (by following flow graph edges) to arrive at
1057 global values. Conceptually, each set of equations is independent. We
1058 may therefore solve all the equations in parallel, solve them one at a
1059 time, or pick any intermediate approach.
1061 When you're going to need N two-dimensional bitmaps, each X (say, the
1062 number of blocks) by Y (say, the number of expressions), call this
1063 function. It's not important what X and Y represent; only that Y
1064 correspond to the things that can be done in parallel. This function will
1065 return an appropriate chunking factor C; you should solve C sets of
1066 equations in parallel. By going through this function, we can easily
1067 trade space against time; by solving fewer equations in parallel we use
1068 less space. */
1070 static int
1071 get_bitmap_width (n, x, y)
1072 int n;
1073 int x;
1074 int y;
1076 /* It's not really worth figuring out *exactly* how much memory will
1077 be used by a particular choice. The important thing is to get
1078 something approximately right. */
1079 size_t max_bitmap_memory = 10 * 1024 * 1024;
1081 /* The number of bytes we'd use for a single column of minimum
1082 width. */
1083 size_t column_size = n * x * sizeof (SBITMAP_ELT_TYPE);
1085 /* Often, it's reasonable just to solve all the equations in
1086 parallel. */
1087 if (column_size * SBITMAP_SET_SIZE (y) <= max_bitmap_memory)
1088 return y;
1090 /* Otherwise, pick the largest width we can, without going over the
1091 limit. */
1092 return SBITMAP_ELT_BITS * ((max_bitmap_memory + column_size - 1)
1093 / column_size);
1096 /* Compute the local properties of each recorded expression.
1098 Local properties are those that are defined by the block, irrespective of
1099 other blocks.
1101 An expression is transparent in a block if its operands are not modified
1102 in the block.
1104 An expression is computed (locally available) in a block if it is computed
1105 at least once and expression would contain the same value if the
1106 computation was moved to the end of the block.
1108 An expression is locally anticipatable in a block if it is computed at
1109 least once and expression would contain the same value if the computation
1110 was moved to the beginning of the block.
1112 We call this routine for cprop, pre and code hoisting. They all compute
1113 basically the same information and thus can easily share this code.
1115 TRANSP, COMP, and ANTLOC are destination sbitmaps for recording local
1116 properties. If NULL, then it is not necessary to compute or record that
1117 particular property.
1119 TABLE controls which hash table to look at. If it is set hash table,
1120 additionally, TRANSP is computed as ~TRANSP, since this is really cprop's
1121 ABSALTERED. */
1123 static void
1124 compute_local_properties (transp, comp, antloc, table)
1125 sbitmap *transp;
1126 sbitmap *comp;
1127 sbitmap *antloc;
1128 struct hash_table *table;
1130 unsigned int i;
1132 /* Initialize any bitmaps that were passed in. */
1133 if (transp)
1135 if (table->set_p)
1136 sbitmap_vector_zero (transp, last_basic_block);
1137 else
1138 sbitmap_vector_ones (transp, last_basic_block);
1141 if (comp)
1142 sbitmap_vector_zero (comp, last_basic_block);
1143 if (antloc)
1144 sbitmap_vector_zero (antloc, last_basic_block);
1146 for (i = 0; i < table->size; i++)
1148 struct expr *expr;
1150 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
1152 int indx = expr->bitmap_index;
1153 struct occr *occr;
1155 /* The expression is transparent in this block if it is not killed.
1156 We start by assuming all are transparent [none are killed], and
1157 then reset the bits for those that are. */
1158 if (transp)
1159 compute_transp (expr->expr, indx, transp, table->set_p);
1161 /* The occurrences recorded in antic_occr are exactly those that
1162 we want to set to nonzero in ANTLOC. */
1163 if (antloc)
1164 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
1166 SET_BIT (antloc[BLOCK_NUM (occr->insn)], indx);
1168 /* While we're scanning the table, this is a good place to
1169 initialize this. */
1170 occr->deleted_p = 0;
1173 /* The occurrences recorded in avail_occr are exactly those that
1174 we want to set to nonzero in COMP. */
1175 if (comp)
1176 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
1178 SET_BIT (comp[BLOCK_NUM (occr->insn)], indx);
1180 /* While we're scanning the table, this is a good place to
1181 initialize this. */
1182 occr->copied_p = 0;
1185 /* While we're scanning the table, this is a good place to
1186 initialize this. */
1187 expr->reaching_reg = 0;
1192 /* Register set information.
1194 `reg_set_table' records where each register is set or otherwise
1195 modified. */
1197 static struct obstack reg_set_obstack;
1199 static void
1200 alloc_reg_set_mem (n_regs)
1201 int n_regs;
1203 unsigned int n;
1205 reg_set_table_size = n_regs + REG_SET_TABLE_SLOP;
1206 n = reg_set_table_size * sizeof (struct reg_set *);
1207 reg_set_table = (struct reg_set **) gmalloc (n);
1208 memset ((char *) reg_set_table, 0, n);
1210 gcc_obstack_init (&reg_set_obstack);
1213 static void
1214 free_reg_set_mem ()
1216 free (reg_set_table);
1217 obstack_free (&reg_set_obstack, NULL);
1220 /* Record REGNO in the reg_set table. */
1222 static void
1223 record_one_set (regno, insn)
1224 int regno;
1225 rtx insn;
1227 /* Allocate a new reg_set element and link it onto the list. */
1228 struct reg_set *new_reg_info;
1230 /* If the table isn't big enough, enlarge it. */
1231 if (regno >= reg_set_table_size)
1233 int new_size = regno + REG_SET_TABLE_SLOP;
1235 reg_set_table
1236 = (struct reg_set **) grealloc ((char *) reg_set_table,
1237 new_size * sizeof (struct reg_set *));
1238 memset ((char *) (reg_set_table + reg_set_table_size), 0,
1239 (new_size - reg_set_table_size) * sizeof (struct reg_set *));
1240 reg_set_table_size = new_size;
1243 new_reg_info = (struct reg_set *) obstack_alloc (&reg_set_obstack,
1244 sizeof (struct reg_set));
1245 bytes_used += sizeof (struct reg_set);
1246 new_reg_info->insn = insn;
1247 new_reg_info->next = reg_set_table[regno];
1248 reg_set_table[regno] = new_reg_info;
1251 /* Called from compute_sets via note_stores to handle one SET or CLOBBER in
1252 an insn. The DATA is really the instruction in which the SET is
1253 occurring. */
1255 static void
1256 record_set_info (dest, setter, data)
1257 rtx dest, setter ATTRIBUTE_UNUSED;
1258 void *data;
1260 rtx record_set_insn = (rtx) data;
1262 if (GET_CODE (dest) == REG && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
1263 record_one_set (REGNO (dest), record_set_insn);
1266 /* Scan the function and record each set of each pseudo-register.
1268 This is called once, at the start of the gcse pass. See the comments for
1269 `reg_set_table' for further documentation. */
1271 static void
1272 compute_sets (f)
1273 rtx f;
1275 rtx insn;
1277 for (insn = f; insn != 0; insn = NEXT_INSN (insn))
1278 if (INSN_P (insn))
1279 note_stores (PATTERN (insn), record_set_info, insn);
1282 /* Hash table support. */
1284 struct reg_avail_info
1286 basic_block last_bb;
1287 int first_set;
1288 int last_set;
1291 static struct reg_avail_info *reg_avail_info;
1292 static basic_block current_bb;
1295 /* See whether X, the source of a set, is something we want to consider for
1296 GCSE. */
1298 static GTY(()) rtx test_insn;
1299 static int
1300 want_to_gcse_p (x)
1301 rtx x;
1303 int num_clobbers = 0;
1304 int icode;
1306 switch (GET_CODE (x))
1308 case REG:
1309 case SUBREG:
1310 case CONST_INT:
1311 case CONST_DOUBLE:
1312 case CONST_VECTOR:
1313 case CALL:
1314 case CONSTANT_P_RTX:
1315 return 0;
1317 default:
1318 break;
1321 /* If this is a valid operand, we are OK. If it's VOIDmode, we aren't. */
1322 if (general_operand (x, GET_MODE (x)))
1323 return 1;
1324 else if (GET_MODE (x) == VOIDmode)
1325 return 0;
1327 /* Otherwise, check if we can make a valid insn from it. First initialize
1328 our test insn if we haven't already. */
1329 if (test_insn == 0)
1331 test_insn
1332 = make_insn_raw (gen_rtx_SET (VOIDmode,
1333 gen_rtx_REG (word_mode,
1334 FIRST_PSEUDO_REGISTER * 2),
1335 const0_rtx));
1336 NEXT_INSN (test_insn) = PREV_INSN (test_insn) = 0;
1339 /* Now make an insn like the one we would make when GCSE'ing and see if
1340 valid. */
1341 PUT_MODE (SET_DEST (PATTERN (test_insn)), GET_MODE (x));
1342 SET_SRC (PATTERN (test_insn)) = x;
1343 return ((icode = recog (PATTERN (test_insn), test_insn, &num_clobbers)) >= 0
1344 && (num_clobbers == 0 || ! added_clobbers_hard_reg_p (icode)));
1347 /* Return nonzero if the operands of expression X are unchanged from the
1348 start of INSN's basic block up to but not including INSN (if AVAIL_P == 0),
1349 or from INSN to the end of INSN's basic block (if AVAIL_P != 0). */
1351 static int
1352 oprs_unchanged_p (x, insn, avail_p)
1353 rtx x, insn;
1354 int avail_p;
1356 int i, j;
1357 enum rtx_code code;
1358 const char *fmt;
1360 if (x == 0)
1361 return 1;
1363 code = GET_CODE (x);
1364 switch (code)
1366 case REG:
1368 struct reg_avail_info *info = &reg_avail_info[REGNO (x)];
1370 if (info->last_bb != current_bb)
1371 return 1;
1372 if (avail_p)
1373 return info->last_set < INSN_CUID (insn);
1374 else
1375 return info->first_set >= INSN_CUID (insn);
1378 case MEM:
1379 if (load_killed_in_block_p (current_bb, INSN_CUID (insn),
1380 x, avail_p))
1381 return 0;
1382 else
1383 return oprs_unchanged_p (XEXP (x, 0), insn, avail_p);
1385 case PRE_DEC:
1386 case PRE_INC:
1387 case POST_DEC:
1388 case POST_INC:
1389 case PRE_MODIFY:
1390 case POST_MODIFY:
1391 return 0;
1393 case PC:
1394 case CC0: /*FIXME*/
1395 case CONST:
1396 case CONST_INT:
1397 case CONST_DOUBLE:
1398 case CONST_VECTOR:
1399 case SYMBOL_REF:
1400 case LABEL_REF:
1401 case ADDR_VEC:
1402 case ADDR_DIFF_VEC:
1403 return 1;
1405 default:
1406 break;
1409 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
1411 if (fmt[i] == 'e')
1413 /* If we are about to do the last recursive call needed at this
1414 level, change it into iteration. This function is called enough
1415 to be worth it. */
1416 if (i == 0)
1417 return oprs_unchanged_p (XEXP (x, i), insn, avail_p);
1419 else if (! oprs_unchanged_p (XEXP (x, i), insn, avail_p))
1420 return 0;
1422 else if (fmt[i] == 'E')
1423 for (j = 0; j < XVECLEN (x, i); j++)
1424 if (! oprs_unchanged_p (XVECEXP (x, i, j), insn, avail_p))
1425 return 0;
1428 return 1;
1431 /* Used for communication between mems_conflict_for_gcse_p and
1432 load_killed_in_block_p. Nonzero if mems_conflict_for_gcse_p finds a
1433 conflict between two memory references. */
1434 static int gcse_mems_conflict_p;
1436 /* Used for communication between mems_conflict_for_gcse_p and
1437 load_killed_in_block_p. A memory reference for a load instruction,
1438 mems_conflict_for_gcse_p will see if a memory store conflicts with
1439 this memory load. */
1440 static rtx gcse_mem_operand;
1442 /* DEST is the output of an instruction. If it is a memory reference, and
1443 possibly conflicts with the load found in gcse_mem_operand, then set
1444 gcse_mems_conflict_p to a nonzero value. */
1446 static void
1447 mems_conflict_for_gcse_p (dest, setter, data)
1448 rtx dest, setter ATTRIBUTE_UNUSED;
1449 void *data ATTRIBUTE_UNUSED;
1451 while (GET_CODE (dest) == SUBREG
1452 || GET_CODE (dest) == ZERO_EXTRACT
1453 || GET_CODE (dest) == SIGN_EXTRACT
1454 || GET_CODE (dest) == STRICT_LOW_PART)
1455 dest = XEXP (dest, 0);
1457 /* If DEST is not a MEM, then it will not conflict with the load. Note
1458 that function calls are assumed to clobber memory, but are handled
1459 elsewhere. */
1460 if (GET_CODE (dest) != MEM)
1461 return;
1463 /* If we are setting a MEM in our list of specially recognized MEMs,
1464 don't mark as killed this time. */
1466 if (dest == gcse_mem_operand && pre_ldst_mems != NULL)
1468 if (!find_rtx_in_ldst (dest))
1469 gcse_mems_conflict_p = 1;
1470 return;
1473 if (true_dependence (dest, GET_MODE (dest), gcse_mem_operand,
1474 rtx_addr_varies_p))
1475 gcse_mems_conflict_p = 1;
1478 /* Return nonzero if the expression in X (a memory reference) is killed
1479 in block BB before or after the insn with the CUID in UID_LIMIT.
1480 AVAIL_P is nonzero for kills after UID_LIMIT, and zero for kills
1481 before UID_LIMIT.
1483 To check the entire block, set UID_LIMIT to max_uid + 1 and
1484 AVAIL_P to 0. */
1486 static int
1487 load_killed_in_block_p (bb, uid_limit, x, avail_p)
1488 basic_block bb;
1489 int uid_limit;
1490 rtx x;
1491 int avail_p;
1493 rtx list_entry = modify_mem_list[bb->index];
1494 while (list_entry)
1496 rtx setter;
1497 /* Ignore entries in the list that do not apply. */
1498 if ((avail_p
1499 && INSN_CUID (XEXP (list_entry, 0)) < uid_limit)
1500 || (! avail_p
1501 && INSN_CUID (XEXP (list_entry, 0)) > uid_limit))
1503 list_entry = XEXP (list_entry, 1);
1504 continue;
1507 setter = XEXP (list_entry, 0);
1509 /* If SETTER is a call everything is clobbered. Note that calls
1510 to pure functions are never put on the list, so we need not
1511 worry about them. */
1512 if (GET_CODE (setter) == CALL_INSN)
1513 return 1;
1515 /* SETTER must be an INSN of some kind that sets memory. Call
1516 note_stores to examine each hunk of memory that is modified.
1518 The note_stores interface is pretty limited, so we have to
1519 communicate via global variables. Yuk. */
1520 gcse_mem_operand = x;
1521 gcse_mems_conflict_p = 0;
1522 note_stores (PATTERN (setter), mems_conflict_for_gcse_p, NULL);
1523 if (gcse_mems_conflict_p)
1524 return 1;
1525 list_entry = XEXP (list_entry, 1);
1527 return 0;
1530 /* Return nonzero if the operands of expression X are unchanged from
1531 the start of INSN's basic block up to but not including INSN. */
1533 static int
1534 oprs_anticipatable_p (x, insn)
1535 rtx x, insn;
1537 return oprs_unchanged_p (x, insn, 0);
1540 /* Return nonzero if the operands of expression X are unchanged from
1541 INSN to the end of INSN's basic block. */
1543 static int
1544 oprs_available_p (x, insn)
1545 rtx x, insn;
1547 return oprs_unchanged_p (x, insn, 1);
1550 /* Hash expression X.
1552 MODE is only used if X is a CONST_INT. DO_NOT_RECORD_P is a boolean
1553 indicating if a volatile operand is found or if the expression contains
1554 something we don't want to insert in the table.
1556 ??? One might want to merge this with canon_hash. Later. */
1558 static unsigned int
1559 hash_expr (x, mode, do_not_record_p, hash_table_size)
1560 rtx x;
1561 enum machine_mode mode;
1562 int *do_not_record_p;
1563 int hash_table_size;
1565 unsigned int hash;
1567 *do_not_record_p = 0;
1569 hash = hash_expr_1 (x, mode, do_not_record_p);
1570 return hash % hash_table_size;
1573 /* Hash a string. Just add its bytes up. */
1575 static inline unsigned
1576 hash_string_1 (ps)
1577 const char *ps;
1579 unsigned hash = 0;
1580 const unsigned char *p = (const unsigned char *) ps;
1582 if (p)
1583 while (*p)
1584 hash += *p++;
1586 return hash;
1589 /* Subroutine of hash_expr to do the actual work. */
1591 static unsigned int
1592 hash_expr_1 (x, mode, do_not_record_p)
1593 rtx x;
1594 enum machine_mode mode;
1595 int *do_not_record_p;
1597 int i, j;
1598 unsigned hash = 0;
1599 enum rtx_code code;
1600 const char *fmt;
1602 /* Used to turn recursion into iteration. We can't rely on GCC's
1603 tail-recursion elimination since we need to keep accumulating values
1604 in HASH. */
1606 if (x == 0)
1607 return hash;
1609 repeat:
1610 code = GET_CODE (x);
1611 switch (code)
1613 case REG:
1614 hash += ((unsigned int) REG << 7) + REGNO (x);
1615 return hash;
1617 case CONST_INT:
1618 hash += (((unsigned int) CONST_INT << 7) + (unsigned int) mode
1619 + (unsigned int) INTVAL (x));
1620 return hash;
1622 case CONST_DOUBLE:
1623 /* This is like the general case, except that it only counts
1624 the integers representing the constant. */
1625 hash += (unsigned int) code + (unsigned int) GET_MODE (x);
1626 if (GET_MODE (x) != VOIDmode)
1627 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
1628 hash += (unsigned int) XWINT (x, i);
1629 else
1630 hash += ((unsigned int) CONST_DOUBLE_LOW (x)
1631 + (unsigned int) CONST_DOUBLE_HIGH (x));
1632 return hash;
1634 case CONST_VECTOR:
1636 int units;
1637 rtx elt;
1639 units = CONST_VECTOR_NUNITS (x);
1641 for (i = 0; i < units; ++i)
1643 elt = CONST_VECTOR_ELT (x, i);
1644 hash += hash_expr_1 (elt, GET_MODE (elt), do_not_record_p);
1647 return hash;
1650 /* Assume there is only one rtx object for any given label. */
1651 case LABEL_REF:
1652 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
1653 differences and differences between each stage's debugging dumps. */
1654 hash += (((unsigned int) LABEL_REF << 7)
1655 + CODE_LABEL_NUMBER (XEXP (x, 0)));
1656 return hash;
1658 case SYMBOL_REF:
1660 /* Don't hash on the symbol's address to avoid bootstrap differences.
1661 Different hash values may cause expressions to be recorded in
1662 different orders and thus different registers to be used in the
1663 final assembler. This also avoids differences in the dump files
1664 between various stages. */
1665 unsigned int h = 0;
1666 const unsigned char *p = (const unsigned char *) XSTR (x, 0);
1668 while (*p)
1669 h += (h << 7) + *p++; /* ??? revisit */
1671 hash += ((unsigned int) SYMBOL_REF << 7) + h;
1672 return hash;
1675 case MEM:
1676 if (MEM_VOLATILE_P (x))
1678 *do_not_record_p = 1;
1679 return 0;
1682 hash += (unsigned int) MEM;
1683 /* We used alias set for hashing, but this is not good, since the alias
1684 set may differ in -fprofile-arcs and -fbranch-probabilities compilation
1685 causing the profiles to fail to match. */
1686 x = XEXP (x, 0);
1687 goto repeat;
1689 case PRE_DEC:
1690 case PRE_INC:
1691 case POST_DEC:
1692 case POST_INC:
1693 case PC:
1694 case CC0:
1695 case CALL:
1696 case UNSPEC_VOLATILE:
1697 *do_not_record_p = 1;
1698 return 0;
1700 case ASM_OPERANDS:
1701 if (MEM_VOLATILE_P (x))
1703 *do_not_record_p = 1;
1704 return 0;
1706 else
1708 /* We don't want to take the filename and line into account. */
1709 hash += (unsigned) code + (unsigned) GET_MODE (x)
1710 + hash_string_1 (ASM_OPERANDS_TEMPLATE (x))
1711 + hash_string_1 (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
1712 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
1714 if (ASM_OPERANDS_INPUT_LENGTH (x))
1716 for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
1718 hash += (hash_expr_1 (ASM_OPERANDS_INPUT (x, i),
1719 GET_MODE (ASM_OPERANDS_INPUT (x, i)),
1720 do_not_record_p)
1721 + hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT
1722 (x, i)));
1725 hash += hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
1726 x = ASM_OPERANDS_INPUT (x, 0);
1727 mode = GET_MODE (x);
1728 goto repeat;
1730 return hash;
1733 default:
1734 break;
1737 hash += (unsigned) code + (unsigned) GET_MODE (x);
1738 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
1740 if (fmt[i] == 'e')
1742 /* If we are about to do the last recursive call
1743 needed at this level, change it into iteration.
1744 This function is called enough to be worth it. */
1745 if (i == 0)
1747 x = XEXP (x, i);
1748 goto repeat;
1751 hash += hash_expr_1 (XEXP (x, i), 0, do_not_record_p);
1752 if (*do_not_record_p)
1753 return 0;
1756 else if (fmt[i] == 'E')
1757 for (j = 0; j < XVECLEN (x, i); j++)
1759 hash += hash_expr_1 (XVECEXP (x, i, j), 0, do_not_record_p);
1760 if (*do_not_record_p)
1761 return 0;
1764 else if (fmt[i] == 's')
1765 hash += hash_string_1 (XSTR (x, i));
1766 else if (fmt[i] == 'i')
1767 hash += (unsigned int) XINT (x, i);
1768 else
1769 abort ();
1772 return hash;
1775 /* Hash a set of register REGNO.
1777 Sets are hashed on the register that is set. This simplifies the PRE copy
1778 propagation code.
1780 ??? May need to make things more elaborate. Later, as necessary. */
1782 static unsigned int
1783 hash_set (regno, hash_table_size)
1784 int regno;
1785 int hash_table_size;
1787 unsigned int hash;
1789 hash = regno;
1790 return hash % hash_table_size;
1793 /* Return nonzero if exp1 is equivalent to exp2.
1794 ??? Borrowed from cse.c. Might want to remerge with cse.c. Later. */
1796 static int
1797 expr_equiv_p (x, y)
1798 rtx x, y;
1800 int i, j;
1801 enum rtx_code code;
1802 const char *fmt;
1804 if (x == y)
1805 return 1;
1807 if (x == 0 || y == 0)
1808 return x == y;
1810 code = GET_CODE (x);
1811 if (code != GET_CODE (y))
1812 return 0;
1814 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
1815 if (GET_MODE (x) != GET_MODE (y))
1816 return 0;
1818 switch (code)
1820 case PC:
1821 case CC0:
1822 return x == y;
1824 case CONST_INT:
1825 return INTVAL (x) == INTVAL (y);
1827 case LABEL_REF:
1828 return XEXP (x, 0) == XEXP (y, 0);
1830 case SYMBOL_REF:
1831 return XSTR (x, 0) == XSTR (y, 0);
1833 case REG:
1834 return REGNO (x) == REGNO (y);
1836 case MEM:
1837 /* Can't merge two expressions in different alias sets, since we can
1838 decide that the expression is transparent in a block when it isn't,
1839 due to it being set with the different alias set. */
1840 if (MEM_ALIAS_SET (x) != MEM_ALIAS_SET (y))
1841 return 0;
1842 break;
1844 /* For commutative operations, check both orders. */
1845 case PLUS:
1846 case MULT:
1847 case AND:
1848 case IOR:
1849 case XOR:
1850 case NE:
1851 case EQ:
1852 return ((expr_equiv_p (XEXP (x, 0), XEXP (y, 0))
1853 && expr_equiv_p (XEXP (x, 1), XEXP (y, 1)))
1854 || (expr_equiv_p (XEXP (x, 0), XEXP (y, 1))
1855 && expr_equiv_p (XEXP (x, 1), XEXP (y, 0))));
1857 case ASM_OPERANDS:
1858 /* We don't use the generic code below because we want to
1859 disregard filename and line numbers. */
1861 /* A volatile asm isn't equivalent to any other. */
1862 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
1863 return 0;
1865 if (GET_MODE (x) != GET_MODE (y)
1866 || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
1867 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
1868 ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
1869 || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
1870 || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
1871 return 0;
1873 if (ASM_OPERANDS_INPUT_LENGTH (x))
1875 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
1876 if (! expr_equiv_p (ASM_OPERANDS_INPUT (x, i),
1877 ASM_OPERANDS_INPUT (y, i))
1878 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
1879 ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
1880 return 0;
1883 return 1;
1885 default:
1886 break;
1889 /* Compare the elements. If any pair of corresponding elements
1890 fail to match, return 0 for the whole thing. */
1892 fmt = GET_RTX_FORMAT (code);
1893 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1895 switch (fmt[i])
1897 case 'e':
1898 if (! expr_equiv_p (XEXP (x, i), XEXP (y, i)))
1899 return 0;
1900 break;
1902 case 'E':
1903 if (XVECLEN (x, i) != XVECLEN (y, i))
1904 return 0;
1905 for (j = 0; j < XVECLEN (x, i); j++)
1906 if (! expr_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
1907 return 0;
1908 break;
1910 case 's':
1911 if (strcmp (XSTR (x, i), XSTR (y, i)))
1912 return 0;
1913 break;
1915 case 'i':
1916 if (XINT (x, i) != XINT (y, i))
1917 return 0;
1918 break;
1920 case 'w':
1921 if (XWINT (x, i) != XWINT (y, i))
1922 return 0;
1923 break;
1925 case '0':
1926 break;
1928 default:
1929 abort ();
1933 return 1;
1936 /* Insert expression X in INSN in the hash TABLE.
1937 If it is already present, record it as the last occurrence in INSN's
1938 basic block.
1940 MODE is the mode of the value X is being stored into.
1941 It is only used if X is a CONST_INT.
1943 ANTIC_P is nonzero if X is an anticipatable expression.
1944 AVAIL_P is nonzero if X is an available expression. */
1946 static void
1947 insert_expr_in_table (x, mode, insn, antic_p, avail_p, table)
1948 rtx x;
1949 enum machine_mode mode;
1950 rtx insn;
1951 int antic_p, avail_p;
1952 struct hash_table *table;
1954 int found, do_not_record_p;
1955 unsigned int hash;
1956 struct expr *cur_expr, *last_expr = NULL;
1957 struct occr *antic_occr, *avail_occr;
1958 struct occr *last_occr = NULL;
1960 hash = hash_expr (x, mode, &do_not_record_p, table->size);
1962 /* Do not insert expression in table if it contains volatile operands,
1963 or if hash_expr determines the expression is something we don't want
1964 to or can't handle. */
1965 if (do_not_record_p)
1966 return;
1968 cur_expr = table->table[hash];
1969 found = 0;
1971 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
1973 /* If the expression isn't found, save a pointer to the end of
1974 the list. */
1975 last_expr = cur_expr;
1976 cur_expr = cur_expr->next_same_hash;
1979 if (! found)
1981 cur_expr = (struct expr *) gcse_alloc (sizeof (struct expr));
1982 bytes_used += sizeof (struct expr);
1983 if (table->table[hash] == NULL)
1984 /* This is the first pattern that hashed to this index. */
1985 table->table[hash] = cur_expr;
1986 else
1987 /* Add EXPR to end of this hash chain. */
1988 last_expr->next_same_hash = cur_expr;
1990 /* Set the fields of the expr element. */
1991 cur_expr->expr = x;
1992 cur_expr->bitmap_index = table->n_elems++;
1993 cur_expr->next_same_hash = NULL;
1994 cur_expr->antic_occr = NULL;
1995 cur_expr->avail_occr = NULL;
1998 /* Now record the occurrence(s). */
1999 if (antic_p)
2001 antic_occr = cur_expr->antic_occr;
2003 /* Search for another occurrence in the same basic block. */
2004 while (antic_occr && BLOCK_NUM (antic_occr->insn) != BLOCK_NUM (insn))
2006 /* If an occurrence isn't found, save a pointer to the end of
2007 the list. */
2008 last_occr = antic_occr;
2009 antic_occr = antic_occr->next;
2012 if (antic_occr)
2013 /* Found another instance of the expression in the same basic block.
2014 Prefer the currently recorded one. We want the first one in the
2015 block and the block is scanned from start to end. */
2016 ; /* nothing to do */
2017 else
2019 /* First occurrence of this expression in this basic block. */
2020 antic_occr = (struct occr *) gcse_alloc (sizeof (struct occr));
2021 bytes_used += sizeof (struct occr);
2022 /* First occurrence of this expression in any block? */
2023 if (cur_expr->antic_occr == NULL)
2024 cur_expr->antic_occr = antic_occr;
2025 else
2026 last_occr->next = antic_occr;
2028 antic_occr->insn = insn;
2029 antic_occr->next = NULL;
2033 if (avail_p)
2035 avail_occr = cur_expr->avail_occr;
2037 /* Search for another occurrence in the same basic block. */
2038 while (avail_occr && BLOCK_NUM (avail_occr->insn) != BLOCK_NUM (insn))
2040 /* If an occurrence isn't found, save a pointer to the end of
2041 the list. */
2042 last_occr = avail_occr;
2043 avail_occr = avail_occr->next;
2046 if (avail_occr)
2047 /* Found another instance of the expression in the same basic block.
2048 Prefer this occurrence to the currently recorded one. We want
2049 the last one in the block and the block is scanned from start
2050 to end. */
2051 avail_occr->insn = insn;
2052 else
2054 /* First occurrence of this expression in this basic block. */
2055 avail_occr = (struct occr *) gcse_alloc (sizeof (struct occr));
2056 bytes_used += sizeof (struct occr);
2058 /* First occurrence of this expression in any block? */
2059 if (cur_expr->avail_occr == NULL)
2060 cur_expr->avail_occr = avail_occr;
2061 else
2062 last_occr->next = avail_occr;
2064 avail_occr->insn = insn;
2065 avail_occr->next = NULL;
2070 /* Insert pattern X in INSN in the hash table.
2071 X is a SET of a reg to either another reg or a constant.
2072 If it is already present, record it as the last occurrence in INSN's
2073 basic block. */
2075 static void
2076 insert_set_in_table (x, insn, table)
2077 rtx x;
2078 rtx insn;
2079 struct hash_table *table;
2081 int found;
2082 unsigned int hash;
2083 struct expr *cur_expr, *last_expr = NULL;
2084 struct occr *cur_occr, *last_occr = NULL;
2086 if (GET_CODE (x) != SET
2087 || GET_CODE (SET_DEST (x)) != REG)
2088 abort ();
2090 hash = hash_set (REGNO (SET_DEST (x)), table->size);
2092 cur_expr = table->table[hash];
2093 found = 0;
2095 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
2097 /* If the expression isn't found, save a pointer to the end of
2098 the list. */
2099 last_expr = cur_expr;
2100 cur_expr = cur_expr->next_same_hash;
2103 if (! found)
2105 cur_expr = (struct expr *) gcse_alloc (sizeof (struct expr));
2106 bytes_used += sizeof (struct expr);
2107 if (table->table[hash] == NULL)
2108 /* This is the first pattern that hashed to this index. */
2109 table->table[hash] = cur_expr;
2110 else
2111 /* Add EXPR to end of this hash chain. */
2112 last_expr->next_same_hash = cur_expr;
2114 /* Set the fields of the expr element.
2115 We must copy X because it can be modified when copy propagation is
2116 performed on its operands. */
2117 cur_expr->expr = copy_rtx (x);
2118 cur_expr->bitmap_index = table->n_elems++;
2119 cur_expr->next_same_hash = NULL;
2120 cur_expr->antic_occr = NULL;
2121 cur_expr->avail_occr = NULL;
2124 /* Now record the occurrence. */
2125 cur_occr = cur_expr->avail_occr;
2127 /* Search for another occurrence in the same basic block. */
2128 while (cur_occr && BLOCK_NUM (cur_occr->insn) != BLOCK_NUM (insn))
2130 /* If an occurrence isn't found, save a pointer to the end of
2131 the list. */
2132 last_occr = cur_occr;
2133 cur_occr = cur_occr->next;
2136 if (cur_occr)
2137 /* Found another instance of the expression in the same basic block.
2138 Prefer this occurrence to the currently recorded one. We want the
2139 last one in the block and the block is scanned from start to end. */
2140 cur_occr->insn = insn;
2141 else
2143 /* First occurrence of this expression in this basic block. */
2144 cur_occr = (struct occr *) gcse_alloc (sizeof (struct occr));
2145 bytes_used += sizeof (struct occr);
2147 /* First occurrence of this expression in any block? */
2148 if (cur_expr->avail_occr == NULL)
2149 cur_expr->avail_occr = cur_occr;
2150 else
2151 last_occr->next = cur_occr;
2153 cur_occr->insn = insn;
2154 cur_occr->next = NULL;
2158 /* Scan pattern PAT of INSN and add an entry to the hash TABLE (set or
2159 expression one). */
2161 static void
2162 hash_scan_set (pat, insn, table)
2163 rtx pat, insn;
2164 struct hash_table *table;
2166 rtx src = SET_SRC (pat);
2167 rtx dest = SET_DEST (pat);
2168 rtx note;
2170 if (GET_CODE (src) == CALL)
2171 hash_scan_call (src, insn, table);
2173 else if (GET_CODE (dest) == REG)
2175 unsigned int regno = REGNO (dest);
2176 rtx tmp;
2178 /* If this is a single set and we are doing constant propagation,
2179 see if a REG_NOTE shows this equivalent to a constant. */
2180 if (table->set_p && (note = find_reg_equal_equiv_note (insn)) != 0
2181 && CONSTANT_P (XEXP (note, 0)))
2182 src = XEXP (note, 0), pat = gen_rtx_SET (VOIDmode, dest, src);
2184 /* Only record sets of pseudo-regs in the hash table. */
2185 if (! table->set_p
2186 && regno >= FIRST_PSEUDO_REGISTER
2187 /* Don't GCSE something if we can't do a reg/reg copy. */
2188 && can_copy_p [GET_MODE (dest)]
2189 /* GCSE commonly inserts instruction after the insn. We can't
2190 do that easily for EH_REGION notes so disable GCSE on these
2191 for now. */
2192 && !find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2193 /* Is SET_SRC something we want to gcse? */
2194 && want_to_gcse_p (src)
2195 /* Don't CSE a nop. */
2196 && ! set_noop_p (pat)
2197 /* Don't GCSE if it has attached REG_EQUIV note.
2198 At this point this only function parameters should have
2199 REG_EQUIV notes and if the argument slot is used somewhere
2200 explicitly, it means address of parameter has been taken,
2201 so we should not extend the lifetime of the pseudo. */
2202 && ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) == 0
2203 || GET_CODE (XEXP (note, 0)) != MEM))
2205 /* An expression is not anticipatable if its operands are
2206 modified before this insn or if this is not the only SET in
2207 this insn. */
2208 int antic_p = oprs_anticipatable_p (src, insn) && single_set (insn);
2209 /* An expression is not available if its operands are
2210 subsequently modified, including this insn. It's also not
2211 available if this is a branch, because we can't insert
2212 a set after the branch. */
2213 int avail_p = (oprs_available_p (src, insn)
2214 && ! JUMP_P (insn));
2216 insert_expr_in_table (src, GET_MODE (dest), insn, antic_p, avail_p, table);
2219 /* Record sets for constant/copy propagation. */
2220 else if (table->set_p
2221 && regno >= FIRST_PSEUDO_REGISTER
2222 && ((GET_CODE (src) == REG
2223 && REGNO (src) >= FIRST_PSEUDO_REGISTER
2224 && can_copy_p [GET_MODE (dest)]
2225 && REGNO (src) != regno)
2226 || (CONSTANT_P (src)
2227 && GET_CODE (src) != CONSTANT_P_RTX))
2228 /* A copy is not available if its src or dest is subsequently
2229 modified. Here we want to search from INSN+1 on, but
2230 oprs_available_p searches from INSN on. */
2231 && (insn == BLOCK_END (BLOCK_NUM (insn))
2232 || ((tmp = next_nonnote_insn (insn)) != NULL_RTX
2233 && oprs_available_p (pat, tmp))))
2234 insert_set_in_table (pat, insn, table);
2238 static void
2239 hash_scan_clobber (x, insn, table)
2240 rtx x ATTRIBUTE_UNUSED, insn ATTRIBUTE_UNUSED;
2241 struct hash_table *table ATTRIBUTE_UNUSED;
2243 /* Currently nothing to do. */
2246 static void
2247 hash_scan_call (x, insn, table)
2248 rtx x ATTRIBUTE_UNUSED, insn ATTRIBUTE_UNUSED;
2249 struct hash_table *table ATTRIBUTE_UNUSED;
2251 /* Currently nothing to do. */
2254 /* Process INSN and add hash table entries as appropriate.
2256 Only available expressions that set a single pseudo-reg are recorded.
2258 Single sets in a PARALLEL could be handled, but it's an extra complication
2259 that isn't dealt with right now. The trick is handling the CLOBBERs that
2260 are also in the PARALLEL. Later.
2262 If SET_P is nonzero, this is for the assignment hash table,
2263 otherwise it is for the expression hash table.
2264 If IN_LIBCALL_BLOCK nonzero, we are in a libcall block, and should
2265 not record any expressions. */
2267 static void
2268 hash_scan_insn (insn, table, in_libcall_block)
2269 rtx insn;
2270 struct hash_table *table;
2271 int in_libcall_block;
2273 rtx pat = PATTERN (insn);
2274 int i;
2276 if (in_libcall_block)
2277 return;
2279 /* Pick out the sets of INSN and for other forms of instructions record
2280 what's been modified. */
2282 if (GET_CODE (pat) == SET)
2283 hash_scan_set (pat, insn, table);
2284 else if (GET_CODE (pat) == PARALLEL)
2285 for (i = 0; i < XVECLEN (pat, 0); i++)
2287 rtx x = XVECEXP (pat, 0, i);
2289 if (GET_CODE (x) == SET)
2290 hash_scan_set (x, insn, table);
2291 else if (GET_CODE (x) == CLOBBER)
2292 hash_scan_clobber (x, insn, table);
2293 else if (GET_CODE (x) == CALL)
2294 hash_scan_call (x, insn, table);
2297 else if (GET_CODE (pat) == CLOBBER)
2298 hash_scan_clobber (pat, insn, table);
2299 else if (GET_CODE (pat) == CALL)
2300 hash_scan_call (pat, insn, table);
2303 static void
2304 dump_hash_table (file, name, table)
2305 FILE *file;
2306 const char *name;
2307 struct hash_table *table;
2309 int i;
2310 /* Flattened out table, so it's printed in proper order. */
2311 struct expr **flat_table;
2312 unsigned int *hash_val;
2313 struct expr *expr;
2315 flat_table
2316 = (struct expr **) xcalloc (table->n_elems, sizeof (struct expr *));
2317 hash_val = (unsigned int *) xmalloc (table->n_elems * sizeof (unsigned int));
2319 for (i = 0; i < (int) table->size; i++)
2320 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
2322 flat_table[expr->bitmap_index] = expr;
2323 hash_val[expr->bitmap_index] = i;
2326 fprintf (file, "%s hash table (%d buckets, %d entries)\n",
2327 name, table->size, table->n_elems);
2329 for (i = 0; i < (int) table->n_elems; i++)
2330 if (flat_table[i] != 0)
2332 expr = flat_table[i];
2333 fprintf (file, "Index %d (hash value %d)\n ",
2334 expr->bitmap_index, hash_val[i]);
2335 print_rtl (file, expr->expr);
2336 fprintf (file, "\n");
2339 fprintf (file, "\n");
2341 free (flat_table);
2342 free (hash_val);
2345 /* Record register first/last/block set information for REGNO in INSN.
2347 first_set records the first place in the block where the register
2348 is set and is used to compute "anticipatability".
2350 last_set records the last place in the block where the register
2351 is set and is used to compute "availability".
2353 last_bb records the block for which first_set and last_set are
2354 valid, as a quick test to invalidate them.
2356 reg_set_in_block records whether the register is set in the block
2357 and is used to compute "transparency". */
2359 static void
2360 record_last_reg_set_info (insn, regno)
2361 rtx insn;
2362 int regno;
2364 struct reg_avail_info *info = &reg_avail_info[regno];
2365 int cuid = INSN_CUID (insn);
2367 info->last_set = cuid;
2368 if (info->last_bb != current_bb)
2370 info->last_bb = current_bb;
2371 info->first_set = cuid;
2372 SET_BIT (reg_set_in_block[current_bb->index], regno);
2377 /* Record all of the canonicalized MEMs of record_last_mem_set_info's insn.
2378 Note we store a pair of elements in the list, so they have to be
2379 taken off pairwise. */
2381 static void
2382 canon_list_insert (dest, unused1, v_insn)
2383 rtx dest ATTRIBUTE_UNUSED;
2384 rtx unused1 ATTRIBUTE_UNUSED;
2385 void * v_insn;
2387 rtx dest_addr, insn;
2388 int bb;
2390 while (GET_CODE (dest) == SUBREG
2391 || GET_CODE (dest) == ZERO_EXTRACT
2392 || GET_CODE (dest) == SIGN_EXTRACT
2393 || GET_CODE (dest) == STRICT_LOW_PART)
2394 dest = XEXP (dest, 0);
2396 /* If DEST is not a MEM, then it will not conflict with a load. Note
2397 that function calls are assumed to clobber memory, but are handled
2398 elsewhere. */
2400 if (GET_CODE (dest) != MEM)
2401 return;
2403 dest_addr = get_addr (XEXP (dest, 0));
2404 dest_addr = canon_rtx (dest_addr);
2405 insn = (rtx) v_insn;
2406 bb = BLOCK_NUM (insn);
2408 canon_modify_mem_list[bb] =
2409 alloc_EXPR_LIST (VOIDmode, dest_addr, canon_modify_mem_list[bb]);
2410 canon_modify_mem_list[bb] =
2411 alloc_EXPR_LIST (VOIDmode, dest, canon_modify_mem_list[bb]);
2412 bitmap_set_bit (canon_modify_mem_list_set, bb);
2415 /* Record memory modification information for INSN. We do not actually care
2416 about the memory location(s) that are set, or even how they are set (consider
2417 a CALL_INSN). We merely need to record which insns modify memory. */
2419 static void
2420 record_last_mem_set_info (insn)
2421 rtx insn;
2423 int bb = BLOCK_NUM (insn);
2425 /* load_killed_in_block_p will handle the case of calls clobbering
2426 everything. */
2427 modify_mem_list[bb] = alloc_INSN_LIST (insn, modify_mem_list[bb]);
2428 bitmap_set_bit (modify_mem_list_set, bb);
2430 if (GET_CODE (insn) == CALL_INSN)
2432 /* Note that traversals of this loop (other than for free-ing)
2433 will break after encountering a CALL_INSN. So, there's no
2434 need to insert a pair of items, as canon_list_insert does. */
2435 canon_modify_mem_list[bb] =
2436 alloc_INSN_LIST (insn, canon_modify_mem_list[bb]);
2437 bitmap_set_bit (canon_modify_mem_list_set, bb);
2439 else
2440 note_stores (PATTERN (insn), canon_list_insert, (void*) insn);
2443 /* Called from compute_hash_table via note_stores to handle one
2444 SET or CLOBBER in an insn. DATA is really the instruction in which
2445 the SET is taking place. */
2447 static void
2448 record_last_set_info (dest, setter, data)
2449 rtx dest, setter ATTRIBUTE_UNUSED;
2450 void *data;
2452 rtx last_set_insn = (rtx) data;
2454 if (GET_CODE (dest) == SUBREG)
2455 dest = SUBREG_REG (dest);
2457 if (GET_CODE (dest) == REG)
2458 record_last_reg_set_info (last_set_insn, REGNO (dest));
2459 else if (GET_CODE (dest) == MEM
2460 /* Ignore pushes, they clobber nothing. */
2461 && ! push_operand (dest, GET_MODE (dest)))
2462 record_last_mem_set_info (last_set_insn);
2465 /* Top level function to create an expression or assignment hash table.
2467 Expression entries are placed in the hash table if
2468 - they are of the form (set (pseudo-reg) src),
2469 - src is something we want to perform GCSE on,
2470 - none of the operands are subsequently modified in the block
2472 Assignment entries are placed in the hash table if
2473 - they are of the form (set (pseudo-reg) src),
2474 - src is something we want to perform const/copy propagation on,
2475 - none of the operands or target are subsequently modified in the block
2477 Currently src must be a pseudo-reg or a const_int.
2479 TABLE is the table computed. */
2481 static void
2482 compute_hash_table_work (table)
2483 struct hash_table *table;
2485 unsigned int i;
2487 /* While we compute the hash table we also compute a bit array of which
2488 registers are set in which blocks.
2489 ??? This isn't needed during const/copy propagation, but it's cheap to
2490 compute. Later. */
2491 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
2493 /* re-Cache any INSN_LIST nodes we have allocated. */
2494 clear_modify_mem_tables ();
2495 /* Some working arrays used to track first and last set in each block. */
2496 reg_avail_info = (struct reg_avail_info*)
2497 gmalloc (max_gcse_regno * sizeof (struct reg_avail_info));
2499 for (i = 0; i < max_gcse_regno; ++i)
2500 reg_avail_info[i].last_bb = NULL;
2502 FOR_EACH_BB (current_bb)
2504 rtx insn;
2505 unsigned int regno;
2506 int in_libcall_block;
2508 /* First pass over the instructions records information used to
2509 determine when registers and memory are first and last set.
2510 ??? hard-reg reg_set_in_block computation
2511 could be moved to compute_sets since they currently don't change. */
2513 for (insn = current_bb->head;
2514 insn && insn != NEXT_INSN (current_bb->end);
2515 insn = NEXT_INSN (insn))
2517 if (! INSN_P (insn))
2518 continue;
2520 if (GET_CODE (insn) == CALL_INSN)
2522 bool clobbers_all = false;
2523 #ifdef NON_SAVING_SETJMP
2524 if (NON_SAVING_SETJMP
2525 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
2526 clobbers_all = true;
2527 #endif
2529 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2530 if (clobbers_all
2531 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2532 record_last_reg_set_info (insn, regno);
2534 mark_call (insn);
2537 note_stores (PATTERN (insn), record_last_set_info, insn);
2540 /* Insert implicit sets in the hash table. */
2541 if (table->set_p
2542 && implicit_sets[current_bb->index] != NULL_RTX)
2543 hash_scan_set (implicit_sets[current_bb->index],
2544 current_bb->head, table);
2546 /* The next pass builds the hash table. */
2548 for (insn = current_bb->head, in_libcall_block = 0;
2549 insn && insn != NEXT_INSN (current_bb->end);
2550 insn = NEXT_INSN (insn))
2551 if (INSN_P (insn))
2553 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
2554 in_libcall_block = 1;
2555 else if (table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
2556 in_libcall_block = 0;
2557 hash_scan_insn (insn, table, in_libcall_block);
2558 if (!table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
2559 in_libcall_block = 0;
2563 free (reg_avail_info);
2564 reg_avail_info = NULL;
2567 /* Allocate space for the set/expr hash TABLE.
2568 N_INSNS is the number of instructions in the function.
2569 It is used to determine the number of buckets to use.
2570 SET_P determines whether set or expression table will
2571 be created. */
2573 static void
2574 alloc_hash_table (n_insns, table, set_p)
2575 int n_insns;
2576 struct hash_table *table;
2577 int set_p;
2579 int n;
2581 table->size = n_insns / 4;
2582 if (table->size < 11)
2583 table->size = 11;
2585 /* Attempt to maintain efficient use of hash table.
2586 Making it an odd number is simplest for now.
2587 ??? Later take some measurements. */
2588 table->size |= 1;
2589 n = table->size * sizeof (struct expr *);
2590 table->table = (struct expr **) gmalloc (n);
2591 table->set_p = set_p;
2594 /* Free things allocated by alloc_hash_table. */
2596 static void
2597 free_hash_table (table)
2598 struct hash_table *table;
2600 free (table->table);
2603 /* Compute the hash TABLE for doing copy/const propagation or
2604 expression hash table. */
2606 static void
2607 compute_hash_table (table)
2608 struct hash_table *table;
2610 /* Initialize count of number of entries in hash table. */
2611 table->n_elems = 0;
2612 memset ((char *) table->table, 0,
2613 table->size * sizeof (struct expr *));
2615 compute_hash_table_work (table);
2618 /* Expression tracking support. */
2620 /* Lookup pattern PAT in the expression TABLE.
2621 The result is a pointer to the table entry, or NULL if not found. */
2623 static struct expr *
2624 lookup_expr (pat, table)
2625 rtx pat;
2626 struct hash_table *table;
2628 int do_not_record_p;
2629 unsigned int hash = hash_expr (pat, GET_MODE (pat), &do_not_record_p,
2630 table->size);
2631 struct expr *expr;
2633 if (do_not_record_p)
2634 return NULL;
2636 expr = table->table[hash];
2638 while (expr && ! expr_equiv_p (expr->expr, pat))
2639 expr = expr->next_same_hash;
2641 return expr;
2644 /* Lookup REGNO in the set TABLE. The result is a pointer to the
2645 table entry, or NULL if not found. */
2647 static struct expr *
2648 lookup_set (regno, table)
2649 unsigned int regno;
2650 struct hash_table *table;
2652 unsigned int hash = hash_set (regno, table->size);
2653 struct expr *expr;
2655 expr = table->table[hash];
2657 while (expr && REGNO (SET_DEST (expr->expr)) != regno)
2658 expr = expr->next_same_hash;
2660 return expr;
2663 /* Return the next entry for REGNO in list EXPR. */
2665 static struct expr *
2666 next_set (regno, expr)
2667 unsigned int regno;
2668 struct expr *expr;
2671 expr = expr->next_same_hash;
2672 while (expr && REGNO (SET_DEST (expr->expr)) != regno);
2674 return expr;
2677 /* Like free_INSN_LIST_list or free_EXPR_LIST_list, except that the node
2678 types may be mixed. */
2680 static void
2681 free_insn_expr_list_list (listp)
2682 rtx *listp;
2684 rtx list, next;
2686 for (list = *listp; list ; list = next)
2688 next = XEXP (list, 1);
2689 if (GET_CODE (list) == EXPR_LIST)
2690 free_EXPR_LIST_node (list);
2691 else
2692 free_INSN_LIST_node (list);
2695 *listp = NULL;
2698 /* Clear canon_modify_mem_list and modify_mem_list tables. */
2699 static void
2700 clear_modify_mem_tables ()
2702 int i;
2704 EXECUTE_IF_SET_IN_BITMAP
2705 (modify_mem_list_set, 0, i, free_INSN_LIST_list (modify_mem_list + i));
2706 bitmap_clear (modify_mem_list_set);
2708 EXECUTE_IF_SET_IN_BITMAP
2709 (canon_modify_mem_list_set, 0, i,
2710 free_insn_expr_list_list (canon_modify_mem_list + i));
2711 bitmap_clear (canon_modify_mem_list_set);
2714 /* Release memory used by modify_mem_list_set and canon_modify_mem_list_set. */
2716 static void
2717 free_modify_mem_tables ()
2719 clear_modify_mem_tables ();
2720 free (modify_mem_list);
2721 free (canon_modify_mem_list);
2722 modify_mem_list = 0;
2723 canon_modify_mem_list = 0;
2726 /* Reset tables used to keep track of what's still available [since the
2727 start of the block]. */
2729 static void
2730 reset_opr_set_tables ()
2732 /* Maintain a bitmap of which regs have been set since beginning of
2733 the block. */
2734 CLEAR_REG_SET (reg_set_bitmap);
2736 /* Also keep a record of the last instruction to modify memory.
2737 For now this is very trivial, we only record whether any memory
2738 location has been modified. */
2739 clear_modify_mem_tables ();
2742 /* Return nonzero if the operands of X are not set before INSN in
2743 INSN's basic block. */
2745 static int
2746 oprs_not_set_p (x, insn)
2747 rtx x, insn;
2749 int i, j;
2750 enum rtx_code code;
2751 const char *fmt;
2753 if (x == 0)
2754 return 1;
2756 code = GET_CODE (x);
2757 switch (code)
2759 case PC:
2760 case CC0:
2761 case CONST:
2762 case CONST_INT:
2763 case CONST_DOUBLE:
2764 case CONST_VECTOR:
2765 case SYMBOL_REF:
2766 case LABEL_REF:
2767 case ADDR_VEC:
2768 case ADDR_DIFF_VEC:
2769 return 1;
2771 case MEM:
2772 if (load_killed_in_block_p (BLOCK_FOR_INSN (insn),
2773 INSN_CUID (insn), x, 0))
2774 return 0;
2775 else
2776 return oprs_not_set_p (XEXP (x, 0), insn);
2778 case REG:
2779 return ! REGNO_REG_SET_P (reg_set_bitmap, REGNO (x));
2781 default:
2782 break;
2785 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
2787 if (fmt[i] == 'e')
2789 /* If we are about to do the last recursive call
2790 needed at this level, change it into iteration.
2791 This function is called enough to be worth it. */
2792 if (i == 0)
2793 return oprs_not_set_p (XEXP (x, i), insn);
2795 if (! oprs_not_set_p (XEXP (x, i), insn))
2796 return 0;
2798 else if (fmt[i] == 'E')
2799 for (j = 0; j < XVECLEN (x, i); j++)
2800 if (! oprs_not_set_p (XVECEXP (x, i, j), insn))
2801 return 0;
2804 return 1;
2807 /* Mark things set by a CALL. */
2809 static void
2810 mark_call (insn)
2811 rtx insn;
2813 if (! CONST_OR_PURE_CALL_P (insn))
2814 record_last_mem_set_info (insn);
2817 /* Mark things set by a SET. */
2819 static void
2820 mark_set (pat, insn)
2821 rtx pat, insn;
2823 rtx dest = SET_DEST (pat);
2825 while (GET_CODE (dest) == SUBREG
2826 || GET_CODE (dest) == ZERO_EXTRACT
2827 || GET_CODE (dest) == SIGN_EXTRACT
2828 || GET_CODE (dest) == STRICT_LOW_PART)
2829 dest = XEXP (dest, 0);
2831 if (GET_CODE (dest) == REG)
2832 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (dest));
2833 else if (GET_CODE (dest) == MEM)
2834 record_last_mem_set_info (insn);
2836 if (GET_CODE (SET_SRC (pat)) == CALL)
2837 mark_call (insn);
2840 /* Record things set by a CLOBBER. */
2842 static void
2843 mark_clobber (pat, insn)
2844 rtx pat, insn;
2846 rtx clob = XEXP (pat, 0);
2848 while (GET_CODE (clob) == SUBREG || GET_CODE (clob) == STRICT_LOW_PART)
2849 clob = XEXP (clob, 0);
2851 if (GET_CODE (clob) == REG)
2852 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (clob));
2853 else
2854 record_last_mem_set_info (insn);
2857 /* Record things set by INSN.
2858 This data is used by oprs_not_set_p. */
2860 static void
2861 mark_oprs_set (insn)
2862 rtx insn;
2864 rtx pat = PATTERN (insn);
2865 int i;
2867 if (GET_CODE (pat) == SET)
2868 mark_set (pat, insn);
2869 else if (GET_CODE (pat) == PARALLEL)
2870 for (i = 0; i < XVECLEN (pat, 0); i++)
2872 rtx x = XVECEXP (pat, 0, i);
2874 if (GET_CODE (x) == SET)
2875 mark_set (x, insn);
2876 else if (GET_CODE (x) == CLOBBER)
2877 mark_clobber (x, insn);
2878 else if (GET_CODE (x) == CALL)
2879 mark_call (insn);
2882 else if (GET_CODE (pat) == CLOBBER)
2883 mark_clobber (pat, insn);
2884 else if (GET_CODE (pat) == CALL)
2885 mark_call (insn);
2889 /* Classic GCSE reaching definition support. */
2891 /* Allocate reaching def variables. */
2893 static void
2894 alloc_rd_mem (n_blocks, n_insns)
2895 int n_blocks, n_insns;
2897 rd_kill = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2898 sbitmap_vector_zero (rd_kill, n_blocks);
2900 rd_gen = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2901 sbitmap_vector_zero (rd_gen, n_blocks);
2903 reaching_defs = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2904 sbitmap_vector_zero (reaching_defs, n_blocks);
2906 rd_out = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2907 sbitmap_vector_zero (rd_out, n_blocks);
2910 /* Free reaching def variables. */
2912 static void
2913 free_rd_mem ()
2915 sbitmap_vector_free (rd_kill);
2916 sbitmap_vector_free (rd_gen);
2917 sbitmap_vector_free (reaching_defs);
2918 sbitmap_vector_free (rd_out);
2921 /* Add INSN to the kills of BB. REGNO, set in BB, is killed by INSN. */
2923 static void
2924 handle_rd_kill_set (insn, regno, bb)
2925 rtx insn;
2926 int regno;
2927 basic_block bb;
2929 struct reg_set *this_reg;
2931 for (this_reg = reg_set_table[regno]; this_reg; this_reg = this_reg ->next)
2932 if (BLOCK_NUM (this_reg->insn) != BLOCK_NUM (insn))
2933 SET_BIT (rd_kill[bb->index], INSN_CUID (this_reg->insn));
2936 /* Compute the set of kill's for reaching definitions. */
2938 static void
2939 compute_kill_rd ()
2941 int cuid;
2942 unsigned int regno;
2943 int i;
2944 basic_block bb;
2946 /* For each block
2947 For each set bit in `gen' of the block (i.e each insn which
2948 generates a definition in the block)
2949 Call the reg set by the insn corresponding to that bit regx
2950 Look at the linked list starting at reg_set_table[regx]
2951 For each setting of regx in the linked list, which is not in
2952 this block
2953 Set the bit in `kill' corresponding to that insn. */
2954 FOR_EACH_BB (bb)
2955 for (cuid = 0; cuid < max_cuid; cuid++)
2956 if (TEST_BIT (rd_gen[bb->index], cuid))
2958 rtx insn = CUID_INSN (cuid);
2959 rtx pat = PATTERN (insn);
2961 if (GET_CODE (insn) == CALL_INSN)
2963 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2964 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2965 handle_rd_kill_set (insn, regno, bb);
2968 if (GET_CODE (pat) == PARALLEL)
2970 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
2972 enum rtx_code code = GET_CODE (XVECEXP (pat, 0, i));
2974 if ((code == SET || code == CLOBBER)
2975 && GET_CODE (XEXP (XVECEXP (pat, 0, i), 0)) == REG)
2976 handle_rd_kill_set (insn,
2977 REGNO (XEXP (XVECEXP (pat, 0, i), 0)),
2978 bb);
2981 else if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == REG)
2982 /* Each setting of this register outside of this block
2983 must be marked in the set of kills in this block. */
2984 handle_rd_kill_set (insn, REGNO (SET_DEST (pat)), bb);
2988 /* Compute the reaching definitions as in
2989 Compilers Principles, Techniques, and Tools. Aho, Sethi, Ullman,
2990 Chapter 10. It is the same algorithm as used for computing available
2991 expressions but applied to the gens and kills of reaching definitions. */
2993 static void
2994 compute_rd ()
2996 int changed, passes;
2997 basic_block bb;
2999 FOR_EACH_BB (bb)
3000 sbitmap_copy (rd_out[bb->index] /*dst*/, rd_gen[bb->index] /*src*/);
3002 passes = 0;
3003 changed = 1;
3004 while (changed)
3006 changed = 0;
3007 FOR_EACH_BB (bb)
3009 sbitmap_union_of_preds (reaching_defs[bb->index], rd_out, bb->index);
3010 changed |= sbitmap_union_of_diff_cg (rd_out[bb->index], rd_gen[bb->index],
3011 reaching_defs[bb->index], rd_kill[bb->index]);
3013 passes++;
3016 if (gcse_file)
3017 fprintf (gcse_file, "reaching def computation: %d passes\n", passes);
3020 /* Classic GCSE available expression support. */
3022 /* Allocate memory for available expression computation. */
3024 static void
3025 alloc_avail_expr_mem (n_blocks, n_exprs)
3026 int n_blocks, n_exprs;
3028 ae_kill = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
3029 sbitmap_vector_zero (ae_kill, n_blocks);
3031 ae_gen = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
3032 sbitmap_vector_zero (ae_gen, n_blocks);
3034 ae_in = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
3035 sbitmap_vector_zero (ae_in, n_blocks);
3037 ae_out = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
3038 sbitmap_vector_zero (ae_out, n_blocks);
3041 static void
3042 free_avail_expr_mem ()
3044 sbitmap_vector_free (ae_kill);
3045 sbitmap_vector_free (ae_gen);
3046 sbitmap_vector_free (ae_in);
3047 sbitmap_vector_free (ae_out);
3050 /* Compute the set of available expressions generated in each basic block. */
3052 static void
3053 compute_ae_gen (expr_hash_table)
3054 struct hash_table *expr_hash_table;
3056 unsigned int i;
3057 struct expr *expr;
3058 struct occr *occr;
3060 /* For each recorded occurrence of each expression, set ae_gen[bb][expr].
3061 This is all we have to do because an expression is not recorded if it
3062 is not available, and the only expressions we want to work with are the
3063 ones that are recorded. */
3064 for (i = 0; i < expr_hash_table->size; i++)
3065 for (expr = expr_hash_table->table[i]; expr != 0; expr = expr->next_same_hash)
3066 for (occr = expr->avail_occr; occr != 0; occr = occr->next)
3067 SET_BIT (ae_gen[BLOCK_NUM (occr->insn)], expr->bitmap_index);
3070 /* Return nonzero if expression X is killed in BB. */
3072 static int
3073 expr_killed_p (x, bb)
3074 rtx x;
3075 basic_block bb;
3077 int i, j;
3078 enum rtx_code code;
3079 const char *fmt;
3081 if (x == 0)
3082 return 1;
3084 code = GET_CODE (x);
3085 switch (code)
3087 case REG:
3088 return TEST_BIT (reg_set_in_block[bb->index], REGNO (x));
3090 case MEM:
3091 if (load_killed_in_block_p (bb, get_max_uid () + 1, x, 0))
3092 return 1;
3093 else
3094 return expr_killed_p (XEXP (x, 0), bb);
3096 case PC:
3097 case CC0: /*FIXME*/
3098 case CONST:
3099 case CONST_INT:
3100 case CONST_DOUBLE:
3101 case CONST_VECTOR:
3102 case SYMBOL_REF:
3103 case LABEL_REF:
3104 case ADDR_VEC:
3105 case ADDR_DIFF_VEC:
3106 return 0;
3108 default:
3109 break;
3112 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
3114 if (fmt[i] == 'e')
3116 /* If we are about to do the last recursive call
3117 needed at this level, change it into iteration.
3118 This function is called enough to be worth it. */
3119 if (i == 0)
3120 return expr_killed_p (XEXP (x, i), bb);
3121 else if (expr_killed_p (XEXP (x, i), bb))
3122 return 1;
3124 else if (fmt[i] == 'E')
3125 for (j = 0; j < XVECLEN (x, i); j++)
3126 if (expr_killed_p (XVECEXP (x, i, j), bb))
3127 return 1;
3130 return 0;
3133 /* Compute the set of available expressions killed in each basic block. */
3135 static void
3136 compute_ae_kill (ae_gen, ae_kill, expr_hash_table)
3137 sbitmap *ae_gen, *ae_kill;
3138 struct hash_table *expr_hash_table;
3140 basic_block bb;
3141 unsigned int i;
3142 struct expr *expr;
3144 FOR_EACH_BB (bb)
3145 for (i = 0; i < expr_hash_table->size; i++)
3146 for (expr = expr_hash_table->table[i]; expr; expr = expr->next_same_hash)
3148 /* Skip EXPR if generated in this block. */
3149 if (TEST_BIT (ae_gen[bb->index], expr->bitmap_index))
3150 continue;
3152 if (expr_killed_p (expr->expr, bb))
3153 SET_BIT (ae_kill[bb->index], expr->bitmap_index);
3157 /* Actually perform the Classic GCSE optimizations. */
3159 /* Return nonzero if occurrence OCCR of expression EXPR reaches block BB.
3161 CHECK_SELF_LOOP is nonzero if we should consider a block reaching itself
3162 as a positive reach. We want to do this when there are two computations
3163 of the expression in the block.
3165 VISITED is a pointer to a working buffer for tracking which BB's have
3166 been visited. It is NULL for the top-level call.
3168 We treat reaching expressions that go through blocks containing the same
3169 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
3170 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
3171 2 as not reaching. The intent is to improve the probability of finding
3172 only one reaching expression and to reduce register lifetimes by picking
3173 the closest such expression. */
3175 static int
3176 expr_reaches_here_p_work (occr, expr, bb, check_self_loop, visited)
3177 struct occr *occr;
3178 struct expr *expr;
3179 basic_block bb;
3180 int check_self_loop;
3181 char *visited;
3183 edge pred;
3185 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
3187 basic_block pred_bb = pred->src;
3189 if (visited[pred_bb->index])
3190 /* This predecessor has already been visited. Nothing to do. */
3192 else if (pred_bb == bb)
3194 /* BB loops on itself. */
3195 if (check_self_loop
3196 && TEST_BIT (ae_gen[pred_bb->index], expr->bitmap_index)
3197 && BLOCK_NUM (occr->insn) == pred_bb->index)
3198 return 1;
3200 visited[pred_bb->index] = 1;
3203 /* Ignore this predecessor if it kills the expression. */
3204 else if (TEST_BIT (ae_kill[pred_bb->index], expr->bitmap_index))
3205 visited[pred_bb->index] = 1;
3207 /* Does this predecessor generate this expression? */
3208 else if (TEST_BIT (ae_gen[pred_bb->index], expr->bitmap_index))
3210 /* Is this the occurrence we're looking for?
3211 Note that there's only one generating occurrence per block
3212 so we just need to check the block number. */
3213 if (BLOCK_NUM (occr->insn) == pred_bb->index)
3214 return 1;
3216 visited[pred_bb->index] = 1;
3219 /* Neither gen nor kill. */
3220 else
3222 visited[pred_bb->index] = 1;
3223 if (expr_reaches_here_p_work (occr, expr, pred_bb, check_self_loop,
3224 visited))
3226 return 1;
3230 /* All paths have been checked. */
3231 return 0;
3234 /* This wrapper for expr_reaches_here_p_work() is to ensure that any
3235 memory allocated for that function is returned. */
3237 static int
3238 expr_reaches_here_p (occr, expr, bb, check_self_loop)
3239 struct occr *occr;
3240 struct expr *expr;
3241 basic_block bb;
3242 int check_self_loop;
3244 int rval;
3245 char *visited = (char *) xcalloc (last_basic_block, 1);
3247 rval = expr_reaches_here_p_work (occr, expr, bb, check_self_loop, visited);
3249 free (visited);
3250 return rval;
3253 /* Return the instruction that computes EXPR that reaches INSN's basic block.
3254 If there is more than one such instruction, return NULL.
3256 Called only by handle_avail_expr. */
3258 static rtx
3259 computing_insn (expr, insn)
3260 struct expr *expr;
3261 rtx insn;
3263 basic_block bb = BLOCK_FOR_INSN (insn);
3265 if (expr->avail_occr->next == NULL)
3267 if (BLOCK_FOR_INSN (expr->avail_occr->insn) == bb)
3268 /* The available expression is actually itself
3269 (i.e. a loop in the flow graph) so do nothing. */
3270 return NULL;
3272 /* (FIXME) Case that we found a pattern that was created by
3273 a substitution that took place. */
3274 return expr->avail_occr->insn;
3276 else
3278 /* Pattern is computed more than once.
3279 Search backwards from this insn to see how many of these
3280 computations actually reach this insn. */
3281 struct occr *occr;
3282 rtx insn_computes_expr = NULL;
3283 int can_reach = 0;
3285 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
3287 if (BLOCK_FOR_INSN (occr->insn) == bb)
3289 /* The expression is generated in this block.
3290 The only time we care about this is when the expression
3291 is generated later in the block [and thus there's a loop].
3292 We let the normal cse pass handle the other cases. */
3293 if (INSN_CUID (insn) < INSN_CUID (occr->insn)
3294 && expr_reaches_here_p (occr, expr, bb, 1))
3296 can_reach++;
3297 if (can_reach > 1)
3298 return NULL;
3300 insn_computes_expr = occr->insn;
3303 else if (expr_reaches_here_p (occr, expr, bb, 0))
3305 can_reach++;
3306 if (can_reach > 1)
3307 return NULL;
3309 insn_computes_expr = occr->insn;
3313 if (insn_computes_expr == NULL)
3314 abort ();
3316 return insn_computes_expr;
3320 /* Return nonzero if the definition in DEF_INSN can reach INSN.
3321 Only called by can_disregard_other_sets. */
3323 static int
3324 def_reaches_here_p (insn, def_insn)
3325 rtx insn, def_insn;
3327 rtx reg;
3329 if (TEST_BIT (reaching_defs[BLOCK_NUM (insn)], INSN_CUID (def_insn)))
3330 return 1;
3332 if (BLOCK_NUM (insn) == BLOCK_NUM (def_insn))
3334 if (INSN_CUID (def_insn) < INSN_CUID (insn))
3336 if (GET_CODE (PATTERN (def_insn)) == PARALLEL)
3337 return 1;
3338 else if (GET_CODE (PATTERN (def_insn)) == CLOBBER)
3339 reg = XEXP (PATTERN (def_insn), 0);
3340 else if (GET_CODE (PATTERN (def_insn)) == SET)
3341 reg = SET_DEST (PATTERN (def_insn));
3342 else
3343 abort ();
3345 return ! reg_set_between_p (reg, NEXT_INSN (def_insn), insn);
3347 else
3348 return 0;
3351 return 0;
3354 /* Return nonzero if *ADDR_THIS_REG can only have one value at INSN. The
3355 value returned is the number of definitions that reach INSN. Returning a
3356 value of zero means that [maybe] more than one definition reaches INSN and
3357 the caller can't perform whatever optimization it is trying. i.e. it is
3358 always safe to return zero. */
3360 static int
3361 can_disregard_other_sets (addr_this_reg, insn, for_combine)
3362 struct reg_set **addr_this_reg;
3363 rtx insn;
3364 int for_combine;
3366 int number_of_reaching_defs = 0;
3367 struct reg_set *this_reg;
3369 for (this_reg = *addr_this_reg; this_reg != 0; this_reg = this_reg->next)
3370 if (def_reaches_here_p (insn, this_reg->insn))
3372 number_of_reaching_defs++;
3373 /* Ignore parallels for now. */
3374 if (GET_CODE (PATTERN (this_reg->insn)) == PARALLEL)
3375 return 0;
3377 if (!for_combine
3378 && (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER
3379 || ! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)),
3380 SET_SRC (PATTERN (insn)))))
3381 /* A setting of the reg to a different value reaches INSN. */
3382 return 0;
3384 if (number_of_reaching_defs > 1)
3386 /* If in this setting the value the register is being set to is
3387 equal to the previous value the register was set to and this
3388 setting reaches the insn we are trying to do the substitution
3389 on then we are ok. */
3390 if (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER)
3391 return 0;
3392 else if (! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)),
3393 SET_SRC (PATTERN (insn))))
3394 return 0;
3397 *addr_this_reg = this_reg;
3400 return number_of_reaching_defs;
3403 /* Expression computed by insn is available and the substitution is legal,
3404 so try to perform the substitution.
3406 The result is nonzero if any changes were made. */
3408 static int
3409 handle_avail_expr (insn, expr)
3410 rtx insn;
3411 struct expr *expr;
3413 rtx pat, insn_computes_expr, expr_set;
3414 rtx to;
3415 struct reg_set *this_reg;
3416 int found_setting, use_src;
3417 int changed = 0;
3419 /* We only handle the case where one computation of the expression
3420 reaches this instruction. */
3421 insn_computes_expr = computing_insn (expr, insn);
3422 if (insn_computes_expr == NULL)
3423 return 0;
3424 expr_set = single_set (insn_computes_expr);
3425 if (!expr_set)
3426 abort ();
3428 found_setting = 0;
3429 use_src = 0;
3431 /* At this point we know only one computation of EXPR outside of this
3432 block reaches this insn. Now try to find a register that the
3433 expression is computed into. */
3434 if (GET_CODE (SET_SRC (expr_set)) == REG)
3436 /* This is the case when the available expression that reaches
3437 here has already been handled as an available expression. */
3438 unsigned int regnum_for_replacing
3439 = REGNO (SET_SRC (expr_set));
3441 /* If the register was created by GCSE we can't use `reg_set_table',
3442 however we know it's set only once. */
3443 if (regnum_for_replacing >= max_gcse_regno
3444 /* If the register the expression is computed into is set only once,
3445 or only one set reaches this insn, we can use it. */
3446 || (((this_reg = reg_set_table[regnum_for_replacing]),
3447 this_reg->next == NULL)
3448 || can_disregard_other_sets (&this_reg, insn, 0)))
3450 use_src = 1;
3451 found_setting = 1;
3455 if (!found_setting)
3457 unsigned int regnum_for_replacing
3458 = REGNO (SET_DEST (expr_set));
3460 /* This shouldn't happen. */
3461 if (regnum_for_replacing >= max_gcse_regno)
3462 abort ();
3464 this_reg = reg_set_table[regnum_for_replacing];
3466 /* If the register the expression is computed into is set only once,
3467 or only one set reaches this insn, use it. */
3468 if (this_reg->next == NULL
3469 || can_disregard_other_sets (&this_reg, insn, 0))
3470 found_setting = 1;
3473 if (found_setting)
3475 pat = PATTERN (insn);
3476 if (use_src)
3477 to = SET_SRC (expr_set);
3478 else
3479 to = SET_DEST (expr_set);
3480 changed = validate_change (insn, &SET_SRC (pat), to, 0);
3482 /* We should be able to ignore the return code from validate_change but
3483 to play it safe we check. */
3484 if (changed)
3486 gcse_subst_count++;
3487 if (gcse_file != NULL)
3489 fprintf (gcse_file, "GCSE: Replacing the source in insn %d with",
3490 INSN_UID (insn));
3491 fprintf (gcse_file, " reg %d %s insn %d\n",
3492 REGNO (to), use_src ? "from" : "set in",
3493 INSN_UID (insn_computes_expr));
3498 /* The register that the expr is computed into is set more than once. */
3499 else if (1 /*expensive_op(this_pattrn->op) && do_expensive_gcse)*/)
3501 /* Insert an insn after insnx that copies the reg set in insnx
3502 into a new pseudo register call this new register REGN.
3503 From insnb until end of basic block or until REGB is set
3504 replace all uses of REGB with REGN. */
3505 rtx new_insn;
3507 to = gen_reg_rtx (GET_MODE (SET_DEST (expr_set)));
3509 /* Generate the new insn. */
3510 /* ??? If the change fails, we return 0, even though we created
3511 an insn. I think this is ok. */
3512 new_insn
3513 = emit_insn_after (gen_rtx_SET (VOIDmode, to,
3514 SET_DEST (expr_set)),
3515 insn_computes_expr);
3517 /* Keep register set table up to date. */
3518 record_one_set (REGNO (to), new_insn);
3520 gcse_create_count++;
3521 if (gcse_file != NULL)
3523 fprintf (gcse_file, "GCSE: Creating insn %d to copy value of reg %d",
3524 INSN_UID (NEXT_INSN (insn_computes_expr)),
3525 REGNO (SET_SRC (PATTERN (NEXT_INSN (insn_computes_expr)))));
3526 fprintf (gcse_file, ", computed in insn %d,\n",
3527 INSN_UID (insn_computes_expr));
3528 fprintf (gcse_file, " into newly allocated reg %d\n",
3529 REGNO (to));
3532 pat = PATTERN (insn);
3534 /* Do register replacement for INSN. */
3535 changed = validate_change (insn, &SET_SRC (pat),
3536 SET_DEST (PATTERN
3537 (NEXT_INSN (insn_computes_expr))),
3540 /* We should be able to ignore the return code from validate_change but
3541 to play it safe we check. */
3542 if (changed)
3544 gcse_subst_count++;
3545 if (gcse_file != NULL)
3547 fprintf (gcse_file,
3548 "GCSE: Replacing the source in insn %d with reg %d ",
3549 INSN_UID (insn),
3550 REGNO (SET_DEST (PATTERN (NEXT_INSN
3551 (insn_computes_expr)))));
3552 fprintf (gcse_file, "set in insn %d\n",
3553 INSN_UID (insn_computes_expr));
3558 return changed;
3561 /* Perform classic GCSE. This is called by one_classic_gcse_pass after all
3562 the dataflow analysis has been done.
3564 The result is nonzero if a change was made. */
3566 static int
3567 classic_gcse ()
3569 int changed;
3570 rtx insn;
3571 basic_block bb;
3573 /* Note we start at block 1. */
3575 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
3576 return 0;
3578 changed = 0;
3579 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR, next_bb)
3581 /* Reset tables used to keep track of what's still valid [since the
3582 start of the block]. */
3583 reset_opr_set_tables ();
3585 for (insn = bb->head;
3586 insn != NULL && insn != NEXT_INSN (bb->end);
3587 insn = NEXT_INSN (insn))
3589 /* Is insn of form (set (pseudo-reg) ...)? */
3590 if (GET_CODE (insn) == INSN
3591 && GET_CODE (PATTERN (insn)) == SET
3592 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
3593 && REGNO (SET_DEST (PATTERN (insn))) >= FIRST_PSEUDO_REGISTER)
3595 rtx pat = PATTERN (insn);
3596 rtx src = SET_SRC (pat);
3597 struct expr *expr;
3599 if (want_to_gcse_p (src)
3600 /* Is the expression recorded? */
3601 && ((expr = lookup_expr (src, &expr_hash_table)) != NULL)
3602 /* Is the expression available [at the start of the
3603 block]? */
3604 && TEST_BIT (ae_in[bb->index], expr->bitmap_index)
3605 /* Are the operands unchanged since the start of the
3606 block? */
3607 && oprs_not_set_p (src, insn))
3608 changed |= handle_avail_expr (insn, expr);
3611 /* Keep track of everything modified by this insn. */
3612 /* ??? Need to be careful w.r.t. mods done to INSN. */
3613 if (INSN_P (insn))
3614 mark_oprs_set (insn);
3618 return changed;
3621 /* Top level routine to perform one classic GCSE pass.
3623 Return nonzero if a change was made. */
3625 static int
3626 one_classic_gcse_pass (pass)
3627 int pass;
3629 int changed = 0;
3631 gcse_subst_count = 0;
3632 gcse_create_count = 0;
3634 alloc_hash_table (max_cuid, &expr_hash_table, 0);
3635 alloc_rd_mem (last_basic_block, max_cuid);
3636 compute_hash_table (&expr_hash_table);
3637 if (gcse_file)
3638 dump_hash_table (gcse_file, "Expression", &expr_hash_table);
3640 if (expr_hash_table.n_elems > 0)
3642 compute_kill_rd ();
3643 compute_rd ();
3644 alloc_avail_expr_mem (last_basic_block, expr_hash_table.n_elems);
3645 compute_ae_gen (&expr_hash_table);
3646 compute_ae_kill (ae_gen, ae_kill, &expr_hash_table);
3647 compute_available (ae_gen, ae_kill, ae_out, ae_in);
3648 changed = classic_gcse ();
3649 free_avail_expr_mem ();
3652 free_rd_mem ();
3653 free_hash_table (&expr_hash_table);
3655 if (gcse_file)
3657 fprintf (gcse_file, "\n");
3658 fprintf (gcse_file, "GCSE of %s, pass %d: %d bytes needed, %d substs,",
3659 current_function_name, pass, bytes_used, gcse_subst_count);
3660 fprintf (gcse_file, "%d insns created\n", gcse_create_count);
3663 return changed;
3666 /* Compute copy/constant propagation working variables. */
3668 /* Local properties of assignments. */
3669 static sbitmap *cprop_pavloc;
3670 static sbitmap *cprop_absaltered;
3672 /* Global properties of assignments (computed from the local properties). */
3673 static sbitmap *cprop_avin;
3674 static sbitmap *cprop_avout;
3676 /* Allocate vars used for copy/const propagation. N_BLOCKS is the number of
3677 basic blocks. N_SETS is the number of sets. */
3679 static void
3680 alloc_cprop_mem (n_blocks, n_sets)
3681 int n_blocks, n_sets;
3683 cprop_pavloc = sbitmap_vector_alloc (n_blocks, n_sets);
3684 cprop_absaltered = sbitmap_vector_alloc (n_blocks, n_sets);
3686 cprop_avin = sbitmap_vector_alloc (n_blocks, n_sets);
3687 cprop_avout = sbitmap_vector_alloc (n_blocks, n_sets);
3690 /* Free vars used by copy/const propagation. */
3692 static void
3693 free_cprop_mem ()
3695 sbitmap_vector_free (cprop_pavloc);
3696 sbitmap_vector_free (cprop_absaltered);
3697 sbitmap_vector_free (cprop_avin);
3698 sbitmap_vector_free (cprop_avout);
3701 /* For each block, compute whether X is transparent. X is either an
3702 expression or an assignment [though we don't care which, for this context
3703 an assignment is treated as an expression]. For each block where an
3704 element of X is modified, set (SET_P == 1) or reset (SET_P == 0) the INDX
3705 bit in BMAP. */
3707 static void
3708 compute_transp (x, indx, bmap, set_p)
3709 rtx x;
3710 int indx;
3711 sbitmap *bmap;
3712 int set_p;
3714 int i, j;
3715 basic_block bb;
3716 enum rtx_code code;
3717 reg_set *r;
3718 const char *fmt;
3720 /* repeat is used to turn tail-recursion into iteration since GCC
3721 can't do it when there's no return value. */
3722 repeat:
3724 if (x == 0)
3725 return;
3727 code = GET_CODE (x);
3728 switch (code)
3730 case REG:
3731 if (set_p)
3733 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3735 FOR_EACH_BB (bb)
3736 if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
3737 SET_BIT (bmap[bb->index], indx);
3739 else
3741 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
3742 SET_BIT (bmap[BLOCK_NUM (r->insn)], indx);
3745 else
3747 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3749 FOR_EACH_BB (bb)
3750 if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
3751 RESET_BIT (bmap[bb->index], indx);
3753 else
3755 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
3756 RESET_BIT (bmap[BLOCK_NUM (r->insn)], indx);
3760 return;
3762 case MEM:
3763 FOR_EACH_BB (bb)
3765 rtx list_entry = canon_modify_mem_list[bb->index];
3767 while (list_entry)
3769 rtx dest, dest_addr;
3771 if (GET_CODE (XEXP (list_entry, 0)) == CALL_INSN)
3773 if (set_p)
3774 SET_BIT (bmap[bb->index], indx);
3775 else
3776 RESET_BIT (bmap[bb->index], indx);
3777 break;
3779 /* LIST_ENTRY must be an INSN of some kind that sets memory.
3780 Examine each hunk of memory that is modified. */
3782 dest = XEXP (list_entry, 0);
3783 list_entry = XEXP (list_entry, 1);
3784 dest_addr = XEXP (list_entry, 0);
3786 if (canon_true_dependence (dest, GET_MODE (dest), dest_addr,
3787 x, rtx_addr_varies_p))
3789 if (set_p)
3790 SET_BIT (bmap[bb->index], indx);
3791 else
3792 RESET_BIT (bmap[bb->index], indx);
3793 break;
3795 list_entry = XEXP (list_entry, 1);
3799 x = XEXP (x, 0);
3800 goto repeat;
3802 case PC:
3803 case CC0: /*FIXME*/
3804 case CONST:
3805 case CONST_INT:
3806 case CONST_DOUBLE:
3807 case CONST_VECTOR:
3808 case SYMBOL_REF:
3809 case LABEL_REF:
3810 case ADDR_VEC:
3811 case ADDR_DIFF_VEC:
3812 return;
3814 default:
3815 break;
3818 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
3820 if (fmt[i] == 'e')
3822 /* If we are about to do the last recursive call
3823 needed at this level, change it into iteration.
3824 This function is called enough to be worth it. */
3825 if (i == 0)
3827 x = XEXP (x, i);
3828 goto repeat;
3831 compute_transp (XEXP (x, i), indx, bmap, set_p);
3833 else if (fmt[i] == 'E')
3834 for (j = 0; j < XVECLEN (x, i); j++)
3835 compute_transp (XVECEXP (x, i, j), indx, bmap, set_p);
3839 /* Top level routine to do the dataflow analysis needed by copy/const
3840 propagation. */
3842 static void
3843 compute_cprop_data ()
3845 compute_local_properties (cprop_absaltered, cprop_pavloc, NULL, &set_hash_table);
3846 compute_available (cprop_pavloc, cprop_absaltered,
3847 cprop_avout, cprop_avin);
3850 /* Copy/constant propagation. */
3852 /* Maximum number of register uses in an insn that we handle. */
3853 #define MAX_USES 8
3855 /* Table of uses found in an insn.
3856 Allocated statically to avoid alloc/free complexity and overhead. */
3857 static struct reg_use reg_use_table[MAX_USES];
3859 /* Index into `reg_use_table' while building it. */
3860 static int reg_use_count;
3862 /* Set up a list of register numbers used in INSN. The found uses are stored
3863 in `reg_use_table'. `reg_use_count' is initialized to zero before entry,
3864 and contains the number of uses in the table upon exit.
3866 ??? If a register appears multiple times we will record it multiple times.
3867 This doesn't hurt anything but it will slow things down. */
3869 static void
3870 find_used_regs (xptr, data)
3871 rtx *xptr;
3872 void *data ATTRIBUTE_UNUSED;
3874 int i, j;
3875 enum rtx_code code;
3876 const char *fmt;
3877 rtx x = *xptr;
3879 /* repeat is used to turn tail-recursion into iteration since GCC
3880 can't do it when there's no return value. */
3881 repeat:
3882 if (x == 0)
3883 return;
3885 code = GET_CODE (x);
3886 if (REG_P (x))
3888 if (reg_use_count == MAX_USES)
3889 return;
3891 reg_use_table[reg_use_count].reg_rtx = x;
3892 reg_use_count++;
3895 /* Recursively scan the operands of this expression. */
3897 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
3899 if (fmt[i] == 'e')
3901 /* If we are about to do the last recursive call
3902 needed at this level, change it into iteration.
3903 This function is called enough to be worth it. */
3904 if (i == 0)
3906 x = XEXP (x, 0);
3907 goto repeat;
3910 find_used_regs (&XEXP (x, i), data);
3912 else if (fmt[i] == 'E')
3913 for (j = 0; j < XVECLEN (x, i); j++)
3914 find_used_regs (&XVECEXP (x, i, j), data);
3918 /* Try to replace all non-SET_DEST occurrences of FROM in INSN with TO.
3919 Returns nonzero is successful. */
3921 static int
3922 try_replace_reg (from, to, insn)
3923 rtx from, to, insn;
3925 rtx note = find_reg_equal_equiv_note (insn);
3926 rtx src = 0;
3927 int success = 0;
3928 rtx set = single_set (insn);
3930 validate_replace_src_group (from, to, insn);
3931 if (num_changes_pending () && apply_change_group ())
3932 success = 1;
3934 if (!success && set && reg_mentioned_p (from, SET_SRC (set)))
3936 /* If above failed and this is a single set, try to simplify the source of
3937 the set given our substitution. We could perhaps try this for multiple
3938 SETs, but it probably won't buy us anything. */
3939 src = simplify_replace_rtx (SET_SRC (set), from, to);
3941 if (!rtx_equal_p (src, SET_SRC (set))
3942 && validate_change (insn, &SET_SRC (set), src, 0))
3943 success = 1;
3945 /* If we've failed to do replacement, have a single SET, and don't already
3946 have a note, add a REG_EQUAL note to not lose information. */
3947 if (!success && note == 0 && set != 0)
3948 note = set_unique_reg_note (insn, REG_EQUAL, copy_rtx (src));
3951 /* If there is already a NOTE, update the expression in it with our
3952 replacement. */
3953 else if (note != 0)
3954 XEXP (note, 0) = simplify_replace_rtx (XEXP (note, 0), from, to);
3956 /* REG_EQUAL may get simplified into register.
3957 We don't allow that. Remove that note. This code ought
3958 not to happen, because previous code ought to synthesize
3959 reg-reg move, but be on the safe side. */
3960 if (note && REG_P (XEXP (note, 0)))
3961 remove_note (insn, note);
3963 return success;
3966 /* Find a set of REGNOs that are available on entry to INSN's block. Returns
3967 NULL no such set is found. */
3969 static struct expr *
3970 find_avail_set (regno, insn)
3971 int regno;
3972 rtx insn;
3974 /* SET1 contains the last set found that can be returned to the caller for
3975 use in a substitution. */
3976 struct expr *set1 = 0;
3978 /* Loops are not possible here. To get a loop we would need two sets
3979 available at the start of the block containing INSN. ie we would
3980 need two sets like this available at the start of the block:
3982 (set (reg X) (reg Y))
3983 (set (reg Y) (reg X))
3985 This can not happen since the set of (reg Y) would have killed the
3986 set of (reg X) making it unavailable at the start of this block. */
3987 while (1)
3989 rtx src;
3990 struct expr *set = lookup_set (regno, &set_hash_table);
3992 /* Find a set that is available at the start of the block
3993 which contains INSN. */
3994 while (set)
3996 if (TEST_BIT (cprop_avin[BLOCK_NUM (insn)], set->bitmap_index))
3997 break;
3998 set = next_set (regno, set);
4001 /* If no available set was found we've reached the end of the
4002 (possibly empty) copy chain. */
4003 if (set == 0)
4004 break;
4006 if (GET_CODE (set->expr) != SET)
4007 abort ();
4009 src = SET_SRC (set->expr);
4011 /* We know the set is available.
4012 Now check that SRC is ANTLOC (i.e. none of the source operands
4013 have changed since the start of the block).
4015 If the source operand changed, we may still use it for the next
4016 iteration of this loop, but we may not use it for substitutions. */
4018 if (CONSTANT_P (src) || oprs_not_set_p (src, insn))
4019 set1 = set;
4021 /* If the source of the set is anything except a register, then
4022 we have reached the end of the copy chain. */
4023 if (GET_CODE (src) != REG)
4024 break;
4026 /* Follow the copy chain, ie start another iteration of the loop
4027 and see if we have an available copy into SRC. */
4028 regno = REGNO (src);
4031 /* SET1 holds the last set that was available and anticipatable at
4032 INSN. */
4033 return set1;
4036 /* Subroutine of cprop_insn that tries to propagate constants into
4037 JUMP_INSNS. JUMP must be a conditional jump. If SETCC is non-NULL
4038 it is the instruction that immediately precedes JUMP, and must be a
4039 single SET of a register. FROM is what we will try to replace,
4040 SRC is the constant we will try to substitute for it. Returns nonzero
4041 if a change was made. */
4043 static int
4044 cprop_jump (bb, setcc, jump, from, src)
4045 basic_block bb;
4046 rtx setcc;
4047 rtx jump;
4048 rtx from;
4049 rtx src;
4051 rtx new, new_set;
4052 rtx set = pc_set (jump);
4054 /* First substitute in the INSN condition as the SET_SRC of the JUMP,
4055 then substitute that given values in this expanded JUMP. */
4056 if (setcc != NULL
4057 && !modified_between_p (from, setcc, jump)
4058 && !modified_between_p (src, setcc, jump))
4060 rtx setcc_set = single_set (setcc);
4061 new_set = simplify_replace_rtx (SET_SRC (set),
4062 SET_DEST (setcc_set),
4063 SET_SRC (setcc_set));
4065 else
4066 new_set = set;
4068 new = simplify_replace_rtx (new_set, from, src);
4070 /* If no simplification can be made, then try the next
4071 register. */
4072 if (rtx_equal_p (new, new_set) || rtx_equal_p (new, SET_SRC (set)))
4073 return 0;
4075 /* If this is now a no-op delete it, otherwise this must be a valid insn. */
4076 if (new == pc_rtx)
4077 delete_insn (jump);
4078 else
4080 /* Ensure the value computed inside the jump insn to be equivalent
4081 to one computed by setcc. */
4082 if (setcc
4083 && modified_in_p (new, setcc))
4084 return 0;
4085 if (! validate_change (jump, &SET_SRC (set), new, 0))
4086 return 0;
4088 /* If this has turned into an unconditional jump,
4089 then put a barrier after it so that the unreachable
4090 code will be deleted. */
4091 if (GET_CODE (SET_SRC (set)) == LABEL_REF)
4092 emit_barrier_after (jump);
4095 #ifdef HAVE_cc0
4096 /* Delete the cc0 setter. */
4097 if (setcc != NULL && CC0_P (SET_DEST (single_set (setcc))))
4098 delete_insn (setcc);
4099 #endif
4101 run_jump_opt_after_gcse = 1;
4103 const_prop_count++;
4104 if (gcse_file != NULL)
4106 fprintf (gcse_file,
4107 "CONST-PROP: Replacing reg %d in jump_insn %d with constant ",
4108 REGNO (from), INSN_UID (jump));
4109 print_rtl (gcse_file, src);
4110 fprintf (gcse_file, "\n");
4112 purge_dead_edges (bb);
4114 return 1;
4117 static bool
4118 constprop_register (insn, from, to, alter_jumps)
4119 rtx insn;
4120 rtx from;
4121 rtx to;
4122 int alter_jumps;
4124 rtx sset;
4126 /* Check for reg or cc0 setting instructions followed by
4127 conditional branch instructions first. */
4128 if (alter_jumps
4129 && (sset = single_set (insn)) != NULL
4130 && NEXT_INSN (insn)
4131 && any_condjump_p (NEXT_INSN (insn)) && onlyjump_p (NEXT_INSN (insn)))
4133 rtx dest = SET_DEST (sset);
4134 if ((REG_P (dest) || CC0_P (dest))
4135 && cprop_jump (BLOCK_FOR_INSN (insn), insn, NEXT_INSN (insn), from, to))
4136 return 1;
4139 /* Handle normal insns next. */
4140 if (GET_CODE (insn) == INSN
4141 && try_replace_reg (from, to, insn))
4142 return 1;
4144 /* Try to propagate a CONST_INT into a conditional jump.
4145 We're pretty specific about what we will handle in this
4146 code, we can extend this as necessary over time.
4148 Right now the insn in question must look like
4149 (set (pc) (if_then_else ...)) */
4150 else if (alter_jumps && any_condjump_p (insn) && onlyjump_p (insn))
4151 return cprop_jump (BLOCK_FOR_INSN (insn), NULL, insn, from, to);
4152 return 0;
4155 /* Perform constant and copy propagation on INSN.
4156 The result is nonzero if a change was made. */
4158 static int
4159 cprop_insn (insn, alter_jumps)
4160 rtx insn;
4161 int alter_jumps;
4163 struct reg_use *reg_used;
4164 int changed = 0;
4165 rtx note;
4167 if (!INSN_P (insn))
4168 return 0;
4170 reg_use_count = 0;
4171 note_uses (&PATTERN (insn), find_used_regs, NULL);
4173 note = find_reg_equal_equiv_note (insn);
4175 /* We may win even when propagating constants into notes. */
4176 if (note)
4177 find_used_regs (&XEXP (note, 0), NULL);
4179 for (reg_used = &reg_use_table[0]; reg_use_count > 0;
4180 reg_used++, reg_use_count--)
4182 unsigned int regno = REGNO (reg_used->reg_rtx);
4183 rtx pat, src;
4184 struct expr *set;
4186 /* Ignore registers created by GCSE.
4187 We do this because ... */
4188 if (regno >= max_gcse_regno)
4189 continue;
4191 /* If the register has already been set in this block, there's
4192 nothing we can do. */
4193 if (! oprs_not_set_p (reg_used->reg_rtx, insn))
4194 continue;
4196 /* Find an assignment that sets reg_used and is available
4197 at the start of the block. */
4198 set = find_avail_set (regno, insn);
4199 if (! set)
4200 continue;
4202 pat = set->expr;
4203 /* ??? We might be able to handle PARALLELs. Later. */
4204 if (GET_CODE (pat) != SET)
4205 abort ();
4207 src = SET_SRC (pat);
4209 /* Constant propagation. */
4210 if (CONSTANT_P (src))
4212 if (constprop_register (insn, reg_used->reg_rtx, src, alter_jumps))
4214 changed = 1;
4215 const_prop_count++;
4216 if (gcse_file != NULL)
4218 fprintf (gcse_file, "GLOBAL CONST-PROP: Replacing reg %d in ", regno);
4219 fprintf (gcse_file, "insn %d with constant ", INSN_UID (insn));
4220 print_rtl (gcse_file, src);
4221 fprintf (gcse_file, "\n");
4225 else if (GET_CODE (src) == REG
4226 && REGNO (src) >= FIRST_PSEUDO_REGISTER
4227 && REGNO (src) != regno)
4229 if (try_replace_reg (reg_used->reg_rtx, src, insn))
4231 changed = 1;
4232 copy_prop_count++;
4233 if (gcse_file != NULL)
4235 fprintf (gcse_file, "GLOBAL COPY-PROP: Replacing reg %d in insn %d",
4236 regno, INSN_UID (insn));
4237 fprintf (gcse_file, " with reg %d\n", REGNO (src));
4240 /* The original insn setting reg_used may or may not now be
4241 deletable. We leave the deletion to flow. */
4242 /* FIXME: If it turns out that the insn isn't deletable,
4243 then we may have unnecessarily extended register lifetimes
4244 and made things worse. */
4249 return changed;
4252 /* Like find_used_regs, but avoid recording uses that appear in
4253 input-output contexts such as zero_extract or pre_dec. This
4254 restricts the cases we consider to those for which local cprop
4255 can legitimately make replacements. */
4257 static void
4258 local_cprop_find_used_regs (xptr, data)
4259 rtx *xptr;
4260 void *data;
4262 rtx x = *xptr;
4264 if (x == 0)
4265 return;
4267 switch (GET_CODE (x))
4269 case ZERO_EXTRACT:
4270 case SIGN_EXTRACT:
4271 case STRICT_LOW_PART:
4272 return;
4274 case PRE_DEC:
4275 case PRE_INC:
4276 case POST_DEC:
4277 case POST_INC:
4278 case PRE_MODIFY:
4279 case POST_MODIFY:
4280 /* Can only legitimately appear this early in the context of
4281 stack pushes for function arguments, but handle all of the
4282 codes nonetheless. */
4283 return;
4285 case SUBREG:
4286 /* Setting a subreg of a register larger than word_mode leaves
4287 the non-written words unchanged. */
4288 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) > BITS_PER_WORD)
4289 return;
4290 break;
4292 default:
4293 break;
4296 find_used_regs (xptr, data);
4299 /* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
4300 their REG_EQUAL notes need updating. */
4302 static bool
4303 do_local_cprop (x, insn, alter_jumps, libcall_sp)
4304 rtx x;
4305 rtx insn;
4306 int alter_jumps;
4307 rtx *libcall_sp;
4309 rtx newreg = NULL, newcnst = NULL;
4311 /* Rule out USE instructions and ASM statements as we don't want to
4312 change the hard registers mentioned. */
4313 if (GET_CODE (x) == REG
4314 && (REGNO (x) >= FIRST_PSEUDO_REGISTER
4315 || (GET_CODE (PATTERN (insn)) != USE
4316 && asm_noperands (PATTERN (insn)) < 0)))
4318 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0);
4319 struct elt_loc_list *l;
4321 if (!val)
4322 return false;
4323 for (l = val->locs; l; l = l->next)
4325 rtx this_rtx = l->loc;
4326 rtx note;
4328 if (l->in_libcall)
4329 continue;
4331 if (CONSTANT_P (this_rtx)
4332 && GET_CODE (this_rtx) != CONSTANT_P_RTX)
4333 newcnst = this_rtx;
4334 if (REG_P (this_rtx) && REGNO (this_rtx) >= FIRST_PSEUDO_REGISTER
4335 /* Don't copy propagate if it has attached REG_EQUIV note.
4336 At this point this only function parameters should have
4337 REG_EQUIV notes and if the argument slot is used somewhere
4338 explicitly, it means address of parameter has been taken,
4339 so we should not extend the lifetime of the pseudo. */
4340 && (!(note = find_reg_note (l->setting_insn, REG_EQUIV, NULL_RTX))
4341 || GET_CODE (XEXP (note, 0)) != MEM))
4342 newreg = this_rtx;
4344 if (newcnst && constprop_register (insn, x, newcnst, alter_jumps))
4346 /* If we find a case where we can't fix the retval REG_EQUAL notes
4347 match the new register, we either have to abandon this replacement
4348 or fix delete_trivially_dead_insns to preserve the setting insn,
4349 or make it delete the REG_EUAQL note, and fix up all passes that
4350 require the REG_EQUAL note there. */
4351 if (!adjust_libcall_notes (x, newcnst, insn, libcall_sp))
4352 abort ();
4353 if (gcse_file != NULL)
4355 fprintf (gcse_file, "LOCAL CONST-PROP: Replacing reg %d in ",
4356 REGNO (x));
4357 fprintf (gcse_file, "insn %d with constant ",
4358 INSN_UID (insn));
4359 print_rtl (gcse_file, newcnst);
4360 fprintf (gcse_file, "\n");
4362 const_prop_count++;
4363 return true;
4365 else if (newreg && newreg != x && try_replace_reg (x, newreg, insn))
4367 adjust_libcall_notes (x, newreg, insn, libcall_sp);
4368 if (gcse_file != NULL)
4370 fprintf (gcse_file,
4371 "LOCAL COPY-PROP: Replacing reg %d in insn %d",
4372 REGNO (x), INSN_UID (insn));
4373 fprintf (gcse_file, " with reg %d\n", REGNO (newreg));
4375 copy_prop_count++;
4376 return true;
4379 return false;
4382 /* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
4383 their REG_EQUAL notes need updating to reflect that OLDREG has been
4384 replaced with NEWVAL in INSN. Return true if all substitutions could
4385 be made. */
4386 static bool
4387 adjust_libcall_notes (oldreg, newval, insn, libcall_sp)
4388 rtx oldreg, newval, insn, *libcall_sp;
4390 rtx end;
4392 while ((end = *libcall_sp++))
4394 rtx note = find_reg_equal_equiv_note (end);
4396 if (! note)
4397 continue;
4399 if (REG_P (newval))
4401 if (reg_set_between_p (newval, PREV_INSN (insn), end))
4405 note = find_reg_equal_equiv_note (end);
4406 if (! note)
4407 continue;
4408 if (reg_mentioned_p (newval, XEXP (note, 0)))
4409 return false;
4411 while ((end = *libcall_sp++));
4412 return true;
4415 XEXP (note, 0) = replace_rtx (XEXP (note, 0), oldreg, newval);
4416 insn = end;
4418 return true;
4421 #define MAX_NESTED_LIBCALLS 9
4423 static void
4424 local_cprop_pass (alter_jumps)
4425 int alter_jumps;
4427 rtx insn;
4428 struct reg_use *reg_used;
4429 rtx libcall_stack[MAX_NESTED_LIBCALLS + 1], *libcall_sp;
4430 bool changed = false;
4432 cselib_init ();
4433 libcall_sp = &libcall_stack[MAX_NESTED_LIBCALLS];
4434 *libcall_sp = 0;
4435 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4437 if (INSN_P (insn))
4439 rtx note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
4441 if (note)
4443 if (libcall_sp == libcall_stack)
4444 abort ();
4445 *--libcall_sp = XEXP (note, 0);
4447 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
4448 if (note)
4449 libcall_sp++;
4450 note = find_reg_equal_equiv_note (insn);
4453 reg_use_count = 0;
4454 note_uses (&PATTERN (insn), local_cprop_find_used_regs, NULL);
4455 if (note)
4456 local_cprop_find_used_regs (&XEXP (note, 0), NULL);
4458 for (reg_used = &reg_use_table[0]; reg_use_count > 0;
4459 reg_used++, reg_use_count--)
4460 if (do_local_cprop (reg_used->reg_rtx, insn, alter_jumps,
4461 libcall_sp))
4463 changed = true;
4464 break;
4467 while (reg_use_count);
4469 cselib_process_insn (insn);
4471 cselib_finish ();
4472 /* Global analysis may get into infinite loops for unreachable blocks. */
4473 if (changed && alter_jumps)
4475 delete_unreachable_blocks ();
4476 free_reg_set_mem ();
4477 alloc_reg_set_mem (max_reg_num ());
4478 compute_sets (get_insns ());
4482 /* Forward propagate copies. This includes copies and constants. Return
4483 nonzero if a change was made. */
4485 static int
4486 cprop (alter_jumps)
4487 int alter_jumps;
4489 int changed;
4490 basic_block bb;
4491 rtx insn;
4493 /* Note we start at block 1. */
4494 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
4496 if (gcse_file != NULL)
4497 fprintf (gcse_file, "\n");
4498 return 0;
4501 changed = 0;
4502 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR, next_bb)
4504 /* Reset tables used to keep track of what's still valid [since the
4505 start of the block]. */
4506 reset_opr_set_tables ();
4508 for (insn = bb->head;
4509 insn != NULL && insn != NEXT_INSN (bb->end);
4510 insn = NEXT_INSN (insn))
4511 if (INSN_P (insn))
4513 changed |= cprop_insn (insn, alter_jumps);
4515 /* Keep track of everything modified by this insn. */
4516 /* ??? Need to be careful w.r.t. mods done to INSN. Don't
4517 call mark_oprs_set if we turned the insn into a NOTE. */
4518 if (GET_CODE (insn) != NOTE)
4519 mark_oprs_set (insn);
4523 if (gcse_file != NULL)
4524 fprintf (gcse_file, "\n");
4526 return changed;
4529 /* Similar to get_condition, only the resulting condition must be
4530 valid at JUMP, instead of at EARLIEST.
4532 This differs from noce_get_condition in ifcvt.c in that we prefer not to
4533 settle for the condition variable in the jump instruction being integral.
4534 We prefer to be able to record the value of a user variable, rather than
4535 the value of a temporary used in a condition. This could be solved by
4536 recording the value of *every* register scaned by canonicalize_condition,
4537 but this would require some code reorganization. */
4539 static rtx
4540 fis_get_condition (jump)
4541 rtx jump;
4543 rtx cond, set, tmp, insn, earliest;
4544 bool reverse;
4546 if (! any_condjump_p (jump))
4547 return NULL_RTX;
4549 set = pc_set (jump);
4550 cond = XEXP (SET_SRC (set), 0);
4552 /* If this branches to JUMP_LABEL when the condition is false,
4553 reverse the condition. */
4554 reverse = (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
4555 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump));
4557 /* Use canonicalize_condition to do the dirty work of manipulating
4558 MODE_CC values and COMPARE rtx codes. */
4559 tmp = canonicalize_condition (jump, cond, reverse, &earliest, NULL_RTX);
4560 if (!tmp)
4561 return NULL_RTX;
4563 /* Verify that the given condition is valid at JUMP by virtue of not
4564 having been modified since EARLIEST. */
4565 for (insn = earliest; insn != jump; insn = NEXT_INSN (insn))
4566 if (INSN_P (insn) && modified_in_p (tmp, insn))
4567 break;
4568 if (insn == jump)
4569 return tmp;
4571 /* The condition was modified. See if we can get a partial result
4572 that doesn't follow all the reversals. Perhaps combine can fold
4573 them together later. */
4574 tmp = XEXP (tmp, 0);
4575 if (!REG_P (tmp) || GET_MODE_CLASS (GET_MODE (tmp)) != MODE_INT)
4576 return NULL_RTX;
4577 tmp = canonicalize_condition (jump, cond, reverse, &earliest, tmp);
4578 if (!tmp)
4579 return NULL_RTX;
4581 /* For sanity's sake, re-validate the new result. */
4582 for (insn = earliest; insn != jump; insn = NEXT_INSN (insn))
4583 if (INSN_P (insn) && modified_in_p (tmp, insn))
4584 return NULL_RTX;
4586 return tmp;
4589 /* Find the implicit sets of a function. An "implicit set" is a constraint
4590 on the value of a variable, implied by a conditional jump. For example,
4591 following "if (x == 2)", the then branch may be optimized as though the
4592 conditional performed an "explicit set", in this example, "x = 2". This
4593 function records the set patterns that are implicit at the start of each
4594 basic block. */
4596 static void
4597 find_implicit_sets ()
4599 basic_block bb, dest;
4600 unsigned int count;
4601 rtx cond, new;
4603 count = 0;
4604 FOR_EACH_BB (bb)
4605 /* Check for more than one sucessor. */
4606 if (bb->succ && bb->succ->succ_next)
4608 cond = fis_get_condition (bb->end);
4610 if (cond
4611 && (GET_CODE (cond) == EQ || GET_CODE (cond) == NE)
4612 && GET_CODE (XEXP (cond, 0)) == REG
4613 && REGNO (XEXP (cond, 0)) >= FIRST_PSEUDO_REGISTER
4614 && CONSTANT_P (XEXP (cond, 1)))
4616 dest = GET_CODE (cond) == EQ ? BRANCH_EDGE (bb)->dest
4617 : FALLTHRU_EDGE (bb)->dest;
4619 if (dest && ! dest->pred->pred_next
4620 && dest != EXIT_BLOCK_PTR)
4622 new = gen_rtx_SET (VOIDmode, XEXP (cond, 0),
4623 XEXP (cond, 1));
4624 implicit_sets[dest->index] = new;
4625 if (gcse_file)
4627 fprintf(gcse_file, "Implicit set of reg %d in ",
4628 REGNO (XEXP (cond, 0)));
4629 fprintf(gcse_file, "basic block %d\n", dest->index);
4631 count++;
4636 if (gcse_file)
4637 fprintf (gcse_file, "Found %d implicit sets\n", count);
4640 /* Perform one copy/constant propagation pass.
4641 PASS is the pass count. If CPROP_JUMPS is true, perform constant
4642 propagation into conditional jumps. If BYPASS_JUMPS is true,
4643 perform conditional jump bypassing optimizations. */
4645 static int
4646 one_cprop_pass (pass, cprop_jumps, bypass_jumps)
4647 int pass;
4648 int cprop_jumps;
4649 int bypass_jumps;
4651 int changed = 0;
4653 const_prop_count = 0;
4654 copy_prop_count = 0;
4656 local_cprop_pass (cprop_jumps);
4658 /* Determine implicit sets. */
4659 implicit_sets = (rtx *) xcalloc (last_basic_block, sizeof (rtx));
4660 find_implicit_sets ();
4662 alloc_hash_table (max_cuid, &set_hash_table, 1);
4663 compute_hash_table (&set_hash_table);
4665 /* Free implicit_sets before peak usage. */
4666 free (implicit_sets);
4667 implicit_sets = NULL;
4669 if (gcse_file)
4670 dump_hash_table (gcse_file, "SET", &set_hash_table);
4671 if (set_hash_table.n_elems > 0)
4673 alloc_cprop_mem (last_basic_block, set_hash_table.n_elems);
4674 compute_cprop_data ();
4675 changed = cprop (cprop_jumps);
4676 if (bypass_jumps)
4677 changed |= bypass_conditional_jumps ();
4678 free_cprop_mem ();
4681 free_hash_table (&set_hash_table);
4683 if (gcse_file)
4685 fprintf (gcse_file, "CPROP of %s, pass %d: %d bytes needed, ",
4686 current_function_name, pass, bytes_used);
4687 fprintf (gcse_file, "%d const props, %d copy props\n\n",
4688 const_prop_count, copy_prop_count);
4690 /* Global analysis may get into infinite loops for unreachable blocks. */
4691 if (changed && cprop_jumps)
4692 delete_unreachable_blocks ();
4694 return changed;
4697 /* Bypass conditional jumps. */
4699 /* The value of last_basic_block at the beginning of the jump_bypass
4700 pass. The use of redirect_edge_and_branch_force may introduce new
4701 basic blocks, but the data flow analysis is only valid for basic
4702 block indices less than bypass_last_basic_block. */
4704 static int bypass_last_basic_block;
4706 /* Find a set of REGNO to a constant that is available at the end of basic
4707 block BB. Returns NULL if no such set is found. Based heavily upon
4708 find_avail_set. */
4710 static struct expr *
4711 find_bypass_set (regno, bb)
4712 int regno;
4713 int bb;
4715 struct expr *result = 0;
4717 for (;;)
4719 rtx src;
4720 struct expr *set = lookup_set (regno, &set_hash_table);
4722 while (set)
4724 if (TEST_BIT (cprop_avout[bb], set->bitmap_index))
4725 break;
4726 set = next_set (regno, set);
4729 if (set == 0)
4730 break;
4732 if (GET_CODE (set->expr) != SET)
4733 abort ();
4735 src = SET_SRC (set->expr);
4736 if (CONSTANT_P (src))
4737 result = set;
4739 if (GET_CODE (src) != REG)
4740 break;
4742 regno = REGNO (src);
4744 return result;
4748 /* Subroutine of bypass_conditional_jumps that attempts to bypass the given
4749 basic block BB which has more than one predecessor. If not NULL, SETCC
4750 is the first instruction of BB, which is immediately followed by JUMP_INSN
4751 JUMP. Otherwise, SETCC is NULL, and JUMP is the first insn of BB.
4752 Returns nonzero if a change was made. */
4754 static int
4755 bypass_block (bb, setcc, jump)
4756 basic_block bb;
4757 rtx setcc, jump;
4759 rtx insn, note;
4760 edge e, enext;
4761 int i, change;
4763 insn = (setcc != NULL) ? setcc : jump;
4765 /* Determine set of register uses in INSN. */
4766 reg_use_count = 0;
4767 note_uses (&PATTERN (insn), find_used_regs, NULL);
4768 note = find_reg_equal_equiv_note (insn);
4769 if (note)
4770 find_used_regs (&XEXP (note, 0), NULL);
4772 change = 0;
4773 for (e = bb->pred; e; e = enext)
4775 enext = e->pred_next;
4776 if (e->flags & EDGE_COMPLEX)
4777 continue;
4779 /* We can't redirect edges from new basic blocks. */
4780 if (e->src->index >= bypass_last_basic_block)
4781 continue;
4783 for (i = 0; i < reg_use_count; i++)
4785 struct reg_use *reg_used = &reg_use_table[i];
4786 unsigned int regno = REGNO (reg_used->reg_rtx);
4787 basic_block dest, old_dest;
4788 struct expr *set;
4789 rtx src, new;
4791 if (regno >= max_gcse_regno)
4792 continue;
4794 set = find_bypass_set (regno, e->src->index);
4796 if (! set)
4797 continue;
4799 src = SET_SRC (pc_set (jump));
4801 if (setcc != NULL)
4802 src = simplify_replace_rtx (src,
4803 SET_DEST (PATTERN (setcc)),
4804 SET_SRC (PATTERN (setcc)));
4806 new = simplify_replace_rtx (src, reg_used->reg_rtx,
4807 SET_SRC (set->expr));
4809 if (new == pc_rtx)
4810 dest = FALLTHRU_EDGE (bb)->dest;
4811 else if (GET_CODE (new) == LABEL_REF)
4812 dest = BLOCK_FOR_INSN (XEXP (new, 0));
4813 else
4814 dest = NULL;
4816 old_dest = e->dest;
4817 if (dest != NULL
4818 && dest != old_dest
4819 && dest != EXIT_BLOCK_PTR)
4821 redirect_edge_and_branch_force (e, dest);
4823 /* Copy the register setter to the redirected edge.
4824 Don't copy CC0 setters, as CC0 is dead after jump. */
4825 if (setcc)
4827 rtx pat = PATTERN (setcc);
4828 if (!CC0_P (SET_DEST (pat)))
4829 insert_insn_on_edge (copy_insn (pat), e);
4832 if (gcse_file != NULL)
4834 fprintf (gcse_file, "JUMP-BYPASS: Proved reg %d in jump_insn %d equals constant ",
4835 regno, INSN_UID (jump));
4836 print_rtl (gcse_file, SET_SRC (set->expr));
4837 fprintf (gcse_file, "\nBypass edge from %d->%d to %d\n",
4838 e->src->index, old_dest->index, dest->index);
4840 change = 1;
4841 break;
4845 return change;
4848 /* Find basic blocks with more than one predecessor that only contain a
4849 single conditional jump. If the result of the comparison is known at
4850 compile-time from any incoming edge, redirect that edge to the
4851 appropriate target. Returns nonzero if a change was made.
4853 This function is now mis-named, because we also handle indirect jumps. */
4855 static int
4856 bypass_conditional_jumps ()
4858 basic_block bb;
4859 int changed;
4860 rtx setcc;
4861 rtx insn;
4862 rtx dest;
4864 /* Note we start at block 1. */
4865 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
4866 return 0;
4868 bypass_last_basic_block = last_basic_block;
4870 changed = 0;
4871 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb,
4872 EXIT_BLOCK_PTR, next_bb)
4874 /* Check for more than one predecessor. */
4875 if (bb->pred && bb->pred->pred_next)
4877 setcc = NULL_RTX;
4878 for (insn = bb->head;
4879 insn != NULL && insn != NEXT_INSN (bb->end);
4880 insn = NEXT_INSN (insn))
4881 if (GET_CODE (insn) == INSN)
4883 if (setcc)
4884 break;
4885 if (GET_CODE (PATTERN (insn)) != SET)
4886 break;
4888 dest = SET_DEST (PATTERN (insn));
4889 if (REG_P (dest) || CC0_P (dest))
4890 setcc = insn;
4891 else
4892 break;
4894 else if (GET_CODE (insn) == JUMP_INSN)
4896 if ((any_condjump_p (insn) || computed_jump_p (insn))
4897 && onlyjump_p (insn))
4898 changed |= bypass_block (bb, setcc, insn);
4899 break;
4901 else if (INSN_P (insn))
4902 break;
4906 /* If we bypassed any register setting insns, we inserted a
4907 copy on the redirected edge. These need to be committed. */
4908 if (changed)
4909 commit_edge_insertions();
4911 return changed;
4914 /* Compute PRE+LCM working variables. */
4916 /* Local properties of expressions. */
4917 /* Nonzero for expressions that are transparent in the block. */
4918 static sbitmap *transp;
4920 /* Nonzero for expressions that are transparent at the end of the block.
4921 This is only zero for expressions killed by abnormal critical edge
4922 created by a calls. */
4923 static sbitmap *transpout;
4925 /* Nonzero for expressions that are computed (available) in the block. */
4926 static sbitmap *comp;
4928 /* Nonzero for expressions that are locally anticipatable in the block. */
4929 static sbitmap *antloc;
4931 /* Nonzero for expressions where this block is an optimal computation
4932 point. */
4933 static sbitmap *pre_optimal;
4935 /* Nonzero for expressions which are redundant in a particular block. */
4936 static sbitmap *pre_redundant;
4938 /* Nonzero for expressions which should be inserted on a specific edge. */
4939 static sbitmap *pre_insert_map;
4941 /* Nonzero for expressions which should be deleted in a specific block. */
4942 static sbitmap *pre_delete_map;
4944 /* Contains the edge_list returned by pre_edge_lcm. */
4945 static struct edge_list *edge_list;
4947 /* Redundant insns. */
4948 static sbitmap pre_redundant_insns;
4950 /* Allocate vars used for PRE analysis. */
4952 static void
4953 alloc_pre_mem (n_blocks, n_exprs)
4954 int n_blocks, n_exprs;
4956 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
4957 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
4958 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
4960 pre_optimal = NULL;
4961 pre_redundant = NULL;
4962 pre_insert_map = NULL;
4963 pre_delete_map = NULL;
4964 ae_in = NULL;
4965 ae_out = NULL;
4966 ae_kill = sbitmap_vector_alloc (n_blocks, n_exprs);
4968 /* pre_insert and pre_delete are allocated later. */
4971 /* Free vars used for PRE analysis. */
4973 static void
4974 free_pre_mem ()
4976 sbitmap_vector_free (transp);
4977 sbitmap_vector_free (comp);
4979 /* ANTLOC and AE_KILL are freed just after pre_lcm finishes. */
4981 if (pre_optimal)
4982 sbitmap_vector_free (pre_optimal);
4983 if (pre_redundant)
4984 sbitmap_vector_free (pre_redundant);
4985 if (pre_insert_map)
4986 sbitmap_vector_free (pre_insert_map);
4987 if (pre_delete_map)
4988 sbitmap_vector_free (pre_delete_map);
4989 if (ae_in)
4990 sbitmap_vector_free (ae_in);
4991 if (ae_out)
4992 sbitmap_vector_free (ae_out);
4994 transp = comp = NULL;
4995 pre_optimal = pre_redundant = pre_insert_map = pre_delete_map = NULL;
4996 ae_in = ae_out = NULL;
4999 /* Top level routine to do the dataflow analysis needed by PRE. */
5001 static void
5002 compute_pre_data ()
5004 sbitmap trapping_expr;
5005 basic_block bb;
5006 unsigned int ui;
5008 compute_local_properties (transp, comp, antloc, &expr_hash_table);
5009 sbitmap_vector_zero (ae_kill, last_basic_block);
5011 /* Collect expressions which might trap. */
5012 trapping_expr = sbitmap_alloc (expr_hash_table.n_elems);
5013 sbitmap_zero (trapping_expr);
5014 for (ui = 0; ui < expr_hash_table.size; ui++)
5016 struct expr *e;
5017 for (e = expr_hash_table.table[ui]; e != NULL; e = e->next_same_hash)
5018 if (may_trap_p (e->expr))
5019 SET_BIT (trapping_expr, e->bitmap_index);
5022 /* Compute ae_kill for each basic block using:
5024 ~(TRANSP | COMP)
5026 This is significantly faster than compute_ae_kill. */
5028 FOR_EACH_BB (bb)
5030 edge e;
5032 /* If the current block is the destination of an abnormal edge, we
5033 kill all trapping expressions because we won't be able to properly
5034 place the instruction on the edge. So make them neither
5035 anticipatable nor transparent. This is fairly conservative. */
5036 for (e = bb->pred; e ; e = e->pred_next)
5037 if (e->flags & EDGE_ABNORMAL)
5039 sbitmap_difference (antloc[bb->index], antloc[bb->index], trapping_expr);
5040 sbitmap_difference (transp[bb->index], transp[bb->index], trapping_expr);
5041 break;
5044 sbitmap_a_or_b (ae_kill[bb->index], transp[bb->index], comp[bb->index]);
5045 sbitmap_not (ae_kill[bb->index], ae_kill[bb->index]);
5048 edge_list = pre_edge_lcm (gcse_file, expr_hash_table.n_elems, transp, comp, antloc,
5049 ae_kill, &pre_insert_map, &pre_delete_map);
5050 sbitmap_vector_free (antloc);
5051 antloc = NULL;
5052 sbitmap_vector_free (ae_kill);
5053 ae_kill = NULL;
5054 sbitmap_free (trapping_expr);
5057 /* PRE utilities */
5059 /* Return nonzero if an occurrence of expression EXPR in OCCR_BB would reach
5060 block BB.
5062 VISITED is a pointer to a working buffer for tracking which BB's have
5063 been visited. It is NULL for the top-level call.
5065 We treat reaching expressions that go through blocks containing the same
5066 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
5067 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
5068 2 as not reaching. The intent is to improve the probability of finding
5069 only one reaching expression and to reduce register lifetimes by picking
5070 the closest such expression. */
5072 static int
5073 pre_expr_reaches_here_p_work (occr_bb, expr, bb, visited)
5074 basic_block occr_bb;
5075 struct expr *expr;
5076 basic_block bb;
5077 char *visited;
5079 edge pred;
5081 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
5083 basic_block pred_bb = pred->src;
5085 if (pred->src == ENTRY_BLOCK_PTR
5086 /* Has predecessor has already been visited? */
5087 || visited[pred_bb->index])
5088 ;/* Nothing to do. */
5090 /* Does this predecessor generate this expression? */
5091 else if (TEST_BIT (comp[pred_bb->index], expr->bitmap_index))
5093 /* Is this the occurrence we're looking for?
5094 Note that there's only one generating occurrence per block
5095 so we just need to check the block number. */
5096 if (occr_bb == pred_bb)
5097 return 1;
5099 visited[pred_bb->index] = 1;
5101 /* Ignore this predecessor if it kills the expression. */
5102 else if (! TEST_BIT (transp[pred_bb->index], expr->bitmap_index))
5103 visited[pred_bb->index] = 1;
5105 /* Neither gen nor kill. */
5106 else
5108 visited[pred_bb->index] = 1;
5109 if (pre_expr_reaches_here_p_work (occr_bb, expr, pred_bb, visited))
5110 return 1;
5114 /* All paths have been checked. */
5115 return 0;
5118 /* The wrapper for pre_expr_reaches_here_work that ensures that any
5119 memory allocated for that function is returned. */
5121 static int
5122 pre_expr_reaches_here_p (occr_bb, expr, bb)
5123 basic_block occr_bb;
5124 struct expr *expr;
5125 basic_block bb;
5127 int rval;
5128 char *visited = (char *) xcalloc (last_basic_block, 1);
5130 rval = pre_expr_reaches_here_p_work (occr_bb, expr, bb, visited);
5132 free (visited);
5133 return rval;
5137 /* Given an expr, generate RTL which we can insert at the end of a BB,
5138 or on an edge. Set the block number of any insns generated to
5139 the value of BB. */
5141 static rtx
5142 process_insert_insn (expr)
5143 struct expr *expr;
5145 rtx reg = expr->reaching_reg;
5146 rtx exp = copy_rtx (expr->expr);
5147 rtx pat;
5149 start_sequence ();
5151 /* If the expression is something that's an operand, like a constant,
5152 just copy it to a register. */
5153 if (general_operand (exp, GET_MODE (reg)))
5154 emit_move_insn (reg, exp);
5156 /* Otherwise, make a new insn to compute this expression and make sure the
5157 insn will be recognized (this also adds any needed CLOBBERs). Copy the
5158 expression to make sure we don't have any sharing issues. */
5159 else if (insn_invalid_p (emit_insn (gen_rtx_SET (VOIDmode, reg, exp))))
5160 abort ();
5162 pat = get_insns ();
5163 end_sequence ();
5165 return pat;
5168 /* Add EXPR to the end of basic block BB.
5170 This is used by both the PRE and code hoisting.
5172 For PRE, we want to verify that the expr is either transparent
5173 or locally anticipatable in the target block. This check makes
5174 no sense for code hoisting. */
5176 static void
5177 insert_insn_end_bb (expr, bb, pre)
5178 struct expr *expr;
5179 basic_block bb;
5180 int pre;
5182 rtx insn = bb->end;
5183 rtx new_insn;
5184 rtx reg = expr->reaching_reg;
5185 int regno = REGNO (reg);
5186 rtx pat, pat_end;
5188 pat = process_insert_insn (expr);
5189 if (pat == NULL_RTX || ! INSN_P (pat))
5190 abort ();
5192 pat_end = pat;
5193 while (NEXT_INSN (pat_end) != NULL_RTX)
5194 pat_end = NEXT_INSN (pat_end);
5196 /* If the last insn is a jump, insert EXPR in front [taking care to
5197 handle cc0, etc. properly]. Similary we need to care trapping
5198 instructions in presence of non-call exceptions. */
5200 if (GET_CODE (insn) == JUMP_INSN
5201 || (GET_CODE (insn) == INSN
5202 && (bb->succ->succ_next || (bb->succ->flags & EDGE_ABNORMAL))))
5204 #ifdef HAVE_cc0
5205 rtx note;
5206 #endif
5207 /* It should always be the case that we can put these instructions
5208 anywhere in the basic block with performing PRE optimizations.
5209 Check this. */
5210 if (GET_CODE (insn) == INSN && pre
5211 && !TEST_BIT (antloc[bb->index], expr->bitmap_index)
5212 && !TEST_BIT (transp[bb->index], expr->bitmap_index))
5213 abort ();
5215 /* If this is a jump table, then we can't insert stuff here. Since
5216 we know the previous real insn must be the tablejump, we insert
5217 the new instruction just before the tablejump. */
5218 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
5219 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
5220 insn = prev_real_insn (insn);
5222 #ifdef HAVE_cc0
5223 /* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts
5224 if cc0 isn't set. */
5225 note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
5226 if (note)
5227 insn = XEXP (note, 0);
5228 else
5230 rtx maybe_cc0_setter = prev_nonnote_insn (insn);
5231 if (maybe_cc0_setter
5232 && INSN_P (maybe_cc0_setter)
5233 && sets_cc0_p (PATTERN (maybe_cc0_setter)))
5234 insn = maybe_cc0_setter;
5236 #endif
5237 /* FIXME: What if something in cc0/jump uses value set in new insn? */
5238 new_insn = emit_insn_before (pat, insn);
5241 /* Likewise if the last insn is a call, as will happen in the presence
5242 of exception handling. */
5243 else if (GET_CODE (insn) == CALL_INSN
5244 && (bb->succ->succ_next || (bb->succ->flags & EDGE_ABNORMAL)))
5246 /* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers,
5247 we search backward and place the instructions before the first
5248 parameter is loaded. Do this for everyone for consistency and a
5249 presumption that we'll get better code elsewhere as well.
5251 It should always be the case that we can put these instructions
5252 anywhere in the basic block with performing PRE optimizations.
5253 Check this. */
5255 if (pre
5256 && !TEST_BIT (antloc[bb->index], expr->bitmap_index)
5257 && !TEST_BIT (transp[bb->index], expr->bitmap_index))
5258 abort ();
5260 /* Since different machines initialize their parameter registers
5261 in different orders, assume nothing. Collect the set of all
5262 parameter registers. */
5263 insn = find_first_parameter_load (insn, bb->head);
5265 /* If we found all the parameter loads, then we want to insert
5266 before the first parameter load.
5268 If we did not find all the parameter loads, then we might have
5269 stopped on the head of the block, which could be a CODE_LABEL.
5270 If we inserted before the CODE_LABEL, then we would be putting
5271 the insn in the wrong basic block. In that case, put the insn
5272 after the CODE_LABEL. Also, respect NOTE_INSN_BASIC_BLOCK. */
5273 while (GET_CODE (insn) == CODE_LABEL
5274 || NOTE_INSN_BASIC_BLOCK_P (insn))
5275 insn = NEXT_INSN (insn);
5277 new_insn = emit_insn_before (pat, insn);
5279 else
5280 new_insn = emit_insn_after (pat, insn);
5282 while (1)
5284 if (INSN_P (pat))
5286 add_label_notes (PATTERN (pat), new_insn);
5287 note_stores (PATTERN (pat), record_set_info, pat);
5289 if (pat == pat_end)
5290 break;
5291 pat = NEXT_INSN (pat);
5294 gcse_create_count++;
5296 if (gcse_file)
5298 fprintf (gcse_file, "PRE/HOIST: end of bb %d, insn %d, ",
5299 bb->index, INSN_UID (new_insn));
5300 fprintf (gcse_file, "copying expression %d to reg %d\n",
5301 expr->bitmap_index, regno);
5305 /* Insert partially redundant expressions on edges in the CFG to make
5306 the expressions fully redundant. */
5308 static int
5309 pre_edge_insert (edge_list, index_map)
5310 struct edge_list *edge_list;
5311 struct expr **index_map;
5313 int e, i, j, num_edges, set_size, did_insert = 0;
5314 sbitmap *inserted;
5316 /* Where PRE_INSERT_MAP is nonzero, we add the expression on that edge
5317 if it reaches any of the deleted expressions. */
5319 set_size = pre_insert_map[0]->size;
5320 num_edges = NUM_EDGES (edge_list);
5321 inserted = sbitmap_vector_alloc (num_edges, expr_hash_table.n_elems);
5322 sbitmap_vector_zero (inserted, num_edges);
5324 for (e = 0; e < num_edges; e++)
5326 int indx;
5327 basic_block bb = INDEX_EDGE_PRED_BB (edge_list, e);
5329 for (i = indx = 0; i < set_size; i++, indx += SBITMAP_ELT_BITS)
5331 SBITMAP_ELT_TYPE insert = pre_insert_map[e]->elms[i];
5333 for (j = indx; insert && j < (int) expr_hash_table.n_elems; j++, insert >>= 1)
5334 if ((insert & 1) != 0 && index_map[j]->reaching_reg != NULL_RTX)
5336 struct expr *expr = index_map[j];
5337 struct occr *occr;
5339 /* Now look at each deleted occurrence of this expression. */
5340 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5342 if (! occr->deleted_p)
5343 continue;
5345 /* Insert this expression on this edge if if it would
5346 reach the deleted occurrence in BB. */
5347 if (!TEST_BIT (inserted[e], j))
5349 rtx insn;
5350 edge eg = INDEX_EDGE (edge_list, e);
5352 /* We can't insert anything on an abnormal and
5353 critical edge, so we insert the insn at the end of
5354 the previous block. There are several alternatives
5355 detailed in Morgans book P277 (sec 10.5) for
5356 handling this situation. This one is easiest for
5357 now. */
5359 if ((eg->flags & EDGE_ABNORMAL) == EDGE_ABNORMAL)
5360 insert_insn_end_bb (index_map[j], bb, 0);
5361 else
5363 insn = process_insert_insn (index_map[j]);
5364 insert_insn_on_edge (insn, eg);
5367 if (gcse_file)
5369 fprintf (gcse_file, "PRE/HOIST: edge (%d,%d), ",
5370 bb->index,
5371 INDEX_EDGE_SUCC_BB (edge_list, e)->index);
5372 fprintf (gcse_file, "copy expression %d\n",
5373 expr->bitmap_index);
5376 update_ld_motion_stores (expr);
5377 SET_BIT (inserted[e], j);
5378 did_insert = 1;
5379 gcse_create_count++;
5386 sbitmap_vector_free (inserted);
5387 return did_insert;
5390 /* Copy the result of INSN to REG. INDX is the expression number. */
5392 static void
5393 pre_insert_copy_insn (expr, insn)
5394 struct expr *expr;
5395 rtx insn;
5397 rtx reg = expr->reaching_reg;
5398 int regno = REGNO (reg);
5399 int indx = expr->bitmap_index;
5400 rtx set = single_set (insn);
5401 rtx new_insn;
5403 if (!set)
5404 abort ();
5406 new_insn = emit_insn_after (gen_move_insn (reg, SET_DEST (set)), insn);
5408 /* Keep register set table up to date. */
5409 record_one_set (regno, new_insn);
5411 gcse_create_count++;
5413 if (gcse_file)
5414 fprintf (gcse_file,
5415 "PRE: bb %d, insn %d, copy expression %d in insn %d to reg %d\n",
5416 BLOCK_NUM (insn), INSN_UID (new_insn), indx,
5417 INSN_UID (insn), regno);
5418 update_ld_motion_stores (expr);
5421 /* Copy available expressions that reach the redundant expression
5422 to `reaching_reg'. */
5424 static void
5425 pre_insert_copies ()
5427 unsigned int i;
5428 struct expr *expr;
5429 struct occr *occr;
5430 struct occr *avail;
5432 /* For each available expression in the table, copy the result to
5433 `reaching_reg' if the expression reaches a deleted one.
5435 ??? The current algorithm is rather brute force.
5436 Need to do some profiling. */
5438 for (i = 0; i < expr_hash_table.size; i++)
5439 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
5441 /* If the basic block isn't reachable, PPOUT will be TRUE. However,
5442 we don't want to insert a copy here because the expression may not
5443 really be redundant. So only insert an insn if the expression was
5444 deleted. This test also avoids further processing if the
5445 expression wasn't deleted anywhere. */
5446 if (expr->reaching_reg == NULL)
5447 continue;
5449 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5451 if (! occr->deleted_p)
5452 continue;
5454 for (avail = expr->avail_occr; avail != NULL; avail = avail->next)
5456 rtx insn = avail->insn;
5458 /* No need to handle this one if handled already. */
5459 if (avail->copied_p)
5460 continue;
5462 /* Don't handle this one if it's a redundant one. */
5463 if (TEST_BIT (pre_redundant_insns, INSN_CUID (insn)))
5464 continue;
5466 /* Or if the expression doesn't reach the deleted one. */
5467 if (! pre_expr_reaches_here_p (BLOCK_FOR_INSN (avail->insn),
5468 expr,
5469 BLOCK_FOR_INSN (occr->insn)))
5470 continue;
5472 /* Copy the result of avail to reaching_reg. */
5473 pre_insert_copy_insn (expr, insn);
5474 avail->copied_p = 1;
5480 /* Emit move from SRC to DEST noting the equivalence with expression computed
5481 in INSN. */
5482 static rtx
5483 gcse_emit_move_after (src, dest, insn)
5484 rtx src, dest, insn;
5486 rtx new;
5487 rtx set = single_set (insn), set2;
5488 rtx note;
5489 rtx eqv;
5491 /* This should never fail since we're creating a reg->reg copy
5492 we've verified to be valid. */
5494 new = emit_insn_after (gen_move_insn (dest, src), insn);
5496 /* Note the equivalence for local CSE pass. */
5497 set2 = single_set (new);
5498 if (!set2 || !rtx_equal_p (SET_DEST (set2), dest))
5499 return new;
5500 if ((note = find_reg_equal_equiv_note (insn)))
5501 eqv = XEXP (note, 0);
5502 else
5503 eqv = SET_SRC (set);
5505 set_unique_reg_note (new, REG_EQUAL, copy_insn_1 (eqv));
5507 return new;
5510 /* Delete redundant computations.
5511 Deletion is done by changing the insn to copy the `reaching_reg' of
5512 the expression into the result of the SET. It is left to later passes
5513 (cprop, cse2, flow, combine, regmove) to propagate the copy or eliminate it.
5515 Returns nonzero if a change is made. */
5517 static int
5518 pre_delete ()
5520 unsigned int i;
5521 int changed;
5522 struct expr *expr;
5523 struct occr *occr;
5525 changed = 0;
5526 for (i = 0; i < expr_hash_table.size; i++)
5527 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
5529 int indx = expr->bitmap_index;
5531 /* We only need to search antic_occr since we require
5532 ANTLOC != 0. */
5534 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5536 rtx insn = occr->insn;
5537 rtx set;
5538 basic_block bb = BLOCK_FOR_INSN (insn);
5540 if (TEST_BIT (pre_delete_map[bb->index], indx))
5542 set = single_set (insn);
5543 if (! set)
5544 abort ();
5546 /* Create a pseudo-reg to store the result of reaching
5547 expressions into. Get the mode for the new pseudo from
5548 the mode of the original destination pseudo. */
5549 if (expr->reaching_reg == NULL)
5550 expr->reaching_reg
5551 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
5553 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
5554 delete_insn (insn);
5555 occr->deleted_p = 1;
5556 SET_BIT (pre_redundant_insns, INSN_CUID (insn));
5557 changed = 1;
5558 gcse_subst_count++;
5560 if (gcse_file)
5562 fprintf (gcse_file,
5563 "PRE: redundant insn %d (expression %d) in ",
5564 INSN_UID (insn), indx);
5565 fprintf (gcse_file, "bb %d, reaching reg is %d\n",
5566 bb->index, REGNO (expr->reaching_reg));
5572 return changed;
5575 /* Perform GCSE optimizations using PRE.
5576 This is called by one_pre_gcse_pass after all the dataflow analysis
5577 has been done.
5579 This is based on the original Morel-Renvoise paper Fred Chow's thesis, and
5580 lazy code motion from Knoop, Ruthing and Steffen as described in Advanced
5581 Compiler Design and Implementation.
5583 ??? A new pseudo reg is created to hold the reaching expression. The nice
5584 thing about the classical approach is that it would try to use an existing
5585 reg. If the register can't be adequately optimized [i.e. we introduce
5586 reload problems], one could add a pass here to propagate the new register
5587 through the block.
5589 ??? We don't handle single sets in PARALLELs because we're [currently] not
5590 able to copy the rest of the parallel when we insert copies to create full
5591 redundancies from partial redundancies. However, there's no reason why we
5592 can't handle PARALLELs in the cases where there are no partial
5593 redundancies. */
5595 static int
5596 pre_gcse ()
5598 unsigned int i;
5599 int did_insert, changed;
5600 struct expr **index_map;
5601 struct expr *expr;
5603 /* Compute a mapping from expression number (`bitmap_index') to
5604 hash table entry. */
5606 index_map = (struct expr **) xcalloc (expr_hash_table.n_elems, sizeof (struct expr *));
5607 for (i = 0; i < expr_hash_table.size; i++)
5608 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
5609 index_map[expr->bitmap_index] = expr;
5611 /* Reset bitmap used to track which insns are redundant. */
5612 pre_redundant_insns = sbitmap_alloc (max_cuid);
5613 sbitmap_zero (pre_redundant_insns);
5615 /* Delete the redundant insns first so that
5616 - we know what register to use for the new insns and for the other
5617 ones with reaching expressions
5618 - we know which insns are redundant when we go to create copies */
5620 changed = pre_delete ();
5622 did_insert = pre_edge_insert (edge_list, index_map);
5624 /* In other places with reaching expressions, copy the expression to the
5625 specially allocated pseudo-reg that reaches the redundant expr. */
5626 pre_insert_copies ();
5627 if (did_insert)
5629 commit_edge_insertions ();
5630 changed = 1;
5633 free (index_map);
5634 sbitmap_free (pre_redundant_insns);
5635 return changed;
5638 /* Top level routine to perform one PRE GCSE pass.
5640 Return nonzero if a change was made. */
5642 static int
5643 one_pre_gcse_pass (pass)
5644 int pass;
5646 int changed = 0;
5648 gcse_subst_count = 0;
5649 gcse_create_count = 0;
5651 alloc_hash_table (max_cuid, &expr_hash_table, 0);
5652 add_noreturn_fake_exit_edges ();
5653 if (flag_gcse_lm)
5654 compute_ld_motion_mems ();
5656 compute_hash_table (&expr_hash_table);
5657 trim_ld_motion_mems ();
5658 if (gcse_file)
5659 dump_hash_table (gcse_file, "Expression", &expr_hash_table);
5661 if (expr_hash_table.n_elems > 0)
5663 alloc_pre_mem (last_basic_block, expr_hash_table.n_elems);
5664 compute_pre_data ();
5665 changed |= pre_gcse ();
5666 free_edge_list (edge_list);
5667 free_pre_mem ();
5670 free_ldst_mems ();
5671 remove_fake_edges ();
5672 free_hash_table (&expr_hash_table);
5674 if (gcse_file)
5676 fprintf (gcse_file, "\nPRE GCSE of %s, pass %d: %d bytes needed, ",
5677 current_function_name, pass, bytes_used);
5678 fprintf (gcse_file, "%d substs, %d insns created\n",
5679 gcse_subst_count, gcse_create_count);
5682 return changed;
5685 /* If X contains any LABEL_REF's, add REG_LABEL notes for them to INSN.
5686 If notes are added to an insn which references a CODE_LABEL, the
5687 LABEL_NUSES count is incremented. We have to add REG_LABEL notes,
5688 because the following loop optimization pass requires them. */
5690 /* ??? This is very similar to the loop.c add_label_notes function. We
5691 could probably share code here. */
5693 /* ??? If there was a jump optimization pass after gcse and before loop,
5694 then we would not need to do this here, because jump would add the
5695 necessary REG_LABEL notes. */
5697 static void
5698 add_label_notes (x, insn)
5699 rtx x;
5700 rtx insn;
5702 enum rtx_code code = GET_CODE (x);
5703 int i, j;
5704 const char *fmt;
5706 if (code == LABEL_REF && !LABEL_REF_NONLOCAL_P (x))
5708 /* This code used to ignore labels that referred to dispatch tables to
5709 avoid flow generating (slighly) worse code.
5711 We no longer ignore such label references (see LABEL_REF handling in
5712 mark_jump_label for additional information). */
5714 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, XEXP (x, 0),
5715 REG_NOTES (insn));
5716 if (LABEL_P (XEXP (x, 0)))
5717 LABEL_NUSES (XEXP (x, 0))++;
5718 return;
5721 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
5723 if (fmt[i] == 'e')
5724 add_label_notes (XEXP (x, i), insn);
5725 else if (fmt[i] == 'E')
5726 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5727 add_label_notes (XVECEXP (x, i, j), insn);
5731 /* Compute transparent outgoing information for each block.
5733 An expression is transparent to an edge unless it is killed by
5734 the edge itself. This can only happen with abnormal control flow,
5735 when the edge is traversed through a call. This happens with
5736 non-local labels and exceptions.
5738 This would not be necessary if we split the edge. While this is
5739 normally impossible for abnormal critical edges, with some effort
5740 it should be possible with exception handling, since we still have
5741 control over which handler should be invoked. But due to increased
5742 EH table sizes, this may not be worthwhile. */
5744 static void
5745 compute_transpout ()
5747 basic_block bb;
5748 unsigned int i;
5749 struct expr *expr;
5751 sbitmap_vector_ones (transpout, last_basic_block);
5753 FOR_EACH_BB (bb)
5755 /* Note that flow inserted a nop a the end of basic blocks that
5756 end in call instructions for reasons other than abnormal
5757 control flow. */
5758 if (GET_CODE (bb->end) != CALL_INSN)
5759 continue;
5761 for (i = 0; i < expr_hash_table.size; i++)
5762 for (expr = expr_hash_table.table[i]; expr ; expr = expr->next_same_hash)
5763 if (GET_CODE (expr->expr) == MEM)
5765 if (GET_CODE (XEXP (expr->expr, 0)) == SYMBOL_REF
5766 && CONSTANT_POOL_ADDRESS_P (XEXP (expr->expr, 0)))
5767 continue;
5769 /* ??? Optimally, we would use interprocedural alias
5770 analysis to determine if this mem is actually killed
5771 by this call. */
5772 RESET_BIT (transpout[bb->index], expr->bitmap_index);
5777 /* Removal of useless null pointer checks */
5779 /* Called via note_stores. X is set by SETTER. If X is a register we must
5780 invalidate nonnull_local and set nonnull_killed. DATA is really a
5781 `null_pointer_info *'.
5783 We ignore hard registers. */
5785 static void
5786 invalidate_nonnull_info (x, setter, data)
5787 rtx x;
5788 rtx setter ATTRIBUTE_UNUSED;
5789 void *data;
5791 unsigned int regno;
5792 struct null_pointer_info *npi = (struct null_pointer_info *) data;
5794 while (GET_CODE (x) == SUBREG)
5795 x = SUBREG_REG (x);
5797 /* Ignore anything that is not a register or is a hard register. */
5798 if (GET_CODE (x) != REG
5799 || REGNO (x) < npi->min_reg
5800 || REGNO (x) >= npi->max_reg)
5801 return;
5803 regno = REGNO (x) - npi->min_reg;
5805 RESET_BIT (npi->nonnull_local[npi->current_block->index], regno);
5806 SET_BIT (npi->nonnull_killed[npi->current_block->index], regno);
5809 /* Do null-pointer check elimination for the registers indicated in
5810 NPI. NONNULL_AVIN and NONNULL_AVOUT are pre-allocated sbitmaps;
5811 they are not our responsibility to free. */
5813 static int
5814 delete_null_pointer_checks_1 (block_reg, nonnull_avin,
5815 nonnull_avout, npi)
5816 unsigned int *block_reg;
5817 sbitmap *nonnull_avin;
5818 sbitmap *nonnull_avout;
5819 struct null_pointer_info *npi;
5821 basic_block bb, current_block;
5822 sbitmap *nonnull_local = npi->nonnull_local;
5823 sbitmap *nonnull_killed = npi->nonnull_killed;
5824 int something_changed = 0;
5826 /* Compute local properties, nonnull and killed. A register will have
5827 the nonnull property if at the end of the current block its value is
5828 known to be nonnull. The killed property indicates that somewhere in
5829 the block any information we had about the register is killed.
5831 Note that a register can have both properties in a single block. That
5832 indicates that it's killed, then later in the block a new value is
5833 computed. */
5834 sbitmap_vector_zero (nonnull_local, last_basic_block);
5835 sbitmap_vector_zero (nonnull_killed, last_basic_block);
5837 FOR_EACH_BB (current_block)
5839 rtx insn, stop_insn;
5841 /* Set the current block for invalidate_nonnull_info. */
5842 npi->current_block = current_block;
5844 /* Scan each insn in the basic block looking for memory references and
5845 register sets. */
5846 stop_insn = NEXT_INSN (current_block->end);
5847 for (insn = current_block->head;
5848 insn != stop_insn;
5849 insn = NEXT_INSN (insn))
5851 rtx set;
5852 rtx reg;
5854 /* Ignore anything that is not a normal insn. */
5855 if (! INSN_P (insn))
5856 continue;
5858 /* Basically ignore anything that is not a simple SET. We do have
5859 to make sure to invalidate nonnull_local and set nonnull_killed
5860 for such insns though. */
5861 set = single_set (insn);
5862 if (!set)
5864 note_stores (PATTERN (insn), invalidate_nonnull_info, npi);
5865 continue;
5868 /* See if we've got a usable memory load. We handle it first
5869 in case it uses its address register as a dest (which kills
5870 the nonnull property). */
5871 if (GET_CODE (SET_SRC (set)) == MEM
5872 && GET_CODE ((reg = XEXP (SET_SRC (set), 0))) == REG
5873 && REGNO (reg) >= npi->min_reg
5874 && REGNO (reg) < npi->max_reg)
5875 SET_BIT (nonnull_local[current_block->index],
5876 REGNO (reg) - npi->min_reg);
5878 /* Now invalidate stuff clobbered by this insn. */
5879 note_stores (PATTERN (insn), invalidate_nonnull_info, npi);
5881 /* And handle stores, we do these last since any sets in INSN can
5882 not kill the nonnull property if it is derived from a MEM
5883 appearing in a SET_DEST. */
5884 if (GET_CODE (SET_DEST (set)) == MEM
5885 && GET_CODE ((reg = XEXP (SET_DEST (set), 0))) == REG
5886 && REGNO (reg) >= npi->min_reg
5887 && REGNO (reg) < npi->max_reg)
5888 SET_BIT (nonnull_local[current_block->index],
5889 REGNO (reg) - npi->min_reg);
5893 /* Now compute global properties based on the local properties. This
5894 is a classic global availability algorithm. */
5895 compute_available (nonnull_local, nonnull_killed,
5896 nonnull_avout, nonnull_avin);
5898 /* Now look at each bb and see if it ends with a compare of a value
5899 against zero. */
5900 FOR_EACH_BB (bb)
5902 rtx last_insn = bb->end;
5903 rtx condition, earliest;
5904 int compare_and_branch;
5906 /* Since MIN_REG is always at least FIRST_PSEUDO_REGISTER, and
5907 since BLOCK_REG[BB] is zero if this block did not end with a
5908 comparison against zero, this condition works. */
5909 if (block_reg[bb->index] < npi->min_reg
5910 || block_reg[bb->index] >= npi->max_reg)
5911 continue;
5913 /* LAST_INSN is a conditional jump. Get its condition. */
5914 condition = get_condition (last_insn, &earliest);
5916 /* If we can't determine the condition then skip. */
5917 if (! condition)
5918 continue;
5920 /* Is the register known to have a nonzero value? */
5921 if (!TEST_BIT (nonnull_avout[bb->index], block_reg[bb->index] - npi->min_reg))
5922 continue;
5924 /* Try to compute whether the compare/branch at the loop end is one or
5925 two instructions. */
5926 if (earliest == last_insn)
5927 compare_and_branch = 1;
5928 else if (earliest == prev_nonnote_insn (last_insn))
5929 compare_and_branch = 2;
5930 else
5931 continue;
5933 /* We know the register in this comparison is nonnull at exit from
5934 this block. We can optimize this comparison. */
5935 if (GET_CODE (condition) == NE)
5937 rtx new_jump;
5939 new_jump = emit_jump_insn_after (gen_jump (JUMP_LABEL (last_insn)),
5940 last_insn);
5941 JUMP_LABEL (new_jump) = JUMP_LABEL (last_insn);
5942 LABEL_NUSES (JUMP_LABEL (new_jump))++;
5943 emit_barrier_after (new_jump);
5946 something_changed = 1;
5947 delete_insn (last_insn);
5948 if (compare_and_branch == 2)
5949 delete_insn (earliest);
5950 purge_dead_edges (bb);
5952 /* Don't check this block again. (Note that BLOCK_END is
5953 invalid here; we deleted the last instruction in the
5954 block.) */
5955 block_reg[bb->index] = 0;
5958 return something_changed;
5961 /* Find EQ/NE comparisons against zero which can be (indirectly) evaluated
5962 at compile time.
5964 This is conceptually similar to global constant/copy propagation and
5965 classic global CSE (it even uses the same dataflow equations as cprop).
5967 If a register is used as memory address with the form (mem (reg)), then we
5968 know that REG can not be zero at that point in the program. Any instruction
5969 which sets REG "kills" this property.
5971 So, if every path leading to a conditional branch has an available memory
5972 reference of that form, then we know the register can not have the value
5973 zero at the conditional branch.
5975 So we merely need to compute the local properties and propagate that data
5976 around the cfg, then optimize where possible.
5978 We run this pass two times. Once before CSE, then again after CSE. This
5979 has proven to be the most profitable approach. It is rare for new
5980 optimization opportunities of this nature to appear after the first CSE
5981 pass.
5983 This could probably be integrated with global cprop with a little work. */
5986 delete_null_pointer_checks (f)
5987 rtx f ATTRIBUTE_UNUSED;
5989 sbitmap *nonnull_avin, *nonnull_avout;
5990 unsigned int *block_reg;
5991 basic_block bb;
5992 int reg;
5993 int regs_per_pass;
5994 int max_reg;
5995 struct null_pointer_info npi;
5996 int something_changed = 0;
5998 /* If we have only a single block, then there's nothing to do. */
5999 if (n_basic_blocks <= 1)
6000 return 0;
6002 /* Trying to perform global optimizations on flow graphs which have
6003 a high connectivity will take a long time and is unlikely to be
6004 particularly useful.
6006 In normal circumstances a cfg should have about twice as many edges
6007 as blocks. But we do not want to punish small functions which have
6008 a couple switch statements. So we require a relatively large number
6009 of basic blocks and the ratio of edges to blocks to be high. */
6010 if (n_basic_blocks > 1000 && n_edges / n_basic_blocks >= 20)
6011 return 0;
6013 /* We need four bitmaps, each with a bit for each register in each
6014 basic block. */
6015 max_reg = max_reg_num ();
6016 regs_per_pass = get_bitmap_width (4, last_basic_block, max_reg);
6018 /* Allocate bitmaps to hold local and global properties. */
6019 npi.nonnull_local = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
6020 npi.nonnull_killed = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
6021 nonnull_avin = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
6022 nonnull_avout = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
6024 /* Go through the basic blocks, seeing whether or not each block
6025 ends with a conditional branch whose condition is a comparison
6026 against zero. Record the register compared in BLOCK_REG. */
6027 block_reg = (unsigned int *) xcalloc (last_basic_block, sizeof (int));
6028 FOR_EACH_BB (bb)
6030 rtx last_insn = bb->end;
6031 rtx condition, earliest, reg;
6033 /* We only want conditional branches. */
6034 if (GET_CODE (last_insn) != JUMP_INSN
6035 || !any_condjump_p (last_insn)
6036 || !onlyjump_p (last_insn))
6037 continue;
6039 /* LAST_INSN is a conditional jump. Get its condition. */
6040 condition = get_condition (last_insn, &earliest);
6042 /* If we were unable to get the condition, or it is not an equality
6043 comparison against zero then there's nothing we can do. */
6044 if (!condition
6045 || (GET_CODE (condition) != NE && GET_CODE (condition) != EQ)
6046 || GET_CODE (XEXP (condition, 1)) != CONST_INT
6047 || (XEXP (condition, 1)
6048 != CONST0_RTX (GET_MODE (XEXP (condition, 0)))))
6049 continue;
6051 /* We must be checking a register against zero. */
6052 reg = XEXP (condition, 0);
6053 if (GET_CODE (reg) != REG)
6054 continue;
6056 block_reg[bb->index] = REGNO (reg);
6059 /* Go through the algorithm for each block of registers. */
6060 for (reg = FIRST_PSEUDO_REGISTER; reg < max_reg; reg += regs_per_pass)
6062 npi.min_reg = reg;
6063 npi.max_reg = MIN (reg + regs_per_pass, max_reg);
6064 something_changed |= delete_null_pointer_checks_1 (block_reg,
6065 nonnull_avin,
6066 nonnull_avout,
6067 &npi);
6070 /* Free the table of registers compared at the end of every block. */
6071 free (block_reg);
6073 /* Free bitmaps. */
6074 sbitmap_vector_free (npi.nonnull_local);
6075 sbitmap_vector_free (npi.nonnull_killed);
6076 sbitmap_vector_free (nonnull_avin);
6077 sbitmap_vector_free (nonnull_avout);
6079 return something_changed;
6082 /* Code Hoisting variables and subroutines. */
6084 /* Very busy expressions. */
6085 static sbitmap *hoist_vbein;
6086 static sbitmap *hoist_vbeout;
6088 /* Hoistable expressions. */
6089 static sbitmap *hoist_exprs;
6091 /* Dominator bitmaps. */
6092 dominance_info dominators;
6094 /* ??? We could compute post dominators and run this algorithm in
6095 reverse to perform tail merging, doing so would probably be
6096 more effective than the tail merging code in jump.c.
6098 It's unclear if tail merging could be run in parallel with
6099 code hoisting. It would be nice. */
6101 /* Allocate vars used for code hoisting analysis. */
6103 static void
6104 alloc_code_hoist_mem (n_blocks, n_exprs)
6105 int n_blocks, n_exprs;
6107 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
6108 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
6109 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
6111 hoist_vbein = sbitmap_vector_alloc (n_blocks, n_exprs);
6112 hoist_vbeout = sbitmap_vector_alloc (n_blocks, n_exprs);
6113 hoist_exprs = sbitmap_vector_alloc (n_blocks, n_exprs);
6114 transpout = sbitmap_vector_alloc (n_blocks, n_exprs);
6117 /* Free vars used for code hoisting analysis. */
6119 static void
6120 free_code_hoist_mem ()
6122 sbitmap_vector_free (antloc);
6123 sbitmap_vector_free (transp);
6124 sbitmap_vector_free (comp);
6126 sbitmap_vector_free (hoist_vbein);
6127 sbitmap_vector_free (hoist_vbeout);
6128 sbitmap_vector_free (hoist_exprs);
6129 sbitmap_vector_free (transpout);
6131 free_dominance_info (dominators);
6134 /* Compute the very busy expressions at entry/exit from each block.
6136 An expression is very busy if all paths from a given point
6137 compute the expression. */
6139 static void
6140 compute_code_hoist_vbeinout ()
6142 int changed, passes;
6143 basic_block bb;
6145 sbitmap_vector_zero (hoist_vbeout, last_basic_block);
6146 sbitmap_vector_zero (hoist_vbein, last_basic_block);
6148 passes = 0;
6149 changed = 1;
6151 while (changed)
6153 changed = 0;
6155 /* We scan the blocks in the reverse order to speed up
6156 the convergence. */
6157 FOR_EACH_BB_REVERSE (bb)
6159 changed |= sbitmap_a_or_b_and_c_cg (hoist_vbein[bb->index], antloc[bb->index],
6160 hoist_vbeout[bb->index], transp[bb->index]);
6161 if (bb->next_bb != EXIT_BLOCK_PTR)
6162 sbitmap_intersection_of_succs (hoist_vbeout[bb->index], hoist_vbein, bb->index);
6165 passes++;
6168 if (gcse_file)
6169 fprintf (gcse_file, "hoisting vbeinout computation: %d passes\n", passes);
6172 /* Top level routine to do the dataflow analysis needed by code hoisting. */
6174 static void
6175 compute_code_hoist_data ()
6177 compute_local_properties (transp, comp, antloc, &expr_hash_table);
6178 compute_transpout ();
6179 compute_code_hoist_vbeinout ();
6180 dominators = calculate_dominance_info (CDI_DOMINATORS);
6181 if (gcse_file)
6182 fprintf (gcse_file, "\n");
6185 /* Determine if the expression identified by EXPR_INDEX would
6186 reach BB unimpared if it was placed at the end of EXPR_BB.
6188 It's unclear exactly what Muchnick meant by "unimpared". It seems
6189 to me that the expression must either be computed or transparent in
6190 *every* block in the path(s) from EXPR_BB to BB. Any other definition
6191 would allow the expression to be hoisted out of loops, even if
6192 the expression wasn't a loop invariant.
6194 Contrast this to reachability for PRE where an expression is
6195 considered reachable if *any* path reaches instead of *all*
6196 paths. */
6198 static int
6199 hoist_expr_reaches_here_p (expr_bb, expr_index, bb, visited)
6200 basic_block expr_bb;
6201 int expr_index;
6202 basic_block bb;
6203 char *visited;
6205 edge pred;
6206 int visited_allocated_locally = 0;
6209 if (visited == NULL)
6211 visited_allocated_locally = 1;
6212 visited = xcalloc (last_basic_block, 1);
6215 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
6217 basic_block pred_bb = pred->src;
6219 if (pred->src == ENTRY_BLOCK_PTR)
6220 break;
6221 else if (pred_bb == expr_bb)
6222 continue;
6223 else if (visited[pred_bb->index])
6224 continue;
6226 /* Does this predecessor generate this expression? */
6227 else if (TEST_BIT (comp[pred_bb->index], expr_index))
6228 break;
6229 else if (! TEST_BIT (transp[pred_bb->index], expr_index))
6230 break;
6232 /* Not killed. */
6233 else
6235 visited[pred_bb->index] = 1;
6236 if (! hoist_expr_reaches_here_p (expr_bb, expr_index,
6237 pred_bb, visited))
6238 break;
6241 if (visited_allocated_locally)
6242 free (visited);
6244 return (pred == NULL);
6247 /* Actually perform code hoisting. */
6249 static void
6250 hoist_code ()
6252 basic_block bb, dominated;
6253 basic_block *domby;
6254 unsigned int domby_len;
6255 unsigned int i,j;
6256 struct expr **index_map;
6257 struct expr *expr;
6259 sbitmap_vector_zero (hoist_exprs, last_basic_block);
6261 /* Compute a mapping from expression number (`bitmap_index') to
6262 hash table entry. */
6264 index_map = (struct expr **) xcalloc (expr_hash_table.n_elems, sizeof (struct expr *));
6265 for (i = 0; i < expr_hash_table.size; i++)
6266 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
6267 index_map[expr->bitmap_index] = expr;
6269 /* Walk over each basic block looking for potentially hoistable
6270 expressions, nothing gets hoisted from the entry block. */
6271 FOR_EACH_BB (bb)
6273 int found = 0;
6274 int insn_inserted_p;
6276 domby_len = get_dominated_by (dominators, bb, &domby);
6277 /* Examine each expression that is very busy at the exit of this
6278 block. These are the potentially hoistable expressions. */
6279 for (i = 0; i < hoist_vbeout[bb->index]->n_bits; i++)
6281 int hoistable = 0;
6283 if (TEST_BIT (hoist_vbeout[bb->index], i)
6284 && TEST_BIT (transpout[bb->index], i))
6286 /* We've found a potentially hoistable expression, now
6287 we look at every block BB dominates to see if it
6288 computes the expression. */
6289 for (j = 0; j < domby_len; j++)
6291 dominated = domby[j];
6292 /* Ignore self dominance. */
6293 if (bb == dominated)
6294 continue;
6295 /* We've found a dominated block, now see if it computes
6296 the busy expression and whether or not moving that
6297 expression to the "beginning" of that block is safe. */
6298 if (!TEST_BIT (antloc[dominated->index], i))
6299 continue;
6301 /* Note if the expression would reach the dominated block
6302 unimpared if it was placed at the end of BB.
6304 Keep track of how many times this expression is hoistable
6305 from a dominated block into BB. */
6306 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
6307 hoistable++;
6310 /* If we found more than one hoistable occurrence of this
6311 expression, then note it in the bitmap of expressions to
6312 hoist. It makes no sense to hoist things which are computed
6313 in only one BB, and doing so tends to pessimize register
6314 allocation. One could increase this value to try harder
6315 to avoid any possible code expansion due to register
6316 allocation issues; however experiments have shown that
6317 the vast majority of hoistable expressions are only movable
6318 from two successors, so raising this threshhold is likely
6319 to nullify any benefit we get from code hoisting. */
6320 if (hoistable > 1)
6322 SET_BIT (hoist_exprs[bb->index], i);
6323 found = 1;
6327 /* If we found nothing to hoist, then quit now. */
6328 if (! found)
6330 free (domby);
6331 continue;
6334 /* Loop over all the hoistable expressions. */
6335 for (i = 0; i < hoist_exprs[bb->index]->n_bits; i++)
6337 /* We want to insert the expression into BB only once, so
6338 note when we've inserted it. */
6339 insn_inserted_p = 0;
6341 /* These tests should be the same as the tests above. */
6342 if (TEST_BIT (hoist_vbeout[bb->index], i))
6344 /* We've found a potentially hoistable expression, now
6345 we look at every block BB dominates to see if it
6346 computes the expression. */
6347 for (j = 0; j < domby_len; j++)
6349 dominated = domby[j];
6350 /* Ignore self dominance. */
6351 if (bb == dominated)
6352 continue;
6354 /* We've found a dominated block, now see if it computes
6355 the busy expression and whether or not moving that
6356 expression to the "beginning" of that block is safe. */
6357 if (!TEST_BIT (antloc[dominated->index], i))
6358 continue;
6360 /* The expression is computed in the dominated block and
6361 it would be safe to compute it at the start of the
6362 dominated block. Now we have to determine if the
6363 expression would reach the dominated block if it was
6364 placed at the end of BB. */
6365 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
6367 struct expr *expr = index_map[i];
6368 struct occr *occr = expr->antic_occr;
6369 rtx insn;
6370 rtx set;
6372 /* Find the right occurrence of this expression. */
6373 while (BLOCK_FOR_INSN (occr->insn) != dominated && occr)
6374 occr = occr->next;
6376 /* Should never happen. */
6377 if (!occr)
6378 abort ();
6380 insn = occr->insn;
6382 set = single_set (insn);
6383 if (! set)
6384 abort ();
6386 /* Create a pseudo-reg to store the result of reaching
6387 expressions into. Get the mode for the new pseudo
6388 from the mode of the original destination pseudo. */
6389 if (expr->reaching_reg == NULL)
6390 expr->reaching_reg
6391 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
6393 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
6394 delete_insn (insn);
6395 occr->deleted_p = 1;
6396 if (!insn_inserted_p)
6398 insert_insn_end_bb (index_map[i], bb, 0);
6399 insn_inserted_p = 1;
6405 free (domby);
6408 free (index_map);
6411 /* Top level routine to perform one code hoisting (aka unification) pass
6413 Return nonzero if a change was made. */
6415 static int
6416 one_code_hoisting_pass ()
6418 int changed = 0;
6420 alloc_hash_table (max_cuid, &expr_hash_table, 0);
6421 compute_hash_table (&expr_hash_table);
6422 if (gcse_file)
6423 dump_hash_table (gcse_file, "Code Hosting Expressions", &expr_hash_table);
6425 if (expr_hash_table.n_elems > 0)
6427 alloc_code_hoist_mem (last_basic_block, expr_hash_table.n_elems);
6428 compute_code_hoist_data ();
6429 hoist_code ();
6430 free_code_hoist_mem ();
6433 free_hash_table (&expr_hash_table);
6435 return changed;
6438 /* Here we provide the things required to do store motion towards
6439 the exit. In order for this to be effective, gcse also needed to
6440 be taught how to move a load when it is kill only by a store to itself.
6442 int i;
6443 float a[10];
6445 void foo(float scale)
6447 for (i=0; i<10; i++)
6448 a[i] *= scale;
6451 'i' is both loaded and stored to in the loop. Normally, gcse cannot move
6452 the load out since its live around the loop, and stored at the bottom
6453 of the loop.
6455 The 'Load Motion' referred to and implemented in this file is
6456 an enhancement to gcse which when using edge based lcm, recognizes
6457 this situation and allows gcse to move the load out of the loop.
6459 Once gcse has hoisted the load, store motion can then push this
6460 load towards the exit, and we end up with no loads or stores of 'i'
6461 in the loop. */
6463 /* This will search the ldst list for a matching expression. If it
6464 doesn't find one, we create one and initialize it. */
6466 static struct ls_expr *
6467 ldst_entry (x)
6468 rtx x;
6470 struct ls_expr * ptr;
6472 for (ptr = first_ls_expr(); ptr != NULL; ptr = next_ls_expr (ptr))
6473 if (expr_equiv_p (ptr->pattern, x))
6474 break;
6476 if (!ptr)
6478 ptr = (struct ls_expr *) xmalloc (sizeof (struct ls_expr));
6480 ptr->next = pre_ldst_mems;
6481 ptr->expr = NULL;
6482 ptr->pattern = x;
6483 ptr->loads = NULL_RTX;
6484 ptr->stores = NULL_RTX;
6485 ptr->reaching_reg = NULL_RTX;
6486 ptr->invalid = 0;
6487 ptr->index = 0;
6488 ptr->hash_index = 0;
6489 pre_ldst_mems = ptr;
6492 return ptr;
6495 /* Free up an individual ldst entry. */
6497 static void
6498 free_ldst_entry (ptr)
6499 struct ls_expr * ptr;
6501 free_INSN_LIST_list (& ptr->loads);
6502 free_INSN_LIST_list (& ptr->stores);
6504 free (ptr);
6507 /* Free up all memory associated with the ldst list. */
6509 static void
6510 free_ldst_mems ()
6512 while (pre_ldst_mems)
6514 struct ls_expr * tmp = pre_ldst_mems;
6516 pre_ldst_mems = pre_ldst_mems->next;
6518 free_ldst_entry (tmp);
6521 pre_ldst_mems = NULL;
6524 /* Dump debugging info about the ldst list. */
6526 static void
6527 print_ldst_list (file)
6528 FILE * file;
6530 struct ls_expr * ptr;
6532 fprintf (file, "LDST list: \n");
6534 for (ptr = first_ls_expr(); ptr != NULL; ptr = next_ls_expr (ptr))
6536 fprintf (file, " Pattern (%3d): ", ptr->index);
6538 print_rtl (file, ptr->pattern);
6540 fprintf (file, "\n Loads : ");
6542 if (ptr->loads)
6543 print_rtl (file, ptr->loads);
6544 else
6545 fprintf (file, "(nil)");
6547 fprintf (file, "\n Stores : ");
6549 if (ptr->stores)
6550 print_rtl (file, ptr->stores);
6551 else
6552 fprintf (file, "(nil)");
6554 fprintf (file, "\n\n");
6557 fprintf (file, "\n");
6560 /* Returns 1 if X is in the list of ldst only expressions. */
6562 static struct ls_expr *
6563 find_rtx_in_ldst (x)
6564 rtx x;
6566 struct ls_expr * ptr;
6568 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
6569 if (expr_equiv_p (ptr->pattern, x) && ! ptr->invalid)
6570 return ptr;
6572 return NULL;
6575 /* Assign each element of the list of mems a monotonically increasing value. */
6577 static int
6578 enumerate_ldsts ()
6580 struct ls_expr * ptr;
6581 int n = 0;
6583 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
6584 ptr->index = n++;
6586 return n;
6589 /* Return first item in the list. */
6591 static inline struct ls_expr *
6592 first_ls_expr ()
6594 return pre_ldst_mems;
6597 /* Return the next item in ther list after the specified one. */
6599 static inline struct ls_expr *
6600 next_ls_expr (ptr)
6601 struct ls_expr * ptr;
6603 return ptr->next;
6606 /* Load Motion for loads which only kill themselves. */
6608 /* Return true if x is a simple MEM operation, with no registers or
6609 side effects. These are the types of loads we consider for the
6610 ld_motion list, otherwise we let the usual aliasing take care of it. */
6612 static int
6613 simple_mem (x)
6614 rtx x;
6616 if (GET_CODE (x) != MEM)
6617 return 0;
6619 if (MEM_VOLATILE_P (x))
6620 return 0;
6622 if (GET_MODE (x) == BLKmode)
6623 return 0;
6625 if (!rtx_varies_p (XEXP (x, 0), 0))
6626 return 1;
6628 return 0;
6631 /* Make sure there isn't a buried reference in this pattern anywhere.
6632 If there is, invalidate the entry for it since we're not capable
6633 of fixing it up just yet.. We have to be sure we know about ALL
6634 loads since the aliasing code will allow all entries in the
6635 ld_motion list to not-alias itself. If we miss a load, we will get
6636 the wrong value since gcse might common it and we won't know to
6637 fix it up. */
6639 static void
6640 invalidate_any_buried_refs (x)
6641 rtx x;
6643 const char * fmt;
6644 int i, j;
6645 struct ls_expr * ptr;
6647 /* Invalidate it in the list. */
6648 if (GET_CODE (x) == MEM && simple_mem (x))
6650 ptr = ldst_entry (x);
6651 ptr->invalid = 1;
6654 /* Recursively process the insn. */
6655 fmt = GET_RTX_FORMAT (GET_CODE (x));
6657 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6659 if (fmt[i] == 'e')
6660 invalidate_any_buried_refs (XEXP (x, i));
6661 else if (fmt[i] == 'E')
6662 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6663 invalidate_any_buried_refs (XVECEXP (x, i, j));
6667 /* Find all the 'simple' MEMs which are used in LOADs and STORES. Simple
6668 being defined as MEM loads and stores to symbols, with no
6669 side effects and no registers in the expression. If there are any
6670 uses/defs which don't match this criteria, it is invalidated and
6671 trimmed out later. */
6673 static void
6674 compute_ld_motion_mems ()
6676 struct ls_expr * ptr;
6677 basic_block bb;
6678 rtx insn;
6680 pre_ldst_mems = NULL;
6682 FOR_EACH_BB (bb)
6684 for (insn = bb->head;
6685 insn && insn != NEXT_INSN (bb->end);
6686 insn = NEXT_INSN (insn))
6688 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
6690 if (GET_CODE (PATTERN (insn)) == SET)
6692 rtx src = SET_SRC (PATTERN (insn));
6693 rtx dest = SET_DEST (PATTERN (insn));
6695 /* Check for a simple LOAD... */
6696 if (GET_CODE (src) == MEM && simple_mem (src))
6698 ptr = ldst_entry (src);
6699 if (GET_CODE (dest) == REG)
6700 ptr->loads = alloc_INSN_LIST (insn, ptr->loads);
6701 else
6702 ptr->invalid = 1;
6704 else
6706 /* Make sure there isn't a buried load somewhere. */
6707 invalidate_any_buried_refs (src);
6710 /* Check for stores. Don't worry about aliased ones, they
6711 will block any movement we might do later. We only care
6712 about this exact pattern since those are the only
6713 circumstance that we will ignore the aliasing info. */
6714 if (GET_CODE (dest) == MEM && simple_mem (dest))
6716 ptr = ldst_entry (dest);
6718 if (GET_CODE (src) != MEM
6719 && GET_CODE (src) != ASM_OPERANDS)
6720 ptr->stores = alloc_INSN_LIST (insn, ptr->stores);
6721 else
6722 ptr->invalid = 1;
6725 else
6726 invalidate_any_buried_refs (PATTERN (insn));
6732 /* Remove any references that have been either invalidated or are not in the
6733 expression list for pre gcse. */
6735 static void
6736 trim_ld_motion_mems ()
6738 struct ls_expr * last = NULL;
6739 struct ls_expr * ptr = first_ls_expr ();
6741 while (ptr != NULL)
6743 int del = ptr->invalid;
6744 struct expr * expr = NULL;
6746 /* Delete if entry has been made invalid. */
6747 if (!del)
6749 unsigned int i;
6751 del = 1;
6752 /* Delete if we cannot find this mem in the expression list. */
6753 for (i = 0; i < expr_hash_table.size && del; i++)
6755 for (expr = expr_hash_table.table[i];
6756 expr != NULL;
6757 expr = expr->next_same_hash)
6758 if (expr_equiv_p (expr->expr, ptr->pattern))
6760 del = 0;
6761 break;
6766 if (del)
6768 if (last != NULL)
6770 last->next = ptr->next;
6771 free_ldst_entry (ptr);
6772 ptr = last->next;
6774 else
6776 pre_ldst_mems = pre_ldst_mems->next;
6777 free_ldst_entry (ptr);
6778 ptr = pre_ldst_mems;
6781 else
6783 /* Set the expression field if we are keeping it. */
6784 last = ptr;
6785 ptr->expr = expr;
6786 ptr = ptr->next;
6790 /* Show the world what we've found. */
6791 if (gcse_file && pre_ldst_mems != NULL)
6792 print_ldst_list (gcse_file);
6795 /* This routine will take an expression which we are replacing with
6796 a reaching register, and update any stores that are needed if
6797 that expression is in the ld_motion list. Stores are updated by
6798 copying their SRC to the reaching register, and then storeing
6799 the reaching register into the store location. These keeps the
6800 correct value in the reaching register for the loads. */
6802 static void
6803 update_ld_motion_stores (expr)
6804 struct expr * expr;
6806 struct ls_expr * mem_ptr;
6808 if ((mem_ptr = find_rtx_in_ldst (expr->expr)))
6810 /* We can try to find just the REACHED stores, but is shouldn't
6811 matter to set the reaching reg everywhere... some might be
6812 dead and should be eliminated later. */
6814 /* We replace SET mem = expr with
6815 SET reg = expr
6816 SET mem = reg , where reg is the
6817 reaching reg used in the load. */
6818 rtx list = mem_ptr->stores;
6820 for ( ; list != NULL_RTX; list = XEXP (list, 1))
6822 rtx insn = XEXP (list, 0);
6823 rtx pat = PATTERN (insn);
6824 rtx src = SET_SRC (pat);
6825 rtx reg = expr->reaching_reg;
6826 rtx copy, new;
6828 /* If we've already copied it, continue. */
6829 if (expr->reaching_reg == src)
6830 continue;
6832 if (gcse_file)
6834 fprintf (gcse_file, "PRE: store updated with reaching reg ");
6835 print_rtl (gcse_file, expr->reaching_reg);
6836 fprintf (gcse_file, ":\n ");
6837 print_inline_rtx (gcse_file, insn, 8);
6838 fprintf (gcse_file, "\n");
6841 copy = gen_move_insn ( reg, SET_SRC (pat));
6842 new = emit_insn_before (copy, insn);
6843 record_one_set (REGNO (reg), new);
6844 SET_SRC (pat) = reg;
6846 /* un-recognize this pattern since it's probably different now. */
6847 INSN_CODE (insn) = -1;
6848 gcse_create_count++;
6853 /* Store motion code. */
6855 /* This is used to communicate the target bitvector we want to use in the
6856 reg_set_info routine when called via the note_stores mechanism. */
6857 static sbitmap * regvec;
6859 /* Used in computing the reverse edge graph bit vectors. */
6860 static sbitmap * st_antloc;
6862 /* Global holding the number of store expressions we are dealing with. */
6863 static int num_stores;
6865 /* Checks to set if we need to mark a register set. Called from note_stores. */
6867 static void
6868 reg_set_info (dest, setter, data)
6869 rtx dest, setter ATTRIBUTE_UNUSED;
6870 void * data ATTRIBUTE_UNUSED;
6872 if (GET_CODE (dest) == SUBREG)
6873 dest = SUBREG_REG (dest);
6875 if (GET_CODE (dest) == REG)
6876 SET_BIT (*regvec, REGNO (dest));
6879 /* Return nonzero if the register operands of expression X are killed
6880 anywhere in basic block BB. */
6882 static int
6883 store_ops_ok (x, bb)
6884 rtx x;
6885 basic_block bb;
6887 int i;
6888 enum rtx_code code;
6889 const char * fmt;
6891 /* Repeat is used to turn tail-recursion into iteration. */
6892 repeat:
6894 if (x == 0)
6895 return 1;
6897 code = GET_CODE (x);
6898 switch (code)
6900 case REG:
6901 /* If a reg has changed after us in this
6902 block, the operand has been killed. */
6903 return TEST_BIT (reg_set_in_block[bb->index], REGNO (x));
6905 case MEM:
6906 x = XEXP (x, 0);
6907 goto repeat;
6909 case PRE_DEC:
6910 case PRE_INC:
6911 case POST_DEC:
6912 case POST_INC:
6913 return 0;
6915 case PC:
6916 case CC0: /*FIXME*/
6917 case CONST:
6918 case CONST_INT:
6919 case CONST_DOUBLE:
6920 case CONST_VECTOR:
6921 case SYMBOL_REF:
6922 case LABEL_REF:
6923 case ADDR_VEC:
6924 case ADDR_DIFF_VEC:
6925 return 1;
6927 default:
6928 break;
6931 i = GET_RTX_LENGTH (code) - 1;
6932 fmt = GET_RTX_FORMAT (code);
6934 for (; i >= 0; i--)
6936 if (fmt[i] == 'e')
6938 rtx tem = XEXP (x, i);
6940 /* If we are about to do the last recursive call
6941 needed at this level, change it into iteration.
6942 This function is called enough to be worth it. */
6943 if (i == 0)
6945 x = tem;
6946 goto repeat;
6949 if (! store_ops_ok (tem, bb))
6950 return 0;
6952 else if (fmt[i] == 'E')
6954 int j;
6956 for (j = 0; j < XVECLEN (x, i); j++)
6958 if (! store_ops_ok (XVECEXP (x, i, j), bb))
6959 return 0;
6964 return 1;
6967 /* Determine whether insn is MEM store pattern that we will consider moving. */
6969 static void
6970 find_moveable_store (insn)
6971 rtx insn;
6973 struct ls_expr * ptr;
6974 rtx dest = PATTERN (insn);
6976 if (GET_CODE (dest) != SET
6977 || GET_CODE (SET_SRC (dest)) == ASM_OPERANDS)
6978 return;
6980 dest = SET_DEST (dest);
6982 if (GET_CODE (dest) != MEM || MEM_VOLATILE_P (dest)
6983 || GET_MODE (dest) == BLKmode)
6984 return;
6986 if (GET_CODE (XEXP (dest, 0)) != SYMBOL_REF)
6987 return;
6989 if (rtx_varies_p (XEXP (dest, 0), 0))
6990 return;
6992 ptr = ldst_entry (dest);
6993 ptr->stores = alloc_INSN_LIST (insn, ptr->stores);
6996 /* Perform store motion. Much like gcse, except we move expressions the
6997 other way by looking at the flowgraph in reverse. */
6999 static int
7000 compute_store_table ()
7002 int ret;
7003 basic_block bb;
7004 unsigned regno;
7005 rtx insn, pat;
7007 max_gcse_regno = max_reg_num ();
7009 reg_set_in_block = (sbitmap *) sbitmap_vector_alloc (last_basic_block,
7010 max_gcse_regno);
7011 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
7012 pre_ldst_mems = 0;
7014 /* Find all the stores we care about. */
7015 FOR_EACH_BB (bb)
7017 regvec = & (reg_set_in_block[bb->index]);
7018 for (insn = bb->end;
7019 insn && insn != PREV_INSN (bb->end);
7020 insn = PREV_INSN (insn))
7022 /* Ignore anything that is not a normal insn. */
7023 if (! INSN_P (insn))
7024 continue;
7026 if (GET_CODE (insn) == CALL_INSN)
7028 bool clobbers_all = false;
7029 #ifdef NON_SAVING_SETJMP
7030 if (NON_SAVING_SETJMP
7031 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
7032 clobbers_all = true;
7033 #endif
7035 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7036 if (clobbers_all
7037 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
7038 SET_BIT (reg_set_in_block[bb->index], regno);
7041 pat = PATTERN (insn);
7042 note_stores (pat, reg_set_info, NULL);
7044 /* Now that we've marked regs, look for stores. */
7045 if (GET_CODE (pat) == SET)
7046 find_moveable_store (insn);
7050 ret = enumerate_ldsts ();
7052 if (gcse_file)
7054 fprintf (gcse_file, "Store Motion Expressions.\n");
7055 print_ldst_list (gcse_file);
7058 return ret;
7061 /* Check to see if the load X is aliased with STORE_PATTERN. */
7063 static int
7064 load_kills_store (x, store_pattern)
7065 rtx x, store_pattern;
7067 if (true_dependence (x, GET_MODE (x), store_pattern, rtx_addr_varies_p))
7068 return 1;
7069 return 0;
7072 /* Go through the entire insn X, looking for any loads which might alias
7073 STORE_PATTERN. Return 1 if found. */
7075 static int
7076 find_loads (x, store_pattern)
7077 rtx x, store_pattern;
7079 const char * fmt;
7080 int i, j;
7081 int ret = 0;
7083 if (!x)
7084 return 0;
7086 if (GET_CODE (x) == SET)
7087 x = SET_SRC (x);
7089 if (GET_CODE (x) == MEM)
7091 if (load_kills_store (x, store_pattern))
7092 return 1;
7095 /* Recursively process the insn. */
7096 fmt = GET_RTX_FORMAT (GET_CODE (x));
7098 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0 && !ret; i--)
7100 if (fmt[i] == 'e')
7101 ret |= find_loads (XEXP (x, i), store_pattern);
7102 else if (fmt[i] == 'E')
7103 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7104 ret |= find_loads (XVECEXP (x, i, j), store_pattern);
7106 return ret;
7109 /* Check if INSN kills the store pattern X (is aliased with it).
7110 Return 1 if it it does. */
7112 static int
7113 store_killed_in_insn (x, insn)
7114 rtx x, insn;
7116 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
7117 return 0;
7119 if (GET_CODE (insn) == CALL_INSN)
7121 /* A normal or pure call might read from pattern,
7122 but a const call will not. */
7123 return ! CONST_OR_PURE_CALL_P (insn) || pure_call_p (insn);
7126 if (GET_CODE (PATTERN (insn)) == SET)
7128 rtx pat = PATTERN (insn);
7129 /* Check for memory stores to aliased objects. */
7130 if (GET_CODE (SET_DEST (pat)) == MEM && !expr_equiv_p (SET_DEST (pat), x))
7131 /* pretend its a load and check for aliasing. */
7132 if (find_loads (SET_DEST (pat), x))
7133 return 1;
7134 return find_loads (SET_SRC (pat), x);
7136 else
7137 return find_loads (PATTERN (insn), x);
7140 /* Returns 1 if the expression X is loaded or clobbered on or after INSN
7141 within basic block BB. */
7143 static int
7144 store_killed_after (x, insn, bb)
7145 rtx x, insn;
7146 basic_block bb;
7148 rtx last = bb->end;
7150 if (insn == last)
7151 return 0;
7153 /* Check if the register operands of the store are OK in this block.
7154 Note that if registers are changed ANYWHERE in the block, we'll
7155 decide we can't move it, regardless of whether it changed above
7156 or below the store. This could be improved by checking the register
7157 operands while looking for aliasing in each insn. */
7158 if (!store_ops_ok (XEXP (x, 0), bb))
7159 return 1;
7161 for ( ; insn && insn != NEXT_INSN (last); insn = NEXT_INSN (insn))
7162 if (store_killed_in_insn (x, insn))
7163 return 1;
7165 return 0;
7168 /* Returns 1 if the expression X is loaded or clobbered on or before INSN
7169 within basic block BB. */
7170 static int
7171 store_killed_before (x, insn, bb)
7172 rtx x, insn;
7173 basic_block bb;
7175 rtx first = bb->head;
7177 if (insn == first)
7178 return store_killed_in_insn (x, insn);
7180 /* Check if the register operands of the store are OK in this block.
7181 Note that if registers are changed ANYWHERE in the block, we'll
7182 decide we can't move it, regardless of whether it changed above
7183 or below the store. This could be improved by checking the register
7184 operands while looking for aliasing in each insn. */
7185 if (!store_ops_ok (XEXP (x, 0), bb))
7186 return 1;
7188 for ( ; insn && insn != PREV_INSN (first); insn = PREV_INSN (insn))
7189 if (store_killed_in_insn (x, insn))
7190 return 1;
7192 return 0;
7195 #define ANTIC_STORE_LIST(x) ((x)->loads)
7196 #define AVAIL_STORE_LIST(x) ((x)->stores)
7198 /* Given the table of available store insns at the end of blocks,
7199 determine which ones are not killed by aliasing, and generate
7200 the appropriate vectors for gen and killed. */
7201 static void
7202 build_store_vectors ()
7204 basic_block bb, b;
7205 rtx insn, st;
7206 struct ls_expr * ptr;
7208 /* Build the gen_vector. This is any store in the table which is not killed
7209 by aliasing later in its block. */
7210 ae_gen = (sbitmap *) sbitmap_vector_alloc (last_basic_block, num_stores);
7211 sbitmap_vector_zero (ae_gen, last_basic_block);
7213 st_antloc = (sbitmap *) sbitmap_vector_alloc (last_basic_block, num_stores);
7214 sbitmap_vector_zero (st_antloc, last_basic_block);
7216 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7218 /* Put all the stores into either the antic list, or the avail list,
7219 or both. */
7220 rtx store_list = ptr->stores;
7221 ptr->stores = NULL_RTX;
7223 for (st = store_list; st != NULL; st = XEXP (st, 1))
7225 insn = XEXP (st, 0);
7226 bb = BLOCK_FOR_INSN (insn);
7228 if (!store_killed_after (ptr->pattern, insn, bb))
7230 /* If we've already seen an available expression in this block,
7231 we can delete the one we saw already (It occurs earlier in
7232 the block), and replace it with this one). We'll copy the
7233 old SRC expression to an unused register in case there
7234 are any side effects. */
7235 if (TEST_BIT (ae_gen[bb->index], ptr->index))
7237 /* Find previous store. */
7238 rtx st;
7239 for (st = AVAIL_STORE_LIST (ptr); st ; st = XEXP (st, 1))
7240 if (BLOCK_FOR_INSN (XEXP (st, 0)) == bb)
7241 break;
7242 if (st)
7244 rtx r = gen_reg_rtx (GET_MODE (ptr->pattern));
7245 if (gcse_file)
7246 fprintf (gcse_file, "Removing redundant store:\n");
7247 replace_store_insn (r, XEXP (st, 0), bb);
7248 XEXP (st, 0) = insn;
7249 continue;
7252 SET_BIT (ae_gen[bb->index], ptr->index);
7253 AVAIL_STORE_LIST (ptr) = alloc_INSN_LIST (insn,
7254 AVAIL_STORE_LIST (ptr));
7257 if (!store_killed_before (ptr->pattern, insn, bb))
7259 SET_BIT (st_antloc[BLOCK_NUM (insn)], ptr->index);
7260 ANTIC_STORE_LIST (ptr) = alloc_INSN_LIST (insn,
7261 ANTIC_STORE_LIST (ptr));
7265 /* Free the original list of store insns. */
7266 free_INSN_LIST_list (&store_list);
7269 ae_kill = (sbitmap *) sbitmap_vector_alloc (last_basic_block, num_stores);
7270 sbitmap_vector_zero (ae_kill, last_basic_block);
7272 transp = (sbitmap *) sbitmap_vector_alloc (last_basic_block, num_stores);
7273 sbitmap_vector_zero (transp, last_basic_block);
7275 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7276 FOR_EACH_BB (b)
7278 if (store_killed_after (ptr->pattern, b->head, b))
7280 /* The anticipatable expression is not killed if it's gen'd. */
7282 We leave this check out for now. If we have a code sequence
7283 in a block which looks like:
7284 ST MEMa = x
7285 L y = MEMa
7286 ST MEMa = z
7287 We should flag this as having an ANTIC expression, NOT
7288 transparent, NOT killed, and AVAIL.
7289 Unfortunately, since we haven't re-written all loads to
7290 use the reaching reg, we'll end up doing an incorrect
7291 Load in the middle here if we push the store down. It happens in
7292 gcc.c-torture/execute/960311-1.c with -O3
7293 If we always kill it in this case, we'll sometimes do
7294 unnecessary work, but it shouldn't actually hurt anything.
7295 if (!TEST_BIT (ae_gen[b], ptr->index)). */
7296 SET_BIT (ae_kill[b->index], ptr->index);
7298 else
7299 SET_BIT (transp[b->index], ptr->index);
7302 /* Any block with no exits calls some non-returning function, so
7303 we better mark the store killed here, or we might not store to
7304 it at all. If we knew it was abort, we wouldn't have to store,
7305 but we don't know that for sure. */
7306 if (gcse_file)
7308 fprintf (gcse_file, "ST_avail and ST_antic (shown under loads..)\n");
7309 print_ldst_list (gcse_file);
7310 dump_sbitmap_vector (gcse_file, "st_antloc", "", st_antloc, last_basic_block);
7311 dump_sbitmap_vector (gcse_file, "st_kill", "", ae_kill, last_basic_block);
7312 dump_sbitmap_vector (gcse_file, "Transpt", "", transp, last_basic_block);
7313 dump_sbitmap_vector (gcse_file, "st_avloc", "", ae_gen, last_basic_block);
7317 /* Insert an instruction at the beginning of a basic block, and update
7318 the BLOCK_HEAD if needed. */
7320 static void
7321 insert_insn_start_bb (insn, bb)
7322 rtx insn;
7323 basic_block bb;
7325 /* Insert at start of successor block. */
7326 rtx prev = PREV_INSN (bb->head);
7327 rtx before = bb->head;
7328 while (before != 0)
7330 if (GET_CODE (before) != CODE_LABEL
7331 && (GET_CODE (before) != NOTE
7332 || NOTE_LINE_NUMBER (before) != NOTE_INSN_BASIC_BLOCK))
7333 break;
7334 prev = before;
7335 if (prev == bb->end)
7336 break;
7337 before = NEXT_INSN (before);
7340 insn = emit_insn_after (insn, prev);
7342 if (gcse_file)
7344 fprintf (gcse_file, "STORE_MOTION insert store at start of BB %d:\n",
7345 bb->index);
7346 print_inline_rtx (gcse_file, insn, 6);
7347 fprintf (gcse_file, "\n");
7351 /* This routine will insert a store on an edge. EXPR is the ldst entry for
7352 the memory reference, and E is the edge to insert it on. Returns nonzero
7353 if an edge insertion was performed. */
7355 static int
7356 insert_store (expr, e)
7357 struct ls_expr * expr;
7358 edge e;
7360 rtx reg, insn;
7361 basic_block bb;
7362 edge tmp;
7364 /* We did all the deleted before this insert, so if we didn't delete a
7365 store, then we haven't set the reaching reg yet either. */
7366 if (expr->reaching_reg == NULL_RTX)
7367 return 0;
7369 reg = expr->reaching_reg;
7370 insn = gen_move_insn (expr->pattern, reg);
7372 /* If we are inserting this expression on ALL predecessor edges of a BB,
7373 insert it at the start of the BB, and reset the insert bits on the other
7374 edges so we don't try to insert it on the other edges. */
7375 bb = e->dest;
7376 for (tmp = e->dest->pred; tmp ; tmp = tmp->pred_next)
7378 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
7379 if (index == EDGE_INDEX_NO_EDGE)
7380 abort ();
7381 if (! TEST_BIT (pre_insert_map[index], expr->index))
7382 break;
7385 /* If tmp is NULL, we found an insertion on every edge, blank the
7386 insertion vector for these edges, and insert at the start of the BB. */
7387 if (!tmp && bb != EXIT_BLOCK_PTR)
7389 for (tmp = e->dest->pred; tmp ; tmp = tmp->pred_next)
7391 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
7392 RESET_BIT (pre_insert_map[index], expr->index);
7394 insert_insn_start_bb (insn, bb);
7395 return 0;
7398 /* We can't insert on this edge, so we'll insert at the head of the
7399 successors block. See Morgan, sec 10.5. */
7400 if ((e->flags & EDGE_ABNORMAL) == EDGE_ABNORMAL)
7402 insert_insn_start_bb (insn, bb);
7403 return 0;
7406 insert_insn_on_edge (insn, e);
7408 if (gcse_file)
7410 fprintf (gcse_file, "STORE_MOTION insert insn on edge (%d, %d):\n",
7411 e->src->index, e->dest->index);
7412 print_inline_rtx (gcse_file, insn, 6);
7413 fprintf (gcse_file, "\n");
7416 return 1;
7419 /* This routine will replace a store with a SET to a specified register. */
7421 static void
7422 replace_store_insn (reg, del, bb)
7423 rtx reg, del;
7424 basic_block bb;
7426 rtx insn;
7428 insn = gen_move_insn (reg, SET_SRC (PATTERN (del)));
7429 insn = emit_insn_after (insn, del);
7431 if (gcse_file)
7433 fprintf (gcse_file,
7434 "STORE_MOTION delete insn in BB %d:\n ", bb->index);
7435 print_inline_rtx (gcse_file, del, 6);
7436 fprintf (gcse_file, "\nSTORE MOTION replaced with insn:\n ");
7437 print_inline_rtx (gcse_file, insn, 6);
7438 fprintf (gcse_file, "\n");
7441 delete_insn (del);
7445 /* Delete a store, but copy the value that would have been stored into
7446 the reaching_reg for later storing. */
7448 static void
7449 delete_store (expr, bb)
7450 struct ls_expr * expr;
7451 basic_block bb;
7453 rtx reg, i, del;
7455 if (expr->reaching_reg == NULL_RTX)
7456 expr->reaching_reg = gen_reg_rtx (GET_MODE (expr->pattern));
7459 /* If there is more than 1 store, the earlier ones will be dead,
7460 but it doesn't hurt to replace them here. */
7461 reg = expr->reaching_reg;
7463 for (i = AVAIL_STORE_LIST (expr); i; i = XEXP (i, 1))
7465 del = XEXP (i, 0);
7466 if (BLOCK_FOR_INSN (del) == bb)
7468 /* We know there is only one since we deleted redundant
7469 ones during the available computation. */
7470 replace_store_insn (reg, del, bb);
7471 break;
7476 /* Free memory used by store motion. */
7478 static void
7479 free_store_memory ()
7481 free_ldst_mems ();
7483 if (ae_gen)
7484 sbitmap_vector_free (ae_gen);
7485 if (ae_kill)
7486 sbitmap_vector_free (ae_kill);
7487 if (transp)
7488 sbitmap_vector_free (transp);
7489 if (st_antloc)
7490 sbitmap_vector_free (st_antloc);
7491 if (pre_insert_map)
7492 sbitmap_vector_free (pre_insert_map);
7493 if (pre_delete_map)
7494 sbitmap_vector_free (pre_delete_map);
7495 if (reg_set_in_block)
7496 sbitmap_vector_free (reg_set_in_block);
7498 ae_gen = ae_kill = transp = st_antloc = NULL;
7499 pre_insert_map = pre_delete_map = reg_set_in_block = NULL;
7502 /* Perform store motion. Much like gcse, except we move expressions the
7503 other way by looking at the flowgraph in reverse. */
7505 static void
7506 store_motion ()
7508 basic_block bb;
7509 int x;
7510 struct ls_expr * ptr;
7511 int update_flow = 0;
7513 if (gcse_file)
7515 fprintf (gcse_file, "before store motion\n");
7516 print_rtl (gcse_file, get_insns ());
7520 init_alias_analysis ();
7522 /* Find all the stores that are live to the end of their block. */
7523 num_stores = compute_store_table ();
7524 if (num_stores == 0)
7526 sbitmap_vector_free (reg_set_in_block);
7527 end_alias_analysis ();
7528 return;
7531 /* Now compute whats actually available to move. */
7532 add_noreturn_fake_exit_edges ();
7533 build_store_vectors ();
7535 edge_list = pre_edge_rev_lcm (gcse_file, num_stores, transp, ae_gen,
7536 st_antloc, ae_kill, &pre_insert_map,
7537 &pre_delete_map);
7539 /* Now we want to insert the new stores which are going to be needed. */
7540 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7542 FOR_EACH_BB (bb)
7543 if (TEST_BIT (pre_delete_map[bb->index], ptr->index))
7544 delete_store (ptr, bb);
7546 for (x = 0; x < NUM_EDGES (edge_list); x++)
7547 if (TEST_BIT (pre_insert_map[x], ptr->index))
7548 update_flow |= insert_store (ptr, INDEX_EDGE (edge_list, x));
7551 if (update_flow)
7552 commit_edge_insertions ();
7554 free_store_memory ();
7555 free_edge_list (edge_list);
7556 remove_fake_edges ();
7557 end_alias_analysis ();
7561 /* Entry point for jump bypassing optimization pass. */
7564 bypass_jumps (file)
7565 FILE *file;
7567 int changed;
7569 /* We do not construct an accurate cfg in functions which call
7570 setjmp, so just punt to be safe. */
7571 if (current_function_calls_setjmp)
7572 return 0;
7574 /* For calling dump_foo fns from gdb. */
7575 debug_stderr = stderr;
7576 gcse_file = file;
7578 /* Identify the basic block information for this function, including
7579 successors and predecessors. */
7580 max_gcse_regno = max_reg_num ();
7582 if (file)
7583 dump_flow_info (file);
7585 /* Return if there's nothing to do. */
7586 if (n_basic_blocks <= 1)
7587 return 0;
7589 /* Trying to perform global optimizations on flow graphs which have
7590 a high connectivity will take a long time and is unlikely to be
7591 particularly useful.
7593 In normal circumstances a cfg should have about twice as many edges
7594 as blocks. But we do not want to punish small functions which have
7595 a couple switch statements. So we require a relatively large number
7596 of basic blocks and the ratio of edges to blocks to be high. */
7597 if (n_basic_blocks > 1000 && n_edges / n_basic_blocks >= 20)
7599 if (warn_disabled_optimization)
7600 warning ("BYPASS disabled: %d > 1000 basic blocks and %d >= 20 edges/basic block",
7601 n_basic_blocks, n_edges / n_basic_blocks);
7602 return 0;
7605 /* If allocating memory for the cprop bitmap would take up too much
7606 storage it's better just to disable the optimization. */
7607 if ((n_basic_blocks
7608 * SBITMAP_SET_SIZE (max_gcse_regno)
7609 * sizeof (SBITMAP_ELT_TYPE)) > MAX_GCSE_MEMORY)
7611 if (warn_disabled_optimization)
7612 warning ("GCSE disabled: %d basic blocks and %d registers",
7613 n_basic_blocks, max_gcse_regno);
7615 return 0;
7618 /* See what modes support reg/reg copy operations. */
7619 if (! can_copy_init_p)
7621 compute_can_copy ();
7622 can_copy_init_p = 1;
7625 gcc_obstack_init (&gcse_obstack);
7626 bytes_used = 0;
7628 /* We need alias. */
7629 init_alias_analysis ();
7631 /* Record where pseudo-registers are set. This data is kept accurate
7632 during each pass. ??? We could also record hard-reg information here
7633 [since it's unchanging], however it is currently done during hash table
7634 computation.
7636 It may be tempting to compute MEM set information here too, but MEM sets
7637 will be subject to code motion one day and thus we need to compute
7638 information about memory sets when we build the hash tables. */
7640 alloc_reg_set_mem (max_gcse_regno);
7641 compute_sets (get_insns ());
7643 max_gcse_regno = max_reg_num ();
7644 alloc_gcse_mem (get_insns ());
7645 changed = one_cprop_pass (1, 1, 1);
7646 free_gcse_mem ();
7648 if (file)
7650 fprintf (file, "BYPASS of %s: %d basic blocks, ",
7651 current_function_name, n_basic_blocks);
7652 fprintf (file, "%d bytes\n\n", bytes_used);
7655 obstack_free (&gcse_obstack, NULL);
7656 free_reg_set_mem ();
7658 /* We are finished with alias. */
7659 end_alias_analysis ();
7660 allocate_reg_info (max_reg_num (), FALSE, FALSE);
7662 return changed;
7665 #include "gt-gcse.h"