re PR target/8343 ([m68k] [3.2 regression] m68k-elf/rtems ICE at instantiate_virtual_...
[official-gcc.git] / gcc / gcse.c
blobae0a1ba5752cccb06f857d893a9f3ac8ceff7e56
1 /* Global common subexpression elimination/Partial redundancy elimination
2 and global constant/copy propagation for GNU compiler.
3 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 02111-1307, USA. */
23 /* TODO
24 - reordering of memory allocation and freeing to be more space efficient
25 - do rough calc of how many regs are needed in each block, and a rough
26 calc of how many regs are available in each class and use that to
27 throttle back the code in cases where RTX_COST is minimal.
28 - a store to the same address as a load does not kill the load if the
29 source of the store is also the destination of the load. Handling this
30 allows more load motion, particularly out of loops.
31 - ability to realloc sbitmap vectors would allow one initial computation
32 of reg_set_in_block with only subsequent additions, rather than
33 recomputing it for each pass
37 /* References searched while implementing this.
39 Compilers Principles, Techniques and Tools
40 Aho, Sethi, Ullman
41 Addison-Wesley, 1988
43 Global Optimization by Suppression of Partial Redundancies
44 E. Morel, C. Renvoise
45 communications of the acm, Vol. 22, Num. 2, Feb. 1979
47 A Portable Machine-Independent Global Optimizer - Design and Measurements
48 Frederick Chow
49 Stanford Ph.D. thesis, Dec. 1983
51 A Fast Algorithm for Code Movement Optimization
52 D.M. Dhamdhere
53 SIGPLAN Notices, Vol. 23, Num. 10, Oct. 1988
55 A Solution to a Problem with Morel and Renvoise's
56 Global Optimization by Suppression of Partial Redundancies
57 K-H Drechsler, M.P. Stadel
58 ACM TOPLAS, Vol. 10, Num. 4, Oct. 1988
60 Practical Adaptation of the Global Optimization
61 Algorithm of Morel and Renvoise
62 D.M. Dhamdhere
63 ACM TOPLAS, Vol. 13, Num. 2. Apr. 1991
65 Efficiently Computing Static Single Assignment Form and the Control
66 Dependence Graph
67 R. Cytron, J. Ferrante, B.K. Rosen, M.N. Wegman, and F.K. Zadeck
68 ACM TOPLAS, Vol. 13, Num. 4, Oct. 1991
70 Lazy Code Motion
71 J. Knoop, O. Ruthing, B. Steffen
72 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
74 What's In a Region? Or Computing Control Dependence Regions in Near-Linear
75 Time for Reducible Flow Control
76 Thomas Ball
77 ACM Letters on Programming Languages and Systems,
78 Vol. 2, Num. 1-4, Mar-Dec 1993
80 An Efficient Representation for Sparse Sets
81 Preston Briggs, Linda Torczon
82 ACM Letters on Programming Languages and Systems,
83 Vol. 2, Num. 1-4, Mar-Dec 1993
85 A Variation of Knoop, Ruthing, and Steffen's Lazy Code Motion
86 K-H Drechsler, M.P. Stadel
87 ACM SIGPLAN Notices, Vol. 28, Num. 5, May 1993
89 Partial Dead Code Elimination
90 J. Knoop, O. Ruthing, B. Steffen
91 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
93 Effective Partial Redundancy Elimination
94 P. Briggs, K.D. Cooper
95 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
97 The Program Structure Tree: Computing Control Regions in Linear Time
98 R. Johnson, D. Pearson, K. Pingali
99 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
101 Optimal Code Motion: Theory and Practice
102 J. Knoop, O. Ruthing, B. Steffen
103 ACM TOPLAS, Vol. 16, Num. 4, Jul. 1994
105 The power of assignment motion
106 J. Knoop, O. Ruthing, B. Steffen
107 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
109 Global code motion / global value numbering
110 C. Click
111 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
113 Value Driven Redundancy Elimination
114 L.T. Simpson
115 Rice University Ph.D. thesis, Apr. 1996
117 Value Numbering
118 L.T. Simpson
119 Massively Scalar Compiler Project, Rice University, Sep. 1996
121 High Performance Compilers for Parallel Computing
122 Michael Wolfe
123 Addison-Wesley, 1996
125 Advanced Compiler Design and Implementation
126 Steven Muchnick
127 Morgan Kaufmann, 1997
129 Building an Optimizing Compiler
130 Robert Morgan
131 Digital Press, 1998
133 People wishing to speed up the code here should read:
134 Elimination Algorithms for Data Flow Analysis
135 B.G. Ryder, M.C. Paull
136 ACM Computing Surveys, Vol. 18, Num. 3, Sep. 1986
138 How to Analyze Large Programs Efficiently and Informatively
139 D.M. Dhamdhere, B.K. Rosen, F.K. Zadeck
140 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
142 People wishing to do something different can find various possibilities
143 in the above papers and elsewhere.
146 #include "config.h"
147 #include "system.h"
148 #include "coretypes.h"
149 #include "tm.h"
150 #include "toplev.h"
152 #include "rtl.h"
153 #include "tm_p.h"
154 #include "regs.h"
155 #include "hard-reg-set.h"
156 #include "flags.h"
157 #include "real.h"
158 #include "insn-config.h"
159 #include "recog.h"
160 #include "basic-block.h"
161 #include "output.h"
162 #include "function.h"
163 #include "expr.h"
164 #include "except.h"
165 #include "ggc.h"
166 #include "params.h"
167 #include "cselib.h"
169 #include "obstack.h"
171 /* Propagate flow information through back edges and thus enable PRE's
172 moving loop invariant calculations out of loops.
174 Originally this tended to create worse overall code, but several
175 improvements during the development of PRE seem to have made following
176 back edges generally a win.
178 Note much of the loop invariant code motion done here would normally
179 be done by loop.c, which has more heuristics for when to move invariants
180 out of loops. At some point we might need to move some of those
181 heuristics into gcse.c. */
183 /* We support GCSE via Partial Redundancy Elimination. PRE optimizations
184 are a superset of those done by GCSE.
186 We perform the following steps:
188 1) Compute basic block information.
190 2) Compute table of places where registers are set.
192 3) Perform copy/constant propagation.
194 4) Perform global cse.
196 5) Perform another pass of copy/constant propagation.
198 Two passes of copy/constant propagation are done because the first one
199 enables more GCSE and the second one helps to clean up the copies that
200 GCSE creates. This is needed more for PRE than for Classic because Classic
201 GCSE will try to use an existing register containing the common
202 subexpression rather than create a new one. This is harder to do for PRE
203 because of the code motion (which Classic GCSE doesn't do).
205 Expressions we are interested in GCSE-ing are of the form
206 (set (pseudo-reg) (expression)).
207 Function want_to_gcse_p says what these are.
209 PRE handles moving invariant expressions out of loops (by treating them as
210 partially redundant).
212 Eventually it would be nice to replace cse.c/gcse.c with SSA (static single
213 assignment) based GVN (global value numbering). L. T. Simpson's paper
214 (Rice University) on value numbering is a useful reference for this.
216 **********************
218 We used to support multiple passes but there are diminishing returns in
219 doing so. The first pass usually makes 90% of the changes that are doable.
220 A second pass can make a few more changes made possible by the first pass.
221 Experiments show any further passes don't make enough changes to justify
222 the expense.
224 A study of spec92 using an unlimited number of passes:
225 [1 pass] = 1208 substitutions, [2] = 577, [3] = 202, [4] = 192, [5] = 83,
226 [6] = 34, [7] = 17, [8] = 9, [9] = 4, [10] = 4, [11] = 2,
227 [12] = 2, [13] = 1, [15] = 1, [16] = 2, [41] = 1
229 It was found doing copy propagation between each pass enables further
230 substitutions.
232 PRE is quite expensive in complicated functions because the DFA can take
233 awhile to converge. Hence we only perform one pass. The parameter max-gcse-passes can
234 be modified if one wants to experiment.
236 **********************
238 The steps for PRE are:
240 1) Build the hash table of expressions we wish to GCSE (expr_hash_table).
242 2) Perform the data flow analysis for PRE.
244 3) Delete the redundant instructions
246 4) Insert the required copies [if any] that make the partially
247 redundant instructions fully redundant.
249 5) For other reaching expressions, insert an instruction to copy the value
250 to a newly created pseudo that will reach the redundant instruction.
252 The deletion is done first so that when we do insertions we
253 know which pseudo reg to use.
255 Various papers have argued that PRE DFA is expensive (O(n^2)) and others
256 argue it is not. The number of iterations for the algorithm to converge
257 is typically 2-4 so I don't view it as that expensive (relatively speaking).
259 PRE GCSE depends heavily on the second CSE pass to clean up the copies
260 we create. To make an expression reach the place where it's redundant,
261 the result of the expression is copied to a new register, and the redundant
262 expression is deleted by replacing it with this new register. Classic GCSE
263 doesn't have this problem as much as it computes the reaching defs of
264 each register in each block and thus can try to use an existing register.
266 **********************
268 A fair bit of simplicity is created by creating small functions for simple
269 tasks, even when the function is only called in one place. This may
270 measurably slow things down [or may not] by creating more function call
271 overhead than is necessary. The source is laid out so that it's trivial
272 to make the affected functions inline so that one can measure what speed
273 up, if any, can be achieved, and maybe later when things settle things can
274 be rearranged.
276 Help stamp out big monolithic functions! */
278 /* GCSE global vars. */
280 /* -dG dump file. */
281 static FILE *gcse_file;
283 /* Note whether or not we should run jump optimization after gcse. We
284 want to do this for two cases.
286 * If we changed any jumps via cprop.
288 * If we added any labels via edge splitting. */
290 static int run_jump_opt_after_gcse;
292 /* Bitmaps are normally not included in debugging dumps.
293 However it's useful to be able to print them from GDB.
294 We could create special functions for this, but it's simpler to
295 just allow passing stderr to the dump_foo fns. Since stderr can
296 be a macro, we store a copy here. */
297 static FILE *debug_stderr;
299 /* An obstack for our working variables. */
300 static struct obstack gcse_obstack;
302 /* Nonzero for each mode that supports (set (reg) (reg)).
303 This is trivially true for integer and floating point values.
304 It may or may not be true for condition codes. */
305 static char can_copy_p[(int) NUM_MACHINE_MODES];
307 /* Nonzero if can_copy_p has been initialized. */
308 static int can_copy_init_p;
310 struct reg_use {rtx reg_rtx; };
312 /* Hash table of expressions. */
314 struct expr
316 /* The expression (SET_SRC for expressions, PATTERN for assignments). */
317 rtx expr;
318 /* Index in the available expression bitmaps. */
319 int bitmap_index;
320 /* Next entry with the same hash. */
321 struct expr *next_same_hash;
322 /* List of anticipatable occurrences in basic blocks in the function.
323 An "anticipatable occurrence" is one that is the first occurrence in the
324 basic block, the operands are not modified in the basic block prior
325 to the occurrence and the output is not used between the start of
326 the block and the occurrence. */
327 struct occr *antic_occr;
328 /* List of available occurrence in basic blocks in the function.
329 An "available occurrence" is one that is the last occurrence in the
330 basic block and the operands are not modified by following statements in
331 the basic block [including this insn]. */
332 struct occr *avail_occr;
333 /* Non-null if the computation is PRE redundant.
334 The value is the newly created pseudo-reg to record a copy of the
335 expression in all the places that reach the redundant copy. */
336 rtx reaching_reg;
339 /* Occurrence of an expression.
340 There is one per basic block. If a pattern appears more than once the
341 last appearance is used [or first for anticipatable expressions]. */
343 struct occr
345 /* Next occurrence of this expression. */
346 struct occr *next;
347 /* The insn that computes the expression. */
348 rtx insn;
349 /* Nonzero if this [anticipatable] occurrence has been deleted. */
350 char deleted_p;
351 /* Nonzero if this [available] occurrence has been copied to
352 reaching_reg. */
353 /* ??? This is mutually exclusive with deleted_p, so they could share
354 the same byte. */
355 char copied_p;
358 /* Expression and copy propagation hash tables.
359 Each hash table is an array of buckets.
360 ??? It is known that if it were an array of entries, structure elements
361 `next_same_hash' and `bitmap_index' wouldn't be necessary. However, it is
362 not clear whether in the final analysis a sufficient amount of memory would
363 be saved as the size of the available expression bitmaps would be larger
364 [one could build a mapping table without holes afterwards though].
365 Someday I'll perform the computation and figure it out. */
367 struct hash_table
369 /* The table itself.
370 This is an array of `expr_hash_table_size' elements. */
371 struct expr **table;
373 /* Size of the hash table, in elements. */
374 unsigned int size;
376 /* Number of hash table elements. */
377 unsigned int n_elems;
379 /* Whether the table is expression of copy propagation one. */
380 int set_p;
383 /* Expression hash table. */
384 static struct hash_table expr_hash_table;
386 /* Copy propagation hash table. */
387 static struct hash_table set_hash_table;
389 /* Mapping of uids to cuids.
390 Only real insns get cuids. */
391 static int *uid_cuid;
393 /* Highest UID in UID_CUID. */
394 static int max_uid;
396 /* Get the cuid of an insn. */
397 #ifdef ENABLE_CHECKING
398 #define INSN_CUID(INSN) (INSN_UID (INSN) > max_uid ? (abort (), 0) : uid_cuid[INSN_UID (INSN)])
399 #else
400 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
401 #endif
403 /* Number of cuids. */
404 static int max_cuid;
406 /* Mapping of cuids to insns. */
407 static rtx *cuid_insn;
409 /* Get insn from cuid. */
410 #define CUID_INSN(CUID) (cuid_insn[CUID])
412 /* Maximum register number in function prior to doing gcse + 1.
413 Registers created during this pass have regno >= max_gcse_regno.
414 This is named with "gcse" to not collide with global of same name. */
415 static unsigned int max_gcse_regno;
417 /* Table of registers that are modified.
419 For each register, each element is a list of places where the pseudo-reg
420 is set.
422 For simplicity, GCSE is done on sets of pseudo-regs only. PRE GCSE only
423 requires knowledge of which blocks kill which regs [and thus could use
424 a bitmap instead of the lists `reg_set_table' uses].
426 `reg_set_table' and could be turned into an array of bitmaps (num-bbs x
427 num-regs) [however perhaps it may be useful to keep the data as is]. One
428 advantage of recording things this way is that `reg_set_table' is fairly
429 sparse with respect to pseudo regs but for hard regs could be fairly dense
430 [relatively speaking]. And recording sets of pseudo-regs in lists speeds
431 up functions like compute_transp since in the case of pseudo-regs we only
432 need to iterate over the number of times a pseudo-reg is set, not over the
433 number of basic blocks [clearly there is a bit of a slow down in the cases
434 where a pseudo is set more than once in a block, however it is believed
435 that the net effect is to speed things up]. This isn't done for hard-regs
436 because recording call-clobbered hard-regs in `reg_set_table' at each
437 function call can consume a fair bit of memory, and iterating over
438 hard-regs stored this way in compute_transp will be more expensive. */
440 typedef struct reg_set
442 /* The next setting of this register. */
443 struct reg_set *next;
444 /* The insn where it was set. */
445 rtx insn;
446 } reg_set;
448 static reg_set **reg_set_table;
450 /* Size of `reg_set_table'.
451 The table starts out at max_gcse_regno + slop, and is enlarged as
452 necessary. */
453 static int reg_set_table_size;
455 /* Amount to grow `reg_set_table' by when it's full. */
456 #define REG_SET_TABLE_SLOP 100
458 /* This is a list of expressions which are MEMs and will be used by load
459 or store motion.
460 Load motion tracks MEMs which aren't killed by
461 anything except itself. (ie, loads and stores to a single location).
462 We can then allow movement of these MEM refs with a little special
463 allowance. (all stores copy the same value to the reaching reg used
464 for the loads). This means all values used to store into memory must have
465 no side effects so we can re-issue the setter value.
466 Store Motion uses this structure as an expression table to track stores
467 which look interesting, and might be moveable towards the exit block. */
469 struct ls_expr
471 struct expr * expr; /* Gcse expression reference for LM. */
472 rtx pattern; /* Pattern of this mem. */
473 rtx loads; /* INSN list of loads seen. */
474 rtx stores; /* INSN list of stores seen. */
475 struct ls_expr * next; /* Next in the list. */
476 int invalid; /* Invalid for some reason. */
477 int index; /* If it maps to a bitmap index. */
478 int hash_index; /* Index when in a hash table. */
479 rtx reaching_reg; /* Register to use when re-writing. */
482 /* Head of the list of load/store memory refs. */
483 static struct ls_expr * pre_ldst_mems = NULL;
485 /* Bitmap containing one bit for each register in the program.
486 Used when performing GCSE to track which registers have been set since
487 the start of the basic block. */
488 static regset reg_set_bitmap;
490 /* For each block, a bitmap of registers set in the block.
491 This is used by expr_killed_p and compute_transp.
492 It is computed during hash table computation and not by compute_sets
493 as it includes registers added since the last pass (or between cprop and
494 gcse) and it's currently not easy to realloc sbitmap vectors. */
495 static sbitmap *reg_set_in_block;
497 /* Array, indexed by basic block number for a list of insns which modify
498 memory within that block. */
499 static rtx * modify_mem_list;
500 bitmap modify_mem_list_set;
502 /* This array parallels modify_mem_list, but is kept canonicalized. */
503 static rtx * canon_modify_mem_list;
504 bitmap canon_modify_mem_list_set;
505 /* Various variables for statistics gathering. */
507 /* Memory used in a pass.
508 This isn't intended to be absolutely precise. Its intent is only
509 to keep an eye on memory usage. */
510 static int bytes_used;
512 /* GCSE substitutions made. */
513 static int gcse_subst_count;
514 /* Number of copy instructions created. */
515 static int gcse_create_count;
516 /* Number of constants propagated. */
517 static int const_prop_count;
518 /* Number of copys propagated. */
519 static int copy_prop_count;
521 /* These variables are used by classic GCSE.
522 Normally they'd be defined a bit later, but `rd_gen' needs to
523 be declared sooner. */
525 /* Each block has a bitmap of each type.
526 The length of each blocks bitmap is:
528 max_cuid - for reaching definitions
529 n_exprs - for available expressions
531 Thus we view the bitmaps as 2 dimensional arrays. i.e.
532 rd_kill[block_num][cuid_num]
533 ae_kill[block_num][expr_num] */
535 /* For reaching defs */
536 static sbitmap *rd_kill, *rd_gen, *reaching_defs, *rd_out;
538 /* for available exprs */
539 static sbitmap *ae_kill, *ae_gen, *ae_in, *ae_out;
541 /* Objects of this type are passed around by the null-pointer check
542 removal routines. */
543 struct null_pointer_info
545 /* The basic block being processed. */
546 basic_block current_block;
547 /* The first register to be handled in this pass. */
548 unsigned int min_reg;
549 /* One greater than the last register to be handled in this pass. */
550 unsigned int max_reg;
551 sbitmap *nonnull_local;
552 sbitmap *nonnull_killed;
555 static void compute_can_copy PARAMS ((void));
556 static char *gmalloc PARAMS ((unsigned int));
557 static char *grealloc PARAMS ((char *, unsigned int));
558 static char *gcse_alloc PARAMS ((unsigned long));
559 static void alloc_gcse_mem PARAMS ((rtx));
560 static void free_gcse_mem PARAMS ((void));
561 static void alloc_reg_set_mem PARAMS ((int));
562 static void free_reg_set_mem PARAMS ((void));
563 static int get_bitmap_width PARAMS ((int, int, int));
564 static void record_one_set PARAMS ((int, rtx));
565 static void record_set_info PARAMS ((rtx, rtx, void *));
566 static void compute_sets PARAMS ((rtx));
567 static void hash_scan_insn PARAMS ((rtx, struct hash_table *, int));
568 static void hash_scan_set PARAMS ((rtx, rtx, struct hash_table *));
569 static void hash_scan_clobber PARAMS ((rtx, rtx, struct hash_table *));
570 static void hash_scan_call PARAMS ((rtx, rtx, struct hash_table *));
571 static int want_to_gcse_p PARAMS ((rtx));
572 static int oprs_unchanged_p PARAMS ((rtx, rtx, int));
573 static int oprs_anticipatable_p PARAMS ((rtx, rtx));
574 static int oprs_available_p PARAMS ((rtx, rtx));
575 static void insert_expr_in_table PARAMS ((rtx, enum machine_mode, rtx,
576 int, int, struct hash_table *));
577 static void insert_set_in_table PARAMS ((rtx, rtx, struct hash_table *));
578 static unsigned int hash_expr PARAMS ((rtx, enum machine_mode, int *, int));
579 static unsigned int hash_expr_1 PARAMS ((rtx, enum machine_mode, int *));
580 static unsigned int hash_string_1 PARAMS ((const char *));
581 static unsigned int hash_set PARAMS ((int, int));
582 static int expr_equiv_p PARAMS ((rtx, rtx));
583 static void record_last_reg_set_info PARAMS ((rtx, int));
584 static void record_last_mem_set_info PARAMS ((rtx));
585 static void record_last_set_info PARAMS ((rtx, rtx, void *));
586 static void compute_hash_table PARAMS ((struct hash_table *));
587 static void alloc_hash_table PARAMS ((int, struct hash_table *, int));
588 static void free_hash_table PARAMS ((struct hash_table *));
589 static void compute_hash_table_work PARAMS ((struct hash_table *));
590 static void dump_hash_table PARAMS ((FILE *, const char *,
591 struct hash_table *));
592 static struct expr *lookup_expr PARAMS ((rtx, struct hash_table *));
593 static struct expr *lookup_set PARAMS ((unsigned int, rtx, struct hash_table *));
594 static struct expr *next_set PARAMS ((unsigned int, struct expr *));
595 static void reset_opr_set_tables PARAMS ((void));
596 static int oprs_not_set_p PARAMS ((rtx, rtx));
597 static void mark_call PARAMS ((rtx));
598 static void mark_set PARAMS ((rtx, rtx));
599 static void mark_clobber PARAMS ((rtx, rtx));
600 static void mark_oprs_set PARAMS ((rtx));
601 static void alloc_cprop_mem PARAMS ((int, int));
602 static void free_cprop_mem PARAMS ((void));
603 static void compute_transp PARAMS ((rtx, int, sbitmap *, int));
604 static void compute_transpout PARAMS ((void));
605 static void compute_local_properties PARAMS ((sbitmap *, sbitmap *, sbitmap *,
606 struct hash_table *));
607 static void compute_cprop_data PARAMS ((void));
608 static void find_used_regs PARAMS ((rtx *, void *));
609 static int try_replace_reg PARAMS ((rtx, rtx, rtx));
610 static struct expr *find_avail_set PARAMS ((int, rtx));
611 static int cprop_jump PARAMS ((basic_block, rtx, rtx, rtx, rtx));
612 static void mems_conflict_for_gcse_p PARAMS ((rtx, rtx, void *));
613 static int load_killed_in_block_p PARAMS ((basic_block, int, rtx, int));
614 static void canon_list_insert PARAMS ((rtx, rtx, void *));
615 static int cprop_insn PARAMS ((rtx, int));
616 static int cprop PARAMS ((int));
617 static int one_cprop_pass PARAMS ((int, int));
618 static bool constprop_register PARAMS ((rtx, rtx, rtx, int));
619 static struct expr *find_bypass_set PARAMS ((int, int));
620 static int bypass_block PARAMS ((basic_block, rtx, rtx));
621 static int bypass_conditional_jumps PARAMS ((void));
622 static void alloc_pre_mem PARAMS ((int, int));
623 static void free_pre_mem PARAMS ((void));
624 static void compute_pre_data PARAMS ((void));
625 static int pre_expr_reaches_here_p PARAMS ((basic_block, struct expr *,
626 basic_block));
627 static void insert_insn_end_bb PARAMS ((struct expr *, basic_block, int));
628 static void pre_insert_copy_insn PARAMS ((struct expr *, rtx));
629 static void pre_insert_copies PARAMS ((void));
630 static int pre_delete PARAMS ((void));
631 static int pre_gcse PARAMS ((void));
632 static int one_pre_gcse_pass PARAMS ((int));
633 static void add_label_notes PARAMS ((rtx, rtx));
634 static void alloc_code_hoist_mem PARAMS ((int, int));
635 static void free_code_hoist_mem PARAMS ((void));
636 static void compute_code_hoist_vbeinout PARAMS ((void));
637 static void compute_code_hoist_data PARAMS ((void));
638 static int hoist_expr_reaches_here_p PARAMS ((basic_block, int, basic_block,
639 char *));
640 static void hoist_code PARAMS ((void));
641 static int one_code_hoisting_pass PARAMS ((void));
642 static void alloc_rd_mem PARAMS ((int, int));
643 static void free_rd_mem PARAMS ((void));
644 static void handle_rd_kill_set PARAMS ((rtx, int, basic_block));
645 static void compute_kill_rd PARAMS ((void));
646 static void compute_rd PARAMS ((void));
647 static void alloc_avail_expr_mem PARAMS ((int, int));
648 static void free_avail_expr_mem PARAMS ((void));
649 static void compute_ae_gen PARAMS ((struct hash_table *));
650 static int expr_killed_p PARAMS ((rtx, basic_block));
651 static void compute_ae_kill PARAMS ((sbitmap *, sbitmap *, struct hash_table *));
652 static int expr_reaches_here_p PARAMS ((struct occr *, struct expr *,
653 basic_block, int));
654 static rtx computing_insn PARAMS ((struct expr *, rtx));
655 static int def_reaches_here_p PARAMS ((rtx, rtx));
656 static int can_disregard_other_sets PARAMS ((struct reg_set **, rtx, int));
657 static int handle_avail_expr PARAMS ((rtx, struct expr *));
658 static int classic_gcse PARAMS ((void));
659 static int one_classic_gcse_pass PARAMS ((int));
660 static void invalidate_nonnull_info PARAMS ((rtx, rtx, void *));
661 static int delete_null_pointer_checks_1 PARAMS ((unsigned int *,
662 sbitmap *, sbitmap *,
663 struct null_pointer_info *));
664 static rtx process_insert_insn PARAMS ((struct expr *));
665 static int pre_edge_insert PARAMS ((struct edge_list *, struct expr **));
666 static int expr_reaches_here_p_work PARAMS ((struct occr *, struct expr *,
667 basic_block, int, char *));
668 static int pre_expr_reaches_here_p_work PARAMS ((basic_block, struct expr *,
669 basic_block, char *));
670 static struct ls_expr * ldst_entry PARAMS ((rtx));
671 static void free_ldst_entry PARAMS ((struct ls_expr *));
672 static void free_ldst_mems PARAMS ((void));
673 static void print_ldst_list PARAMS ((FILE *));
674 static struct ls_expr * find_rtx_in_ldst PARAMS ((rtx));
675 static int enumerate_ldsts PARAMS ((void));
676 static inline struct ls_expr * first_ls_expr PARAMS ((void));
677 static inline struct ls_expr * next_ls_expr PARAMS ((struct ls_expr *));
678 static int simple_mem PARAMS ((rtx));
679 static void invalidate_any_buried_refs PARAMS ((rtx));
680 static void compute_ld_motion_mems PARAMS ((void));
681 static void trim_ld_motion_mems PARAMS ((void));
682 static void update_ld_motion_stores PARAMS ((struct expr *));
683 static void reg_set_info PARAMS ((rtx, rtx, void *));
684 static int store_ops_ok PARAMS ((rtx, basic_block));
685 static void find_moveable_store PARAMS ((rtx));
686 static int compute_store_table PARAMS ((void));
687 static int load_kills_store PARAMS ((rtx, rtx));
688 static int find_loads PARAMS ((rtx, rtx));
689 static int store_killed_in_insn PARAMS ((rtx, rtx));
690 static int store_killed_after PARAMS ((rtx, rtx, basic_block));
691 static int store_killed_before PARAMS ((rtx, rtx, basic_block));
692 static void build_store_vectors PARAMS ((void));
693 static void insert_insn_start_bb PARAMS ((rtx, basic_block));
694 static int insert_store PARAMS ((struct ls_expr *, edge));
695 static void replace_store_insn PARAMS ((rtx, rtx, basic_block));
696 static void delete_store PARAMS ((struct ls_expr *,
697 basic_block));
698 static void free_store_memory PARAMS ((void));
699 static void store_motion PARAMS ((void));
700 static void free_insn_expr_list_list PARAMS ((rtx *));
701 static void clear_modify_mem_tables PARAMS ((void));
702 static void free_modify_mem_tables PARAMS ((void));
703 static rtx gcse_emit_move_after PARAMS ((rtx, rtx, rtx));
704 static bool do_local_cprop PARAMS ((rtx, rtx, int, rtx*));
705 static bool adjust_libcall_notes PARAMS ((rtx, rtx, rtx, rtx*));
706 static void local_cprop_pass PARAMS ((int));
708 /* Entry point for global common subexpression elimination.
709 F is the first instruction in the function. */
712 gcse_main (f, file)
713 rtx f;
714 FILE *file;
716 int changed, pass;
717 /* Bytes used at start of pass. */
718 int initial_bytes_used;
719 /* Maximum number of bytes used by a pass. */
720 int max_pass_bytes;
721 /* Point to release obstack data from for each pass. */
722 char *gcse_obstack_bottom;
724 /* We do not construct an accurate cfg in functions which call
725 setjmp, so just punt to be safe. */
726 if (current_function_calls_setjmp)
727 return 0;
729 /* Assume that we do not need to run jump optimizations after gcse. */
730 run_jump_opt_after_gcse = 0;
732 /* For calling dump_foo fns from gdb. */
733 debug_stderr = stderr;
734 gcse_file = file;
736 /* Identify the basic block information for this function, including
737 successors and predecessors. */
738 max_gcse_regno = max_reg_num ();
740 if (file)
741 dump_flow_info (file);
743 /* Return if there's nothing to do. */
744 if (n_basic_blocks <= 1)
745 return 0;
747 /* Trying to perform global optimizations on flow graphs which have
748 a high connectivity will take a long time and is unlikely to be
749 particularly useful.
751 In normal circumstances a cfg should have about twice as many edges
752 as blocks. But we do not want to punish small functions which have
753 a couple switch statements. So we require a relatively large number
754 of basic blocks and the ratio of edges to blocks to be high. */
755 if (n_basic_blocks > 1000 && n_edges / n_basic_blocks >= 20)
757 if (warn_disabled_optimization)
758 warning ("GCSE disabled: %d > 1000 basic blocks and %d >= 20 edges/basic block",
759 n_basic_blocks, n_edges / n_basic_blocks);
760 return 0;
763 /* If allocating memory for the cprop bitmap would take up too much
764 storage it's better just to disable the optimization. */
765 if ((n_basic_blocks
766 * SBITMAP_SET_SIZE (max_gcse_regno)
767 * sizeof (SBITMAP_ELT_TYPE)) > MAX_GCSE_MEMORY)
769 if (warn_disabled_optimization)
770 warning ("GCSE disabled: %d basic blocks and %d registers",
771 n_basic_blocks, max_gcse_regno);
773 return 0;
776 /* See what modes support reg/reg copy operations. */
777 if (! can_copy_init_p)
779 compute_can_copy ();
780 can_copy_init_p = 1;
783 gcc_obstack_init (&gcse_obstack);
784 bytes_used = 0;
786 /* We need alias. */
787 init_alias_analysis ();
788 /* Record where pseudo-registers are set. This data is kept accurate
789 during each pass. ??? We could also record hard-reg information here
790 [since it's unchanging], however it is currently done during hash table
791 computation.
793 It may be tempting to compute MEM set information here too, but MEM sets
794 will be subject to code motion one day and thus we need to compute
795 information about memory sets when we build the hash tables. */
797 alloc_reg_set_mem (max_gcse_regno);
798 compute_sets (f);
800 pass = 0;
801 initial_bytes_used = bytes_used;
802 max_pass_bytes = 0;
803 gcse_obstack_bottom = gcse_alloc (1);
804 changed = 1;
805 while (changed && pass < MAX_GCSE_PASSES)
807 changed = 0;
808 if (file)
809 fprintf (file, "GCSE pass %d\n\n", pass + 1);
811 /* Initialize bytes_used to the space for the pred/succ lists,
812 and the reg_set_table data. */
813 bytes_used = initial_bytes_used;
815 /* Each pass may create new registers, so recalculate each time. */
816 max_gcse_regno = max_reg_num ();
818 alloc_gcse_mem (f);
820 /* Don't allow constant propagation to modify jumps
821 during this pass. */
822 changed = one_cprop_pass (pass + 1, 0);
824 if (optimize_size)
825 changed |= one_classic_gcse_pass (pass + 1);
826 else
828 changed |= one_pre_gcse_pass (pass + 1);
829 /* We may have just created new basic blocks. Release and
830 recompute various things which are sized on the number of
831 basic blocks. */
832 if (changed)
834 free_modify_mem_tables ();
835 modify_mem_list
836 = (rtx *) gmalloc (last_basic_block * sizeof (rtx));
837 canon_modify_mem_list
838 = (rtx *) gmalloc (last_basic_block * sizeof (rtx));
839 memset ((char *) modify_mem_list, 0, last_basic_block * sizeof (rtx));
840 memset ((char *) canon_modify_mem_list, 0, last_basic_block * sizeof (rtx));
842 free_reg_set_mem ();
843 alloc_reg_set_mem (max_reg_num ());
844 compute_sets (f);
845 run_jump_opt_after_gcse = 1;
848 if (max_pass_bytes < bytes_used)
849 max_pass_bytes = bytes_used;
851 /* Free up memory, then reallocate for code hoisting. We can
852 not re-use the existing allocated memory because the tables
853 will not have info for the insns or registers created by
854 partial redundancy elimination. */
855 free_gcse_mem ();
857 /* It does not make sense to run code hoisting unless we optimizing
858 for code size -- it rarely makes programs faster, and can make
859 them bigger if we did partial redundancy elimination (when optimizing
860 for space, we use a classic gcse algorithm instead of partial
861 redundancy algorithms). */
862 if (optimize_size)
864 max_gcse_regno = max_reg_num ();
865 alloc_gcse_mem (f);
866 changed |= one_code_hoisting_pass ();
867 free_gcse_mem ();
869 if (max_pass_bytes < bytes_used)
870 max_pass_bytes = bytes_used;
873 if (file)
875 fprintf (file, "\n");
876 fflush (file);
879 obstack_free (&gcse_obstack, gcse_obstack_bottom);
880 pass++;
883 /* Do one last pass of copy propagation, including cprop into
884 conditional jumps. */
886 max_gcse_regno = max_reg_num ();
887 alloc_gcse_mem (f);
888 /* This time, go ahead and allow cprop to alter jumps. */
889 one_cprop_pass (pass + 1, 1);
890 free_gcse_mem ();
892 if (file)
894 fprintf (file, "GCSE of %s: %d basic blocks, ",
895 current_function_name, n_basic_blocks);
896 fprintf (file, "%d pass%s, %d bytes\n\n",
897 pass, pass > 1 ? "es" : "", max_pass_bytes);
900 obstack_free (&gcse_obstack, NULL);
901 free_reg_set_mem ();
902 /* We are finished with alias. */
903 end_alias_analysis ();
904 allocate_reg_info (max_reg_num (), FALSE, FALSE);
906 /* Store motion disabled until it is fixed. */
907 if (0 && !optimize_size && flag_gcse_sm)
908 store_motion ();
909 /* Record where pseudo-registers are set. */
910 return run_jump_opt_after_gcse;
913 /* Misc. utilities. */
915 /* Compute which modes support reg/reg copy operations. */
917 static void
918 compute_can_copy ()
920 int i;
921 #ifndef AVOID_CCMODE_COPIES
922 rtx reg, insn;
923 #endif
924 memset (can_copy_p, 0, NUM_MACHINE_MODES);
926 start_sequence ();
927 for (i = 0; i < NUM_MACHINE_MODES; i++)
928 if (GET_MODE_CLASS (i) == MODE_CC)
930 #ifdef AVOID_CCMODE_COPIES
931 can_copy_p[i] = 0;
932 #else
933 reg = gen_rtx_REG ((enum machine_mode) i, LAST_VIRTUAL_REGISTER + 1);
934 insn = emit_insn (gen_rtx_SET (VOIDmode, reg, reg));
935 if (recog (PATTERN (insn), insn, NULL) >= 0)
936 can_copy_p[i] = 1;
937 #endif
939 else
940 can_copy_p[i] = 1;
942 end_sequence ();
945 /* Cover function to xmalloc to record bytes allocated. */
947 static char *
948 gmalloc (size)
949 unsigned int size;
951 bytes_used += size;
952 return xmalloc (size);
955 /* Cover function to xrealloc.
956 We don't record the additional size since we don't know it.
957 It won't affect memory usage stats much anyway. */
959 static char *
960 grealloc (ptr, size)
961 char *ptr;
962 unsigned int size;
964 return xrealloc (ptr, size);
967 /* Cover function to obstack_alloc. */
969 static char *
970 gcse_alloc (size)
971 unsigned long size;
973 bytes_used += size;
974 return (char *) obstack_alloc (&gcse_obstack, size);
977 /* Allocate memory for the cuid mapping array,
978 and reg/memory set tracking tables.
980 This is called at the start of each pass. */
982 static void
983 alloc_gcse_mem (f)
984 rtx f;
986 int i, n;
987 rtx insn;
989 /* Find the largest UID and create a mapping from UIDs to CUIDs.
990 CUIDs are like UIDs except they increase monotonically, have no gaps,
991 and only apply to real insns. */
993 max_uid = get_max_uid ();
994 n = (max_uid + 1) * sizeof (int);
995 uid_cuid = (int *) gmalloc (n);
996 memset ((char *) uid_cuid, 0, n);
997 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
999 if (INSN_P (insn))
1000 uid_cuid[INSN_UID (insn)] = i++;
1001 else
1002 uid_cuid[INSN_UID (insn)] = i;
1005 /* Create a table mapping cuids to insns. */
1007 max_cuid = i;
1008 n = (max_cuid + 1) * sizeof (rtx);
1009 cuid_insn = (rtx *) gmalloc (n);
1010 memset ((char *) cuid_insn, 0, n);
1011 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
1012 if (INSN_P (insn))
1013 CUID_INSN (i++) = insn;
1015 /* Allocate vars to track sets of regs. */
1016 reg_set_bitmap = BITMAP_XMALLOC ();
1018 /* Allocate vars to track sets of regs, memory per block. */
1019 reg_set_in_block = (sbitmap *) sbitmap_vector_alloc (last_basic_block,
1020 max_gcse_regno);
1021 /* Allocate array to keep a list of insns which modify memory in each
1022 basic block. */
1023 modify_mem_list = (rtx *) gmalloc (last_basic_block * sizeof (rtx));
1024 canon_modify_mem_list = (rtx *) gmalloc (last_basic_block * sizeof (rtx));
1025 memset ((char *) modify_mem_list, 0, last_basic_block * sizeof (rtx));
1026 memset ((char *) canon_modify_mem_list, 0, last_basic_block * sizeof (rtx));
1027 modify_mem_list_set = BITMAP_XMALLOC ();
1028 canon_modify_mem_list_set = BITMAP_XMALLOC ();
1031 /* Free memory allocated by alloc_gcse_mem. */
1033 static void
1034 free_gcse_mem ()
1036 free (uid_cuid);
1037 free (cuid_insn);
1039 BITMAP_XFREE (reg_set_bitmap);
1041 sbitmap_vector_free (reg_set_in_block);
1042 free_modify_mem_tables ();
1043 BITMAP_XFREE (modify_mem_list_set);
1044 BITMAP_XFREE (canon_modify_mem_list_set);
1047 /* Many of the global optimization algorithms work by solving dataflow
1048 equations for various expressions. Initially, some local value is
1049 computed for each expression in each block. Then, the values across the
1050 various blocks are combined (by following flow graph edges) to arrive at
1051 global values. Conceptually, each set of equations is independent. We
1052 may therefore solve all the equations in parallel, solve them one at a
1053 time, or pick any intermediate approach.
1055 When you're going to need N two-dimensional bitmaps, each X (say, the
1056 number of blocks) by Y (say, the number of expressions), call this
1057 function. It's not important what X and Y represent; only that Y
1058 correspond to the things that can be done in parallel. This function will
1059 return an appropriate chunking factor C; you should solve C sets of
1060 equations in parallel. By going through this function, we can easily
1061 trade space against time; by solving fewer equations in parallel we use
1062 less space. */
1064 static int
1065 get_bitmap_width (n, x, y)
1066 int n;
1067 int x;
1068 int y;
1070 /* It's not really worth figuring out *exactly* how much memory will
1071 be used by a particular choice. The important thing is to get
1072 something approximately right. */
1073 size_t max_bitmap_memory = 10 * 1024 * 1024;
1075 /* The number of bytes we'd use for a single column of minimum
1076 width. */
1077 size_t column_size = n * x * sizeof (SBITMAP_ELT_TYPE);
1079 /* Often, it's reasonable just to solve all the equations in
1080 parallel. */
1081 if (column_size * SBITMAP_SET_SIZE (y) <= max_bitmap_memory)
1082 return y;
1084 /* Otherwise, pick the largest width we can, without going over the
1085 limit. */
1086 return SBITMAP_ELT_BITS * ((max_bitmap_memory + column_size - 1)
1087 / column_size);
1090 /* Compute the local properties of each recorded expression.
1092 Local properties are those that are defined by the block, irrespective of
1093 other blocks.
1095 An expression is transparent in a block if its operands are not modified
1096 in the block.
1098 An expression is computed (locally available) in a block if it is computed
1099 at least once and expression would contain the same value if the
1100 computation was moved to the end of the block.
1102 An expression is locally anticipatable in a block if it is computed at
1103 least once and expression would contain the same value if the computation
1104 was moved to the beginning of the block.
1106 We call this routine for cprop, pre and code hoisting. They all compute
1107 basically the same information and thus can easily share this code.
1109 TRANSP, COMP, and ANTLOC are destination sbitmaps for recording local
1110 properties. If NULL, then it is not necessary to compute or record that
1111 particular property.
1113 TABLE controls which hash table to look at. If it is set hash table,
1114 additionally, TRANSP is computed as ~TRANSP, since this is really cprop's
1115 ABSALTERED. */
1117 static void
1118 compute_local_properties (transp, comp, antloc, table)
1119 sbitmap *transp;
1120 sbitmap *comp;
1121 sbitmap *antloc;
1122 struct hash_table *table;
1124 unsigned int i;
1126 /* Initialize any bitmaps that were passed in. */
1127 if (transp)
1129 if (table->set_p)
1130 sbitmap_vector_zero (transp, last_basic_block);
1131 else
1132 sbitmap_vector_ones (transp, last_basic_block);
1135 if (comp)
1136 sbitmap_vector_zero (comp, last_basic_block);
1137 if (antloc)
1138 sbitmap_vector_zero (antloc, last_basic_block);
1140 for (i = 0; i < table->size; i++)
1142 struct expr *expr;
1144 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
1146 int indx = expr->bitmap_index;
1147 struct occr *occr;
1149 /* The expression is transparent in this block if it is not killed.
1150 We start by assuming all are transparent [none are killed], and
1151 then reset the bits for those that are. */
1152 if (transp)
1153 compute_transp (expr->expr, indx, transp, table->set_p);
1155 /* The occurrences recorded in antic_occr are exactly those that
1156 we want to set to nonzero in ANTLOC. */
1157 if (antloc)
1158 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
1160 SET_BIT (antloc[BLOCK_NUM (occr->insn)], indx);
1162 /* While we're scanning the table, this is a good place to
1163 initialize this. */
1164 occr->deleted_p = 0;
1167 /* The occurrences recorded in avail_occr are exactly those that
1168 we want to set to nonzero in COMP. */
1169 if (comp)
1170 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
1172 SET_BIT (comp[BLOCK_NUM (occr->insn)], indx);
1174 /* While we're scanning the table, this is a good place to
1175 initialize this. */
1176 occr->copied_p = 0;
1179 /* While we're scanning the table, this is a good place to
1180 initialize this. */
1181 expr->reaching_reg = 0;
1186 /* Register set information.
1188 `reg_set_table' records where each register is set or otherwise
1189 modified. */
1191 static struct obstack reg_set_obstack;
1193 static void
1194 alloc_reg_set_mem (n_regs)
1195 int n_regs;
1197 unsigned int n;
1199 reg_set_table_size = n_regs + REG_SET_TABLE_SLOP;
1200 n = reg_set_table_size * sizeof (struct reg_set *);
1201 reg_set_table = (struct reg_set **) gmalloc (n);
1202 memset ((char *) reg_set_table, 0, n);
1204 gcc_obstack_init (&reg_set_obstack);
1207 static void
1208 free_reg_set_mem ()
1210 free (reg_set_table);
1211 obstack_free (&reg_set_obstack, NULL);
1214 /* Record REGNO in the reg_set table. */
1216 static void
1217 record_one_set (regno, insn)
1218 int regno;
1219 rtx insn;
1221 /* Allocate a new reg_set element and link it onto the list. */
1222 struct reg_set *new_reg_info;
1224 /* If the table isn't big enough, enlarge it. */
1225 if (regno >= reg_set_table_size)
1227 int new_size = regno + REG_SET_TABLE_SLOP;
1229 reg_set_table
1230 = (struct reg_set **) grealloc ((char *) reg_set_table,
1231 new_size * sizeof (struct reg_set *));
1232 memset ((char *) (reg_set_table + reg_set_table_size), 0,
1233 (new_size - reg_set_table_size) * sizeof (struct reg_set *));
1234 reg_set_table_size = new_size;
1237 new_reg_info = (struct reg_set *) obstack_alloc (&reg_set_obstack,
1238 sizeof (struct reg_set));
1239 bytes_used += sizeof (struct reg_set);
1240 new_reg_info->insn = insn;
1241 new_reg_info->next = reg_set_table[regno];
1242 reg_set_table[regno] = new_reg_info;
1245 /* Called from compute_sets via note_stores to handle one SET or CLOBBER in
1246 an insn. The DATA is really the instruction in which the SET is
1247 occurring. */
1249 static void
1250 record_set_info (dest, setter, data)
1251 rtx dest, setter ATTRIBUTE_UNUSED;
1252 void *data;
1254 rtx record_set_insn = (rtx) data;
1256 if (GET_CODE (dest) == REG && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
1257 record_one_set (REGNO (dest), record_set_insn);
1260 /* Scan the function and record each set of each pseudo-register.
1262 This is called once, at the start of the gcse pass. See the comments for
1263 `reg_set_table' for further documenation. */
1265 static void
1266 compute_sets (f)
1267 rtx f;
1269 rtx insn;
1271 for (insn = f; insn != 0; insn = NEXT_INSN (insn))
1272 if (INSN_P (insn))
1273 note_stores (PATTERN (insn), record_set_info, insn);
1276 /* Hash table support. */
1278 struct reg_avail_info
1280 basic_block last_bb;
1281 int first_set;
1282 int last_set;
1285 static struct reg_avail_info *reg_avail_info;
1286 static basic_block current_bb;
1289 /* See whether X, the source of a set, is something we want to consider for
1290 GCSE. */
1292 static GTY(()) rtx test_insn;
1293 static int
1294 want_to_gcse_p (x)
1295 rtx x;
1297 int num_clobbers = 0;
1298 int icode;
1300 switch (GET_CODE (x))
1302 case REG:
1303 case SUBREG:
1304 case CONST_INT:
1305 case CONST_DOUBLE:
1306 case CONST_VECTOR:
1307 case CALL:
1308 return 0;
1310 default:
1311 break;
1314 /* If this is a valid operand, we are OK. If it's VOIDmode, we aren't. */
1315 if (general_operand (x, GET_MODE (x)))
1316 return 1;
1317 else if (GET_MODE (x) == VOIDmode)
1318 return 0;
1320 /* Otherwise, check if we can make a valid insn from it. First initialize
1321 our test insn if we haven't already. */
1322 if (test_insn == 0)
1324 test_insn
1325 = make_insn_raw (gen_rtx_SET (VOIDmode,
1326 gen_rtx_REG (word_mode,
1327 FIRST_PSEUDO_REGISTER * 2),
1328 const0_rtx));
1329 NEXT_INSN (test_insn) = PREV_INSN (test_insn) = 0;
1332 /* Now make an insn like the one we would make when GCSE'ing and see if
1333 valid. */
1334 PUT_MODE (SET_DEST (PATTERN (test_insn)), GET_MODE (x));
1335 SET_SRC (PATTERN (test_insn)) = x;
1336 return ((icode = recog (PATTERN (test_insn), test_insn, &num_clobbers)) >= 0
1337 && (num_clobbers == 0 || ! added_clobbers_hard_reg_p (icode)));
1340 /* Return nonzero if the operands of expression X are unchanged from the
1341 start of INSN's basic block up to but not including INSN (if AVAIL_P == 0),
1342 or from INSN to the end of INSN's basic block (if AVAIL_P != 0). */
1344 static int
1345 oprs_unchanged_p (x, insn, avail_p)
1346 rtx x, insn;
1347 int avail_p;
1349 int i, j;
1350 enum rtx_code code;
1351 const char *fmt;
1353 if (x == 0)
1354 return 1;
1356 code = GET_CODE (x);
1357 switch (code)
1359 case REG:
1361 struct reg_avail_info *info = &reg_avail_info[REGNO (x)];
1363 if (info->last_bb != current_bb)
1364 return 1;
1365 if (avail_p)
1366 return info->last_set < INSN_CUID (insn);
1367 else
1368 return info->first_set >= INSN_CUID (insn);
1371 case MEM:
1372 if (load_killed_in_block_p (current_bb, INSN_CUID (insn),
1373 x, avail_p))
1374 return 0;
1375 else
1376 return oprs_unchanged_p (XEXP (x, 0), insn, avail_p);
1378 case PRE_DEC:
1379 case PRE_INC:
1380 case POST_DEC:
1381 case POST_INC:
1382 case PRE_MODIFY:
1383 case POST_MODIFY:
1384 return 0;
1386 case PC:
1387 case CC0: /*FIXME*/
1388 case CONST:
1389 case CONST_INT:
1390 case CONST_DOUBLE:
1391 case CONST_VECTOR:
1392 case SYMBOL_REF:
1393 case LABEL_REF:
1394 case ADDR_VEC:
1395 case ADDR_DIFF_VEC:
1396 return 1;
1398 default:
1399 break;
1402 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
1404 if (fmt[i] == 'e')
1406 /* If we are about to do the last recursive call needed at this
1407 level, change it into iteration. This function is called enough
1408 to be worth it. */
1409 if (i == 0)
1410 return oprs_unchanged_p (XEXP (x, i), insn, avail_p);
1412 else if (! oprs_unchanged_p (XEXP (x, i), insn, avail_p))
1413 return 0;
1415 else if (fmt[i] == 'E')
1416 for (j = 0; j < XVECLEN (x, i); j++)
1417 if (! oprs_unchanged_p (XVECEXP (x, i, j), insn, avail_p))
1418 return 0;
1421 return 1;
1424 /* Used for communication between mems_conflict_for_gcse_p and
1425 load_killed_in_block_p. Nonzero if mems_conflict_for_gcse_p finds a
1426 conflict between two memory references. */
1427 static int gcse_mems_conflict_p;
1429 /* Used for communication between mems_conflict_for_gcse_p and
1430 load_killed_in_block_p. A memory reference for a load instruction,
1431 mems_conflict_for_gcse_p will see if a memory store conflicts with
1432 this memory load. */
1433 static rtx gcse_mem_operand;
1435 /* DEST is the output of an instruction. If it is a memory reference, and
1436 possibly conflicts with the load found in gcse_mem_operand, then set
1437 gcse_mems_conflict_p to a nonzero value. */
1439 static void
1440 mems_conflict_for_gcse_p (dest, setter, data)
1441 rtx dest, setter ATTRIBUTE_UNUSED;
1442 void *data ATTRIBUTE_UNUSED;
1444 while (GET_CODE (dest) == SUBREG
1445 || GET_CODE (dest) == ZERO_EXTRACT
1446 || GET_CODE (dest) == SIGN_EXTRACT
1447 || GET_CODE (dest) == STRICT_LOW_PART)
1448 dest = XEXP (dest, 0);
1450 /* If DEST is not a MEM, then it will not conflict with the load. Note
1451 that function calls are assumed to clobber memory, but are handled
1452 elsewhere. */
1453 if (GET_CODE (dest) != MEM)
1454 return;
1456 /* If we are setting a MEM in our list of specially recognized MEMs,
1457 don't mark as killed this time. */
1459 if (dest == gcse_mem_operand && pre_ldst_mems != NULL)
1461 if (!find_rtx_in_ldst (dest))
1462 gcse_mems_conflict_p = 1;
1463 return;
1466 if (true_dependence (dest, GET_MODE (dest), gcse_mem_operand,
1467 rtx_addr_varies_p))
1468 gcse_mems_conflict_p = 1;
1471 /* Return nonzero if the expression in X (a memory reference) is killed
1472 in block BB before or after the insn with the CUID in UID_LIMIT.
1473 AVAIL_P is nonzero for kills after UID_LIMIT, and zero for kills
1474 before UID_LIMIT.
1476 To check the entire block, set UID_LIMIT to max_uid + 1 and
1477 AVAIL_P to 0. */
1479 static int
1480 load_killed_in_block_p (bb, uid_limit, x, avail_p)
1481 basic_block bb;
1482 int uid_limit;
1483 rtx x;
1484 int avail_p;
1486 rtx list_entry = modify_mem_list[bb->index];
1487 while (list_entry)
1489 rtx setter;
1490 /* Ignore entries in the list that do not apply. */
1491 if ((avail_p
1492 && INSN_CUID (XEXP (list_entry, 0)) < uid_limit)
1493 || (! avail_p
1494 && INSN_CUID (XEXP (list_entry, 0)) > uid_limit))
1496 list_entry = XEXP (list_entry, 1);
1497 continue;
1500 setter = XEXP (list_entry, 0);
1502 /* If SETTER is a call everything is clobbered. Note that calls
1503 to pure functions are never put on the list, so we need not
1504 worry about them. */
1505 if (GET_CODE (setter) == CALL_INSN)
1506 return 1;
1508 /* SETTER must be an INSN of some kind that sets memory. Call
1509 note_stores to examine each hunk of memory that is modified.
1511 The note_stores interface is pretty limited, so we have to
1512 communicate via global variables. Yuk. */
1513 gcse_mem_operand = x;
1514 gcse_mems_conflict_p = 0;
1515 note_stores (PATTERN (setter), mems_conflict_for_gcse_p, NULL);
1516 if (gcse_mems_conflict_p)
1517 return 1;
1518 list_entry = XEXP (list_entry, 1);
1520 return 0;
1523 /* Return nonzero if the operands of expression X are unchanged from
1524 the start of INSN's basic block up to but not including INSN. */
1526 static int
1527 oprs_anticipatable_p (x, insn)
1528 rtx x, insn;
1530 return oprs_unchanged_p (x, insn, 0);
1533 /* Return nonzero if the operands of expression X are unchanged from
1534 INSN to the end of INSN's basic block. */
1536 static int
1537 oprs_available_p (x, insn)
1538 rtx x, insn;
1540 return oprs_unchanged_p (x, insn, 1);
1543 /* Hash expression X.
1545 MODE is only used if X is a CONST_INT. DO_NOT_RECORD_P is a boolean
1546 indicating if a volatile operand is found or if the expression contains
1547 something we don't want to insert in the table.
1549 ??? One might want to merge this with canon_hash. Later. */
1551 static unsigned int
1552 hash_expr (x, mode, do_not_record_p, hash_table_size)
1553 rtx x;
1554 enum machine_mode mode;
1555 int *do_not_record_p;
1556 int hash_table_size;
1558 unsigned int hash;
1560 *do_not_record_p = 0;
1562 hash = hash_expr_1 (x, mode, do_not_record_p);
1563 return hash % hash_table_size;
1566 /* Hash a string. Just add its bytes up. */
1568 static inline unsigned
1569 hash_string_1 (ps)
1570 const char *ps;
1572 unsigned hash = 0;
1573 const unsigned char *p = (const unsigned char *) ps;
1575 if (p)
1576 while (*p)
1577 hash += *p++;
1579 return hash;
1582 /* Subroutine of hash_expr to do the actual work. */
1584 static unsigned int
1585 hash_expr_1 (x, mode, do_not_record_p)
1586 rtx x;
1587 enum machine_mode mode;
1588 int *do_not_record_p;
1590 int i, j;
1591 unsigned hash = 0;
1592 enum rtx_code code;
1593 const char *fmt;
1595 /* Used to turn recursion into iteration. We can't rely on GCC's
1596 tail-recursion eliminatio since we need to keep accumulating values
1597 in HASH. */
1599 if (x == 0)
1600 return hash;
1602 repeat:
1603 code = GET_CODE (x);
1604 switch (code)
1606 case REG:
1607 hash += ((unsigned int) REG << 7) + REGNO (x);
1608 return hash;
1610 case CONST_INT:
1611 hash += (((unsigned int) CONST_INT << 7) + (unsigned int) mode
1612 + (unsigned int) INTVAL (x));
1613 return hash;
1615 case CONST_DOUBLE:
1616 /* This is like the general case, except that it only counts
1617 the integers representing the constant. */
1618 hash += (unsigned int) code + (unsigned int) GET_MODE (x);
1619 if (GET_MODE (x) != VOIDmode)
1620 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
1621 hash += (unsigned int) XWINT (x, i);
1622 else
1623 hash += ((unsigned int) CONST_DOUBLE_LOW (x)
1624 + (unsigned int) CONST_DOUBLE_HIGH (x));
1625 return hash;
1627 case CONST_VECTOR:
1629 int units;
1630 rtx elt;
1632 units = CONST_VECTOR_NUNITS (x);
1634 for (i = 0; i < units; ++i)
1636 elt = CONST_VECTOR_ELT (x, i);
1637 hash += hash_expr_1 (elt, GET_MODE (elt), do_not_record_p);
1640 return hash;
1643 /* Assume there is only one rtx object for any given label. */
1644 case LABEL_REF:
1645 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
1646 differences and differences between each stage's debugging dumps. */
1647 hash += (((unsigned int) LABEL_REF << 7)
1648 + CODE_LABEL_NUMBER (XEXP (x, 0)));
1649 return hash;
1651 case SYMBOL_REF:
1653 /* Don't hash on the symbol's address to avoid bootstrap differences.
1654 Different hash values may cause expressions to be recorded in
1655 different orders and thus different registers to be used in the
1656 final assembler. This also avoids differences in the dump files
1657 between various stages. */
1658 unsigned int h = 0;
1659 const unsigned char *p = (const unsigned char *) XSTR (x, 0);
1661 while (*p)
1662 h += (h << 7) + *p++; /* ??? revisit */
1664 hash += ((unsigned int) SYMBOL_REF << 7) + h;
1665 return hash;
1668 case MEM:
1669 if (MEM_VOLATILE_P (x))
1671 *do_not_record_p = 1;
1672 return 0;
1675 hash += (unsigned int) MEM;
1676 /* We used alias set for hashing, but this is not good, since the alias
1677 set may differ in -fprofile-arcs and -fbranch-probabilities compilation
1678 causing the profiles to fail to match. */
1679 x = XEXP (x, 0);
1680 goto repeat;
1682 case PRE_DEC:
1683 case PRE_INC:
1684 case POST_DEC:
1685 case POST_INC:
1686 case PC:
1687 case CC0:
1688 case CALL:
1689 case UNSPEC_VOLATILE:
1690 *do_not_record_p = 1;
1691 return 0;
1693 case ASM_OPERANDS:
1694 if (MEM_VOLATILE_P (x))
1696 *do_not_record_p = 1;
1697 return 0;
1699 else
1701 /* We don't want to take the filename and line into account. */
1702 hash += (unsigned) code + (unsigned) GET_MODE (x)
1703 + hash_string_1 (ASM_OPERANDS_TEMPLATE (x))
1704 + hash_string_1 (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
1705 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
1707 if (ASM_OPERANDS_INPUT_LENGTH (x))
1709 for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
1711 hash += (hash_expr_1 (ASM_OPERANDS_INPUT (x, i),
1712 GET_MODE (ASM_OPERANDS_INPUT (x, i)),
1713 do_not_record_p)
1714 + hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT
1715 (x, i)));
1718 hash += hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
1719 x = ASM_OPERANDS_INPUT (x, 0);
1720 mode = GET_MODE (x);
1721 goto repeat;
1723 return hash;
1726 default:
1727 break;
1730 hash += (unsigned) code + (unsigned) GET_MODE (x);
1731 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
1733 if (fmt[i] == 'e')
1735 /* If we are about to do the last recursive call
1736 needed at this level, change it into iteration.
1737 This function is called enough to be worth it. */
1738 if (i == 0)
1740 x = XEXP (x, i);
1741 goto repeat;
1744 hash += hash_expr_1 (XEXP (x, i), 0, do_not_record_p);
1745 if (*do_not_record_p)
1746 return 0;
1749 else if (fmt[i] == 'E')
1750 for (j = 0; j < XVECLEN (x, i); j++)
1752 hash += hash_expr_1 (XVECEXP (x, i, j), 0, do_not_record_p);
1753 if (*do_not_record_p)
1754 return 0;
1757 else if (fmt[i] == 's')
1758 hash += hash_string_1 (XSTR (x, i));
1759 else if (fmt[i] == 'i')
1760 hash += (unsigned int) XINT (x, i);
1761 else
1762 abort ();
1765 return hash;
1768 /* Hash a set of register REGNO.
1770 Sets are hashed on the register that is set. This simplifies the PRE copy
1771 propagation code.
1773 ??? May need to make things more elaborate. Later, as necessary. */
1775 static unsigned int
1776 hash_set (regno, hash_table_size)
1777 int regno;
1778 int hash_table_size;
1780 unsigned int hash;
1782 hash = regno;
1783 return hash % hash_table_size;
1786 /* Return nonzero if exp1 is equivalent to exp2.
1787 ??? Borrowed from cse.c. Might want to remerge with cse.c. Later. */
1789 static int
1790 expr_equiv_p (x, y)
1791 rtx x, y;
1793 int i, j;
1794 enum rtx_code code;
1795 const char *fmt;
1797 if (x == y)
1798 return 1;
1800 if (x == 0 || y == 0)
1801 return x == y;
1803 code = GET_CODE (x);
1804 if (code != GET_CODE (y))
1805 return 0;
1807 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
1808 if (GET_MODE (x) != GET_MODE (y))
1809 return 0;
1811 switch (code)
1813 case PC:
1814 case CC0:
1815 return x == y;
1817 case CONST_INT:
1818 return INTVAL (x) == INTVAL (y);
1820 case LABEL_REF:
1821 return XEXP (x, 0) == XEXP (y, 0);
1823 case SYMBOL_REF:
1824 return XSTR (x, 0) == XSTR (y, 0);
1826 case REG:
1827 return REGNO (x) == REGNO (y);
1829 case MEM:
1830 /* Can't merge two expressions in different alias sets, since we can
1831 decide that the expression is transparent in a block when it isn't,
1832 due to it being set with the different alias set. */
1833 if (MEM_ALIAS_SET (x) != MEM_ALIAS_SET (y))
1834 return 0;
1835 break;
1837 /* For commutative operations, check both orders. */
1838 case PLUS:
1839 case MULT:
1840 case AND:
1841 case IOR:
1842 case XOR:
1843 case NE:
1844 case EQ:
1845 return ((expr_equiv_p (XEXP (x, 0), XEXP (y, 0))
1846 && expr_equiv_p (XEXP (x, 1), XEXP (y, 1)))
1847 || (expr_equiv_p (XEXP (x, 0), XEXP (y, 1))
1848 && expr_equiv_p (XEXP (x, 1), XEXP (y, 0))));
1850 case ASM_OPERANDS:
1851 /* We don't use the generic code below because we want to
1852 disregard filename and line numbers. */
1854 /* A volatile asm isn't equivalent to any other. */
1855 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
1856 return 0;
1858 if (GET_MODE (x) != GET_MODE (y)
1859 || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
1860 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
1861 ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
1862 || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
1863 || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
1864 return 0;
1866 if (ASM_OPERANDS_INPUT_LENGTH (x))
1868 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
1869 if (! expr_equiv_p (ASM_OPERANDS_INPUT (x, i),
1870 ASM_OPERANDS_INPUT (y, i))
1871 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
1872 ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
1873 return 0;
1876 return 1;
1878 default:
1879 break;
1882 /* Compare the elements. If any pair of corresponding elements
1883 fail to match, return 0 for the whole thing. */
1885 fmt = GET_RTX_FORMAT (code);
1886 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1888 switch (fmt[i])
1890 case 'e':
1891 if (! expr_equiv_p (XEXP (x, i), XEXP (y, i)))
1892 return 0;
1893 break;
1895 case 'E':
1896 if (XVECLEN (x, i) != XVECLEN (y, i))
1897 return 0;
1898 for (j = 0; j < XVECLEN (x, i); j++)
1899 if (! expr_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
1900 return 0;
1901 break;
1903 case 's':
1904 if (strcmp (XSTR (x, i), XSTR (y, i)))
1905 return 0;
1906 break;
1908 case 'i':
1909 if (XINT (x, i) != XINT (y, i))
1910 return 0;
1911 break;
1913 case 'w':
1914 if (XWINT (x, i) != XWINT (y, i))
1915 return 0;
1916 break;
1918 case '0':
1919 break;
1921 default:
1922 abort ();
1926 return 1;
1929 /* Insert expression X in INSN in the hash TABLE.
1930 If it is already present, record it as the last occurrence in INSN's
1931 basic block.
1933 MODE is the mode of the value X is being stored into.
1934 It is only used if X is a CONST_INT.
1936 ANTIC_P is nonzero if X is an anticipatable expression.
1937 AVAIL_P is nonzero if X is an available expression. */
1939 static void
1940 insert_expr_in_table (x, mode, insn, antic_p, avail_p, table)
1941 rtx x;
1942 enum machine_mode mode;
1943 rtx insn;
1944 int antic_p, avail_p;
1945 struct hash_table *table;
1947 int found, do_not_record_p;
1948 unsigned int hash;
1949 struct expr *cur_expr, *last_expr = NULL;
1950 struct occr *antic_occr, *avail_occr;
1951 struct occr *last_occr = NULL;
1953 hash = hash_expr (x, mode, &do_not_record_p, table->size);
1955 /* Do not insert expression in table if it contains volatile operands,
1956 or if hash_expr determines the expression is something we don't want
1957 to or can't handle. */
1958 if (do_not_record_p)
1959 return;
1961 cur_expr = table->table[hash];
1962 found = 0;
1964 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
1966 /* If the expression isn't found, save a pointer to the end of
1967 the list. */
1968 last_expr = cur_expr;
1969 cur_expr = cur_expr->next_same_hash;
1972 if (! found)
1974 cur_expr = (struct expr *) gcse_alloc (sizeof (struct expr));
1975 bytes_used += sizeof (struct expr);
1976 if (table->table[hash] == NULL)
1977 /* This is the first pattern that hashed to this index. */
1978 table->table[hash] = cur_expr;
1979 else
1980 /* Add EXPR to end of this hash chain. */
1981 last_expr->next_same_hash = cur_expr;
1983 /* Set the fields of the expr element. */
1984 cur_expr->expr = x;
1985 cur_expr->bitmap_index = table->n_elems++;
1986 cur_expr->next_same_hash = NULL;
1987 cur_expr->antic_occr = NULL;
1988 cur_expr->avail_occr = NULL;
1991 /* Now record the occurrence(s). */
1992 if (antic_p)
1994 antic_occr = cur_expr->antic_occr;
1996 /* Search for another occurrence in the same basic block. */
1997 while (antic_occr && BLOCK_NUM (antic_occr->insn) != BLOCK_NUM (insn))
1999 /* If an occurrence isn't found, save a pointer to the end of
2000 the list. */
2001 last_occr = antic_occr;
2002 antic_occr = antic_occr->next;
2005 if (antic_occr)
2006 /* Found another instance of the expression in the same basic block.
2007 Prefer the currently recorded one. We want the first one in the
2008 block and the block is scanned from start to end. */
2009 ; /* nothing to do */
2010 else
2012 /* First occurrence of this expression in this basic block. */
2013 antic_occr = (struct occr *) gcse_alloc (sizeof (struct occr));
2014 bytes_used += sizeof (struct occr);
2015 /* First occurrence of this expression in any block? */
2016 if (cur_expr->antic_occr == NULL)
2017 cur_expr->antic_occr = antic_occr;
2018 else
2019 last_occr->next = antic_occr;
2021 antic_occr->insn = insn;
2022 antic_occr->next = NULL;
2026 if (avail_p)
2028 avail_occr = cur_expr->avail_occr;
2030 /* Search for another occurrence in the same basic block. */
2031 while (avail_occr && BLOCK_NUM (avail_occr->insn) != BLOCK_NUM (insn))
2033 /* If an occurrence isn't found, save a pointer to the end of
2034 the list. */
2035 last_occr = avail_occr;
2036 avail_occr = avail_occr->next;
2039 if (avail_occr)
2040 /* Found another instance of the expression in the same basic block.
2041 Prefer this occurrence to the currently recorded one. We want
2042 the last one in the block and the block is scanned from start
2043 to end. */
2044 avail_occr->insn = insn;
2045 else
2047 /* First occurrence of this expression in this basic block. */
2048 avail_occr = (struct occr *) gcse_alloc (sizeof (struct occr));
2049 bytes_used += sizeof (struct occr);
2051 /* First occurrence of this expression in any block? */
2052 if (cur_expr->avail_occr == NULL)
2053 cur_expr->avail_occr = avail_occr;
2054 else
2055 last_occr->next = avail_occr;
2057 avail_occr->insn = insn;
2058 avail_occr->next = NULL;
2063 /* Insert pattern X in INSN in the hash table.
2064 X is a SET of a reg to either another reg or a constant.
2065 If it is already present, record it as the last occurrence in INSN's
2066 basic block. */
2068 static void
2069 insert_set_in_table (x, insn, table)
2070 rtx x;
2071 rtx insn;
2072 struct hash_table *table;
2074 int found;
2075 unsigned int hash;
2076 struct expr *cur_expr, *last_expr = NULL;
2077 struct occr *cur_occr, *last_occr = NULL;
2079 if (GET_CODE (x) != SET
2080 || GET_CODE (SET_DEST (x)) != REG)
2081 abort ();
2083 hash = hash_set (REGNO (SET_DEST (x)), table->size);
2085 cur_expr = table->table[hash];
2086 found = 0;
2088 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
2090 /* If the expression isn't found, save a pointer to the end of
2091 the list. */
2092 last_expr = cur_expr;
2093 cur_expr = cur_expr->next_same_hash;
2096 if (! found)
2098 cur_expr = (struct expr *) gcse_alloc (sizeof (struct expr));
2099 bytes_used += sizeof (struct expr);
2100 if (table->table[hash] == NULL)
2101 /* This is the first pattern that hashed to this index. */
2102 table->table[hash] = cur_expr;
2103 else
2104 /* Add EXPR to end of this hash chain. */
2105 last_expr->next_same_hash = cur_expr;
2107 /* Set the fields of the expr element.
2108 We must copy X because it can be modified when copy propagation is
2109 performed on its operands. */
2110 cur_expr->expr = copy_rtx (x);
2111 cur_expr->bitmap_index = table->n_elems++;
2112 cur_expr->next_same_hash = NULL;
2113 cur_expr->antic_occr = NULL;
2114 cur_expr->avail_occr = NULL;
2117 /* Now record the occurrence. */
2118 cur_occr = cur_expr->avail_occr;
2120 /* Search for another occurrence in the same basic block. */
2121 while (cur_occr && BLOCK_NUM (cur_occr->insn) != BLOCK_NUM (insn))
2123 /* If an occurrence isn't found, save a pointer to the end of
2124 the list. */
2125 last_occr = cur_occr;
2126 cur_occr = cur_occr->next;
2129 if (cur_occr)
2130 /* Found another instance of the expression in the same basic block.
2131 Prefer this occurrence to the currently recorded one. We want the
2132 last one in the block and the block is scanned from start to end. */
2133 cur_occr->insn = insn;
2134 else
2136 /* First occurrence of this expression in this basic block. */
2137 cur_occr = (struct occr *) gcse_alloc (sizeof (struct occr));
2138 bytes_used += sizeof (struct occr);
2140 /* First occurrence of this expression in any block? */
2141 if (cur_expr->avail_occr == NULL)
2142 cur_expr->avail_occr = cur_occr;
2143 else
2144 last_occr->next = cur_occr;
2146 cur_occr->insn = insn;
2147 cur_occr->next = NULL;
2151 /* Scan pattern PAT of INSN and add an entry to the hash TABLE (set or
2152 expression one). */
2154 static void
2155 hash_scan_set (pat, insn, table)
2156 rtx pat, insn;
2157 struct hash_table *table;
2159 rtx src = SET_SRC (pat);
2160 rtx dest = SET_DEST (pat);
2161 rtx note;
2163 if (GET_CODE (src) == CALL)
2164 hash_scan_call (src, insn, table);
2166 else if (GET_CODE (dest) == REG)
2168 unsigned int regno = REGNO (dest);
2169 rtx tmp;
2171 /* If this is a single set and we are doing constant propagation,
2172 see if a REG_NOTE shows this equivalent to a constant. */
2173 if (table->set_p && (note = find_reg_equal_equiv_note (insn)) != 0
2174 && CONSTANT_P (XEXP (note, 0)))
2175 src = XEXP (note, 0), pat = gen_rtx_SET (VOIDmode, dest, src);
2177 /* Only record sets of pseudo-regs in the hash table. */
2178 if (! table->set_p
2179 && regno >= FIRST_PSEUDO_REGISTER
2180 /* Don't GCSE something if we can't do a reg/reg copy. */
2181 && can_copy_p [GET_MODE (dest)]
2182 /* GCSE commonly inserts instruction after the insn. We can't
2183 do that easily for EH_REGION notes so disable GCSE on these
2184 for now. */
2185 && !find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2186 /* Is SET_SRC something we want to gcse? */
2187 && want_to_gcse_p (src)
2188 /* Don't CSE a nop. */
2189 && ! set_noop_p (pat)
2190 /* Don't GCSE if it has attached REG_EQUIV note.
2191 At this point this only function parameters should have
2192 REG_EQUIV notes and if the argument slot is used somewhere
2193 explicitly, it means address of parameter has been taken,
2194 so we should not extend the lifetime of the pseudo. */
2195 && ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) == 0
2196 || GET_CODE (XEXP (note, 0)) != MEM))
2198 /* An expression is not anticipatable if its operands are
2199 modified before this insn or if this is not the only SET in
2200 this insn. */
2201 int antic_p = oprs_anticipatable_p (src, insn) && single_set (insn);
2202 /* An expression is not available if its operands are
2203 subsequently modified, including this insn. It's also not
2204 available if this is a branch, because we can't insert
2205 a set after the branch. */
2206 int avail_p = (oprs_available_p (src, insn)
2207 && ! JUMP_P (insn));
2209 insert_expr_in_table (src, GET_MODE (dest), insn, antic_p, avail_p, table);
2212 /* Record sets for constant/copy propagation. */
2213 else if (table->set_p
2214 && regno >= FIRST_PSEUDO_REGISTER
2215 && ((GET_CODE (src) == REG
2216 && REGNO (src) >= FIRST_PSEUDO_REGISTER
2217 && can_copy_p [GET_MODE (dest)]
2218 && REGNO (src) != regno)
2219 || CONSTANT_P (src))
2220 /* A copy is not available if its src or dest is subsequently
2221 modified. Here we want to search from INSN+1 on, but
2222 oprs_available_p searches from INSN on. */
2223 && (insn == BLOCK_END (BLOCK_NUM (insn))
2224 || ((tmp = next_nonnote_insn (insn)) != NULL_RTX
2225 && oprs_available_p (pat, tmp))))
2226 insert_set_in_table (pat, insn, table);
2230 static void
2231 hash_scan_clobber (x, insn, table)
2232 rtx x ATTRIBUTE_UNUSED, insn ATTRIBUTE_UNUSED;
2233 struct hash_table *table ATTRIBUTE_UNUSED;
2235 /* Currently nothing to do. */
2238 static void
2239 hash_scan_call (x, insn, table)
2240 rtx x ATTRIBUTE_UNUSED, insn ATTRIBUTE_UNUSED;
2241 struct hash_table *table ATTRIBUTE_UNUSED;
2243 /* Currently nothing to do. */
2246 /* Process INSN and add hash table entries as appropriate.
2248 Only available expressions that set a single pseudo-reg are recorded.
2250 Single sets in a PARALLEL could be handled, but it's an extra complication
2251 that isn't dealt with right now. The trick is handling the CLOBBERs that
2252 are also in the PARALLEL. Later.
2254 If SET_P is nonzero, this is for the assignment hash table,
2255 otherwise it is for the expression hash table.
2256 If IN_LIBCALL_BLOCK nonzero, we are in a libcall block, and should
2257 not record any expressions. */
2259 static void
2260 hash_scan_insn (insn, table, in_libcall_block)
2261 rtx insn;
2262 struct hash_table *table;
2263 int in_libcall_block;
2265 rtx pat = PATTERN (insn);
2266 int i;
2268 if (in_libcall_block)
2269 return;
2271 /* Pick out the sets of INSN and for other forms of instructions record
2272 what's been modified. */
2274 if (GET_CODE (pat) == SET)
2275 hash_scan_set (pat, insn, table);
2276 else if (GET_CODE (pat) == PARALLEL)
2277 for (i = 0; i < XVECLEN (pat, 0); i++)
2279 rtx x = XVECEXP (pat, 0, i);
2281 if (GET_CODE (x) == SET)
2282 hash_scan_set (x, insn, table);
2283 else if (GET_CODE (x) == CLOBBER)
2284 hash_scan_clobber (x, insn, table);
2285 else if (GET_CODE (x) == CALL)
2286 hash_scan_call (x, insn, table);
2289 else if (GET_CODE (pat) == CLOBBER)
2290 hash_scan_clobber (pat, insn, table);
2291 else if (GET_CODE (pat) == CALL)
2292 hash_scan_call (pat, insn, table);
2295 static void
2296 dump_hash_table (file, name, table)
2297 FILE *file;
2298 const char *name;
2299 struct hash_table *table;
2301 int i;
2302 /* Flattened out table, so it's printed in proper order. */
2303 struct expr **flat_table;
2304 unsigned int *hash_val;
2305 struct expr *expr;
2307 flat_table
2308 = (struct expr **) xcalloc (table->n_elems, sizeof (struct expr *));
2309 hash_val = (unsigned int *) xmalloc (table->n_elems * sizeof (unsigned int));
2311 for (i = 0; i < (int) table->size; i++)
2312 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
2314 flat_table[expr->bitmap_index] = expr;
2315 hash_val[expr->bitmap_index] = i;
2318 fprintf (file, "%s hash table (%d buckets, %d entries)\n",
2319 name, table->size, table->n_elems);
2321 for (i = 0; i < (int) table->n_elems; i++)
2322 if (flat_table[i] != 0)
2324 expr = flat_table[i];
2325 fprintf (file, "Index %d (hash value %d)\n ",
2326 expr->bitmap_index, hash_val[i]);
2327 print_rtl (file, expr->expr);
2328 fprintf (file, "\n");
2331 fprintf (file, "\n");
2333 free (flat_table);
2334 free (hash_val);
2337 /* Record register first/last/block set information for REGNO in INSN.
2339 first_set records the first place in the block where the register
2340 is set and is used to compute "anticipatability".
2342 last_set records the last place in the block where the register
2343 is set and is used to compute "availability".
2345 last_bb records the block for which first_set and last_set are
2346 valid, as a quick test to invalidate them.
2348 reg_set_in_block records whether the register is set in the block
2349 and is used to compute "transparency". */
2351 static void
2352 record_last_reg_set_info (insn, regno)
2353 rtx insn;
2354 int regno;
2356 struct reg_avail_info *info = &reg_avail_info[regno];
2357 int cuid = INSN_CUID (insn);
2359 info->last_set = cuid;
2360 if (info->last_bb != current_bb)
2362 info->last_bb = current_bb;
2363 info->first_set = cuid;
2364 SET_BIT (reg_set_in_block[current_bb->index], regno);
2369 /* Record all of the canonicalized MEMs of record_last_mem_set_info's insn.
2370 Note we store a pair of elements in the list, so they have to be
2371 taken off pairwise. */
2373 static void
2374 canon_list_insert (dest, unused1, v_insn)
2375 rtx dest ATTRIBUTE_UNUSED;
2376 rtx unused1 ATTRIBUTE_UNUSED;
2377 void * v_insn;
2379 rtx dest_addr, insn;
2380 int bb;
2382 while (GET_CODE (dest) == SUBREG
2383 || GET_CODE (dest) == ZERO_EXTRACT
2384 || GET_CODE (dest) == SIGN_EXTRACT
2385 || GET_CODE (dest) == STRICT_LOW_PART)
2386 dest = XEXP (dest, 0);
2388 /* If DEST is not a MEM, then it will not conflict with a load. Note
2389 that function calls are assumed to clobber memory, but are handled
2390 elsewhere. */
2392 if (GET_CODE (dest) != MEM)
2393 return;
2395 dest_addr = get_addr (XEXP (dest, 0));
2396 dest_addr = canon_rtx (dest_addr);
2397 insn = (rtx) v_insn;
2398 bb = BLOCK_NUM (insn);
2400 canon_modify_mem_list[bb] =
2401 alloc_EXPR_LIST (VOIDmode, dest_addr, canon_modify_mem_list[bb]);
2402 canon_modify_mem_list[bb] =
2403 alloc_EXPR_LIST (VOIDmode, dest, canon_modify_mem_list[bb]);
2404 bitmap_set_bit (canon_modify_mem_list_set, bb);
2407 /* Record memory modification information for INSN. We do not actually care
2408 about the memory location(s) that are set, or even how they are set (consider
2409 a CALL_INSN). We merely need to record which insns modify memory. */
2411 static void
2412 record_last_mem_set_info (insn)
2413 rtx insn;
2415 int bb = BLOCK_NUM (insn);
2417 /* load_killed_in_block_p will handle the case of calls clobbering
2418 everything. */
2419 modify_mem_list[bb] = alloc_INSN_LIST (insn, modify_mem_list[bb]);
2420 bitmap_set_bit (modify_mem_list_set, bb);
2422 if (GET_CODE (insn) == CALL_INSN)
2424 /* Note that traversals of this loop (other than for free-ing)
2425 will break after encountering a CALL_INSN. So, there's no
2426 need to insert a pair of items, as canon_list_insert does. */
2427 canon_modify_mem_list[bb] =
2428 alloc_INSN_LIST (insn, canon_modify_mem_list[bb]);
2429 bitmap_set_bit (canon_modify_mem_list_set, bb);
2431 else
2432 note_stores (PATTERN (insn), canon_list_insert, (void*) insn);
2435 /* Called from compute_hash_table via note_stores to handle one
2436 SET or CLOBBER in an insn. DATA is really the instruction in which
2437 the SET is taking place. */
2439 static void
2440 record_last_set_info (dest, setter, data)
2441 rtx dest, setter ATTRIBUTE_UNUSED;
2442 void *data;
2444 rtx last_set_insn = (rtx) data;
2446 if (GET_CODE (dest) == SUBREG)
2447 dest = SUBREG_REG (dest);
2449 if (GET_CODE (dest) == REG)
2450 record_last_reg_set_info (last_set_insn, REGNO (dest));
2451 else if (GET_CODE (dest) == MEM
2452 /* Ignore pushes, they clobber nothing. */
2453 && ! push_operand (dest, GET_MODE (dest)))
2454 record_last_mem_set_info (last_set_insn);
2457 /* Top level function to create an expression or assignment hash table.
2459 Expression entries are placed in the hash table if
2460 - they are of the form (set (pseudo-reg) src),
2461 - src is something we want to perform GCSE on,
2462 - none of the operands are subsequently modified in the block
2464 Assignment entries are placed in the hash table if
2465 - they are of the form (set (pseudo-reg) src),
2466 - src is something we want to perform const/copy propagation on,
2467 - none of the operands or target are subsequently modified in the block
2469 Currently src must be a pseudo-reg or a const_int.
2471 F is the first insn.
2472 TABLE is the table computed. */
2474 static void
2475 compute_hash_table_work (table)
2476 struct hash_table *table;
2478 unsigned int i;
2480 /* While we compute the hash table we also compute a bit array of which
2481 registers are set in which blocks.
2482 ??? This isn't needed during const/copy propagation, but it's cheap to
2483 compute. Later. */
2484 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
2486 /* re-Cache any INSN_LIST nodes we have allocated. */
2487 clear_modify_mem_tables ();
2488 /* Some working arrays used to track first and last set in each block. */
2489 reg_avail_info = (struct reg_avail_info*)
2490 gmalloc (max_gcse_regno * sizeof (struct reg_avail_info));
2492 for (i = 0; i < max_gcse_regno; ++i)
2493 reg_avail_info[i].last_bb = NULL;
2495 FOR_EACH_BB (current_bb)
2497 rtx insn;
2498 unsigned int regno;
2499 int in_libcall_block;
2501 /* First pass over the instructions records information used to
2502 determine when registers and memory are first and last set.
2503 ??? hard-reg reg_set_in_block computation
2504 could be moved to compute_sets since they currently don't change. */
2506 for (insn = current_bb->head;
2507 insn && insn != NEXT_INSN (current_bb->end);
2508 insn = NEXT_INSN (insn))
2510 if (! INSN_P (insn))
2511 continue;
2513 if (GET_CODE (insn) == CALL_INSN)
2515 bool clobbers_all = false;
2516 #ifdef NON_SAVING_SETJMP
2517 if (NON_SAVING_SETJMP
2518 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
2519 clobbers_all = true;
2520 #endif
2522 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2523 if (clobbers_all
2524 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2525 record_last_reg_set_info (insn, regno);
2527 mark_call (insn);
2530 note_stores (PATTERN (insn), record_last_set_info, insn);
2533 /* The next pass builds the hash table. */
2535 for (insn = current_bb->head, in_libcall_block = 0;
2536 insn && insn != NEXT_INSN (current_bb->end);
2537 insn = NEXT_INSN (insn))
2538 if (INSN_P (insn))
2540 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
2541 in_libcall_block = 1;
2542 else if (table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
2543 in_libcall_block = 0;
2544 hash_scan_insn (insn, table, in_libcall_block);
2545 if (!table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
2546 in_libcall_block = 0;
2550 free (reg_avail_info);
2551 reg_avail_info = NULL;
2554 /* Allocate space for the set/expr hash TABLE.
2555 N_INSNS is the number of instructions in the function.
2556 It is used to determine the number of buckets to use.
2557 SET_P determines whether set or expression table will
2558 be created. */
2560 static void
2561 alloc_hash_table (n_insns, table, set_p)
2562 int n_insns;
2563 struct hash_table *table;
2564 int set_p;
2566 int n;
2568 table->size = n_insns / 4;
2569 if (table->size < 11)
2570 table->size = 11;
2572 /* Attempt to maintain efficient use of hash table.
2573 Making it an odd number is simplest for now.
2574 ??? Later take some measurements. */
2575 table->size |= 1;
2576 n = table->size * sizeof (struct expr *);
2577 table->table = (struct expr **) gmalloc (n);
2578 table->set_p = set_p;
2581 /* Free things allocated by alloc_hash_table. */
2583 static void
2584 free_hash_table (table)
2585 struct hash_table *table;
2587 free (table->table);
2590 /* Compute the hash TABLE for doing copy/const propagation or
2591 expression hash table. */
2593 static void
2594 compute_hash_table (table)
2595 struct hash_table *table;
2597 /* Initialize count of number of entries in hash table. */
2598 table->n_elems = 0;
2599 memset ((char *) table->table, 0,
2600 table->size * sizeof (struct expr *));
2602 compute_hash_table_work (table);
2605 /* Expression tracking support. */
2607 /* Lookup pattern PAT in the expression TABLE.
2608 The result is a pointer to the table entry, or NULL if not found. */
2610 static struct expr *
2611 lookup_expr (pat, table)
2612 rtx pat;
2613 struct hash_table *table;
2615 int do_not_record_p;
2616 unsigned int hash = hash_expr (pat, GET_MODE (pat), &do_not_record_p,
2617 table->size);
2618 struct expr *expr;
2620 if (do_not_record_p)
2621 return NULL;
2623 expr = table->table[hash];
2625 while (expr && ! expr_equiv_p (expr->expr, pat))
2626 expr = expr->next_same_hash;
2628 return expr;
2631 /* Lookup REGNO in the set TABLE. If PAT is non-NULL look for the entry that
2632 matches it, otherwise return the first entry for REGNO. The result is a
2633 pointer to the table entry, or NULL if not found. */
2635 static struct expr *
2636 lookup_set (regno, pat, table)
2637 unsigned int regno;
2638 rtx pat;
2639 struct hash_table *table;
2641 unsigned int hash = hash_set (regno, table->size);
2642 struct expr *expr;
2644 expr = table->table[hash];
2646 if (pat)
2648 while (expr && ! expr_equiv_p (expr->expr, pat))
2649 expr = expr->next_same_hash;
2651 else
2653 while (expr && REGNO (SET_DEST (expr->expr)) != regno)
2654 expr = expr->next_same_hash;
2657 return expr;
2660 /* Return the next entry for REGNO in list EXPR. */
2662 static struct expr *
2663 next_set (regno, expr)
2664 unsigned int regno;
2665 struct expr *expr;
2668 expr = expr->next_same_hash;
2669 while (expr && REGNO (SET_DEST (expr->expr)) != regno);
2671 return expr;
2674 /* Like free_INSN_LIST_list or free_EXPR_LIST_list, except that the node
2675 types may be mixed. */
2677 static void
2678 free_insn_expr_list_list (listp)
2679 rtx *listp;
2681 rtx list, next;
2683 for (list = *listp; list ; list = next)
2685 next = XEXP (list, 1);
2686 if (GET_CODE (list) == EXPR_LIST)
2687 free_EXPR_LIST_node (list);
2688 else
2689 free_INSN_LIST_node (list);
2692 *listp = NULL;
2695 /* Clear canon_modify_mem_list and modify_mem_list tables. */
2696 static void
2697 clear_modify_mem_tables ()
2699 int i;
2701 EXECUTE_IF_SET_IN_BITMAP
2702 (modify_mem_list_set, 0, i, free_INSN_LIST_list (modify_mem_list + i));
2703 bitmap_clear (modify_mem_list_set);
2705 EXECUTE_IF_SET_IN_BITMAP
2706 (canon_modify_mem_list_set, 0, i,
2707 free_insn_expr_list_list (canon_modify_mem_list + i));
2708 bitmap_clear (canon_modify_mem_list_set);
2711 /* Release memory used by modify_mem_list_set and canon_modify_mem_list_set. */
2713 static void
2714 free_modify_mem_tables ()
2716 clear_modify_mem_tables ();
2717 free (modify_mem_list);
2718 free (canon_modify_mem_list);
2719 modify_mem_list = 0;
2720 canon_modify_mem_list = 0;
2723 /* Reset tables used to keep track of what's still available [since the
2724 start of the block]. */
2726 static void
2727 reset_opr_set_tables ()
2729 /* Maintain a bitmap of which regs have been set since beginning of
2730 the block. */
2731 CLEAR_REG_SET (reg_set_bitmap);
2733 /* Also keep a record of the last instruction to modify memory.
2734 For now this is very trivial, we only record whether any memory
2735 location has been modified. */
2736 clear_modify_mem_tables ();
2739 /* Return nonzero if the operands of X are not set before INSN in
2740 INSN's basic block. */
2742 static int
2743 oprs_not_set_p (x, insn)
2744 rtx x, insn;
2746 int i, j;
2747 enum rtx_code code;
2748 const char *fmt;
2750 if (x == 0)
2751 return 1;
2753 code = GET_CODE (x);
2754 switch (code)
2756 case PC:
2757 case CC0:
2758 case CONST:
2759 case CONST_INT:
2760 case CONST_DOUBLE:
2761 case CONST_VECTOR:
2762 case SYMBOL_REF:
2763 case LABEL_REF:
2764 case ADDR_VEC:
2765 case ADDR_DIFF_VEC:
2766 return 1;
2768 case MEM:
2769 if (load_killed_in_block_p (BLOCK_FOR_INSN (insn),
2770 INSN_CUID (insn), x, 0))
2771 return 0;
2772 else
2773 return oprs_not_set_p (XEXP (x, 0), insn);
2775 case REG:
2776 return ! REGNO_REG_SET_P (reg_set_bitmap, REGNO (x));
2778 default:
2779 break;
2782 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
2784 if (fmt[i] == 'e')
2786 /* If we are about to do the last recursive call
2787 needed at this level, change it into iteration.
2788 This function is called enough to be worth it. */
2789 if (i == 0)
2790 return oprs_not_set_p (XEXP (x, i), insn);
2792 if (! oprs_not_set_p (XEXP (x, i), insn))
2793 return 0;
2795 else if (fmt[i] == 'E')
2796 for (j = 0; j < XVECLEN (x, i); j++)
2797 if (! oprs_not_set_p (XVECEXP (x, i, j), insn))
2798 return 0;
2801 return 1;
2804 /* Mark things set by a CALL. */
2806 static void
2807 mark_call (insn)
2808 rtx insn;
2810 if (! CONST_OR_PURE_CALL_P (insn))
2811 record_last_mem_set_info (insn);
2814 /* Mark things set by a SET. */
2816 static void
2817 mark_set (pat, insn)
2818 rtx pat, insn;
2820 rtx dest = SET_DEST (pat);
2822 while (GET_CODE (dest) == SUBREG
2823 || GET_CODE (dest) == ZERO_EXTRACT
2824 || GET_CODE (dest) == SIGN_EXTRACT
2825 || GET_CODE (dest) == STRICT_LOW_PART)
2826 dest = XEXP (dest, 0);
2828 if (GET_CODE (dest) == REG)
2829 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (dest));
2830 else if (GET_CODE (dest) == MEM)
2831 record_last_mem_set_info (insn);
2833 if (GET_CODE (SET_SRC (pat)) == CALL)
2834 mark_call (insn);
2837 /* Record things set by a CLOBBER. */
2839 static void
2840 mark_clobber (pat, insn)
2841 rtx pat, insn;
2843 rtx clob = XEXP (pat, 0);
2845 while (GET_CODE (clob) == SUBREG || GET_CODE (clob) == STRICT_LOW_PART)
2846 clob = XEXP (clob, 0);
2848 if (GET_CODE (clob) == REG)
2849 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (clob));
2850 else
2851 record_last_mem_set_info (insn);
2854 /* Record things set by INSN.
2855 This data is used by oprs_not_set_p. */
2857 static void
2858 mark_oprs_set (insn)
2859 rtx insn;
2861 rtx pat = PATTERN (insn);
2862 int i;
2864 if (GET_CODE (pat) == SET)
2865 mark_set (pat, insn);
2866 else if (GET_CODE (pat) == PARALLEL)
2867 for (i = 0; i < XVECLEN (pat, 0); i++)
2869 rtx x = XVECEXP (pat, 0, i);
2871 if (GET_CODE (x) == SET)
2872 mark_set (x, insn);
2873 else if (GET_CODE (x) == CLOBBER)
2874 mark_clobber (x, insn);
2875 else if (GET_CODE (x) == CALL)
2876 mark_call (insn);
2879 else if (GET_CODE (pat) == CLOBBER)
2880 mark_clobber (pat, insn);
2881 else if (GET_CODE (pat) == CALL)
2882 mark_call (insn);
2886 /* Classic GCSE reaching definition support. */
2888 /* Allocate reaching def variables. */
2890 static void
2891 alloc_rd_mem (n_blocks, n_insns)
2892 int n_blocks, n_insns;
2894 rd_kill = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2895 sbitmap_vector_zero (rd_kill, n_blocks);
2897 rd_gen = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2898 sbitmap_vector_zero (rd_gen, n_blocks);
2900 reaching_defs = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2901 sbitmap_vector_zero (reaching_defs, n_blocks);
2903 rd_out = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2904 sbitmap_vector_zero (rd_out, n_blocks);
2907 /* Free reaching def variables. */
2909 static void
2910 free_rd_mem ()
2912 sbitmap_vector_free (rd_kill);
2913 sbitmap_vector_free (rd_gen);
2914 sbitmap_vector_free (reaching_defs);
2915 sbitmap_vector_free (rd_out);
2918 /* Add INSN to the kills of BB. REGNO, set in BB, is killed by INSN. */
2920 static void
2921 handle_rd_kill_set (insn, regno, bb)
2922 rtx insn;
2923 int regno;
2924 basic_block bb;
2926 struct reg_set *this_reg;
2928 for (this_reg = reg_set_table[regno]; this_reg; this_reg = this_reg ->next)
2929 if (BLOCK_NUM (this_reg->insn) != BLOCK_NUM (insn))
2930 SET_BIT (rd_kill[bb->index], INSN_CUID (this_reg->insn));
2933 /* Compute the set of kill's for reaching definitions. */
2935 static void
2936 compute_kill_rd ()
2938 int cuid;
2939 unsigned int regno;
2940 int i;
2941 basic_block bb;
2943 /* For each block
2944 For each set bit in `gen' of the block (i.e each insn which
2945 generates a definition in the block)
2946 Call the reg set by the insn corresponding to that bit regx
2947 Look at the linked list starting at reg_set_table[regx]
2948 For each setting of regx in the linked list, which is not in
2949 this block
2950 Set the bit in `kill' corresponding to that insn. */
2951 FOR_EACH_BB (bb)
2952 for (cuid = 0; cuid < max_cuid; cuid++)
2953 if (TEST_BIT (rd_gen[bb->index], cuid))
2955 rtx insn = CUID_INSN (cuid);
2956 rtx pat = PATTERN (insn);
2958 if (GET_CODE (insn) == CALL_INSN)
2960 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2961 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2962 handle_rd_kill_set (insn, regno, bb);
2965 if (GET_CODE (pat) == PARALLEL)
2967 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
2969 enum rtx_code code = GET_CODE (XVECEXP (pat, 0, i));
2971 if ((code == SET || code == CLOBBER)
2972 && GET_CODE (XEXP (XVECEXP (pat, 0, i), 0)) == REG)
2973 handle_rd_kill_set (insn,
2974 REGNO (XEXP (XVECEXP (pat, 0, i), 0)),
2975 bb);
2978 else if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == REG)
2979 /* Each setting of this register outside of this block
2980 must be marked in the set of kills in this block. */
2981 handle_rd_kill_set (insn, REGNO (SET_DEST (pat)), bb);
2985 /* Compute the reaching definitions as in
2986 Compilers Principles, Techniques, and Tools. Aho, Sethi, Ullman,
2987 Chapter 10. It is the same algorithm as used for computing available
2988 expressions but applied to the gens and kills of reaching definitions. */
2990 static void
2991 compute_rd ()
2993 int changed, passes;
2994 basic_block bb;
2996 FOR_EACH_BB (bb)
2997 sbitmap_copy (rd_out[bb->index] /*dst*/, rd_gen[bb->index] /*src*/);
2999 passes = 0;
3000 changed = 1;
3001 while (changed)
3003 changed = 0;
3004 FOR_EACH_BB (bb)
3006 sbitmap_union_of_preds (reaching_defs[bb->index], rd_out, bb->index);
3007 changed |= sbitmap_union_of_diff_cg (rd_out[bb->index], rd_gen[bb->index],
3008 reaching_defs[bb->index], rd_kill[bb->index]);
3010 passes++;
3013 if (gcse_file)
3014 fprintf (gcse_file, "reaching def computation: %d passes\n", passes);
3017 /* Classic GCSE available expression support. */
3019 /* Allocate memory for available expression computation. */
3021 static void
3022 alloc_avail_expr_mem (n_blocks, n_exprs)
3023 int n_blocks, n_exprs;
3025 ae_kill = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
3026 sbitmap_vector_zero (ae_kill, n_blocks);
3028 ae_gen = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
3029 sbitmap_vector_zero (ae_gen, n_blocks);
3031 ae_in = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
3032 sbitmap_vector_zero (ae_in, n_blocks);
3034 ae_out = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
3035 sbitmap_vector_zero (ae_out, n_blocks);
3038 static void
3039 free_avail_expr_mem ()
3041 sbitmap_vector_free (ae_kill);
3042 sbitmap_vector_free (ae_gen);
3043 sbitmap_vector_free (ae_in);
3044 sbitmap_vector_free (ae_out);
3047 /* Compute the set of available expressions generated in each basic block. */
3049 static void
3050 compute_ae_gen (expr_hash_table)
3051 struct hash_table *expr_hash_table;
3053 unsigned int i;
3054 struct expr *expr;
3055 struct occr *occr;
3057 /* For each recorded occurrence of each expression, set ae_gen[bb][expr].
3058 This is all we have to do because an expression is not recorded if it
3059 is not available, and the only expressions we want to work with are the
3060 ones that are recorded. */
3061 for (i = 0; i < expr_hash_table->size; i++)
3062 for (expr = expr_hash_table->table[i]; expr != 0; expr = expr->next_same_hash)
3063 for (occr = expr->avail_occr; occr != 0; occr = occr->next)
3064 SET_BIT (ae_gen[BLOCK_NUM (occr->insn)], expr->bitmap_index);
3067 /* Return nonzero if expression X is killed in BB. */
3069 static int
3070 expr_killed_p (x, bb)
3071 rtx x;
3072 basic_block bb;
3074 int i, j;
3075 enum rtx_code code;
3076 const char *fmt;
3078 if (x == 0)
3079 return 1;
3081 code = GET_CODE (x);
3082 switch (code)
3084 case REG:
3085 return TEST_BIT (reg_set_in_block[bb->index], REGNO (x));
3087 case MEM:
3088 if (load_killed_in_block_p (bb, get_max_uid () + 1, x, 0))
3089 return 1;
3090 else
3091 return expr_killed_p (XEXP (x, 0), bb);
3093 case PC:
3094 case CC0: /*FIXME*/
3095 case CONST:
3096 case CONST_INT:
3097 case CONST_DOUBLE:
3098 case CONST_VECTOR:
3099 case SYMBOL_REF:
3100 case LABEL_REF:
3101 case ADDR_VEC:
3102 case ADDR_DIFF_VEC:
3103 return 0;
3105 default:
3106 break;
3109 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
3111 if (fmt[i] == 'e')
3113 /* If we are about to do the last recursive call
3114 needed at this level, change it into iteration.
3115 This function is called enough to be worth it. */
3116 if (i == 0)
3117 return expr_killed_p (XEXP (x, i), bb);
3118 else if (expr_killed_p (XEXP (x, i), bb))
3119 return 1;
3121 else if (fmt[i] == 'E')
3122 for (j = 0; j < XVECLEN (x, i); j++)
3123 if (expr_killed_p (XVECEXP (x, i, j), bb))
3124 return 1;
3127 return 0;
3130 /* Compute the set of available expressions killed in each basic block. */
3132 static void
3133 compute_ae_kill (ae_gen, ae_kill, expr_hash_table)
3134 sbitmap *ae_gen, *ae_kill;
3135 struct hash_table *expr_hash_table;
3137 basic_block bb;
3138 unsigned int i;
3139 struct expr *expr;
3141 FOR_EACH_BB (bb)
3142 for (i = 0; i < expr_hash_table->size; i++)
3143 for (expr = expr_hash_table->table[i]; expr; expr = expr->next_same_hash)
3145 /* Skip EXPR if generated in this block. */
3146 if (TEST_BIT (ae_gen[bb->index], expr->bitmap_index))
3147 continue;
3149 if (expr_killed_p (expr->expr, bb))
3150 SET_BIT (ae_kill[bb->index], expr->bitmap_index);
3154 /* Actually perform the Classic GCSE optimizations. */
3156 /* Return nonzero if occurrence OCCR of expression EXPR reaches block BB.
3158 CHECK_SELF_LOOP is nonzero if we should consider a block reaching itself
3159 as a positive reach. We want to do this when there are two computations
3160 of the expression in the block.
3162 VISITED is a pointer to a working buffer for tracking which BB's have
3163 been visited. It is NULL for the top-level call.
3165 We treat reaching expressions that go through blocks containing the same
3166 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
3167 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
3168 2 as not reaching. The intent is to improve the probability of finding
3169 only one reaching expression and to reduce register lifetimes by picking
3170 the closest such expression. */
3172 static int
3173 expr_reaches_here_p_work (occr, expr, bb, check_self_loop, visited)
3174 struct occr *occr;
3175 struct expr *expr;
3176 basic_block bb;
3177 int check_self_loop;
3178 char *visited;
3180 edge pred;
3182 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
3184 basic_block pred_bb = pred->src;
3186 if (visited[pred_bb->index])
3187 /* This predecessor has already been visited. Nothing to do. */
3189 else if (pred_bb == bb)
3191 /* BB loops on itself. */
3192 if (check_self_loop
3193 && TEST_BIT (ae_gen[pred_bb->index], expr->bitmap_index)
3194 && BLOCK_NUM (occr->insn) == pred_bb->index)
3195 return 1;
3197 visited[pred_bb->index] = 1;
3200 /* Ignore this predecessor if it kills the expression. */
3201 else if (TEST_BIT (ae_kill[pred_bb->index], expr->bitmap_index))
3202 visited[pred_bb->index] = 1;
3204 /* Does this predecessor generate this expression? */
3205 else if (TEST_BIT (ae_gen[pred_bb->index], expr->bitmap_index))
3207 /* Is this the occurrence we're looking for?
3208 Note that there's only one generating occurrence per block
3209 so we just need to check the block number. */
3210 if (BLOCK_NUM (occr->insn) == pred_bb->index)
3211 return 1;
3213 visited[pred_bb->index] = 1;
3216 /* Neither gen nor kill. */
3217 else
3219 visited[pred_bb->index] = 1;
3220 if (expr_reaches_here_p_work (occr, expr, pred_bb, check_self_loop,
3221 visited))
3223 return 1;
3227 /* All paths have been checked. */
3228 return 0;
3231 /* This wrapper for expr_reaches_here_p_work() is to ensure that any
3232 memory allocated for that function is returned. */
3234 static int
3235 expr_reaches_here_p (occr, expr, bb, check_self_loop)
3236 struct occr *occr;
3237 struct expr *expr;
3238 basic_block bb;
3239 int check_self_loop;
3241 int rval;
3242 char *visited = (char *) xcalloc (last_basic_block, 1);
3244 rval = expr_reaches_here_p_work (occr, expr, bb, check_self_loop, visited);
3246 free (visited);
3247 return rval;
3250 /* Return the instruction that computes EXPR that reaches INSN's basic block.
3251 If there is more than one such instruction, return NULL.
3253 Called only by handle_avail_expr. */
3255 static rtx
3256 computing_insn (expr, insn)
3257 struct expr *expr;
3258 rtx insn;
3260 basic_block bb = BLOCK_FOR_INSN (insn);
3262 if (expr->avail_occr->next == NULL)
3264 if (BLOCK_FOR_INSN (expr->avail_occr->insn) == bb)
3265 /* The available expression is actually itself
3266 (i.e. a loop in the flow graph) so do nothing. */
3267 return NULL;
3269 /* (FIXME) Case that we found a pattern that was created by
3270 a substitution that took place. */
3271 return expr->avail_occr->insn;
3273 else
3275 /* Pattern is computed more than once.
3276 Search backwards from this insn to see how many of these
3277 computations actually reach this insn. */
3278 struct occr *occr;
3279 rtx insn_computes_expr = NULL;
3280 int can_reach = 0;
3282 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
3284 if (BLOCK_FOR_INSN (occr->insn) == bb)
3286 /* The expression is generated in this block.
3287 The only time we care about this is when the expression
3288 is generated later in the block [and thus there's a loop].
3289 We let the normal cse pass handle the other cases. */
3290 if (INSN_CUID (insn) < INSN_CUID (occr->insn)
3291 && expr_reaches_here_p (occr, expr, bb, 1))
3293 can_reach++;
3294 if (can_reach > 1)
3295 return NULL;
3297 insn_computes_expr = occr->insn;
3300 else if (expr_reaches_here_p (occr, expr, bb, 0))
3302 can_reach++;
3303 if (can_reach > 1)
3304 return NULL;
3306 insn_computes_expr = occr->insn;
3310 if (insn_computes_expr == NULL)
3311 abort ();
3313 return insn_computes_expr;
3317 /* Return nonzero if the definition in DEF_INSN can reach INSN.
3318 Only called by can_disregard_other_sets. */
3320 static int
3321 def_reaches_here_p (insn, def_insn)
3322 rtx insn, def_insn;
3324 rtx reg;
3326 if (TEST_BIT (reaching_defs[BLOCK_NUM (insn)], INSN_CUID (def_insn)))
3327 return 1;
3329 if (BLOCK_NUM (insn) == BLOCK_NUM (def_insn))
3331 if (INSN_CUID (def_insn) < INSN_CUID (insn))
3333 if (GET_CODE (PATTERN (def_insn)) == PARALLEL)
3334 return 1;
3335 else if (GET_CODE (PATTERN (def_insn)) == CLOBBER)
3336 reg = XEXP (PATTERN (def_insn), 0);
3337 else if (GET_CODE (PATTERN (def_insn)) == SET)
3338 reg = SET_DEST (PATTERN (def_insn));
3339 else
3340 abort ();
3342 return ! reg_set_between_p (reg, NEXT_INSN (def_insn), insn);
3344 else
3345 return 0;
3348 return 0;
3351 /* Return nonzero if *ADDR_THIS_REG can only have one value at INSN. The
3352 value returned is the number of definitions that reach INSN. Returning a
3353 value of zero means that [maybe] more than one definition reaches INSN and
3354 the caller can't perform whatever optimization it is trying. i.e. it is
3355 always safe to return zero. */
3357 static int
3358 can_disregard_other_sets (addr_this_reg, insn, for_combine)
3359 struct reg_set **addr_this_reg;
3360 rtx insn;
3361 int for_combine;
3363 int number_of_reaching_defs = 0;
3364 struct reg_set *this_reg;
3366 for (this_reg = *addr_this_reg; this_reg != 0; this_reg = this_reg->next)
3367 if (def_reaches_here_p (insn, this_reg->insn))
3369 number_of_reaching_defs++;
3370 /* Ignore parallels for now. */
3371 if (GET_CODE (PATTERN (this_reg->insn)) == PARALLEL)
3372 return 0;
3374 if (!for_combine
3375 && (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER
3376 || ! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)),
3377 SET_SRC (PATTERN (insn)))))
3378 /* A setting of the reg to a different value reaches INSN. */
3379 return 0;
3381 if (number_of_reaching_defs > 1)
3383 /* If in this setting the value the register is being set to is
3384 equal to the previous value the register was set to and this
3385 setting reaches the insn we are trying to do the substitution
3386 on then we are ok. */
3387 if (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER)
3388 return 0;
3389 else if (! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)),
3390 SET_SRC (PATTERN (insn))))
3391 return 0;
3394 *addr_this_reg = this_reg;
3397 return number_of_reaching_defs;
3400 /* Expression computed by insn is available and the substitution is legal,
3401 so try to perform the substitution.
3403 The result is nonzero if any changes were made. */
3405 static int
3406 handle_avail_expr (insn, expr)
3407 rtx insn;
3408 struct expr *expr;
3410 rtx pat, insn_computes_expr, expr_set;
3411 rtx to;
3412 struct reg_set *this_reg;
3413 int found_setting, use_src;
3414 int changed = 0;
3416 /* We only handle the case where one computation of the expression
3417 reaches this instruction. */
3418 insn_computes_expr = computing_insn (expr, insn);
3419 if (insn_computes_expr == NULL)
3420 return 0;
3421 expr_set = single_set (insn_computes_expr);
3422 if (!expr_set)
3423 abort ();
3425 found_setting = 0;
3426 use_src = 0;
3428 /* At this point we know only one computation of EXPR outside of this
3429 block reaches this insn. Now try to find a register that the
3430 expression is computed into. */
3431 if (GET_CODE (SET_SRC (expr_set)) == REG)
3433 /* This is the case when the available expression that reaches
3434 here has already been handled as an available expression. */
3435 unsigned int regnum_for_replacing
3436 = REGNO (SET_SRC (expr_set));
3438 /* If the register was created by GCSE we can't use `reg_set_table',
3439 however we know it's set only once. */
3440 if (regnum_for_replacing >= max_gcse_regno
3441 /* If the register the expression is computed into is set only once,
3442 or only one set reaches this insn, we can use it. */
3443 || (((this_reg = reg_set_table[regnum_for_replacing]),
3444 this_reg->next == NULL)
3445 || can_disregard_other_sets (&this_reg, insn, 0)))
3447 use_src = 1;
3448 found_setting = 1;
3452 if (!found_setting)
3454 unsigned int regnum_for_replacing
3455 = REGNO (SET_DEST (expr_set));
3457 /* This shouldn't happen. */
3458 if (regnum_for_replacing >= max_gcse_regno)
3459 abort ();
3461 this_reg = reg_set_table[regnum_for_replacing];
3463 /* If the register the expression is computed into is set only once,
3464 or only one set reaches this insn, use it. */
3465 if (this_reg->next == NULL
3466 || can_disregard_other_sets (&this_reg, insn, 0))
3467 found_setting = 1;
3470 if (found_setting)
3472 pat = PATTERN (insn);
3473 if (use_src)
3474 to = SET_SRC (expr_set);
3475 else
3476 to = SET_DEST (expr_set);
3477 changed = validate_change (insn, &SET_SRC (pat), to, 0);
3479 /* We should be able to ignore the return code from validate_change but
3480 to play it safe we check. */
3481 if (changed)
3483 gcse_subst_count++;
3484 if (gcse_file != NULL)
3486 fprintf (gcse_file, "GCSE: Replacing the source in insn %d with",
3487 INSN_UID (insn));
3488 fprintf (gcse_file, " reg %d %s insn %d\n",
3489 REGNO (to), use_src ? "from" : "set in",
3490 INSN_UID (insn_computes_expr));
3495 /* The register that the expr is computed into is set more than once. */
3496 else if (1 /*expensive_op(this_pattrn->op) && do_expensive_gcse)*/)
3498 /* Insert an insn after insnx that copies the reg set in insnx
3499 into a new pseudo register call this new register REGN.
3500 From insnb until end of basic block or until REGB is set
3501 replace all uses of REGB with REGN. */
3502 rtx new_insn;
3504 to = gen_reg_rtx (GET_MODE (SET_DEST (expr_set)));
3506 /* Generate the new insn. */
3507 /* ??? If the change fails, we return 0, even though we created
3508 an insn. I think this is ok. */
3509 new_insn
3510 = emit_insn_after (gen_rtx_SET (VOIDmode, to,
3511 SET_DEST (expr_set)),
3512 insn_computes_expr);
3514 /* Keep register set table up to date. */
3515 record_one_set (REGNO (to), new_insn);
3517 gcse_create_count++;
3518 if (gcse_file != NULL)
3520 fprintf (gcse_file, "GCSE: Creating insn %d to copy value of reg %d",
3521 INSN_UID (NEXT_INSN (insn_computes_expr)),
3522 REGNO (SET_SRC (PATTERN (NEXT_INSN (insn_computes_expr)))));
3523 fprintf (gcse_file, ", computed in insn %d,\n",
3524 INSN_UID (insn_computes_expr));
3525 fprintf (gcse_file, " into newly allocated reg %d\n",
3526 REGNO (to));
3529 pat = PATTERN (insn);
3531 /* Do register replacement for INSN. */
3532 changed = validate_change (insn, &SET_SRC (pat),
3533 SET_DEST (PATTERN
3534 (NEXT_INSN (insn_computes_expr))),
3537 /* We should be able to ignore the return code from validate_change but
3538 to play it safe we check. */
3539 if (changed)
3541 gcse_subst_count++;
3542 if (gcse_file != NULL)
3544 fprintf (gcse_file,
3545 "GCSE: Replacing the source in insn %d with reg %d ",
3546 INSN_UID (insn),
3547 REGNO (SET_DEST (PATTERN (NEXT_INSN
3548 (insn_computes_expr)))));
3549 fprintf (gcse_file, "set in insn %d\n",
3550 INSN_UID (insn_computes_expr));
3555 return changed;
3558 /* Perform classic GCSE. This is called by one_classic_gcse_pass after all
3559 the dataflow analysis has been done.
3561 The result is nonzero if a change was made. */
3563 static int
3564 classic_gcse ()
3566 int changed;
3567 rtx insn;
3568 basic_block bb;
3570 /* Note we start at block 1. */
3572 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
3573 return 0;
3575 changed = 0;
3576 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR, next_bb)
3578 /* Reset tables used to keep track of what's still valid [since the
3579 start of the block]. */
3580 reset_opr_set_tables ();
3582 for (insn = bb->head;
3583 insn != NULL && insn != NEXT_INSN (bb->end);
3584 insn = NEXT_INSN (insn))
3586 /* Is insn of form (set (pseudo-reg) ...)? */
3587 if (GET_CODE (insn) == INSN
3588 && GET_CODE (PATTERN (insn)) == SET
3589 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
3590 && REGNO (SET_DEST (PATTERN (insn))) >= FIRST_PSEUDO_REGISTER)
3592 rtx pat = PATTERN (insn);
3593 rtx src = SET_SRC (pat);
3594 struct expr *expr;
3596 if (want_to_gcse_p (src)
3597 /* Is the expression recorded? */
3598 && ((expr = lookup_expr (src, &expr_hash_table)) != NULL)
3599 /* Is the expression available [at the start of the
3600 block]? */
3601 && TEST_BIT (ae_in[bb->index], expr->bitmap_index)
3602 /* Are the operands unchanged since the start of the
3603 block? */
3604 && oprs_not_set_p (src, insn))
3605 changed |= handle_avail_expr (insn, expr);
3608 /* Keep track of everything modified by this insn. */
3609 /* ??? Need to be careful w.r.t. mods done to INSN. */
3610 if (INSN_P (insn))
3611 mark_oprs_set (insn);
3615 return changed;
3618 /* Top level routine to perform one classic GCSE pass.
3620 Return nonzero if a change was made. */
3622 static int
3623 one_classic_gcse_pass (pass)
3624 int pass;
3626 int changed = 0;
3628 gcse_subst_count = 0;
3629 gcse_create_count = 0;
3631 alloc_hash_table (max_cuid, &expr_hash_table, 0);
3632 alloc_rd_mem (last_basic_block, max_cuid);
3633 compute_hash_table (&expr_hash_table);
3634 if (gcse_file)
3635 dump_hash_table (gcse_file, "Expression", &expr_hash_table);
3637 if (expr_hash_table.n_elems > 0)
3639 compute_kill_rd ();
3640 compute_rd ();
3641 alloc_avail_expr_mem (last_basic_block, expr_hash_table.n_elems);
3642 compute_ae_gen (&expr_hash_table);
3643 compute_ae_kill (ae_gen, ae_kill, &expr_hash_table);
3644 compute_available (ae_gen, ae_kill, ae_out, ae_in);
3645 changed = classic_gcse ();
3646 free_avail_expr_mem ();
3649 free_rd_mem ();
3650 free_hash_table (&expr_hash_table);
3652 if (gcse_file)
3654 fprintf (gcse_file, "\n");
3655 fprintf (gcse_file, "GCSE of %s, pass %d: %d bytes needed, %d substs,",
3656 current_function_name, pass, bytes_used, gcse_subst_count);
3657 fprintf (gcse_file, "%d insns created\n", gcse_create_count);
3660 return changed;
3663 /* Compute copy/constant propagation working variables. */
3665 /* Local properties of assignments. */
3666 static sbitmap *cprop_pavloc;
3667 static sbitmap *cprop_absaltered;
3669 /* Global properties of assignments (computed from the local properties). */
3670 static sbitmap *cprop_avin;
3671 static sbitmap *cprop_avout;
3673 /* Allocate vars used for copy/const propagation. N_BLOCKS is the number of
3674 basic blocks. N_SETS is the number of sets. */
3676 static void
3677 alloc_cprop_mem (n_blocks, n_sets)
3678 int n_blocks, n_sets;
3680 cprop_pavloc = sbitmap_vector_alloc (n_blocks, n_sets);
3681 cprop_absaltered = sbitmap_vector_alloc (n_blocks, n_sets);
3683 cprop_avin = sbitmap_vector_alloc (n_blocks, n_sets);
3684 cprop_avout = sbitmap_vector_alloc (n_blocks, n_sets);
3687 /* Free vars used by copy/const propagation. */
3689 static void
3690 free_cprop_mem ()
3692 sbitmap_vector_free (cprop_pavloc);
3693 sbitmap_vector_free (cprop_absaltered);
3694 sbitmap_vector_free (cprop_avin);
3695 sbitmap_vector_free (cprop_avout);
3698 /* For each block, compute whether X is transparent. X is either an
3699 expression or an assignment [though we don't care which, for this context
3700 an assignment is treated as an expression]. For each block where an
3701 element of X is modified, set (SET_P == 1) or reset (SET_P == 0) the INDX
3702 bit in BMAP. */
3704 static void
3705 compute_transp (x, indx, bmap, set_p)
3706 rtx x;
3707 int indx;
3708 sbitmap *bmap;
3709 int set_p;
3711 int i, j;
3712 basic_block bb;
3713 enum rtx_code code;
3714 reg_set *r;
3715 const char *fmt;
3717 /* repeat is used to turn tail-recursion into iteration since GCC
3718 can't do it when there's no return value. */
3719 repeat:
3721 if (x == 0)
3722 return;
3724 code = GET_CODE (x);
3725 switch (code)
3727 case REG:
3728 if (set_p)
3730 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3732 FOR_EACH_BB (bb)
3733 if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
3734 SET_BIT (bmap[bb->index], indx);
3736 else
3738 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
3739 SET_BIT (bmap[BLOCK_NUM (r->insn)], indx);
3742 else
3744 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3746 FOR_EACH_BB (bb)
3747 if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
3748 RESET_BIT (bmap[bb->index], indx);
3750 else
3752 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
3753 RESET_BIT (bmap[BLOCK_NUM (r->insn)], indx);
3757 return;
3759 case MEM:
3760 FOR_EACH_BB (bb)
3762 rtx list_entry = canon_modify_mem_list[bb->index];
3764 while (list_entry)
3766 rtx dest, dest_addr;
3768 if (GET_CODE (XEXP (list_entry, 0)) == CALL_INSN)
3770 if (set_p)
3771 SET_BIT (bmap[bb->index], indx);
3772 else
3773 RESET_BIT (bmap[bb->index], indx);
3774 break;
3776 /* LIST_ENTRY must be an INSN of some kind that sets memory.
3777 Examine each hunk of memory that is modified. */
3779 dest = XEXP (list_entry, 0);
3780 list_entry = XEXP (list_entry, 1);
3781 dest_addr = XEXP (list_entry, 0);
3783 if (canon_true_dependence (dest, GET_MODE (dest), dest_addr,
3784 x, rtx_addr_varies_p))
3786 if (set_p)
3787 SET_BIT (bmap[bb->index], indx);
3788 else
3789 RESET_BIT (bmap[bb->index], indx);
3790 break;
3792 list_entry = XEXP (list_entry, 1);
3796 x = XEXP (x, 0);
3797 goto repeat;
3799 case PC:
3800 case CC0: /*FIXME*/
3801 case CONST:
3802 case CONST_INT:
3803 case CONST_DOUBLE:
3804 case CONST_VECTOR:
3805 case SYMBOL_REF:
3806 case LABEL_REF:
3807 case ADDR_VEC:
3808 case ADDR_DIFF_VEC:
3809 return;
3811 default:
3812 break;
3815 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
3817 if (fmt[i] == 'e')
3819 /* If we are about to do the last recursive call
3820 needed at this level, change it into iteration.
3821 This function is called enough to be worth it. */
3822 if (i == 0)
3824 x = XEXP (x, i);
3825 goto repeat;
3828 compute_transp (XEXP (x, i), indx, bmap, set_p);
3830 else if (fmt[i] == 'E')
3831 for (j = 0; j < XVECLEN (x, i); j++)
3832 compute_transp (XVECEXP (x, i, j), indx, bmap, set_p);
3836 /* Top level routine to do the dataflow analysis needed by copy/const
3837 propagation. */
3839 static void
3840 compute_cprop_data ()
3842 compute_local_properties (cprop_absaltered, cprop_pavloc, NULL, &set_hash_table);
3843 compute_available (cprop_pavloc, cprop_absaltered,
3844 cprop_avout, cprop_avin);
3847 /* Copy/constant propagation. */
3849 /* Maximum number of register uses in an insn that we handle. */
3850 #define MAX_USES 8
3852 /* Table of uses found in an insn.
3853 Allocated statically to avoid alloc/free complexity and overhead. */
3854 static struct reg_use reg_use_table[MAX_USES];
3856 /* Index into `reg_use_table' while building it. */
3857 static int reg_use_count;
3859 /* Set up a list of register numbers used in INSN. The found uses are stored
3860 in `reg_use_table'. `reg_use_count' is initialized to zero before entry,
3861 and contains the number of uses in the table upon exit.
3863 ??? If a register appears multiple times we will record it multiple times.
3864 This doesn't hurt anything but it will slow things down. */
3866 static void
3867 find_used_regs (xptr, data)
3868 rtx *xptr;
3869 void *data ATTRIBUTE_UNUSED;
3871 int i, j;
3872 enum rtx_code code;
3873 const char *fmt;
3874 rtx x = *xptr;
3876 /* repeat is used to turn tail-recursion into iteration since GCC
3877 can't do it when there's no return value. */
3878 repeat:
3879 if (x == 0)
3880 return;
3882 code = GET_CODE (x);
3883 if (REG_P (x))
3885 if (reg_use_count == MAX_USES)
3886 return;
3888 reg_use_table[reg_use_count].reg_rtx = x;
3889 reg_use_count++;
3892 /* Recursively scan the operands of this expression. */
3894 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
3896 if (fmt[i] == 'e')
3898 /* If we are about to do the last recursive call
3899 needed at this level, change it into iteration.
3900 This function is called enough to be worth it. */
3901 if (i == 0)
3903 x = XEXP (x, 0);
3904 goto repeat;
3907 find_used_regs (&XEXP (x, i), data);
3909 else if (fmt[i] == 'E')
3910 for (j = 0; j < XVECLEN (x, i); j++)
3911 find_used_regs (&XVECEXP (x, i, j), data);
3915 /* Try to replace all non-SET_DEST occurrences of FROM in INSN with TO.
3916 Returns nonzero is successful. */
3918 static int
3919 try_replace_reg (from, to, insn)
3920 rtx from, to, insn;
3922 rtx note = find_reg_equal_equiv_note (insn);
3923 rtx src = 0;
3924 int success = 0;
3925 rtx set = single_set (insn);
3927 validate_replace_src_group (from, to, insn);
3928 if (num_changes_pending () && apply_change_group ())
3929 success = 1;
3931 if (!success && set && reg_mentioned_p (from, SET_SRC (set)))
3933 /* If above failed and this is a single set, try to simplify the source of
3934 the set given our substitution. We could perhaps try this for multiple
3935 SETs, but it probably won't buy us anything. */
3936 src = simplify_replace_rtx (SET_SRC (set), from, to);
3938 if (!rtx_equal_p (src, SET_SRC (set))
3939 && validate_change (insn, &SET_SRC (set), src, 0))
3940 success = 1;
3942 /* If we've failed to do replacement, have a single SET, and don't already
3943 have a note, add a REG_EQUAL note to not lose information. */
3944 if (!success && note == 0 && set != 0)
3945 note = set_unique_reg_note (insn, REG_EQUAL, copy_rtx (src));
3948 /* If there is already a NOTE, update the expression in it with our
3949 replacement. */
3950 else if (note != 0)
3951 XEXP (note, 0) = simplify_replace_rtx (XEXP (note, 0), from, to);
3953 /* REG_EQUAL may get simplified into register.
3954 We don't allow that. Remove that note. This code ought
3955 not to hapen, because previous code ought to syntetize
3956 reg-reg move, but be on the safe side. */
3957 if (note && REG_P (XEXP (note, 0)))
3958 remove_note (insn, note);
3960 return success;
3963 /* Find a set of REGNOs that are available on entry to INSN's block. Returns
3964 NULL no such set is found. */
3966 static struct expr *
3967 find_avail_set (regno, insn)
3968 int regno;
3969 rtx insn;
3971 /* SET1 contains the last set found that can be returned to the caller for
3972 use in a substitution. */
3973 struct expr *set1 = 0;
3975 /* Loops are not possible here. To get a loop we would need two sets
3976 available at the start of the block containing INSN. ie we would
3977 need two sets like this available at the start of the block:
3979 (set (reg X) (reg Y))
3980 (set (reg Y) (reg X))
3982 This can not happen since the set of (reg Y) would have killed the
3983 set of (reg X) making it unavailable at the start of this block. */
3984 while (1)
3986 rtx src;
3987 struct expr *set = lookup_set (regno, NULL_RTX, &set_hash_table);
3989 /* Find a set that is available at the start of the block
3990 which contains INSN. */
3991 while (set)
3993 if (TEST_BIT (cprop_avin[BLOCK_NUM (insn)], set->bitmap_index))
3994 break;
3995 set = next_set (regno, set);
3998 /* If no available set was found we've reached the end of the
3999 (possibly empty) copy chain. */
4000 if (set == 0)
4001 break;
4003 if (GET_CODE (set->expr) != SET)
4004 abort ();
4006 src = SET_SRC (set->expr);
4008 /* We know the set is available.
4009 Now check that SRC is ANTLOC (i.e. none of the source operands
4010 have changed since the start of the block).
4012 If the source operand changed, we may still use it for the next
4013 iteration of this loop, but we may not use it for substitutions. */
4015 if (CONSTANT_P (src) || oprs_not_set_p (src, insn))
4016 set1 = set;
4018 /* If the source of the set is anything except a register, then
4019 we have reached the end of the copy chain. */
4020 if (GET_CODE (src) != REG)
4021 break;
4023 /* Follow the copy chain, ie start another iteration of the loop
4024 and see if we have an available copy into SRC. */
4025 regno = REGNO (src);
4028 /* SET1 holds the last set that was available and anticipatable at
4029 INSN. */
4030 return set1;
4033 /* Subroutine of cprop_insn that tries to propagate constants into
4034 JUMP_INSNS. JUMP must be a conditional jump. If SETCC is non-NULL
4035 it is the instruction that immediately preceeds JUMP, and must be a
4036 single SET of a register. FROM is what we will try to replace,
4037 SRC is the constant we will try to substitute for it. Returns nonzero
4038 if a change was made. */
4040 static int
4041 cprop_jump (bb, setcc, jump, from, src)
4042 basic_block bb;
4043 rtx setcc;
4044 rtx jump;
4045 rtx from;
4046 rtx src;
4048 rtx new, new_set;
4049 rtx set = pc_set (jump);
4051 /* First substitute in the INSN condition as the SET_SRC of the JUMP,
4052 then substitute that given values in this expanded JUMP. */
4053 if (setcc != NULL
4054 && !modified_between_p (from, setcc, jump)
4055 && !modified_between_p (src, setcc, jump))
4057 rtx setcc_set = single_set (setcc);
4058 new_set = simplify_replace_rtx (SET_SRC (set),
4059 SET_DEST (setcc_set),
4060 SET_SRC (setcc_set));
4062 else
4063 new_set = set;
4065 new = simplify_replace_rtx (new_set, from, src);
4067 /* If no simplification can be made, then try the next
4068 register. */
4069 if (rtx_equal_p (new, new_set) || rtx_equal_p (new, SET_SRC (set)))
4070 return 0;
4072 /* If this is now a no-op delete it, otherwise this must be a valid insn. */
4073 if (new == pc_rtx)
4074 delete_insn (jump);
4075 else
4077 /* Ensure the value computed inside the jump insn to be equivalent
4078 to one computed by setcc. */
4079 if (setcc
4080 && modified_in_p (new, setcc))
4081 return 0;
4082 if (! validate_change (jump, &SET_SRC (set), new, 0))
4083 return 0;
4085 /* If this has turned into an unconditional jump,
4086 then put a barrier after it so that the unreachable
4087 code will be deleted. */
4088 if (GET_CODE (SET_SRC (set)) == LABEL_REF)
4089 emit_barrier_after (jump);
4092 #ifdef HAVE_cc0
4093 /* Delete the cc0 setter. */
4094 if (setcc != NULL && CC0_P (SET_DEST (single_set (setcc))))
4095 delete_insn (setcc);
4096 #endif
4098 run_jump_opt_after_gcse = 1;
4100 const_prop_count++;
4101 if (gcse_file != NULL)
4103 fprintf (gcse_file,
4104 "CONST-PROP: Replacing reg %d in jump_insn %d with constant ",
4105 REGNO (from), INSN_UID (jump));
4106 print_rtl (gcse_file, src);
4107 fprintf (gcse_file, "\n");
4109 purge_dead_edges (bb);
4111 return 1;
4114 static bool
4115 constprop_register (insn, from, to, alter_jumps)
4116 rtx insn;
4117 rtx from;
4118 rtx to;
4119 int alter_jumps;
4121 rtx sset;
4123 /* Check for reg or cc0 setting instructions followed by
4124 conditional branch instructions first. */
4125 if (alter_jumps
4126 && (sset = single_set (insn)) != NULL
4127 && any_condjump_p (NEXT_INSN (insn)) && onlyjump_p (NEXT_INSN (insn)))
4129 rtx dest = SET_DEST (sset);
4130 if ((REG_P (dest) || CC0_P (dest))
4131 && cprop_jump (BLOCK_FOR_INSN (insn), insn, NEXT_INSN (insn), from, to))
4132 return 1;
4135 /* Handle normal insns next. */
4136 if (GET_CODE (insn) == INSN
4137 && try_replace_reg (from, to, insn))
4138 return 1;
4140 /* Try to propagate a CONST_INT into a conditional jump.
4141 We're pretty specific about what we will handle in this
4142 code, we can extend this as necessary over time.
4144 Right now the insn in question must look like
4145 (set (pc) (if_then_else ...)) */
4146 else if (alter_jumps && any_condjump_p (insn) && onlyjump_p (insn))
4147 return cprop_jump (BLOCK_FOR_INSN (insn), NULL, insn, from, to);
4148 return 0;
4151 /* Perform constant and copy propagation on INSN.
4152 The result is nonzero if a change was made. */
4154 static int
4155 cprop_insn (insn, alter_jumps)
4156 rtx insn;
4157 int alter_jumps;
4159 struct reg_use *reg_used;
4160 int changed = 0;
4161 rtx note;
4163 if (!INSN_P (insn))
4164 return 0;
4166 reg_use_count = 0;
4167 note_uses (&PATTERN (insn), find_used_regs, NULL);
4169 note = find_reg_equal_equiv_note (insn);
4171 /* We may win even when propagating constants into notes. */
4172 if (note)
4173 find_used_regs (&XEXP (note, 0), NULL);
4175 for (reg_used = &reg_use_table[0]; reg_use_count > 0;
4176 reg_used++, reg_use_count--)
4178 unsigned int regno = REGNO (reg_used->reg_rtx);
4179 rtx pat, src;
4180 struct expr *set;
4182 /* Ignore registers created by GCSE.
4183 We do this because ... */
4184 if (regno >= max_gcse_regno)
4185 continue;
4187 /* If the register has already been set in this block, there's
4188 nothing we can do. */
4189 if (! oprs_not_set_p (reg_used->reg_rtx, insn))
4190 continue;
4192 /* Find an assignment that sets reg_used and is available
4193 at the start of the block. */
4194 set = find_avail_set (regno, insn);
4195 if (! set)
4196 continue;
4198 pat = set->expr;
4199 /* ??? We might be able to handle PARALLELs. Later. */
4200 if (GET_CODE (pat) != SET)
4201 abort ();
4203 src = SET_SRC (pat);
4205 /* Constant propagation. */
4206 if (CONSTANT_P (src))
4208 if (constprop_register (insn, reg_used->reg_rtx, src, alter_jumps))
4210 changed = 1;
4211 const_prop_count++;
4212 if (gcse_file != NULL)
4214 fprintf (gcse_file, "GLOBAL CONST-PROP: Replacing reg %d in ", regno);
4215 fprintf (gcse_file, "insn %d with constant ", INSN_UID (insn));
4216 print_rtl (gcse_file, src);
4217 fprintf (gcse_file, "\n");
4221 else if (GET_CODE (src) == REG
4222 && REGNO (src) >= FIRST_PSEUDO_REGISTER
4223 && REGNO (src) != regno)
4225 if (try_replace_reg (reg_used->reg_rtx, src, insn))
4227 changed = 1;
4228 copy_prop_count++;
4229 if (gcse_file != NULL)
4231 fprintf (gcse_file, "GLOBAL COPY-PROP: Replacing reg %d in insn %d",
4232 regno, INSN_UID (insn));
4233 fprintf (gcse_file, " with reg %d\n", REGNO (src));
4236 /* The original insn setting reg_used may or may not now be
4237 deletable. We leave the deletion to flow. */
4238 /* FIXME: If it turns out that the insn isn't deletable,
4239 then we may have unnecessarily extended register lifetimes
4240 and made things worse. */
4245 return changed;
4248 /* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
4249 their REG_EQUAL notes need updating. */
4251 static bool
4252 do_local_cprop (x, insn, alter_jumps, libcall_sp)
4253 rtx x;
4254 rtx insn;
4255 int alter_jumps;
4256 rtx *libcall_sp;
4258 rtx newreg = NULL, newcnst = NULL;
4260 /* Rule out USE instructions and ASM statements as we don't want to
4261 change the hard registers mentioned. */
4262 if (GET_CODE (x) == REG
4263 && (REGNO (x) >= FIRST_PSEUDO_REGISTER
4264 || (GET_CODE (PATTERN (insn)) != USE
4265 && asm_noperands (PATTERN (insn)) < 0)))
4267 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0);
4268 struct elt_loc_list *l;
4270 if (!val)
4271 return false;
4272 for (l = val->locs; l; l = l->next)
4274 rtx this_rtx = l->loc;
4275 rtx note;
4277 if (l->in_libcall)
4278 continue;
4280 if (CONSTANT_P (this_rtx))
4281 newcnst = this_rtx;
4282 if (REG_P (this_rtx) && REGNO (this_rtx) >= FIRST_PSEUDO_REGISTER
4283 /* Don't copy propagate if it has attached REG_EQUIV note.
4284 At this point this only function parameters should have
4285 REG_EQUIV notes and if the argument slot is used somewhere
4286 explicitly, it means address of parameter has been taken,
4287 so we should not extend the lifetime of the pseudo. */
4288 && (!(note = find_reg_note (l->setting_insn, REG_EQUIV, NULL_RTX))
4289 || GET_CODE (XEXP (note, 0)) != MEM))
4290 newreg = this_rtx;
4292 if (newcnst && constprop_register (insn, x, newcnst, alter_jumps))
4294 /* If we find a case where we can't fix the retval REG_EQUAL notes
4295 match the new register, we either have to abandom this replacement
4296 or fix delete_trivially_dead_insns to preserve the setting insn,
4297 or make it delete the REG_EUAQL note, and fix up all passes that
4298 require the REG_EQUAL note there. */
4299 if (!adjust_libcall_notes (x, newcnst, insn, libcall_sp))
4300 abort ();
4301 if (gcse_file != NULL)
4303 fprintf (gcse_file, "LOCAL CONST-PROP: Replacing reg %d in ",
4304 REGNO (x));
4305 fprintf (gcse_file, "insn %d with constant ",
4306 INSN_UID (insn));
4307 print_rtl (gcse_file, newcnst);
4308 fprintf (gcse_file, "\n");
4310 const_prop_count++;
4311 return true;
4313 else if (newreg && newreg != x && try_replace_reg (x, newreg, insn))
4315 adjust_libcall_notes (x, newreg, insn, libcall_sp);
4316 if (gcse_file != NULL)
4318 fprintf (gcse_file,
4319 "LOCAL COPY-PROP: Replacing reg %d in insn %d",
4320 REGNO (x), INSN_UID (insn));
4321 fprintf (gcse_file, " with reg %d\n", REGNO (newreg));
4323 copy_prop_count++;
4324 return true;
4327 return false;
4330 /* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
4331 their REG_EQUAL notes need updating to reflect that OLDREG has been
4332 replaced with NEWVAL in INSN. Return true if all substitutions could
4333 be made. */
4334 static bool
4335 adjust_libcall_notes (oldreg, newval, insn, libcall_sp)
4336 rtx oldreg, newval, insn, *libcall_sp;
4338 rtx end;
4340 while ((end = *libcall_sp++))
4342 rtx note = find_reg_equal_equiv_note (end);
4344 if (! note)
4345 continue;
4347 if (REG_P (newval))
4349 if (reg_set_between_p (newval, PREV_INSN (insn), end))
4353 note = find_reg_equal_equiv_note (end);
4354 if (! note)
4355 continue;
4356 if (reg_mentioned_p (newval, XEXP (note, 0)))
4357 return false;
4359 while ((end = *libcall_sp++));
4360 return true;
4363 XEXP (note, 0) = replace_rtx (XEXP (note, 0), oldreg, newval);
4364 insn = end;
4366 return true;
4369 #define MAX_NESTED_LIBCALLS 9
4371 static void
4372 local_cprop_pass (alter_jumps)
4373 int alter_jumps;
4375 rtx insn;
4376 struct reg_use *reg_used;
4377 rtx libcall_stack[MAX_NESTED_LIBCALLS + 1], *libcall_sp;
4379 cselib_init ();
4380 libcall_sp = &libcall_stack[MAX_NESTED_LIBCALLS];
4381 *libcall_sp = 0;
4382 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4384 if (INSN_P (insn))
4386 rtx note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
4388 if (note)
4390 if (libcall_sp == libcall_stack)
4391 abort ();
4392 *--libcall_sp = XEXP (note, 0);
4394 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
4395 if (note)
4396 libcall_sp++;
4397 note = find_reg_equal_equiv_note (insn);
4400 reg_use_count = 0;
4401 note_uses (&PATTERN (insn), find_used_regs, NULL);
4402 if (note)
4403 find_used_regs (&XEXP (note, 0), NULL);
4405 for (reg_used = &reg_use_table[0]; reg_use_count > 0;
4406 reg_used++, reg_use_count--)
4407 if (do_local_cprop (reg_used->reg_rtx, insn, alter_jumps,
4408 libcall_sp))
4409 break;
4411 while (reg_use_count);
4413 cselib_process_insn (insn);
4415 cselib_finish ();
4418 /* Forward propagate copies. This includes copies and constants. Return
4419 nonzero if a change was made. */
4421 static int
4422 cprop (alter_jumps)
4423 int alter_jumps;
4425 int changed;
4426 basic_block bb;
4427 rtx insn;
4429 /* Note we start at block 1. */
4430 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
4432 if (gcse_file != NULL)
4433 fprintf (gcse_file, "\n");
4434 return 0;
4437 changed = 0;
4438 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR, next_bb)
4440 /* Reset tables used to keep track of what's still valid [since the
4441 start of the block]. */
4442 reset_opr_set_tables ();
4444 for (insn = bb->head;
4445 insn != NULL && insn != NEXT_INSN (bb->end);
4446 insn = NEXT_INSN (insn))
4447 if (INSN_P (insn))
4449 changed |= cprop_insn (insn, alter_jumps);
4451 /* Keep track of everything modified by this insn. */
4452 /* ??? Need to be careful w.r.t. mods done to INSN. Don't
4453 call mark_oprs_set if we turned the insn into a NOTE. */
4454 if (GET_CODE (insn) != NOTE)
4455 mark_oprs_set (insn);
4459 if (gcse_file != NULL)
4460 fprintf (gcse_file, "\n");
4462 return changed;
4465 /* Perform one copy/constant propagation pass.
4466 F is the first insn in the function.
4467 PASS is the pass count. */
4469 static int
4470 one_cprop_pass (pass, alter_jumps)
4471 int pass;
4472 int alter_jumps;
4474 int changed = 0;
4476 const_prop_count = 0;
4477 copy_prop_count = 0;
4479 local_cprop_pass (alter_jumps);
4481 alloc_hash_table (max_cuid, &set_hash_table, 1);
4482 compute_hash_table (&set_hash_table);
4483 if (gcse_file)
4484 dump_hash_table (gcse_file, "SET", &set_hash_table);
4485 if (set_hash_table.n_elems > 0)
4487 alloc_cprop_mem (last_basic_block, set_hash_table.n_elems);
4488 compute_cprop_data ();
4489 changed = cprop (alter_jumps);
4490 if (alter_jumps)
4491 changed |= bypass_conditional_jumps ();
4492 free_cprop_mem ();
4495 free_hash_table (&set_hash_table);
4497 if (gcse_file)
4499 fprintf (gcse_file, "CPROP of %s, pass %d: %d bytes needed, ",
4500 current_function_name, pass, bytes_used);
4501 fprintf (gcse_file, "%d const props, %d copy props\n\n",
4502 const_prop_count, copy_prop_count);
4505 return changed;
4508 /* Bypass conditional jumps. */
4510 /* Find a set of REGNO to a constant that is available at the end of basic
4511 block BB. Returns NULL if no such set is found. Based heavily upon
4512 find_avail_set. */
4514 static struct expr *
4515 find_bypass_set (regno, bb)
4516 int regno;
4517 int bb;
4519 struct expr *result = 0;
4521 for (;;)
4523 rtx src;
4524 struct expr *set = lookup_set (regno, NULL_RTX, &set_hash_table);
4526 while (set)
4528 if (TEST_BIT (cprop_avout[bb], set->bitmap_index))
4529 break;
4530 set = next_set (regno, set);
4533 if (set == 0)
4534 break;
4536 if (GET_CODE (set->expr) != SET)
4537 abort ();
4539 src = SET_SRC (set->expr);
4540 if (CONSTANT_P (src))
4541 result = set;
4543 if (GET_CODE (src) != REG)
4544 break;
4546 regno = REGNO (src);
4548 return result;
4552 /* Subroutine of bypass_conditional_jumps that attempts to bypass the given
4553 basic block BB which has more than one predecessor. If not NULL, SETCC
4554 is the first instruction of BB, which is immediately followed by JUMP_INSN
4555 JUMP. Otherwise, SETCC is NULL, and JUMP is the first insn of BB.
4556 Returns nonzero if a change was made. */
4558 static int
4559 bypass_block (bb, setcc, jump)
4560 basic_block bb;
4561 rtx setcc, jump;
4563 rtx insn, note;
4564 edge e, enext;
4565 int i, change;
4567 insn = (setcc != NULL) ? setcc : jump;
4569 /* Determine set of register uses in INSN. */
4570 reg_use_count = 0;
4571 note_uses (&PATTERN (insn), find_used_regs, NULL);
4572 note = find_reg_equal_equiv_note (insn);
4573 if (note)
4574 find_used_regs (&XEXP (note, 0), NULL);
4576 change = 0;
4577 for (e = bb->pred; e; e = enext)
4579 enext = e->pred_next;
4580 for (i = 0; i < reg_use_count; i++)
4582 struct reg_use *reg_used = &reg_use_table[i];
4583 unsigned int regno = REGNO (reg_used->reg_rtx);
4584 basic_block dest, old_dest;
4585 struct expr *set;
4586 rtx src, new;
4588 if (regno >= max_gcse_regno)
4589 continue;
4591 set = find_bypass_set (regno, e->src->index);
4593 if (! set)
4594 continue;
4596 src = SET_SRC (pc_set (jump));
4598 if (setcc != NULL)
4599 src = simplify_replace_rtx (src,
4600 SET_DEST (PATTERN (setcc)),
4601 SET_SRC (PATTERN (setcc)));
4603 new = simplify_replace_rtx (src, reg_used->reg_rtx,
4604 SET_SRC (set->expr));
4606 if (new == pc_rtx)
4607 dest = FALLTHRU_EDGE (bb)->dest;
4608 else if (GET_CODE (new) == LABEL_REF)
4609 dest = BRANCH_EDGE (bb)->dest;
4610 else
4611 dest = NULL;
4613 /* Once basic block indices are stable, we should be able
4614 to use redirect_edge_and_branch_force instead. */
4615 old_dest = e->dest;
4616 if (dest != NULL && dest != old_dest
4617 && redirect_edge_and_branch (e, dest))
4619 /* Copy the register setter to the redirected edge.
4620 Don't copy CC0 setters, as CC0 is dead after jump. */
4621 if (setcc)
4623 rtx pat = PATTERN (setcc);
4624 if (!CC0_P (SET_DEST (pat)))
4625 insert_insn_on_edge (copy_insn (pat), e);
4628 if (gcse_file != NULL)
4630 fprintf (gcse_file, "JUMP-BYPASS: Proved reg %d in jump_insn %d equals constant ",
4631 regno, INSN_UID (jump));
4632 print_rtl (gcse_file, SET_SRC (set->expr));
4633 fprintf (gcse_file, "\nBypass edge from %d->%d to %d\n",
4634 e->src->index, old_dest->index, dest->index);
4636 change = 1;
4637 break;
4641 return change;
4644 /* Find basic blocks with more than one predecessor that only contain a
4645 single conditional jump. If the result of the comparison is known at
4646 compile-time from any incoming edge, redirect that edge to the
4647 appropriate target. Returns nonzero if a change was made. */
4649 static int
4650 bypass_conditional_jumps ()
4652 basic_block bb;
4653 int changed;
4654 rtx setcc;
4655 rtx insn;
4656 rtx dest;
4658 /* Note we start at block 1. */
4659 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
4660 return 0;
4662 changed = 0;
4663 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb,
4664 EXIT_BLOCK_PTR, next_bb)
4666 /* Check for more than one predecessor. */
4667 if (bb->pred && bb->pred->pred_next)
4669 setcc = NULL_RTX;
4670 for (insn = bb->head;
4671 insn != NULL && insn != NEXT_INSN (bb->end);
4672 insn = NEXT_INSN (insn))
4673 if (GET_CODE (insn) == INSN)
4675 if (setcc)
4676 break;
4677 if (GET_CODE (PATTERN (insn)) != SET)
4678 break;
4680 dest = SET_DEST (PATTERN (insn));
4681 if (REG_P (dest) || CC0_P (dest))
4682 setcc = insn;
4683 else
4684 break;
4686 else if (GET_CODE (insn) == JUMP_INSN)
4688 if (any_condjump_p (insn) && onlyjump_p (insn))
4689 changed |= bypass_block (bb, setcc, insn);
4690 break;
4692 else if (INSN_P (insn))
4693 break;
4697 /* If we bypassed any register setting insns, we inserted a
4698 copy on the redirected edge. These need to be commited. */
4699 if (changed)
4700 commit_edge_insertions();
4702 return changed;
4705 /* Compute PRE+LCM working variables. */
4707 /* Local properties of expressions. */
4708 /* Nonzero for expressions that are transparent in the block. */
4709 static sbitmap *transp;
4711 /* Nonzero for expressions that are transparent at the end of the block.
4712 This is only zero for expressions killed by abnormal critical edge
4713 created by a calls. */
4714 static sbitmap *transpout;
4716 /* Nonzero for expressions that are computed (available) in the block. */
4717 static sbitmap *comp;
4719 /* Nonzero for expressions that are locally anticipatable in the block. */
4720 static sbitmap *antloc;
4722 /* Nonzero for expressions where this block is an optimal computation
4723 point. */
4724 static sbitmap *pre_optimal;
4726 /* Nonzero for expressions which are redundant in a particular block. */
4727 static sbitmap *pre_redundant;
4729 /* Nonzero for expressions which should be inserted on a specific edge. */
4730 static sbitmap *pre_insert_map;
4732 /* Nonzero for expressions which should be deleted in a specific block. */
4733 static sbitmap *pre_delete_map;
4735 /* Contains the edge_list returned by pre_edge_lcm. */
4736 static struct edge_list *edge_list;
4738 /* Redundant insns. */
4739 static sbitmap pre_redundant_insns;
4741 /* Allocate vars used for PRE analysis. */
4743 static void
4744 alloc_pre_mem (n_blocks, n_exprs)
4745 int n_blocks, n_exprs;
4747 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
4748 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
4749 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
4751 pre_optimal = NULL;
4752 pre_redundant = NULL;
4753 pre_insert_map = NULL;
4754 pre_delete_map = NULL;
4755 ae_in = NULL;
4756 ae_out = NULL;
4757 ae_kill = sbitmap_vector_alloc (n_blocks, n_exprs);
4759 /* pre_insert and pre_delete are allocated later. */
4762 /* Free vars used for PRE analysis. */
4764 static void
4765 free_pre_mem ()
4767 sbitmap_vector_free (transp);
4768 sbitmap_vector_free (comp);
4770 /* ANTLOC and AE_KILL are freed just after pre_lcm finishes. */
4772 if (pre_optimal)
4773 sbitmap_vector_free (pre_optimal);
4774 if (pre_redundant)
4775 sbitmap_vector_free (pre_redundant);
4776 if (pre_insert_map)
4777 sbitmap_vector_free (pre_insert_map);
4778 if (pre_delete_map)
4779 sbitmap_vector_free (pre_delete_map);
4780 if (ae_in)
4781 sbitmap_vector_free (ae_in);
4782 if (ae_out)
4783 sbitmap_vector_free (ae_out);
4785 transp = comp = NULL;
4786 pre_optimal = pre_redundant = pre_insert_map = pre_delete_map = NULL;
4787 ae_in = ae_out = NULL;
4790 /* Top level routine to do the dataflow analysis needed by PRE. */
4792 static void
4793 compute_pre_data ()
4795 sbitmap trapping_expr;
4796 basic_block bb;
4797 unsigned int ui;
4799 compute_local_properties (transp, comp, antloc, &expr_hash_table);
4800 sbitmap_vector_zero (ae_kill, last_basic_block);
4802 /* Collect expressions which might trap. */
4803 trapping_expr = sbitmap_alloc (expr_hash_table.n_elems);
4804 sbitmap_zero (trapping_expr);
4805 for (ui = 0; ui < expr_hash_table.size; ui++)
4807 struct expr *e;
4808 for (e = expr_hash_table.table[ui]; e != NULL; e = e->next_same_hash)
4809 if (may_trap_p (e->expr))
4810 SET_BIT (trapping_expr, e->bitmap_index);
4813 /* Compute ae_kill for each basic block using:
4815 ~(TRANSP | COMP)
4817 This is significantly faster than compute_ae_kill. */
4819 FOR_EACH_BB (bb)
4821 edge e;
4823 /* If the current block is the destination of an abnormal edge, we
4824 kill all trapping expressions because we won't be able to properly
4825 place the instruction on the edge. So make them neither
4826 anticipatable nor transparent. This is fairly conservative. */
4827 for (e = bb->pred; e ; e = e->pred_next)
4828 if (e->flags & EDGE_ABNORMAL)
4830 sbitmap_difference (antloc[bb->index], antloc[bb->index], trapping_expr);
4831 sbitmap_difference (transp[bb->index], transp[bb->index], trapping_expr);
4832 break;
4835 sbitmap_a_or_b (ae_kill[bb->index], transp[bb->index], comp[bb->index]);
4836 sbitmap_not (ae_kill[bb->index], ae_kill[bb->index]);
4839 edge_list = pre_edge_lcm (gcse_file, expr_hash_table.n_elems, transp, comp, antloc,
4840 ae_kill, &pre_insert_map, &pre_delete_map);
4841 sbitmap_vector_free (antloc);
4842 antloc = NULL;
4843 sbitmap_vector_free (ae_kill);
4844 ae_kill = NULL;
4845 sbitmap_free (trapping_expr);
4848 /* PRE utilities */
4850 /* Return nonzero if an occurrence of expression EXPR in OCCR_BB would reach
4851 block BB.
4853 VISITED is a pointer to a working buffer for tracking which BB's have
4854 been visited. It is NULL for the top-level call.
4856 We treat reaching expressions that go through blocks containing the same
4857 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
4858 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
4859 2 as not reaching. The intent is to improve the probability of finding
4860 only one reaching expression and to reduce register lifetimes by picking
4861 the closest such expression. */
4863 static int
4864 pre_expr_reaches_here_p_work (occr_bb, expr, bb, visited)
4865 basic_block occr_bb;
4866 struct expr *expr;
4867 basic_block bb;
4868 char *visited;
4870 edge pred;
4872 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
4874 basic_block pred_bb = pred->src;
4876 if (pred->src == ENTRY_BLOCK_PTR
4877 /* Has predecessor has already been visited? */
4878 || visited[pred_bb->index])
4879 ;/* Nothing to do. */
4881 /* Does this predecessor generate this expression? */
4882 else if (TEST_BIT (comp[pred_bb->index], expr->bitmap_index))
4884 /* Is this the occurrence we're looking for?
4885 Note that there's only one generating occurrence per block
4886 so we just need to check the block number. */
4887 if (occr_bb == pred_bb)
4888 return 1;
4890 visited[pred_bb->index] = 1;
4892 /* Ignore this predecessor if it kills the expression. */
4893 else if (! TEST_BIT (transp[pred_bb->index], expr->bitmap_index))
4894 visited[pred_bb->index] = 1;
4896 /* Neither gen nor kill. */
4897 else
4899 visited[pred_bb->index] = 1;
4900 if (pre_expr_reaches_here_p_work (occr_bb, expr, pred_bb, visited))
4901 return 1;
4905 /* All paths have been checked. */
4906 return 0;
4909 /* The wrapper for pre_expr_reaches_here_work that ensures that any
4910 memory allocated for that function is returned. */
4912 static int
4913 pre_expr_reaches_here_p (occr_bb, expr, bb)
4914 basic_block occr_bb;
4915 struct expr *expr;
4916 basic_block bb;
4918 int rval;
4919 char *visited = (char *) xcalloc (last_basic_block, 1);
4921 rval = pre_expr_reaches_here_p_work (occr_bb, expr, bb, visited);
4923 free (visited);
4924 return rval;
4928 /* Given an expr, generate RTL which we can insert at the end of a BB,
4929 or on an edge. Set the block number of any insns generated to
4930 the value of BB. */
4932 static rtx
4933 process_insert_insn (expr)
4934 struct expr *expr;
4936 rtx reg = expr->reaching_reg;
4937 rtx exp = copy_rtx (expr->expr);
4938 rtx pat;
4940 start_sequence ();
4942 /* If the expression is something that's an operand, like a constant,
4943 just copy it to a register. */
4944 if (general_operand (exp, GET_MODE (reg)))
4945 emit_move_insn (reg, exp);
4947 /* Otherwise, make a new insn to compute this expression and make sure the
4948 insn will be recognized (this also adds any needed CLOBBERs). Copy the
4949 expression to make sure we don't have any sharing issues. */
4950 else if (insn_invalid_p (emit_insn (gen_rtx_SET (VOIDmode, reg, exp))))
4951 abort ();
4953 pat = get_insns ();
4954 end_sequence ();
4956 return pat;
4959 /* Add EXPR to the end of basic block BB.
4961 This is used by both the PRE and code hoisting.
4963 For PRE, we want to verify that the expr is either transparent
4964 or locally anticipatable in the target block. This check makes
4965 no sense for code hoisting. */
4967 static void
4968 insert_insn_end_bb (expr, bb, pre)
4969 struct expr *expr;
4970 basic_block bb;
4971 int pre;
4973 rtx insn = bb->end;
4974 rtx new_insn;
4975 rtx reg = expr->reaching_reg;
4976 int regno = REGNO (reg);
4977 rtx pat, pat_end;
4979 pat = process_insert_insn (expr);
4980 if (pat == NULL_RTX || ! INSN_P (pat))
4981 abort ();
4983 pat_end = pat;
4984 while (NEXT_INSN (pat_end) != NULL_RTX)
4985 pat_end = NEXT_INSN (pat_end);
4987 /* If the last insn is a jump, insert EXPR in front [taking care to
4988 handle cc0, etc. properly]. Similary we need to care trapping
4989 instructions in presence of non-call exceptions. */
4991 if (GET_CODE (insn) == JUMP_INSN
4992 || (GET_CODE (insn) == INSN
4993 && (bb->succ->succ_next || (bb->succ->flags & EDGE_ABNORMAL))))
4995 #ifdef HAVE_cc0
4996 rtx note;
4997 #endif
4998 /* It should always be the case that we can put these instructions
4999 anywhere in the basic block with performing PRE optimizations.
5000 Check this. */
5001 if (GET_CODE (insn) == INSN && pre
5002 && !TEST_BIT (antloc[bb->index], expr->bitmap_index)
5003 && !TEST_BIT (transp[bb->index], expr->bitmap_index))
5004 abort ();
5006 /* If this is a jump table, then we can't insert stuff here. Since
5007 we know the previous real insn must be the tablejump, we insert
5008 the new instruction just before the tablejump. */
5009 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
5010 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
5011 insn = prev_real_insn (insn);
5013 #ifdef HAVE_cc0
5014 /* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts
5015 if cc0 isn't set. */
5016 note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
5017 if (note)
5018 insn = XEXP (note, 0);
5019 else
5021 rtx maybe_cc0_setter = prev_nonnote_insn (insn);
5022 if (maybe_cc0_setter
5023 && INSN_P (maybe_cc0_setter)
5024 && sets_cc0_p (PATTERN (maybe_cc0_setter)))
5025 insn = maybe_cc0_setter;
5027 #endif
5028 /* FIXME: What if something in cc0/jump uses value set in new insn? */
5029 new_insn = emit_insn_before (pat, insn);
5032 /* Likewise if the last insn is a call, as will happen in the presence
5033 of exception handling. */
5034 else if (GET_CODE (insn) == CALL_INSN
5035 && (bb->succ->succ_next || (bb->succ->flags & EDGE_ABNORMAL)))
5037 /* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers,
5038 we search backward and place the instructions before the first
5039 parameter is loaded. Do this for everyone for consistency and a
5040 presumtion that we'll get better code elsewhere as well.
5042 It should always be the case that we can put these instructions
5043 anywhere in the basic block with performing PRE optimizations.
5044 Check this. */
5046 if (pre
5047 && !TEST_BIT (antloc[bb->index], expr->bitmap_index)
5048 && !TEST_BIT (transp[bb->index], expr->bitmap_index))
5049 abort ();
5051 /* Since different machines initialize their parameter registers
5052 in different orders, assume nothing. Collect the set of all
5053 parameter registers. */
5054 insn = find_first_parameter_load (insn, bb->head);
5056 /* If we found all the parameter loads, then we want to insert
5057 before the first parameter load.
5059 If we did not find all the parameter loads, then we might have
5060 stopped on the head of the block, which could be a CODE_LABEL.
5061 If we inserted before the CODE_LABEL, then we would be putting
5062 the insn in the wrong basic block. In that case, put the insn
5063 after the CODE_LABEL. Also, respect NOTE_INSN_BASIC_BLOCK. */
5064 while (GET_CODE (insn) == CODE_LABEL
5065 || NOTE_INSN_BASIC_BLOCK_P (insn))
5066 insn = NEXT_INSN (insn);
5068 new_insn = emit_insn_before (pat, insn);
5070 else
5071 new_insn = emit_insn_after (pat, insn);
5073 while (1)
5075 if (INSN_P (pat))
5077 add_label_notes (PATTERN (pat), new_insn);
5078 note_stores (PATTERN (pat), record_set_info, pat);
5080 if (pat == pat_end)
5081 break;
5082 pat = NEXT_INSN (pat);
5085 gcse_create_count++;
5087 if (gcse_file)
5089 fprintf (gcse_file, "PRE/HOIST: end of bb %d, insn %d, ",
5090 bb->index, INSN_UID (new_insn));
5091 fprintf (gcse_file, "copying expression %d to reg %d\n",
5092 expr->bitmap_index, regno);
5096 /* Insert partially redundant expressions on edges in the CFG to make
5097 the expressions fully redundant. */
5099 static int
5100 pre_edge_insert (edge_list, index_map)
5101 struct edge_list *edge_list;
5102 struct expr **index_map;
5104 int e, i, j, num_edges, set_size, did_insert = 0;
5105 sbitmap *inserted;
5107 /* Where PRE_INSERT_MAP is nonzero, we add the expression on that edge
5108 if it reaches any of the deleted expressions. */
5110 set_size = pre_insert_map[0]->size;
5111 num_edges = NUM_EDGES (edge_list);
5112 inserted = sbitmap_vector_alloc (num_edges, expr_hash_table.n_elems);
5113 sbitmap_vector_zero (inserted, num_edges);
5115 for (e = 0; e < num_edges; e++)
5117 int indx;
5118 basic_block bb = INDEX_EDGE_PRED_BB (edge_list, e);
5120 for (i = indx = 0; i < set_size; i++, indx += SBITMAP_ELT_BITS)
5122 SBITMAP_ELT_TYPE insert = pre_insert_map[e]->elms[i];
5124 for (j = indx; insert && j < (int) expr_hash_table.n_elems; j++, insert >>= 1)
5125 if ((insert & 1) != 0 && index_map[j]->reaching_reg != NULL_RTX)
5127 struct expr *expr = index_map[j];
5128 struct occr *occr;
5130 /* Now look at each deleted occurrence of this expression. */
5131 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5133 if (! occr->deleted_p)
5134 continue;
5136 /* Insert this expression on this edge if if it would
5137 reach the deleted occurrence in BB. */
5138 if (!TEST_BIT (inserted[e], j))
5140 rtx insn;
5141 edge eg = INDEX_EDGE (edge_list, e);
5143 /* We can't insert anything on an abnormal and
5144 critical edge, so we insert the insn at the end of
5145 the previous block. There are several alternatives
5146 detailed in Morgans book P277 (sec 10.5) for
5147 handling this situation. This one is easiest for
5148 now. */
5150 if ((eg->flags & EDGE_ABNORMAL) == EDGE_ABNORMAL)
5151 insert_insn_end_bb (index_map[j], bb, 0);
5152 else
5154 insn = process_insert_insn (index_map[j]);
5155 insert_insn_on_edge (insn, eg);
5158 if (gcse_file)
5160 fprintf (gcse_file, "PRE/HOIST: edge (%d,%d), ",
5161 bb->index,
5162 INDEX_EDGE_SUCC_BB (edge_list, e)->index);
5163 fprintf (gcse_file, "copy expression %d\n",
5164 expr->bitmap_index);
5167 update_ld_motion_stores (expr);
5168 SET_BIT (inserted[e], j);
5169 did_insert = 1;
5170 gcse_create_count++;
5177 sbitmap_vector_free (inserted);
5178 return did_insert;
5181 /* Copy the result of INSN to REG. INDX is the expression number. */
5183 static void
5184 pre_insert_copy_insn (expr, insn)
5185 struct expr *expr;
5186 rtx insn;
5188 rtx reg = expr->reaching_reg;
5189 int regno = REGNO (reg);
5190 int indx = expr->bitmap_index;
5191 rtx set = single_set (insn);
5192 rtx new_insn;
5194 if (!set)
5195 abort ();
5197 new_insn = emit_insn_after (gen_move_insn (reg, SET_DEST (set)), insn);
5199 /* Keep register set table up to date. */
5200 record_one_set (regno, new_insn);
5202 gcse_create_count++;
5204 if (gcse_file)
5205 fprintf (gcse_file,
5206 "PRE: bb %d, insn %d, copy expression %d in insn %d to reg %d\n",
5207 BLOCK_NUM (insn), INSN_UID (new_insn), indx,
5208 INSN_UID (insn), regno);
5209 update_ld_motion_stores (expr);
5212 /* Copy available expressions that reach the redundant expression
5213 to `reaching_reg'. */
5215 static void
5216 pre_insert_copies ()
5218 unsigned int i;
5219 struct expr *expr;
5220 struct occr *occr;
5221 struct occr *avail;
5223 /* For each available expression in the table, copy the result to
5224 `reaching_reg' if the expression reaches a deleted one.
5226 ??? The current algorithm is rather brute force.
5227 Need to do some profiling. */
5229 for (i = 0; i < expr_hash_table.size; i++)
5230 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
5232 /* If the basic block isn't reachable, PPOUT will be TRUE. However,
5233 we don't want to insert a copy here because the expression may not
5234 really be redundant. So only insert an insn if the expression was
5235 deleted. This test also avoids further processing if the
5236 expression wasn't deleted anywhere. */
5237 if (expr->reaching_reg == NULL)
5238 continue;
5240 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5242 if (! occr->deleted_p)
5243 continue;
5245 for (avail = expr->avail_occr; avail != NULL; avail = avail->next)
5247 rtx insn = avail->insn;
5249 /* No need to handle this one if handled already. */
5250 if (avail->copied_p)
5251 continue;
5253 /* Don't handle this one if it's a redundant one. */
5254 if (TEST_BIT (pre_redundant_insns, INSN_CUID (insn)))
5255 continue;
5257 /* Or if the expression doesn't reach the deleted one. */
5258 if (! pre_expr_reaches_here_p (BLOCK_FOR_INSN (avail->insn),
5259 expr,
5260 BLOCK_FOR_INSN (occr->insn)))
5261 continue;
5263 /* Copy the result of avail to reaching_reg. */
5264 pre_insert_copy_insn (expr, insn);
5265 avail->copied_p = 1;
5271 /* Emit move from SRC to DEST noting the equivalence with expression computed
5272 in INSN. */
5273 static rtx
5274 gcse_emit_move_after (src, dest, insn)
5275 rtx src, dest, insn;
5277 rtx new;
5278 rtx set = single_set (insn), set2;
5279 rtx note;
5280 rtx eqv;
5282 /* This should never fail since we're creating a reg->reg copy
5283 we've verified to be valid. */
5285 new = emit_insn_after (gen_move_insn (dest, src), insn);
5287 /* Note the equivalence for local CSE pass. */
5288 set2 = single_set (new);
5289 if (!set2 || !rtx_equal_p (SET_DEST (set2), dest))
5290 return new;
5291 if ((note = find_reg_equal_equiv_note (insn)))
5292 eqv = XEXP (note, 0);
5293 else
5294 eqv = SET_SRC (set);
5296 set_unique_reg_note (new, REG_EQUAL, copy_insn_1 (eqv));
5298 return new;
5301 /* Delete redundant computations.
5302 Deletion is done by changing the insn to copy the `reaching_reg' of
5303 the expression into the result of the SET. It is left to later passes
5304 (cprop, cse2, flow, combine, regmove) to propagate the copy or eliminate it.
5306 Returns nonzero if a change is made. */
5308 static int
5309 pre_delete ()
5311 unsigned int i;
5312 int changed;
5313 struct expr *expr;
5314 struct occr *occr;
5316 changed = 0;
5317 for (i = 0; i < expr_hash_table.size; i++)
5318 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
5320 int indx = expr->bitmap_index;
5322 /* We only need to search antic_occr since we require
5323 ANTLOC != 0. */
5325 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5327 rtx insn = occr->insn;
5328 rtx set;
5329 basic_block bb = BLOCK_FOR_INSN (insn);
5331 if (TEST_BIT (pre_delete_map[bb->index], indx))
5333 set = single_set (insn);
5334 if (! set)
5335 abort ();
5337 /* Create a pseudo-reg to store the result of reaching
5338 expressions into. Get the mode for the new pseudo from
5339 the mode of the original destination pseudo. */
5340 if (expr->reaching_reg == NULL)
5341 expr->reaching_reg
5342 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
5344 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
5345 delete_insn (insn);
5346 occr->deleted_p = 1;
5347 SET_BIT (pre_redundant_insns, INSN_CUID (insn));
5348 changed = 1;
5349 gcse_subst_count++;
5351 if (gcse_file)
5353 fprintf (gcse_file,
5354 "PRE: redundant insn %d (expression %d) in ",
5355 INSN_UID (insn), indx);
5356 fprintf (gcse_file, "bb %d, reaching reg is %d\n",
5357 bb->index, REGNO (expr->reaching_reg));
5363 return changed;
5366 /* Perform GCSE optimizations using PRE.
5367 This is called by one_pre_gcse_pass after all the dataflow analysis
5368 has been done.
5370 This is based on the original Morel-Renvoise paper Fred Chow's thesis, and
5371 lazy code motion from Knoop, Ruthing and Steffen as described in Advanced
5372 Compiler Design and Implementation.
5374 ??? A new pseudo reg is created to hold the reaching expression. The nice
5375 thing about the classical approach is that it would try to use an existing
5376 reg. If the register can't be adequately optimized [i.e. we introduce
5377 reload problems], one could add a pass here to propagate the new register
5378 through the block.
5380 ??? We don't handle single sets in PARALLELs because we're [currently] not
5381 able to copy the rest of the parallel when we insert copies to create full
5382 redundancies from partial redundancies. However, there's no reason why we
5383 can't handle PARALLELs in the cases where there are no partial
5384 redundancies. */
5386 static int
5387 pre_gcse ()
5389 unsigned int i;
5390 int did_insert, changed;
5391 struct expr **index_map;
5392 struct expr *expr;
5394 /* Compute a mapping from expression number (`bitmap_index') to
5395 hash table entry. */
5397 index_map = (struct expr **) xcalloc (expr_hash_table.n_elems, sizeof (struct expr *));
5398 for (i = 0; i < expr_hash_table.size; i++)
5399 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
5400 index_map[expr->bitmap_index] = expr;
5402 /* Reset bitmap used to track which insns are redundant. */
5403 pre_redundant_insns = sbitmap_alloc (max_cuid);
5404 sbitmap_zero (pre_redundant_insns);
5406 /* Delete the redundant insns first so that
5407 - we know what register to use for the new insns and for the other
5408 ones with reaching expressions
5409 - we know which insns are redundant when we go to create copies */
5411 changed = pre_delete ();
5413 did_insert = pre_edge_insert (edge_list, index_map);
5415 /* In other places with reaching expressions, copy the expression to the
5416 specially allocated pseudo-reg that reaches the redundant expr. */
5417 pre_insert_copies ();
5418 if (did_insert)
5420 commit_edge_insertions ();
5421 changed = 1;
5424 free (index_map);
5425 sbitmap_free (pre_redundant_insns);
5426 return changed;
5429 /* Top level routine to perform one PRE GCSE pass.
5431 Return nonzero if a change was made. */
5433 static int
5434 one_pre_gcse_pass (pass)
5435 int pass;
5437 int changed = 0;
5439 gcse_subst_count = 0;
5440 gcse_create_count = 0;
5442 alloc_hash_table (max_cuid, &expr_hash_table, 0);
5443 add_noreturn_fake_exit_edges ();
5444 if (flag_gcse_lm)
5445 compute_ld_motion_mems ();
5447 compute_hash_table (&expr_hash_table);
5448 trim_ld_motion_mems ();
5449 if (gcse_file)
5450 dump_hash_table (gcse_file, "Expression", &expr_hash_table);
5452 if (expr_hash_table.n_elems > 0)
5454 alloc_pre_mem (last_basic_block, expr_hash_table.n_elems);
5455 compute_pre_data ();
5456 changed |= pre_gcse ();
5457 free_edge_list (edge_list);
5458 free_pre_mem ();
5461 free_ldst_mems ();
5462 remove_fake_edges ();
5463 free_hash_table (&expr_hash_table);
5465 if (gcse_file)
5467 fprintf (gcse_file, "\nPRE GCSE of %s, pass %d: %d bytes needed, ",
5468 current_function_name, pass, bytes_used);
5469 fprintf (gcse_file, "%d substs, %d insns created\n",
5470 gcse_subst_count, gcse_create_count);
5473 return changed;
5476 /* If X contains any LABEL_REF's, add REG_LABEL notes for them to INSN.
5477 If notes are added to an insn which references a CODE_LABEL, the
5478 LABEL_NUSES count is incremented. We have to add REG_LABEL notes,
5479 because the following loop optimization pass requires them. */
5481 /* ??? This is very similar to the loop.c add_label_notes function. We
5482 could probably share code here. */
5484 /* ??? If there was a jump optimization pass after gcse and before loop,
5485 then we would not need to do this here, because jump would add the
5486 necessary REG_LABEL notes. */
5488 static void
5489 add_label_notes (x, insn)
5490 rtx x;
5491 rtx insn;
5493 enum rtx_code code = GET_CODE (x);
5494 int i, j;
5495 const char *fmt;
5497 if (code == LABEL_REF && !LABEL_REF_NONLOCAL_P (x))
5499 /* This code used to ignore labels that referred to dispatch tables to
5500 avoid flow generating (slighly) worse code.
5502 We no longer ignore such label references (see LABEL_REF handling in
5503 mark_jump_label for additional information). */
5505 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, XEXP (x, 0),
5506 REG_NOTES (insn));
5507 if (LABEL_P (XEXP (x, 0)))
5508 LABEL_NUSES (XEXP (x, 0))++;
5509 return;
5512 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
5514 if (fmt[i] == 'e')
5515 add_label_notes (XEXP (x, i), insn);
5516 else if (fmt[i] == 'E')
5517 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5518 add_label_notes (XVECEXP (x, i, j), insn);
5522 /* Compute transparent outgoing information for each block.
5524 An expression is transparent to an edge unless it is killed by
5525 the edge itself. This can only happen with abnormal control flow,
5526 when the edge is traversed through a call. This happens with
5527 non-local labels and exceptions.
5529 This would not be necessary if we split the edge. While this is
5530 normally impossible for abnormal critical edges, with some effort
5531 it should be possible with exception handling, since we still have
5532 control over which handler should be invoked. But due to increased
5533 EH table sizes, this may not be worthwhile. */
5535 static void
5536 compute_transpout ()
5538 basic_block bb;
5539 unsigned int i;
5540 struct expr *expr;
5542 sbitmap_vector_ones (transpout, last_basic_block);
5544 FOR_EACH_BB (bb)
5546 /* Note that flow inserted a nop a the end of basic blocks that
5547 end in call instructions for reasons other than abnormal
5548 control flow. */
5549 if (GET_CODE (bb->end) != CALL_INSN)
5550 continue;
5552 for (i = 0; i < expr_hash_table.size; i++)
5553 for (expr = expr_hash_table.table[i]; expr ; expr = expr->next_same_hash)
5554 if (GET_CODE (expr->expr) == MEM)
5556 if (GET_CODE (XEXP (expr->expr, 0)) == SYMBOL_REF
5557 && CONSTANT_POOL_ADDRESS_P (XEXP (expr->expr, 0)))
5558 continue;
5560 /* ??? Optimally, we would use interprocedural alias
5561 analysis to determine if this mem is actually killed
5562 by this call. */
5563 RESET_BIT (transpout[bb->index], expr->bitmap_index);
5568 /* Removal of useless null pointer checks */
5570 /* Called via note_stores. X is set by SETTER. If X is a register we must
5571 invalidate nonnull_local and set nonnull_killed. DATA is really a
5572 `null_pointer_info *'.
5574 We ignore hard registers. */
5576 static void
5577 invalidate_nonnull_info (x, setter, data)
5578 rtx x;
5579 rtx setter ATTRIBUTE_UNUSED;
5580 void *data;
5582 unsigned int regno;
5583 struct null_pointer_info *npi = (struct null_pointer_info *) data;
5585 while (GET_CODE (x) == SUBREG)
5586 x = SUBREG_REG (x);
5588 /* Ignore anything that is not a register or is a hard register. */
5589 if (GET_CODE (x) != REG
5590 || REGNO (x) < npi->min_reg
5591 || REGNO (x) >= npi->max_reg)
5592 return;
5594 regno = REGNO (x) - npi->min_reg;
5596 RESET_BIT (npi->nonnull_local[npi->current_block->index], regno);
5597 SET_BIT (npi->nonnull_killed[npi->current_block->index], regno);
5600 /* Do null-pointer check elimination for the registers indicated in
5601 NPI. NONNULL_AVIN and NONNULL_AVOUT are pre-allocated sbitmaps;
5602 they are not our responsibility to free. */
5604 static int
5605 delete_null_pointer_checks_1 (block_reg, nonnull_avin,
5606 nonnull_avout, npi)
5607 unsigned int *block_reg;
5608 sbitmap *nonnull_avin;
5609 sbitmap *nonnull_avout;
5610 struct null_pointer_info *npi;
5612 basic_block bb, current_block;
5613 sbitmap *nonnull_local = npi->nonnull_local;
5614 sbitmap *nonnull_killed = npi->nonnull_killed;
5615 int something_changed = 0;
5617 /* Compute local properties, nonnull and killed. A register will have
5618 the nonnull property if at the end of the current block its value is
5619 known to be nonnull. The killed property indicates that somewhere in
5620 the block any information we had about the register is killed.
5622 Note that a register can have both properties in a single block. That
5623 indicates that it's killed, then later in the block a new value is
5624 computed. */
5625 sbitmap_vector_zero (nonnull_local, last_basic_block);
5626 sbitmap_vector_zero (nonnull_killed, last_basic_block);
5628 FOR_EACH_BB (current_block)
5630 rtx insn, stop_insn;
5632 /* Set the current block for invalidate_nonnull_info. */
5633 npi->current_block = current_block;
5635 /* Scan each insn in the basic block looking for memory references and
5636 register sets. */
5637 stop_insn = NEXT_INSN (current_block->end);
5638 for (insn = current_block->head;
5639 insn != stop_insn;
5640 insn = NEXT_INSN (insn))
5642 rtx set;
5643 rtx reg;
5645 /* Ignore anything that is not a normal insn. */
5646 if (! INSN_P (insn))
5647 continue;
5649 /* Basically ignore anything that is not a simple SET. We do have
5650 to make sure to invalidate nonnull_local and set nonnull_killed
5651 for such insns though. */
5652 set = single_set (insn);
5653 if (!set)
5655 note_stores (PATTERN (insn), invalidate_nonnull_info, npi);
5656 continue;
5659 /* See if we've got a usable memory load. We handle it first
5660 in case it uses its address register as a dest (which kills
5661 the nonnull property). */
5662 if (GET_CODE (SET_SRC (set)) == MEM
5663 && GET_CODE ((reg = XEXP (SET_SRC (set), 0))) == REG
5664 && REGNO (reg) >= npi->min_reg
5665 && REGNO (reg) < npi->max_reg)
5666 SET_BIT (nonnull_local[current_block->index],
5667 REGNO (reg) - npi->min_reg);
5669 /* Now invalidate stuff clobbered by this insn. */
5670 note_stores (PATTERN (insn), invalidate_nonnull_info, npi);
5672 /* And handle stores, we do these last since any sets in INSN can
5673 not kill the nonnull property if it is derived from a MEM
5674 appearing in a SET_DEST. */
5675 if (GET_CODE (SET_DEST (set)) == MEM
5676 && GET_CODE ((reg = XEXP (SET_DEST (set), 0))) == REG
5677 && REGNO (reg) >= npi->min_reg
5678 && REGNO (reg) < npi->max_reg)
5679 SET_BIT (nonnull_local[current_block->index],
5680 REGNO (reg) - npi->min_reg);
5684 /* Now compute global properties based on the local properties. This
5685 is a classic global availablity algorithm. */
5686 compute_available (nonnull_local, nonnull_killed,
5687 nonnull_avout, nonnull_avin);
5689 /* Now look at each bb and see if it ends with a compare of a value
5690 against zero. */
5691 FOR_EACH_BB (bb)
5693 rtx last_insn = bb->end;
5694 rtx condition, earliest;
5695 int compare_and_branch;
5697 /* Since MIN_REG is always at least FIRST_PSEUDO_REGISTER, and
5698 since BLOCK_REG[BB] is zero if this block did not end with a
5699 comparison against zero, this condition works. */
5700 if (block_reg[bb->index] < npi->min_reg
5701 || block_reg[bb->index] >= npi->max_reg)
5702 continue;
5704 /* LAST_INSN is a conditional jump. Get its condition. */
5705 condition = get_condition (last_insn, &earliest);
5707 /* If we can't determine the condition then skip. */
5708 if (! condition)
5709 continue;
5711 /* Is the register known to have a nonzero value? */
5712 if (!TEST_BIT (nonnull_avout[bb->index], block_reg[bb->index] - npi->min_reg))
5713 continue;
5715 /* Try to compute whether the compare/branch at the loop end is one or
5716 two instructions. */
5717 if (earliest == last_insn)
5718 compare_and_branch = 1;
5719 else if (earliest == prev_nonnote_insn (last_insn))
5720 compare_and_branch = 2;
5721 else
5722 continue;
5724 /* We know the register in this comparison is nonnull at exit from
5725 this block. We can optimize this comparison. */
5726 if (GET_CODE (condition) == NE)
5728 rtx new_jump;
5730 new_jump = emit_jump_insn_after (gen_jump (JUMP_LABEL (last_insn)),
5731 last_insn);
5732 JUMP_LABEL (new_jump) = JUMP_LABEL (last_insn);
5733 LABEL_NUSES (JUMP_LABEL (new_jump))++;
5734 emit_barrier_after (new_jump);
5737 something_changed = 1;
5738 delete_insn (last_insn);
5739 if (compare_and_branch == 2)
5740 delete_insn (earliest);
5741 purge_dead_edges (bb);
5743 /* Don't check this block again. (Note that BLOCK_END is
5744 invalid here; we deleted the last instruction in the
5745 block.) */
5746 block_reg[bb->index] = 0;
5749 return something_changed;
5752 /* Find EQ/NE comparisons against zero which can be (indirectly) evaluated
5753 at compile time.
5755 This is conceptually similar to global constant/copy propagation and
5756 classic global CSE (it even uses the same dataflow equations as cprop).
5758 If a register is used as memory address with the form (mem (reg)), then we
5759 know that REG can not be zero at that point in the program. Any instruction
5760 which sets REG "kills" this property.
5762 So, if every path leading to a conditional branch has an available memory
5763 reference of that form, then we know the register can not have the value
5764 zero at the conditional branch.
5766 So we merely need to compute the local properies and propagate that data
5767 around the cfg, then optimize where possible.
5769 We run this pass two times. Once before CSE, then again after CSE. This
5770 has proven to be the most profitable approach. It is rare for new
5771 optimization opportunities of this nature to appear after the first CSE
5772 pass.
5774 This could probably be integrated with global cprop with a little work. */
5777 delete_null_pointer_checks (f)
5778 rtx f ATTRIBUTE_UNUSED;
5780 sbitmap *nonnull_avin, *nonnull_avout;
5781 unsigned int *block_reg;
5782 basic_block bb;
5783 int reg;
5784 int regs_per_pass;
5785 int max_reg;
5786 struct null_pointer_info npi;
5787 int something_changed = 0;
5789 /* If we have only a single block, then there's nothing to do. */
5790 if (n_basic_blocks <= 1)
5791 return 0;
5793 /* Trying to perform global optimizations on flow graphs which have
5794 a high connectivity will take a long time and is unlikely to be
5795 particularly useful.
5797 In normal circumstances a cfg should have about twice as many edges
5798 as blocks. But we do not want to punish small functions which have
5799 a couple switch statements. So we require a relatively large number
5800 of basic blocks and the ratio of edges to blocks to be high. */
5801 if (n_basic_blocks > 1000 && n_edges / n_basic_blocks >= 20)
5802 return 0;
5804 /* We need four bitmaps, each with a bit for each register in each
5805 basic block. */
5806 max_reg = max_reg_num ();
5807 regs_per_pass = get_bitmap_width (4, last_basic_block, max_reg);
5809 /* Allocate bitmaps to hold local and global properties. */
5810 npi.nonnull_local = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
5811 npi.nonnull_killed = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
5812 nonnull_avin = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
5813 nonnull_avout = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
5815 /* Go through the basic blocks, seeing whether or not each block
5816 ends with a conditional branch whose condition is a comparison
5817 against zero. Record the register compared in BLOCK_REG. */
5818 block_reg = (unsigned int *) xcalloc (last_basic_block, sizeof (int));
5819 FOR_EACH_BB (bb)
5821 rtx last_insn = bb->end;
5822 rtx condition, earliest, reg;
5824 /* We only want conditional branches. */
5825 if (GET_CODE (last_insn) != JUMP_INSN
5826 || !any_condjump_p (last_insn)
5827 || !onlyjump_p (last_insn))
5828 continue;
5830 /* LAST_INSN is a conditional jump. Get its condition. */
5831 condition = get_condition (last_insn, &earliest);
5833 /* If we were unable to get the condition, or it is not an equality
5834 comparison against zero then there's nothing we can do. */
5835 if (!condition
5836 || (GET_CODE (condition) != NE && GET_CODE (condition) != EQ)
5837 || GET_CODE (XEXP (condition, 1)) != CONST_INT
5838 || (XEXP (condition, 1)
5839 != CONST0_RTX (GET_MODE (XEXP (condition, 0)))))
5840 continue;
5842 /* We must be checking a register against zero. */
5843 reg = XEXP (condition, 0);
5844 if (GET_CODE (reg) != REG)
5845 continue;
5847 block_reg[bb->index] = REGNO (reg);
5850 /* Go through the algorithm for each block of registers. */
5851 for (reg = FIRST_PSEUDO_REGISTER; reg < max_reg; reg += regs_per_pass)
5853 npi.min_reg = reg;
5854 npi.max_reg = MIN (reg + regs_per_pass, max_reg);
5855 something_changed |= delete_null_pointer_checks_1 (block_reg,
5856 nonnull_avin,
5857 nonnull_avout,
5858 &npi);
5861 /* Free the table of registers compared at the end of every block. */
5862 free (block_reg);
5864 /* Free bitmaps. */
5865 sbitmap_vector_free (npi.nonnull_local);
5866 sbitmap_vector_free (npi.nonnull_killed);
5867 sbitmap_vector_free (nonnull_avin);
5868 sbitmap_vector_free (nonnull_avout);
5870 return something_changed;
5873 /* Code Hoisting variables and subroutines. */
5875 /* Very busy expressions. */
5876 static sbitmap *hoist_vbein;
5877 static sbitmap *hoist_vbeout;
5879 /* Hoistable expressions. */
5880 static sbitmap *hoist_exprs;
5882 /* Dominator bitmaps. */
5883 dominance_info dominators;
5885 /* ??? We could compute post dominators and run this algorithm in
5886 reverse to perform tail merging, doing so would probably be
5887 more effective than the tail merging code in jump.c.
5889 It's unclear if tail merging could be run in parallel with
5890 code hoisting. It would be nice. */
5892 /* Allocate vars used for code hoisting analysis. */
5894 static void
5895 alloc_code_hoist_mem (n_blocks, n_exprs)
5896 int n_blocks, n_exprs;
5898 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
5899 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
5900 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
5902 hoist_vbein = sbitmap_vector_alloc (n_blocks, n_exprs);
5903 hoist_vbeout = sbitmap_vector_alloc (n_blocks, n_exprs);
5904 hoist_exprs = sbitmap_vector_alloc (n_blocks, n_exprs);
5905 transpout = sbitmap_vector_alloc (n_blocks, n_exprs);
5908 /* Free vars used for code hoisting analysis. */
5910 static void
5911 free_code_hoist_mem ()
5913 sbitmap_vector_free (antloc);
5914 sbitmap_vector_free (transp);
5915 sbitmap_vector_free (comp);
5917 sbitmap_vector_free (hoist_vbein);
5918 sbitmap_vector_free (hoist_vbeout);
5919 sbitmap_vector_free (hoist_exprs);
5920 sbitmap_vector_free (transpout);
5922 free_dominance_info (dominators);
5925 /* Compute the very busy expressions at entry/exit from each block.
5927 An expression is very busy if all paths from a given point
5928 compute the expression. */
5930 static void
5931 compute_code_hoist_vbeinout ()
5933 int changed, passes;
5934 basic_block bb;
5936 sbitmap_vector_zero (hoist_vbeout, last_basic_block);
5937 sbitmap_vector_zero (hoist_vbein, last_basic_block);
5939 passes = 0;
5940 changed = 1;
5942 while (changed)
5944 changed = 0;
5946 /* We scan the blocks in the reverse order to speed up
5947 the convergence. */
5948 FOR_EACH_BB_REVERSE (bb)
5950 changed |= sbitmap_a_or_b_and_c_cg (hoist_vbein[bb->index], antloc[bb->index],
5951 hoist_vbeout[bb->index], transp[bb->index]);
5952 if (bb->next_bb != EXIT_BLOCK_PTR)
5953 sbitmap_intersection_of_succs (hoist_vbeout[bb->index], hoist_vbein, bb->index);
5956 passes++;
5959 if (gcse_file)
5960 fprintf (gcse_file, "hoisting vbeinout computation: %d passes\n", passes);
5963 /* Top level routine to do the dataflow analysis needed by code hoisting. */
5965 static void
5966 compute_code_hoist_data ()
5968 compute_local_properties (transp, comp, antloc, &expr_hash_table);
5969 compute_transpout ();
5970 compute_code_hoist_vbeinout ();
5971 dominators = calculate_dominance_info (CDI_DOMINATORS);
5972 if (gcse_file)
5973 fprintf (gcse_file, "\n");
5976 /* Determine if the expression identified by EXPR_INDEX would
5977 reach BB unimpared if it was placed at the end of EXPR_BB.
5979 It's unclear exactly what Muchnick meant by "unimpared". It seems
5980 to me that the expression must either be computed or transparent in
5981 *every* block in the path(s) from EXPR_BB to BB. Any other definition
5982 would allow the expression to be hoisted out of loops, even if
5983 the expression wasn't a loop invariant.
5985 Contrast this to reachability for PRE where an expression is
5986 considered reachable if *any* path reaches instead of *all*
5987 paths. */
5989 static int
5990 hoist_expr_reaches_here_p (expr_bb, expr_index, bb, visited)
5991 basic_block expr_bb;
5992 int expr_index;
5993 basic_block bb;
5994 char *visited;
5996 edge pred;
5997 int visited_allocated_locally = 0;
6000 if (visited == NULL)
6002 visited_allocated_locally = 1;
6003 visited = xcalloc (last_basic_block, 1);
6006 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
6008 basic_block pred_bb = pred->src;
6010 if (pred->src == ENTRY_BLOCK_PTR)
6011 break;
6012 else if (pred_bb == expr_bb)
6013 continue;
6014 else if (visited[pred_bb->index])
6015 continue;
6017 /* Does this predecessor generate this expression? */
6018 else if (TEST_BIT (comp[pred_bb->index], expr_index))
6019 break;
6020 else if (! TEST_BIT (transp[pred_bb->index], expr_index))
6021 break;
6023 /* Not killed. */
6024 else
6026 visited[pred_bb->index] = 1;
6027 if (! hoist_expr_reaches_here_p (expr_bb, expr_index,
6028 pred_bb, visited))
6029 break;
6032 if (visited_allocated_locally)
6033 free (visited);
6035 return (pred == NULL);
6038 /* Actually perform code hoisting. */
6040 static void
6041 hoist_code ()
6043 basic_block bb, dominated;
6044 basic_block *domby;
6045 unsigned int domby_len;
6046 unsigned int i,j;
6047 struct expr **index_map;
6048 struct expr *expr;
6050 sbitmap_vector_zero (hoist_exprs, last_basic_block);
6052 /* Compute a mapping from expression number (`bitmap_index') to
6053 hash table entry. */
6055 index_map = (struct expr **) xcalloc (expr_hash_table.n_elems, sizeof (struct expr *));
6056 for (i = 0; i < expr_hash_table.size; i++)
6057 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
6058 index_map[expr->bitmap_index] = expr;
6060 /* Walk over each basic block looking for potentially hoistable
6061 expressions, nothing gets hoisted from the entry block. */
6062 FOR_EACH_BB (bb)
6064 int found = 0;
6065 int insn_inserted_p;
6067 domby_len = get_dominated_by (dominators, bb, &domby);
6068 /* Examine each expression that is very busy at the exit of this
6069 block. These are the potentially hoistable expressions. */
6070 for (i = 0; i < hoist_vbeout[bb->index]->n_bits; i++)
6072 int hoistable = 0;
6074 if (TEST_BIT (hoist_vbeout[bb->index], i)
6075 && TEST_BIT (transpout[bb->index], i))
6077 /* We've found a potentially hoistable expression, now
6078 we look at every block BB dominates to see if it
6079 computes the expression. */
6080 for (j = 0; j < domby_len; j++)
6082 dominated = domby[j];
6083 /* Ignore self dominance. */
6084 if (bb == dominated)
6085 continue;
6086 /* We've found a dominated block, now see if it computes
6087 the busy expression and whether or not moving that
6088 expression to the "beginning" of that block is safe. */
6089 if (!TEST_BIT (antloc[dominated->index], i))
6090 continue;
6092 /* Note if the expression would reach the dominated block
6093 unimpared if it was placed at the end of BB.
6095 Keep track of how many times this expression is hoistable
6096 from a dominated block into BB. */
6097 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
6098 hoistable++;
6101 /* If we found more than one hoistable occurrence of this
6102 expression, then note it in the bitmap of expressions to
6103 hoist. It makes no sense to hoist things which are computed
6104 in only one BB, and doing so tends to pessimize register
6105 allocation. One could increase this value to try harder
6106 to avoid any possible code expansion due to register
6107 allocation issues; however experiments have shown that
6108 the vast majority of hoistable expressions are only movable
6109 from two successors, so raising this threshhold is likely
6110 to nullify any benefit we get from code hoisting. */
6111 if (hoistable > 1)
6113 SET_BIT (hoist_exprs[bb->index], i);
6114 found = 1;
6118 /* If we found nothing to hoist, then quit now. */
6119 if (! found)
6121 free (domby);
6122 continue;
6125 /* Loop over all the hoistable expressions. */
6126 for (i = 0; i < hoist_exprs[bb->index]->n_bits; i++)
6128 /* We want to insert the expression into BB only once, so
6129 note when we've inserted it. */
6130 insn_inserted_p = 0;
6132 /* These tests should be the same as the tests above. */
6133 if (TEST_BIT (hoist_vbeout[bb->index], i))
6135 /* We've found a potentially hoistable expression, now
6136 we look at every block BB dominates to see if it
6137 computes the expression. */
6138 for (j = 0; j < domby_len; j++)
6140 dominated = domby[j];
6141 /* Ignore self dominance. */
6142 if (bb == dominated)
6143 continue;
6145 /* We've found a dominated block, now see if it computes
6146 the busy expression and whether or not moving that
6147 expression to the "beginning" of that block is safe. */
6148 if (!TEST_BIT (antloc[dominated->index], i))
6149 continue;
6151 /* The expression is computed in the dominated block and
6152 it would be safe to compute it at the start of the
6153 dominated block. Now we have to determine if the
6154 expression would reach the dominated block if it was
6155 placed at the end of BB. */
6156 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
6158 struct expr *expr = index_map[i];
6159 struct occr *occr = expr->antic_occr;
6160 rtx insn;
6161 rtx set;
6163 /* Find the right occurrence of this expression. */
6164 while (BLOCK_FOR_INSN (occr->insn) != dominated && occr)
6165 occr = occr->next;
6167 /* Should never happen. */
6168 if (!occr)
6169 abort ();
6171 insn = occr->insn;
6173 set = single_set (insn);
6174 if (! set)
6175 abort ();
6177 /* Create a pseudo-reg to store the result of reaching
6178 expressions into. Get the mode for the new pseudo
6179 from the mode of the original destination pseudo. */
6180 if (expr->reaching_reg == NULL)
6181 expr->reaching_reg
6182 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
6184 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
6185 delete_insn (insn);
6186 occr->deleted_p = 1;
6187 if (!insn_inserted_p)
6189 insert_insn_end_bb (index_map[i], bb, 0);
6190 insn_inserted_p = 1;
6196 free (domby);
6199 free (index_map);
6202 /* Top level routine to perform one code hoisting (aka unification) pass
6204 Return nonzero if a change was made. */
6206 static int
6207 one_code_hoisting_pass ()
6209 int changed = 0;
6211 alloc_hash_table (max_cuid, &expr_hash_table, 0);
6212 compute_hash_table (&expr_hash_table);
6213 if (gcse_file)
6214 dump_hash_table (gcse_file, "Code Hosting Expressions", &expr_hash_table);
6216 if (expr_hash_table.n_elems > 0)
6218 alloc_code_hoist_mem (last_basic_block, expr_hash_table.n_elems);
6219 compute_code_hoist_data ();
6220 hoist_code ();
6221 free_code_hoist_mem ();
6224 free_hash_table (&expr_hash_table);
6226 return changed;
6229 /* Here we provide the things required to do store motion towards
6230 the exit. In order for this to be effective, gcse also needed to
6231 be taught how to move a load when it is kill only by a store to itself.
6233 int i;
6234 float a[10];
6236 void foo(float scale)
6238 for (i=0; i<10; i++)
6239 a[i] *= scale;
6242 'i' is both loaded and stored to in the loop. Normally, gcse cannot move
6243 the load out since its live around the loop, and stored at the bottom
6244 of the loop.
6246 The 'Load Motion' referred to and implemented in this file is
6247 an enhancement to gcse which when using edge based lcm, recognizes
6248 this situation and allows gcse to move the load out of the loop.
6250 Once gcse has hoisted the load, store motion can then push this
6251 load towards the exit, and we end up with no loads or stores of 'i'
6252 in the loop. */
6254 /* This will search the ldst list for a matching expression. If it
6255 doesn't find one, we create one and initialize it. */
6257 static struct ls_expr *
6258 ldst_entry (x)
6259 rtx x;
6261 struct ls_expr * ptr;
6263 for (ptr = first_ls_expr(); ptr != NULL; ptr = next_ls_expr (ptr))
6264 if (expr_equiv_p (ptr->pattern, x))
6265 break;
6267 if (!ptr)
6269 ptr = (struct ls_expr *) xmalloc (sizeof (struct ls_expr));
6271 ptr->next = pre_ldst_mems;
6272 ptr->expr = NULL;
6273 ptr->pattern = x;
6274 ptr->loads = NULL_RTX;
6275 ptr->stores = NULL_RTX;
6276 ptr->reaching_reg = NULL_RTX;
6277 ptr->invalid = 0;
6278 ptr->index = 0;
6279 ptr->hash_index = 0;
6280 pre_ldst_mems = ptr;
6283 return ptr;
6286 /* Free up an individual ldst entry. */
6288 static void
6289 free_ldst_entry (ptr)
6290 struct ls_expr * ptr;
6292 free_INSN_LIST_list (& ptr->loads);
6293 free_INSN_LIST_list (& ptr->stores);
6295 free (ptr);
6298 /* Free up all memory associated with the ldst list. */
6300 static void
6301 free_ldst_mems ()
6303 while (pre_ldst_mems)
6305 struct ls_expr * tmp = pre_ldst_mems;
6307 pre_ldst_mems = pre_ldst_mems->next;
6309 free_ldst_entry (tmp);
6312 pre_ldst_mems = NULL;
6315 /* Dump debugging info about the ldst list. */
6317 static void
6318 print_ldst_list (file)
6319 FILE * file;
6321 struct ls_expr * ptr;
6323 fprintf (file, "LDST list: \n");
6325 for (ptr = first_ls_expr(); ptr != NULL; ptr = next_ls_expr (ptr))
6327 fprintf (file, " Pattern (%3d): ", ptr->index);
6329 print_rtl (file, ptr->pattern);
6331 fprintf (file, "\n Loads : ");
6333 if (ptr->loads)
6334 print_rtl (file, ptr->loads);
6335 else
6336 fprintf (file, "(nil)");
6338 fprintf (file, "\n Stores : ");
6340 if (ptr->stores)
6341 print_rtl (file, ptr->stores);
6342 else
6343 fprintf (file, "(nil)");
6345 fprintf (file, "\n\n");
6348 fprintf (file, "\n");
6351 /* Returns 1 if X is in the list of ldst only expressions. */
6353 static struct ls_expr *
6354 find_rtx_in_ldst (x)
6355 rtx x;
6357 struct ls_expr * ptr;
6359 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
6360 if (expr_equiv_p (ptr->pattern, x) && ! ptr->invalid)
6361 return ptr;
6363 return NULL;
6366 /* Assign each element of the list of mems a monotonically increasing value. */
6368 static int
6369 enumerate_ldsts ()
6371 struct ls_expr * ptr;
6372 int n = 0;
6374 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
6375 ptr->index = n++;
6377 return n;
6380 /* Return first item in the list. */
6382 static inline struct ls_expr *
6383 first_ls_expr ()
6385 return pre_ldst_mems;
6388 /* Return the next item in ther list after the specified one. */
6390 static inline struct ls_expr *
6391 next_ls_expr (ptr)
6392 struct ls_expr * ptr;
6394 return ptr->next;
6397 /* Load Motion for loads which only kill themselves. */
6399 /* Return true if x is a simple MEM operation, with no registers or
6400 side effects. These are the types of loads we consider for the
6401 ld_motion list, otherwise we let the usual aliasing take care of it. */
6403 static int
6404 simple_mem (x)
6405 rtx x;
6407 if (GET_CODE (x) != MEM)
6408 return 0;
6410 if (MEM_VOLATILE_P (x))
6411 return 0;
6413 if (GET_MODE (x) == BLKmode)
6414 return 0;
6416 if (!rtx_varies_p (XEXP (x, 0), 0))
6417 return 1;
6419 return 0;
6422 /* Make sure there isn't a buried reference in this pattern anywhere.
6423 If there is, invalidate the entry for it since we're not capable
6424 of fixing it up just yet.. We have to be sure we know about ALL
6425 loads since the aliasing code will allow all entries in the
6426 ld_motion list to not-alias itself. If we miss a load, we will get
6427 the wrong value since gcse might common it and we won't know to
6428 fix it up. */
6430 static void
6431 invalidate_any_buried_refs (x)
6432 rtx x;
6434 const char * fmt;
6435 int i, j;
6436 struct ls_expr * ptr;
6438 /* Invalidate it in the list. */
6439 if (GET_CODE (x) == MEM && simple_mem (x))
6441 ptr = ldst_entry (x);
6442 ptr->invalid = 1;
6445 /* Recursively process the insn. */
6446 fmt = GET_RTX_FORMAT (GET_CODE (x));
6448 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6450 if (fmt[i] == 'e')
6451 invalidate_any_buried_refs (XEXP (x, i));
6452 else if (fmt[i] == 'E')
6453 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6454 invalidate_any_buried_refs (XVECEXP (x, i, j));
6458 /* Find all the 'simple' MEMs which are used in LOADs and STORES. Simple
6459 being defined as MEM loads and stores to symbols, with no
6460 side effects and no registers in the expression. If there are any
6461 uses/defs which don't match this criteria, it is invalidated and
6462 trimmed out later. */
6464 static void
6465 compute_ld_motion_mems ()
6467 struct ls_expr * ptr;
6468 basic_block bb;
6469 rtx insn;
6471 pre_ldst_mems = NULL;
6473 FOR_EACH_BB (bb)
6475 for (insn = bb->head;
6476 insn && insn != NEXT_INSN (bb->end);
6477 insn = NEXT_INSN (insn))
6479 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
6481 if (GET_CODE (PATTERN (insn)) == SET)
6483 rtx src = SET_SRC (PATTERN (insn));
6484 rtx dest = SET_DEST (PATTERN (insn));
6486 /* Check for a simple LOAD... */
6487 if (GET_CODE (src) == MEM && simple_mem (src))
6489 ptr = ldst_entry (src);
6490 if (GET_CODE (dest) == REG)
6491 ptr->loads = alloc_INSN_LIST (insn, ptr->loads);
6492 else
6493 ptr->invalid = 1;
6495 else
6497 /* Make sure there isn't a buried load somewhere. */
6498 invalidate_any_buried_refs (src);
6501 /* Check for stores. Don't worry about aliased ones, they
6502 will block any movement we might do later. We only care
6503 about this exact pattern since those are the only
6504 circumstance that we will ignore the aliasing info. */
6505 if (GET_CODE (dest) == MEM && simple_mem (dest))
6507 ptr = ldst_entry (dest);
6509 if (GET_CODE (src) != MEM
6510 && GET_CODE (src) != ASM_OPERANDS)
6511 ptr->stores = alloc_INSN_LIST (insn, ptr->stores);
6512 else
6513 ptr->invalid = 1;
6516 else
6517 invalidate_any_buried_refs (PATTERN (insn));
6523 /* Remove any references that have been either invalidated or are not in the
6524 expression list for pre gcse. */
6526 static void
6527 trim_ld_motion_mems ()
6529 struct ls_expr * last = NULL;
6530 struct ls_expr * ptr = first_ls_expr ();
6532 while (ptr != NULL)
6534 int del = ptr->invalid;
6535 struct expr * expr = NULL;
6537 /* Delete if entry has been made invalid. */
6538 if (!del)
6540 unsigned int i;
6542 del = 1;
6543 /* Delete if we cannot find this mem in the expression list. */
6544 for (i = 0; i < expr_hash_table.size && del; i++)
6546 for (expr = expr_hash_table.table[i];
6547 expr != NULL;
6548 expr = expr->next_same_hash)
6549 if (expr_equiv_p (expr->expr, ptr->pattern))
6551 del = 0;
6552 break;
6557 if (del)
6559 if (last != NULL)
6561 last->next = ptr->next;
6562 free_ldst_entry (ptr);
6563 ptr = last->next;
6565 else
6567 pre_ldst_mems = pre_ldst_mems->next;
6568 free_ldst_entry (ptr);
6569 ptr = pre_ldst_mems;
6572 else
6574 /* Set the expression field if we are keeping it. */
6575 last = ptr;
6576 ptr->expr = expr;
6577 ptr = ptr->next;
6581 /* Show the world what we've found. */
6582 if (gcse_file && pre_ldst_mems != NULL)
6583 print_ldst_list (gcse_file);
6586 /* This routine will take an expression which we are replacing with
6587 a reaching register, and update any stores that are needed if
6588 that expression is in the ld_motion list. Stores are updated by
6589 copying their SRC to the reaching register, and then storeing
6590 the reaching register into the store location. These keeps the
6591 correct value in the reaching register for the loads. */
6593 static void
6594 update_ld_motion_stores (expr)
6595 struct expr * expr;
6597 struct ls_expr * mem_ptr;
6599 if ((mem_ptr = find_rtx_in_ldst (expr->expr)))
6601 /* We can try to find just the REACHED stores, but is shouldn't
6602 matter to set the reaching reg everywhere... some might be
6603 dead and should be eliminated later. */
6605 /* We replace SET mem = expr with
6606 SET reg = expr
6607 SET mem = reg , where reg is the
6608 reaching reg used in the load. */
6609 rtx list = mem_ptr->stores;
6611 for ( ; list != NULL_RTX; list = XEXP (list, 1))
6613 rtx insn = XEXP (list, 0);
6614 rtx pat = PATTERN (insn);
6615 rtx src = SET_SRC (pat);
6616 rtx reg = expr->reaching_reg;
6617 rtx copy, new;
6619 /* If we've already copied it, continue. */
6620 if (expr->reaching_reg == src)
6621 continue;
6623 if (gcse_file)
6625 fprintf (gcse_file, "PRE: store updated with reaching reg ");
6626 print_rtl (gcse_file, expr->reaching_reg);
6627 fprintf (gcse_file, ":\n ");
6628 print_inline_rtx (gcse_file, insn, 8);
6629 fprintf (gcse_file, "\n");
6632 copy = gen_move_insn ( reg, SET_SRC (pat));
6633 new = emit_insn_before (copy, insn);
6634 record_one_set (REGNO (reg), new);
6635 SET_SRC (pat) = reg;
6637 /* un-recognize this pattern since it's probably different now. */
6638 INSN_CODE (insn) = -1;
6639 gcse_create_count++;
6644 /* Store motion code. */
6646 /* This is used to communicate the target bitvector we want to use in the
6647 reg_set_info routine when called via the note_stores mechanism. */
6648 static sbitmap * regvec;
6650 /* Used in computing the reverse edge graph bit vectors. */
6651 static sbitmap * st_antloc;
6653 /* Global holding the number of store expressions we are dealing with. */
6654 static int num_stores;
6656 /* Checks to set if we need to mark a register set. Called from note_stores. */
6658 static void
6659 reg_set_info (dest, setter, data)
6660 rtx dest, setter ATTRIBUTE_UNUSED;
6661 void * data ATTRIBUTE_UNUSED;
6663 if (GET_CODE (dest) == SUBREG)
6664 dest = SUBREG_REG (dest);
6666 if (GET_CODE (dest) == REG)
6667 SET_BIT (*regvec, REGNO (dest));
6670 /* Return nonzero if the register operands of expression X are killed
6671 anywhere in basic block BB. */
6673 static int
6674 store_ops_ok (x, bb)
6675 rtx x;
6676 basic_block bb;
6678 int i;
6679 enum rtx_code code;
6680 const char * fmt;
6682 /* Repeat is used to turn tail-recursion into iteration. */
6683 repeat:
6685 if (x == 0)
6686 return 1;
6688 code = GET_CODE (x);
6689 switch (code)
6691 case REG:
6692 /* If a reg has changed after us in this
6693 block, the operand has been killed. */
6694 return TEST_BIT (reg_set_in_block[bb->index], REGNO (x));
6696 case MEM:
6697 x = XEXP (x, 0);
6698 goto repeat;
6700 case PRE_DEC:
6701 case PRE_INC:
6702 case POST_DEC:
6703 case POST_INC:
6704 return 0;
6706 case PC:
6707 case CC0: /*FIXME*/
6708 case CONST:
6709 case CONST_INT:
6710 case CONST_DOUBLE:
6711 case CONST_VECTOR:
6712 case SYMBOL_REF:
6713 case LABEL_REF:
6714 case ADDR_VEC:
6715 case ADDR_DIFF_VEC:
6716 return 1;
6718 default:
6719 break;
6722 i = GET_RTX_LENGTH (code) - 1;
6723 fmt = GET_RTX_FORMAT (code);
6725 for (; i >= 0; i--)
6727 if (fmt[i] == 'e')
6729 rtx tem = XEXP (x, i);
6731 /* If we are about to do the last recursive call
6732 needed at this level, change it into iteration.
6733 This function is called enough to be worth it. */
6734 if (i == 0)
6736 x = tem;
6737 goto repeat;
6740 if (! store_ops_ok (tem, bb))
6741 return 0;
6743 else if (fmt[i] == 'E')
6745 int j;
6747 for (j = 0; j < XVECLEN (x, i); j++)
6749 if (! store_ops_ok (XVECEXP (x, i, j), bb))
6750 return 0;
6755 return 1;
6758 /* Determine whether insn is MEM store pattern that we will consider moving. */
6760 static void
6761 find_moveable_store (insn)
6762 rtx insn;
6764 struct ls_expr * ptr;
6765 rtx dest = PATTERN (insn);
6767 if (GET_CODE (dest) != SET
6768 || GET_CODE (SET_SRC (dest)) == ASM_OPERANDS)
6769 return;
6771 dest = SET_DEST (dest);
6773 if (GET_CODE (dest) != MEM || MEM_VOLATILE_P (dest)
6774 || GET_MODE (dest) == BLKmode)
6775 return;
6777 if (GET_CODE (XEXP (dest, 0)) != SYMBOL_REF)
6778 return;
6780 if (rtx_varies_p (XEXP (dest, 0), 0))
6781 return;
6783 ptr = ldst_entry (dest);
6784 ptr->stores = alloc_INSN_LIST (insn, ptr->stores);
6787 /* Perform store motion. Much like gcse, except we move expressions the
6788 other way by looking at the flowgraph in reverse. */
6790 static int
6791 compute_store_table ()
6793 int ret;
6794 basic_block bb;
6795 unsigned regno;
6796 rtx insn, pat;
6798 max_gcse_regno = max_reg_num ();
6800 reg_set_in_block = (sbitmap *) sbitmap_vector_alloc (last_basic_block,
6801 max_gcse_regno);
6802 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
6803 pre_ldst_mems = 0;
6805 /* Find all the stores we care about. */
6806 FOR_EACH_BB (bb)
6808 regvec = & (reg_set_in_block[bb->index]);
6809 for (insn = bb->end;
6810 insn && insn != PREV_INSN (bb->end);
6811 insn = PREV_INSN (insn))
6813 /* Ignore anything that is not a normal insn. */
6814 if (! INSN_P (insn))
6815 continue;
6817 if (GET_CODE (insn) == CALL_INSN)
6819 bool clobbers_all = false;
6820 #ifdef NON_SAVING_SETJMP
6821 if (NON_SAVING_SETJMP
6822 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
6823 clobbers_all = true;
6824 #endif
6826 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6827 if (clobbers_all
6828 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
6829 SET_BIT (reg_set_in_block[bb->index], regno);
6832 pat = PATTERN (insn);
6833 note_stores (pat, reg_set_info, NULL);
6835 /* Now that we've marked regs, look for stores. */
6836 if (GET_CODE (pat) == SET)
6837 find_moveable_store (insn);
6841 ret = enumerate_ldsts ();
6843 if (gcse_file)
6845 fprintf (gcse_file, "Store Motion Expressions.\n");
6846 print_ldst_list (gcse_file);
6849 return ret;
6852 /* Check to see if the load X is aliased with STORE_PATTERN. */
6854 static int
6855 load_kills_store (x, store_pattern)
6856 rtx x, store_pattern;
6858 if (true_dependence (x, GET_MODE (x), store_pattern, rtx_addr_varies_p))
6859 return 1;
6860 return 0;
6863 /* Go through the entire insn X, looking for any loads which might alias
6864 STORE_PATTERN. Return 1 if found. */
6866 static int
6867 find_loads (x, store_pattern)
6868 rtx x, store_pattern;
6870 const char * fmt;
6871 int i, j;
6872 int ret = 0;
6874 if (!x)
6875 return 0;
6877 if (GET_CODE (x) == SET)
6878 x = SET_SRC (x);
6880 if (GET_CODE (x) == MEM)
6882 if (load_kills_store (x, store_pattern))
6883 return 1;
6886 /* Recursively process the insn. */
6887 fmt = GET_RTX_FORMAT (GET_CODE (x));
6889 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0 && !ret; i--)
6891 if (fmt[i] == 'e')
6892 ret |= find_loads (XEXP (x, i), store_pattern);
6893 else if (fmt[i] == 'E')
6894 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6895 ret |= find_loads (XVECEXP (x, i, j), store_pattern);
6897 return ret;
6900 /* Check if INSN kills the store pattern X (is aliased with it).
6901 Return 1 if it it does. */
6903 static int
6904 store_killed_in_insn (x, insn)
6905 rtx x, insn;
6907 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
6908 return 0;
6910 if (GET_CODE (insn) == CALL_INSN)
6912 /* A normal or pure call might read from pattern,
6913 but a const call will not. */
6914 return ! CONST_OR_PURE_CALL_P (insn) || pure_call_p (insn);
6917 if (GET_CODE (PATTERN (insn)) == SET)
6919 rtx pat = PATTERN (insn);
6920 /* Check for memory stores to aliased objects. */
6921 if (GET_CODE (SET_DEST (pat)) == MEM && !expr_equiv_p (SET_DEST (pat), x))
6922 /* pretend its a load and check for aliasing. */
6923 if (find_loads (SET_DEST (pat), x))
6924 return 1;
6925 return find_loads (SET_SRC (pat), x);
6927 else
6928 return find_loads (PATTERN (insn), x);
6931 /* Returns 1 if the expression X is loaded or clobbered on or after INSN
6932 within basic block BB. */
6934 static int
6935 store_killed_after (x, insn, bb)
6936 rtx x, insn;
6937 basic_block bb;
6939 rtx last = bb->end;
6941 if (insn == last)
6942 return 0;
6944 /* Check if the register operands of the store are OK in this block.
6945 Note that if registers are changed ANYWHERE in the block, we'll
6946 decide we can't move it, regardless of whether it changed above
6947 or below the store. This could be improved by checking the register
6948 operands while lookinng for aliasing in each insn. */
6949 if (!store_ops_ok (XEXP (x, 0), bb))
6950 return 1;
6952 for ( ; insn && insn != NEXT_INSN (last); insn = NEXT_INSN (insn))
6953 if (store_killed_in_insn (x, insn))
6954 return 1;
6956 return 0;
6959 /* Returns 1 if the expression X is loaded or clobbered on or before INSN
6960 within basic block BB. */
6961 static int
6962 store_killed_before (x, insn, bb)
6963 rtx x, insn;
6964 basic_block bb;
6966 rtx first = bb->head;
6968 if (insn == first)
6969 return store_killed_in_insn (x, insn);
6971 /* Check if the register operands of the store are OK in this block.
6972 Note that if registers are changed ANYWHERE in the block, we'll
6973 decide we can't move it, regardless of whether it changed above
6974 or below the store. This could be improved by checking the register
6975 operands while lookinng for aliasing in each insn. */
6976 if (!store_ops_ok (XEXP (x, 0), bb))
6977 return 1;
6979 for ( ; insn && insn != PREV_INSN (first); insn = PREV_INSN (insn))
6980 if (store_killed_in_insn (x, insn))
6981 return 1;
6983 return 0;
6986 #define ANTIC_STORE_LIST(x) ((x)->loads)
6987 #define AVAIL_STORE_LIST(x) ((x)->stores)
6989 /* Given the table of available store insns at the end of blocks,
6990 determine which ones are not killed by aliasing, and generate
6991 the appropriate vectors for gen and killed. */
6992 static void
6993 build_store_vectors ()
6995 basic_block bb, b;
6996 rtx insn, st;
6997 struct ls_expr * ptr;
6999 /* Build the gen_vector. This is any store in the table which is not killed
7000 by aliasing later in its block. */
7001 ae_gen = (sbitmap *) sbitmap_vector_alloc (last_basic_block, num_stores);
7002 sbitmap_vector_zero (ae_gen, last_basic_block);
7004 st_antloc = (sbitmap *) sbitmap_vector_alloc (last_basic_block, num_stores);
7005 sbitmap_vector_zero (st_antloc, last_basic_block);
7007 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7009 /* Put all the stores into either the antic list, or the avail list,
7010 or both. */
7011 rtx store_list = ptr->stores;
7012 ptr->stores = NULL_RTX;
7014 for (st = store_list; st != NULL; st = XEXP (st, 1))
7016 insn = XEXP (st, 0);
7017 bb = BLOCK_FOR_INSN (insn);
7019 if (!store_killed_after (ptr->pattern, insn, bb))
7021 /* If we've already seen an availale expression in this block,
7022 we can delete the one we saw already (It occurs earlier in
7023 the block), and replace it with this one). We'll copy the
7024 old SRC expression to an unused register in case there
7025 are any side effects. */
7026 if (TEST_BIT (ae_gen[bb->index], ptr->index))
7028 /* Find previous store. */
7029 rtx st;
7030 for (st = AVAIL_STORE_LIST (ptr); st ; st = XEXP (st, 1))
7031 if (BLOCK_FOR_INSN (XEXP (st, 0)) == bb)
7032 break;
7033 if (st)
7035 rtx r = gen_reg_rtx (GET_MODE (ptr->pattern));
7036 if (gcse_file)
7037 fprintf (gcse_file, "Removing redundant store:\n");
7038 replace_store_insn (r, XEXP (st, 0), bb);
7039 XEXP (st, 0) = insn;
7040 continue;
7043 SET_BIT (ae_gen[bb->index], ptr->index);
7044 AVAIL_STORE_LIST (ptr) = alloc_INSN_LIST (insn,
7045 AVAIL_STORE_LIST (ptr));
7048 if (!store_killed_before (ptr->pattern, insn, bb))
7050 SET_BIT (st_antloc[BLOCK_NUM (insn)], ptr->index);
7051 ANTIC_STORE_LIST (ptr) = alloc_INSN_LIST (insn,
7052 ANTIC_STORE_LIST (ptr));
7056 /* Free the original list of store insns. */
7057 free_INSN_LIST_list (&store_list);
7060 ae_kill = (sbitmap *) sbitmap_vector_alloc (last_basic_block, num_stores);
7061 sbitmap_vector_zero (ae_kill, last_basic_block);
7063 transp = (sbitmap *) sbitmap_vector_alloc (last_basic_block, num_stores);
7064 sbitmap_vector_zero (transp, last_basic_block);
7066 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7067 FOR_EACH_BB (b)
7069 if (store_killed_after (ptr->pattern, b->head, b))
7071 /* The anticipatable expression is not killed if it's gen'd. */
7073 We leave this check out for now. If we have a code sequence
7074 in a block which looks like:
7075 ST MEMa = x
7076 L y = MEMa
7077 ST MEMa = z
7078 We should flag this as having an ANTIC expression, NOT
7079 transparent, NOT killed, and AVAIL.
7080 Unfortunately, since we haven't re-written all loads to
7081 use the reaching reg, we'll end up doing an incorrect
7082 Load in the middle here if we push the store down. It happens in
7083 gcc.c-torture/execute/960311-1.c with -O3
7084 If we always kill it in this case, we'll sometimes do
7085 uneccessary work, but it shouldn't actually hurt anything.
7086 if (!TEST_BIT (ae_gen[b], ptr->index)). */
7087 SET_BIT (ae_kill[b->index], ptr->index);
7089 else
7090 SET_BIT (transp[b->index], ptr->index);
7093 /* Any block with no exits calls some non-returning function, so
7094 we better mark the store killed here, or we might not store to
7095 it at all. If we knew it was abort, we wouldn't have to store,
7096 but we don't know that for sure. */
7097 if (gcse_file)
7099 fprintf (gcse_file, "ST_avail and ST_antic (shown under loads..)\n");
7100 print_ldst_list (gcse_file);
7101 dump_sbitmap_vector (gcse_file, "st_antloc", "", st_antloc, last_basic_block);
7102 dump_sbitmap_vector (gcse_file, "st_kill", "", ae_kill, last_basic_block);
7103 dump_sbitmap_vector (gcse_file, "Transpt", "", transp, last_basic_block);
7104 dump_sbitmap_vector (gcse_file, "st_avloc", "", ae_gen, last_basic_block);
7108 /* Insert an instruction at the begining of a basic block, and update
7109 the BLOCK_HEAD if needed. */
7111 static void
7112 insert_insn_start_bb (insn, bb)
7113 rtx insn;
7114 basic_block bb;
7116 /* Insert at start of successor block. */
7117 rtx prev = PREV_INSN (bb->head);
7118 rtx before = bb->head;
7119 while (before != 0)
7121 if (GET_CODE (before) != CODE_LABEL
7122 && (GET_CODE (before) != NOTE
7123 || NOTE_LINE_NUMBER (before) != NOTE_INSN_BASIC_BLOCK))
7124 break;
7125 prev = before;
7126 if (prev == bb->end)
7127 break;
7128 before = NEXT_INSN (before);
7131 insn = emit_insn_after (insn, prev);
7133 if (gcse_file)
7135 fprintf (gcse_file, "STORE_MOTION insert store at start of BB %d:\n",
7136 bb->index);
7137 print_inline_rtx (gcse_file, insn, 6);
7138 fprintf (gcse_file, "\n");
7142 /* This routine will insert a store on an edge. EXPR is the ldst entry for
7143 the memory reference, and E is the edge to insert it on. Returns nonzero
7144 if an edge insertion was performed. */
7146 static int
7147 insert_store (expr, e)
7148 struct ls_expr * expr;
7149 edge e;
7151 rtx reg, insn;
7152 basic_block bb;
7153 edge tmp;
7155 /* We did all the deleted before this insert, so if we didn't delete a
7156 store, then we haven't set the reaching reg yet either. */
7157 if (expr->reaching_reg == NULL_RTX)
7158 return 0;
7160 reg = expr->reaching_reg;
7161 insn = gen_move_insn (expr->pattern, reg);
7163 /* If we are inserting this expression on ALL predecessor edges of a BB,
7164 insert it at the start of the BB, and reset the insert bits on the other
7165 edges so we don't try to insert it on the other edges. */
7166 bb = e->dest;
7167 for (tmp = e->dest->pred; tmp ; tmp = tmp->pred_next)
7169 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
7170 if (index == EDGE_INDEX_NO_EDGE)
7171 abort ();
7172 if (! TEST_BIT (pre_insert_map[index], expr->index))
7173 break;
7176 /* If tmp is NULL, we found an insertion on every edge, blank the
7177 insertion vector for these edges, and insert at the start of the BB. */
7178 if (!tmp && bb != EXIT_BLOCK_PTR)
7180 for (tmp = e->dest->pred; tmp ; tmp = tmp->pred_next)
7182 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
7183 RESET_BIT (pre_insert_map[index], expr->index);
7185 insert_insn_start_bb (insn, bb);
7186 return 0;
7189 /* We can't insert on this edge, so we'll insert at the head of the
7190 successors block. See Morgan, sec 10.5. */
7191 if ((e->flags & EDGE_ABNORMAL) == EDGE_ABNORMAL)
7193 insert_insn_start_bb (insn, bb);
7194 return 0;
7197 insert_insn_on_edge (insn, e);
7199 if (gcse_file)
7201 fprintf (gcse_file, "STORE_MOTION insert insn on edge (%d, %d):\n",
7202 e->src->index, e->dest->index);
7203 print_inline_rtx (gcse_file, insn, 6);
7204 fprintf (gcse_file, "\n");
7207 return 1;
7210 /* This routine will replace a store with a SET to a specified register. */
7212 static void
7213 replace_store_insn (reg, del, bb)
7214 rtx reg, del;
7215 basic_block bb;
7217 rtx insn;
7219 insn = gen_move_insn (reg, SET_SRC (PATTERN (del)));
7220 insn = emit_insn_after (insn, del);
7222 if (gcse_file)
7224 fprintf (gcse_file,
7225 "STORE_MOTION delete insn in BB %d:\n ", bb->index);
7226 print_inline_rtx (gcse_file, del, 6);
7227 fprintf (gcse_file, "\nSTORE MOTION replaced with insn:\n ");
7228 print_inline_rtx (gcse_file, insn, 6);
7229 fprintf (gcse_file, "\n");
7232 delete_insn (del);
7236 /* Delete a store, but copy the value that would have been stored into
7237 the reaching_reg for later storing. */
7239 static void
7240 delete_store (expr, bb)
7241 struct ls_expr * expr;
7242 basic_block bb;
7244 rtx reg, i, del;
7246 if (expr->reaching_reg == NULL_RTX)
7247 expr->reaching_reg = gen_reg_rtx (GET_MODE (expr->pattern));
7250 /* If there is more than 1 store, the earlier ones will be dead,
7251 but it doesn't hurt to replace them here. */
7252 reg = expr->reaching_reg;
7254 for (i = AVAIL_STORE_LIST (expr); i; i = XEXP (i, 1))
7256 del = XEXP (i, 0);
7257 if (BLOCK_FOR_INSN (del) == bb)
7259 /* We know there is only one since we deleted redundant
7260 ones during the available computation. */
7261 replace_store_insn (reg, del, bb);
7262 break;
7267 /* Free memory used by store motion. */
7269 static void
7270 free_store_memory ()
7272 free_ldst_mems ();
7274 if (ae_gen)
7275 sbitmap_vector_free (ae_gen);
7276 if (ae_kill)
7277 sbitmap_vector_free (ae_kill);
7278 if (transp)
7279 sbitmap_vector_free (transp);
7280 if (st_antloc)
7281 sbitmap_vector_free (st_antloc);
7282 if (pre_insert_map)
7283 sbitmap_vector_free (pre_insert_map);
7284 if (pre_delete_map)
7285 sbitmap_vector_free (pre_delete_map);
7286 if (reg_set_in_block)
7287 sbitmap_vector_free (reg_set_in_block);
7289 ae_gen = ae_kill = transp = st_antloc = NULL;
7290 pre_insert_map = pre_delete_map = reg_set_in_block = NULL;
7293 /* Perform store motion. Much like gcse, except we move expressions the
7294 other way by looking at the flowgraph in reverse. */
7296 static void
7297 store_motion ()
7299 basic_block bb;
7300 int x;
7301 struct ls_expr * ptr;
7302 int update_flow = 0;
7304 if (gcse_file)
7306 fprintf (gcse_file, "before store motion\n");
7307 print_rtl (gcse_file, get_insns ());
7311 init_alias_analysis ();
7313 /* Find all the stores that are live to the end of their block. */
7314 num_stores = compute_store_table ();
7315 if (num_stores == 0)
7317 sbitmap_vector_free (reg_set_in_block);
7318 end_alias_analysis ();
7319 return;
7322 /* Now compute whats actually available to move. */
7323 add_noreturn_fake_exit_edges ();
7324 build_store_vectors ();
7326 edge_list = pre_edge_rev_lcm (gcse_file, num_stores, transp, ae_gen,
7327 st_antloc, ae_kill, &pre_insert_map,
7328 &pre_delete_map);
7330 /* Now we want to insert the new stores which are going to be needed. */
7331 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7333 FOR_EACH_BB (bb)
7334 if (TEST_BIT (pre_delete_map[bb->index], ptr->index))
7335 delete_store (ptr, bb);
7337 for (x = 0; x < NUM_EDGES (edge_list); x++)
7338 if (TEST_BIT (pre_insert_map[x], ptr->index))
7339 update_flow |= insert_store (ptr, INDEX_EDGE (edge_list, x));
7342 if (update_flow)
7343 commit_edge_insertions ();
7345 free_store_memory ();
7346 free_edge_list (edge_list);
7347 remove_fake_edges ();
7348 end_alias_analysis ();
7351 #include "gt-gcse.h"