* config/arm/elf.h (ASM_OUTPUT_ALIGNED_COMMON): Remove definition.
[official-gcc.git] / gcc / gcse.c
blobdb2fb57ff2104a3d177d3dd409821354a28d88c2
1 /* Global common subexpression elimination/Partial redundancy elimination
2 and global constant/copy propagation for GNU compiler.
3 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 02111-1307, USA. */
23 /* TODO
24 - reordering of memory allocation and freeing to be more space efficient
25 - do rough calc of how many regs are needed in each block, and a rough
26 calc of how many regs are available in each class and use that to
27 throttle back the code in cases where RTX_COST is minimal.
28 - a store to the same address as a load does not kill the load if the
29 source of the store is also the destination of the load. Handling this
30 allows more load motion, particularly out of loops.
31 - ability to realloc sbitmap vectors would allow one initial computation
32 of reg_set_in_block with only subsequent additions, rather than
33 recomputing it for each pass
37 /* References searched while implementing this.
39 Compilers Principles, Techniques and Tools
40 Aho, Sethi, Ullman
41 Addison-Wesley, 1988
43 Global Optimization by Suppression of Partial Redundancies
44 E. Morel, C. Renvoise
45 communications of the acm, Vol. 22, Num. 2, Feb. 1979
47 A Portable Machine-Independent Global Optimizer - Design and Measurements
48 Frederick Chow
49 Stanford Ph.D. thesis, Dec. 1983
51 A Fast Algorithm for Code Movement Optimization
52 D.M. Dhamdhere
53 SIGPLAN Notices, Vol. 23, Num. 10, Oct. 1988
55 A Solution to a Problem with Morel and Renvoise's
56 Global Optimization by Suppression of Partial Redundancies
57 K-H Drechsler, M.P. Stadel
58 ACM TOPLAS, Vol. 10, Num. 4, Oct. 1988
60 Practical Adaptation of the Global Optimization
61 Algorithm of Morel and Renvoise
62 D.M. Dhamdhere
63 ACM TOPLAS, Vol. 13, Num. 2. Apr. 1991
65 Efficiently Computing Static Single Assignment Form and the Control
66 Dependence Graph
67 R. Cytron, J. Ferrante, B.K. Rosen, M.N. Wegman, and F.K. Zadeck
68 ACM TOPLAS, Vol. 13, Num. 4, Oct. 1991
70 Lazy Code Motion
71 J. Knoop, O. Ruthing, B. Steffen
72 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
74 What's In a Region? Or Computing Control Dependence Regions in Near-Linear
75 Time for Reducible Flow Control
76 Thomas Ball
77 ACM Letters on Programming Languages and Systems,
78 Vol. 2, Num. 1-4, Mar-Dec 1993
80 An Efficient Representation for Sparse Sets
81 Preston Briggs, Linda Torczon
82 ACM Letters on Programming Languages and Systems,
83 Vol. 2, Num. 1-4, Mar-Dec 1993
85 A Variation of Knoop, Ruthing, and Steffen's Lazy Code Motion
86 K-H Drechsler, M.P. Stadel
87 ACM SIGPLAN Notices, Vol. 28, Num. 5, May 1993
89 Partial Dead Code Elimination
90 J. Knoop, O. Ruthing, B. Steffen
91 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
93 Effective Partial Redundancy Elimination
94 P. Briggs, K.D. Cooper
95 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
97 The Program Structure Tree: Computing Control Regions in Linear Time
98 R. Johnson, D. Pearson, K. Pingali
99 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
101 Optimal Code Motion: Theory and Practice
102 J. Knoop, O. Ruthing, B. Steffen
103 ACM TOPLAS, Vol. 16, Num. 4, Jul. 1994
105 The power of assignment motion
106 J. Knoop, O. Ruthing, B. Steffen
107 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
109 Global code motion / global value numbering
110 C. Click
111 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
113 Value Driven Redundancy Elimination
114 L.T. Simpson
115 Rice University Ph.D. thesis, Apr. 1996
117 Value Numbering
118 L.T. Simpson
119 Massively Scalar Compiler Project, Rice University, Sep. 1996
121 High Performance Compilers for Parallel Computing
122 Michael Wolfe
123 Addison-Wesley, 1996
125 Advanced Compiler Design and Implementation
126 Steven Muchnick
127 Morgan Kaufmann, 1997
129 Building an Optimizing Compiler
130 Robert Morgan
131 Digital Press, 1998
133 People wishing to speed up the code here should read:
134 Elimination Algorithms for Data Flow Analysis
135 B.G. Ryder, M.C. Paull
136 ACM Computing Surveys, Vol. 18, Num. 3, Sep. 1986
138 How to Analyze Large Programs Efficiently and Informatively
139 D.M. Dhamdhere, B.K. Rosen, F.K. Zadeck
140 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
142 People wishing to do something different can find various possibilities
143 in the above papers and elsewhere.
146 #include "config.h"
147 #include "system.h"
148 #include "coretypes.h"
149 #include "tm.h"
150 #include "toplev.h"
152 #include "rtl.h"
153 #include "tm_p.h"
154 #include "regs.h"
155 #include "hard-reg-set.h"
156 #include "flags.h"
157 #include "real.h"
158 #include "insn-config.h"
159 #include "recog.h"
160 #include "basic-block.h"
161 #include "output.h"
162 #include "function.h"
163 #include "expr.h"
164 #include "except.h"
165 #include "ggc.h"
166 #include "params.h"
167 #include "cselib.h"
169 #include "obstack.h"
171 /* Propagate flow information through back edges and thus enable PRE's
172 moving loop invariant calculations out of loops.
174 Originally this tended to create worse overall code, but several
175 improvements during the development of PRE seem to have made following
176 back edges generally a win.
178 Note much of the loop invariant code motion done here would normally
179 be done by loop.c, which has more heuristics for when to move invariants
180 out of loops. At some point we might need to move some of those
181 heuristics into gcse.c. */
183 /* We support GCSE via Partial Redundancy Elimination. PRE optimizations
184 are a superset of those done by GCSE.
186 We perform the following steps:
188 1) Compute basic block information.
190 2) Compute table of places where registers are set.
192 3) Perform copy/constant propagation.
194 4) Perform global cse.
196 5) Perform another pass of copy/constant propagation.
198 Two passes of copy/constant propagation are done because the first one
199 enables more GCSE and the second one helps to clean up the copies that
200 GCSE creates. This is needed more for PRE than for Classic because Classic
201 GCSE will try to use an existing register containing the common
202 subexpression rather than create a new one. This is harder to do for PRE
203 because of the code motion (which Classic GCSE doesn't do).
205 Expressions we are interested in GCSE-ing are of the form
206 (set (pseudo-reg) (expression)).
207 Function want_to_gcse_p says what these are.
209 PRE handles moving invariant expressions out of loops (by treating them as
210 partially redundant).
212 Eventually it would be nice to replace cse.c/gcse.c with SSA (static single
213 assignment) based GVN (global value numbering). L. T. Simpson's paper
214 (Rice University) on value numbering is a useful reference for this.
216 **********************
218 We used to support multiple passes but there are diminishing returns in
219 doing so. The first pass usually makes 90% of the changes that are doable.
220 A second pass can make a few more changes made possible by the first pass.
221 Experiments show any further passes don't make enough changes to justify
222 the expense.
224 A study of spec92 using an unlimited number of passes:
225 [1 pass] = 1208 substitutions, [2] = 577, [3] = 202, [4] = 192, [5] = 83,
226 [6] = 34, [7] = 17, [8] = 9, [9] = 4, [10] = 4, [11] = 2,
227 [12] = 2, [13] = 1, [15] = 1, [16] = 2, [41] = 1
229 It was found doing copy propagation between each pass enables further
230 substitutions.
232 PRE is quite expensive in complicated functions because the DFA can take
233 awhile to converge. Hence we only perform one pass. The parameter max-gcse-passes can
234 be modified if one wants to experiment.
236 **********************
238 The steps for PRE are:
240 1) Build the hash table of expressions we wish to GCSE (expr_hash_table).
242 2) Perform the data flow analysis for PRE.
244 3) Delete the redundant instructions
246 4) Insert the required copies [if any] that make the partially
247 redundant instructions fully redundant.
249 5) For other reaching expressions, insert an instruction to copy the value
250 to a newly created pseudo that will reach the redundant instruction.
252 The deletion is done first so that when we do insertions we
253 know which pseudo reg to use.
255 Various papers have argued that PRE DFA is expensive (O(n^2)) and others
256 argue it is not. The number of iterations for the algorithm to converge
257 is typically 2-4 so I don't view it as that expensive (relatively speaking).
259 PRE GCSE depends heavily on the second CSE pass to clean up the copies
260 we create. To make an expression reach the place where it's redundant,
261 the result of the expression is copied to a new register, and the redundant
262 expression is deleted by replacing it with this new register. Classic GCSE
263 doesn't have this problem as much as it computes the reaching defs of
264 each register in each block and thus can try to use an existing register.
266 **********************
268 A fair bit of simplicity is created by creating small functions for simple
269 tasks, even when the function is only called in one place. This may
270 measurably slow things down [or may not] by creating more function call
271 overhead than is necessary. The source is laid out so that it's trivial
272 to make the affected functions inline so that one can measure what speed
273 up, if any, can be achieved, and maybe later when things settle things can
274 be rearranged.
276 Help stamp out big monolithic functions! */
278 /* GCSE global vars. */
280 /* -dG dump file. */
281 static FILE *gcse_file;
283 /* Note whether or not we should run jump optimization after gcse. We
284 want to do this for two cases.
286 * If we changed any jumps via cprop.
288 * If we added any labels via edge splitting. */
290 static int run_jump_opt_after_gcse;
292 /* Bitmaps are normally not included in debugging dumps.
293 However it's useful to be able to print them from GDB.
294 We could create special functions for this, but it's simpler to
295 just allow passing stderr to the dump_foo fns. Since stderr can
296 be a macro, we store a copy here. */
297 static FILE *debug_stderr;
299 /* An obstack for our working variables. */
300 static struct obstack gcse_obstack;
302 struct reg_use {rtx reg_rtx; };
304 /* Hash table of expressions. */
306 struct expr
308 /* The expression (SET_SRC for expressions, PATTERN for assignments). */
309 rtx expr;
310 /* Index in the available expression bitmaps. */
311 int bitmap_index;
312 /* Next entry with the same hash. */
313 struct expr *next_same_hash;
314 /* List of anticipatable occurrences in basic blocks in the function.
315 An "anticipatable occurrence" is one that is the first occurrence in the
316 basic block, the operands are not modified in the basic block prior
317 to the occurrence and the output is not used between the start of
318 the block and the occurrence. */
319 struct occr *antic_occr;
320 /* List of available occurrence in basic blocks in the function.
321 An "available occurrence" is one that is the last occurrence in the
322 basic block and the operands are not modified by following statements in
323 the basic block [including this insn]. */
324 struct occr *avail_occr;
325 /* Non-null if the computation is PRE redundant.
326 The value is the newly created pseudo-reg to record a copy of the
327 expression in all the places that reach the redundant copy. */
328 rtx reaching_reg;
331 /* Occurrence of an expression.
332 There is one per basic block. If a pattern appears more than once the
333 last appearance is used [or first for anticipatable expressions]. */
335 struct occr
337 /* Next occurrence of this expression. */
338 struct occr *next;
339 /* The insn that computes the expression. */
340 rtx insn;
341 /* Nonzero if this [anticipatable] occurrence has been deleted. */
342 char deleted_p;
343 /* Nonzero if this [available] occurrence has been copied to
344 reaching_reg. */
345 /* ??? This is mutually exclusive with deleted_p, so they could share
346 the same byte. */
347 char copied_p;
350 /* Expression and copy propagation hash tables.
351 Each hash table is an array of buckets.
352 ??? It is known that if it were an array of entries, structure elements
353 `next_same_hash' and `bitmap_index' wouldn't be necessary. However, it is
354 not clear whether in the final analysis a sufficient amount of memory would
355 be saved as the size of the available expression bitmaps would be larger
356 [one could build a mapping table without holes afterwards though].
357 Someday I'll perform the computation and figure it out. */
359 struct hash_table
361 /* The table itself.
362 This is an array of `expr_hash_table_size' elements. */
363 struct expr **table;
365 /* Size of the hash table, in elements. */
366 unsigned int size;
368 /* Number of hash table elements. */
369 unsigned int n_elems;
371 /* Whether the table is expression of copy propagation one. */
372 int set_p;
375 /* Expression hash table. */
376 static struct hash_table expr_hash_table;
378 /* Copy propagation hash table. */
379 static struct hash_table set_hash_table;
381 /* Mapping of uids to cuids.
382 Only real insns get cuids. */
383 static int *uid_cuid;
385 /* Highest UID in UID_CUID. */
386 static int max_uid;
388 /* Get the cuid of an insn. */
389 #ifdef ENABLE_CHECKING
390 #define INSN_CUID(INSN) (INSN_UID (INSN) > max_uid ? (abort (), 0) : uid_cuid[INSN_UID (INSN)])
391 #else
392 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
393 #endif
395 /* Number of cuids. */
396 static int max_cuid;
398 /* Mapping of cuids to insns. */
399 static rtx *cuid_insn;
401 /* Get insn from cuid. */
402 #define CUID_INSN(CUID) (cuid_insn[CUID])
404 /* Maximum register number in function prior to doing gcse + 1.
405 Registers created during this pass have regno >= max_gcse_regno.
406 This is named with "gcse" to not collide with global of same name. */
407 static unsigned int max_gcse_regno;
409 /* Table of registers that are modified.
411 For each register, each element is a list of places where the pseudo-reg
412 is set.
414 For simplicity, GCSE is done on sets of pseudo-regs only. PRE GCSE only
415 requires knowledge of which blocks kill which regs [and thus could use
416 a bitmap instead of the lists `reg_set_table' uses].
418 `reg_set_table' and could be turned into an array of bitmaps (num-bbs x
419 num-regs) [however perhaps it may be useful to keep the data as is]. One
420 advantage of recording things this way is that `reg_set_table' is fairly
421 sparse with respect to pseudo regs but for hard regs could be fairly dense
422 [relatively speaking]. And recording sets of pseudo-regs in lists speeds
423 up functions like compute_transp since in the case of pseudo-regs we only
424 need to iterate over the number of times a pseudo-reg is set, not over the
425 number of basic blocks [clearly there is a bit of a slow down in the cases
426 where a pseudo is set more than once in a block, however it is believed
427 that the net effect is to speed things up]. This isn't done for hard-regs
428 because recording call-clobbered hard-regs in `reg_set_table' at each
429 function call can consume a fair bit of memory, and iterating over
430 hard-regs stored this way in compute_transp will be more expensive. */
432 typedef struct reg_set
434 /* The next setting of this register. */
435 struct reg_set *next;
436 /* The insn where it was set. */
437 rtx insn;
438 } reg_set;
440 static reg_set **reg_set_table;
442 /* Size of `reg_set_table'.
443 The table starts out at max_gcse_regno + slop, and is enlarged as
444 necessary. */
445 static int reg_set_table_size;
447 /* Amount to grow `reg_set_table' by when it's full. */
448 #define REG_SET_TABLE_SLOP 100
450 /* This is a list of expressions which are MEMs and will be used by load
451 or store motion.
452 Load motion tracks MEMs which aren't killed by
453 anything except itself. (ie, loads and stores to a single location).
454 We can then allow movement of these MEM refs with a little special
455 allowance. (all stores copy the same value to the reaching reg used
456 for the loads). This means all values used to store into memory must have
457 no side effects so we can re-issue the setter value.
458 Store Motion uses this structure as an expression table to track stores
459 which look interesting, and might be moveable towards the exit block. */
461 struct ls_expr
463 struct expr * expr; /* Gcse expression reference for LM. */
464 rtx pattern; /* Pattern of this mem. */
465 rtx pattern_regs; /* List of registers mentioned by the mem. */
466 rtx loads; /* INSN list of loads seen. */
467 rtx stores; /* INSN list of stores seen. */
468 struct ls_expr * next; /* Next in the list. */
469 int invalid; /* Invalid for some reason. */
470 int index; /* If it maps to a bitmap index. */
471 int hash_index; /* Index when in a hash table. */
472 rtx reaching_reg; /* Register to use when re-writing. */
475 /* Array of implicit set patterns indexed by basic block index. */
476 static rtx *implicit_sets;
478 /* Head of the list of load/store memory refs. */
479 static struct ls_expr * pre_ldst_mems = NULL;
481 /* Bitmap containing one bit for each register in the program.
482 Used when performing GCSE to track which registers have been set since
483 the start of the basic block. */
484 static regset reg_set_bitmap;
486 /* For each block, a bitmap of registers set in the block.
487 This is used by expr_killed_p and compute_transp.
488 It is computed during hash table computation and not by compute_sets
489 as it includes registers added since the last pass (or between cprop and
490 gcse) and it's currently not easy to realloc sbitmap vectors. */
491 static sbitmap *reg_set_in_block;
493 /* Array, indexed by basic block number for a list of insns which modify
494 memory within that block. */
495 static rtx * modify_mem_list;
496 bitmap modify_mem_list_set;
498 /* This array parallels modify_mem_list, but is kept canonicalized. */
499 static rtx * canon_modify_mem_list;
500 bitmap canon_modify_mem_list_set;
501 /* Various variables for statistics gathering. */
503 /* Memory used in a pass.
504 This isn't intended to be absolutely precise. Its intent is only
505 to keep an eye on memory usage. */
506 static int bytes_used;
508 /* GCSE substitutions made. */
509 static int gcse_subst_count;
510 /* Number of copy instructions created. */
511 static int gcse_create_count;
512 /* Number of constants propagated. */
513 static int const_prop_count;
514 /* Number of copys propagated. */
515 static int copy_prop_count;
517 /* These variables are used by classic GCSE.
518 Normally they'd be defined a bit later, but `rd_gen' needs to
519 be declared sooner. */
521 /* Each block has a bitmap of each type.
522 The length of each blocks bitmap is:
524 max_cuid - for reaching definitions
525 n_exprs - for available expressions
527 Thus we view the bitmaps as 2 dimensional arrays. i.e.
528 rd_kill[block_num][cuid_num]
529 ae_kill[block_num][expr_num] */
531 /* For reaching defs */
532 static sbitmap *rd_kill, *rd_gen, *reaching_defs, *rd_out;
534 /* for available exprs */
535 static sbitmap *ae_kill, *ae_gen, *ae_in, *ae_out;
537 /* Objects of this type are passed around by the null-pointer check
538 removal routines. */
539 struct null_pointer_info
541 /* The basic block being processed. */
542 basic_block current_block;
543 /* The first register to be handled in this pass. */
544 unsigned int min_reg;
545 /* One greater than the last register to be handled in this pass. */
546 unsigned int max_reg;
547 sbitmap *nonnull_local;
548 sbitmap *nonnull_killed;
551 static void compute_can_copy PARAMS ((void));
552 static char *gmalloc PARAMS ((unsigned int));
553 static char *grealloc PARAMS ((char *, unsigned int));
554 static char *gcse_alloc PARAMS ((unsigned long));
555 static void alloc_gcse_mem PARAMS ((rtx));
556 static void free_gcse_mem PARAMS ((void));
557 static void alloc_reg_set_mem PARAMS ((int));
558 static void free_reg_set_mem PARAMS ((void));
559 static int get_bitmap_width PARAMS ((int, int, int));
560 static void record_one_set PARAMS ((int, rtx));
561 static void record_set_info PARAMS ((rtx, rtx, void *));
562 static void compute_sets PARAMS ((rtx));
563 static void hash_scan_insn PARAMS ((rtx, struct hash_table *, int));
564 static void hash_scan_set PARAMS ((rtx, rtx, struct hash_table *));
565 static void hash_scan_clobber PARAMS ((rtx, rtx, struct hash_table *));
566 static void hash_scan_call PARAMS ((rtx, rtx, struct hash_table *));
567 static int want_to_gcse_p PARAMS ((rtx));
568 static bool gcse_constant_p PARAMS ((rtx));
569 static int oprs_unchanged_p PARAMS ((rtx, rtx, int));
570 static int oprs_anticipatable_p PARAMS ((rtx, rtx));
571 static int oprs_available_p PARAMS ((rtx, rtx));
572 static void insert_expr_in_table PARAMS ((rtx, enum machine_mode, rtx,
573 int, int, struct hash_table *));
574 static void insert_set_in_table PARAMS ((rtx, rtx, struct hash_table *));
575 static unsigned int hash_expr PARAMS ((rtx, enum machine_mode, int *, int));
576 static unsigned int hash_expr_1 PARAMS ((rtx, enum machine_mode, int *));
577 static unsigned int hash_string_1 PARAMS ((const char *));
578 static unsigned int hash_set PARAMS ((int, int));
579 static int expr_equiv_p PARAMS ((rtx, rtx));
580 static void record_last_reg_set_info PARAMS ((rtx, int));
581 static void record_last_mem_set_info PARAMS ((rtx));
582 static void record_last_set_info PARAMS ((rtx, rtx, void *));
583 static void compute_hash_table PARAMS ((struct hash_table *));
584 static void alloc_hash_table PARAMS ((int, struct hash_table *, int));
585 static void free_hash_table PARAMS ((struct hash_table *));
586 static void compute_hash_table_work PARAMS ((struct hash_table *));
587 static void dump_hash_table PARAMS ((FILE *, const char *,
588 struct hash_table *));
589 static struct expr *lookup_expr PARAMS ((rtx, struct hash_table *));
590 static struct expr *lookup_set PARAMS ((unsigned int, struct hash_table *));
591 static struct expr *next_set PARAMS ((unsigned int, struct expr *));
592 static void reset_opr_set_tables PARAMS ((void));
593 static int oprs_not_set_p PARAMS ((rtx, rtx));
594 static void mark_call PARAMS ((rtx));
595 static void mark_set PARAMS ((rtx, rtx));
596 static void mark_clobber PARAMS ((rtx, rtx));
597 static void mark_oprs_set PARAMS ((rtx));
598 static void alloc_cprop_mem PARAMS ((int, int));
599 static void free_cprop_mem PARAMS ((void));
600 static void compute_transp PARAMS ((rtx, int, sbitmap *, int));
601 static void compute_transpout PARAMS ((void));
602 static void compute_local_properties PARAMS ((sbitmap *, sbitmap *, sbitmap *,
603 struct hash_table *));
604 static void compute_cprop_data PARAMS ((void));
605 static void find_used_regs PARAMS ((rtx *, void *));
606 static int try_replace_reg PARAMS ((rtx, rtx, rtx));
607 static struct expr *find_avail_set PARAMS ((int, rtx));
608 static int cprop_jump PARAMS ((basic_block, rtx, rtx, rtx, rtx));
609 static void mems_conflict_for_gcse_p PARAMS ((rtx, rtx, void *));
610 static int load_killed_in_block_p PARAMS ((basic_block, int, rtx, int));
611 static void canon_list_insert PARAMS ((rtx, rtx, void *));
612 static int cprop_insn PARAMS ((rtx, int));
613 static int cprop PARAMS ((int));
614 static rtx fis_get_condition PARAMS ((rtx));
615 static void find_implicit_sets PARAMS ((void));
616 static int one_cprop_pass PARAMS ((int, int, int));
617 static bool constprop_register PARAMS ((rtx, rtx, rtx, int));
618 static struct expr *find_bypass_set PARAMS ((int, int));
619 static bool reg_killed_on_edge PARAMS ((rtx, edge));
620 static int bypass_block PARAMS ((basic_block, rtx, rtx));
621 static int bypass_conditional_jumps PARAMS ((void));
622 static void alloc_pre_mem PARAMS ((int, int));
623 static void free_pre_mem PARAMS ((void));
624 static void compute_pre_data PARAMS ((void));
625 static int pre_expr_reaches_here_p PARAMS ((basic_block, struct expr *,
626 basic_block));
627 static void insert_insn_end_bb PARAMS ((struct expr *, basic_block, int));
628 static void pre_insert_copy_insn PARAMS ((struct expr *, rtx));
629 static void pre_insert_copies PARAMS ((void));
630 static int pre_delete PARAMS ((void));
631 static int pre_gcse PARAMS ((void));
632 static int one_pre_gcse_pass PARAMS ((int));
633 static void add_label_notes PARAMS ((rtx, rtx));
634 static void alloc_code_hoist_mem PARAMS ((int, int));
635 static void free_code_hoist_mem PARAMS ((void));
636 static void compute_code_hoist_vbeinout PARAMS ((void));
637 static void compute_code_hoist_data PARAMS ((void));
638 static int hoist_expr_reaches_here_p PARAMS ((basic_block, int, basic_block,
639 char *));
640 static void hoist_code PARAMS ((void));
641 static int one_code_hoisting_pass PARAMS ((void));
642 static void alloc_rd_mem PARAMS ((int, int));
643 static void free_rd_mem PARAMS ((void));
644 static void handle_rd_kill_set PARAMS ((rtx, int, basic_block));
645 static void compute_kill_rd PARAMS ((void));
646 static void compute_rd PARAMS ((void));
647 static void alloc_avail_expr_mem PARAMS ((int, int));
648 static void free_avail_expr_mem PARAMS ((void));
649 static void compute_ae_gen PARAMS ((struct hash_table *));
650 static int expr_killed_p PARAMS ((rtx, basic_block));
651 static void compute_ae_kill PARAMS ((sbitmap *, sbitmap *, struct hash_table *));
652 static int expr_reaches_here_p PARAMS ((struct occr *, struct expr *,
653 basic_block, int));
654 static rtx computing_insn PARAMS ((struct expr *, rtx));
655 static int def_reaches_here_p PARAMS ((rtx, rtx));
656 static int can_disregard_other_sets PARAMS ((struct reg_set **, rtx, int));
657 static int handle_avail_expr PARAMS ((rtx, struct expr *));
658 static int classic_gcse PARAMS ((void));
659 static int one_classic_gcse_pass PARAMS ((int));
660 static void invalidate_nonnull_info PARAMS ((rtx, rtx, void *));
661 static int delete_null_pointer_checks_1 PARAMS ((unsigned int *,
662 sbitmap *, sbitmap *,
663 struct null_pointer_info *));
664 static rtx process_insert_insn PARAMS ((struct expr *));
665 static int pre_edge_insert PARAMS ((struct edge_list *, struct expr **));
666 static int expr_reaches_here_p_work PARAMS ((struct occr *, struct expr *,
667 basic_block, int, char *));
668 static int pre_expr_reaches_here_p_work PARAMS ((basic_block, struct expr *,
669 basic_block, char *));
670 static struct ls_expr * ldst_entry PARAMS ((rtx));
671 static void free_ldst_entry PARAMS ((struct ls_expr *));
672 static void free_ldst_mems PARAMS ((void));
673 static void print_ldst_list PARAMS ((FILE *));
674 static struct ls_expr * find_rtx_in_ldst PARAMS ((rtx));
675 static int enumerate_ldsts PARAMS ((void));
676 static inline struct ls_expr * first_ls_expr PARAMS ((void));
677 static inline struct ls_expr * next_ls_expr PARAMS ((struct ls_expr *));
678 static int simple_mem PARAMS ((rtx));
679 static void invalidate_any_buried_refs PARAMS ((rtx));
680 static void compute_ld_motion_mems PARAMS ((void));
681 static void trim_ld_motion_mems PARAMS ((void));
682 static void update_ld_motion_stores PARAMS ((struct expr *));
683 static void reg_set_info PARAMS ((rtx, rtx, void *));
684 static bool store_ops_ok PARAMS ((rtx, int *));
685 static rtx extract_mentioned_regs PARAMS ((rtx));
686 static rtx extract_mentioned_regs_helper PARAMS ((rtx, rtx));
687 static void find_moveable_store PARAMS ((rtx, int *, int *));
688 static int compute_store_table PARAMS ((void));
689 static bool load_kills_store PARAMS ((rtx, rtx));
690 static bool find_loads PARAMS ((rtx, rtx));
691 static bool store_killed_in_insn PARAMS ((rtx, rtx, rtx));
692 static bool store_killed_after PARAMS ((rtx, rtx, rtx, basic_block,
693 int *, rtx *));
694 static bool store_killed_before PARAMS ((rtx, rtx, rtx, basic_block,
695 int *));
696 static void build_store_vectors PARAMS ((void));
697 static void insert_insn_start_bb PARAMS ((rtx, basic_block));
698 static int insert_store PARAMS ((struct ls_expr *, edge));
699 static void replace_store_insn PARAMS ((rtx, rtx, basic_block));
700 static void delete_store PARAMS ((struct ls_expr *,
701 basic_block));
702 static void free_store_memory PARAMS ((void));
703 static void store_motion PARAMS ((void));
704 static void free_insn_expr_list_list PARAMS ((rtx *));
705 static void clear_modify_mem_tables PARAMS ((void));
706 static void free_modify_mem_tables PARAMS ((void));
707 static rtx gcse_emit_move_after PARAMS ((rtx, rtx, rtx));
708 static void local_cprop_find_used_regs PARAMS ((rtx *, void *));
709 static bool do_local_cprop PARAMS ((rtx, rtx, int, rtx*));
710 static bool adjust_libcall_notes PARAMS ((rtx, rtx, rtx, rtx*));
711 static void local_cprop_pass PARAMS ((int));
713 /* Entry point for global common subexpression elimination.
714 F is the first instruction in the function. */
717 gcse_main (f, file)
718 rtx f;
719 FILE *file;
721 int changed, pass;
722 /* Bytes used at start of pass. */
723 int initial_bytes_used;
724 /* Maximum number of bytes used by a pass. */
725 int max_pass_bytes;
726 /* Point to release obstack data from for each pass. */
727 char *gcse_obstack_bottom;
729 /* We do not construct an accurate cfg in functions which call
730 setjmp, so just punt to be safe. */
731 if (current_function_calls_setjmp)
732 return 0;
734 /* Assume that we do not need to run jump optimizations after gcse. */
735 run_jump_opt_after_gcse = 0;
737 /* For calling dump_foo fns from gdb. */
738 debug_stderr = stderr;
739 gcse_file = file;
741 /* Identify the basic block information for this function, including
742 successors and predecessors. */
743 max_gcse_regno = max_reg_num ();
745 if (file)
746 dump_flow_info (file);
748 /* Return if there's nothing to do. */
749 if (n_basic_blocks <= 1)
750 return 0;
752 /* Trying to perform global optimizations on flow graphs which have
753 a high connectivity will take a long time and is unlikely to be
754 particularly useful.
756 In normal circumstances a cfg should have about twice as many edges
757 as blocks. But we do not want to punish small functions which have
758 a couple switch statements. So we require a relatively large number
759 of basic blocks and the ratio of edges to blocks to be high. */
760 if (n_basic_blocks > 1000 && n_edges / n_basic_blocks >= 20)
762 if (warn_disabled_optimization)
763 warning ("GCSE disabled: %d > 1000 basic blocks and %d >= 20 edges/basic block",
764 n_basic_blocks, n_edges / n_basic_blocks);
765 return 0;
768 /* If allocating memory for the cprop bitmap would take up too much
769 storage it's better just to disable the optimization. */
770 if ((n_basic_blocks
771 * SBITMAP_SET_SIZE (max_gcse_regno)
772 * sizeof (SBITMAP_ELT_TYPE)) > MAX_GCSE_MEMORY)
774 if (warn_disabled_optimization)
775 warning ("GCSE disabled: %d basic blocks and %d registers",
776 n_basic_blocks, max_gcse_regno);
778 return 0;
781 gcc_obstack_init (&gcse_obstack);
782 bytes_used = 0;
784 /* We need alias. */
785 init_alias_analysis ();
786 /* Record where pseudo-registers are set. This data is kept accurate
787 during each pass. ??? We could also record hard-reg information here
788 [since it's unchanging], however it is currently done during hash table
789 computation.
791 It may be tempting to compute MEM set information here too, but MEM sets
792 will be subject to code motion one day and thus we need to compute
793 information about memory sets when we build the hash tables. */
795 alloc_reg_set_mem (max_gcse_regno);
796 compute_sets (f);
798 pass = 0;
799 initial_bytes_used = bytes_used;
800 max_pass_bytes = 0;
801 gcse_obstack_bottom = gcse_alloc (1);
802 changed = 1;
803 while (changed && pass < MAX_GCSE_PASSES)
805 changed = 0;
806 if (file)
807 fprintf (file, "GCSE pass %d\n\n", pass + 1);
809 /* Initialize bytes_used to the space for the pred/succ lists,
810 and the reg_set_table data. */
811 bytes_used = initial_bytes_used;
813 /* Each pass may create new registers, so recalculate each time. */
814 max_gcse_regno = max_reg_num ();
816 alloc_gcse_mem (f);
818 /* Don't allow constant propagation to modify jumps
819 during this pass. */
820 changed = one_cprop_pass (pass + 1, 0, 0);
822 if (optimize_size)
823 changed |= one_classic_gcse_pass (pass + 1);
824 else
826 changed |= one_pre_gcse_pass (pass + 1);
827 /* We may have just created new basic blocks. Release and
828 recompute various things which are sized on the number of
829 basic blocks. */
830 if (changed)
832 free_modify_mem_tables ();
833 modify_mem_list
834 = (rtx *) gmalloc (last_basic_block * sizeof (rtx));
835 canon_modify_mem_list
836 = (rtx *) gmalloc (last_basic_block * sizeof (rtx));
837 memset ((char *) modify_mem_list, 0, last_basic_block * sizeof (rtx));
838 memset ((char *) canon_modify_mem_list, 0, last_basic_block * sizeof (rtx));
840 free_reg_set_mem ();
841 alloc_reg_set_mem (max_reg_num ());
842 compute_sets (f);
843 run_jump_opt_after_gcse = 1;
846 if (max_pass_bytes < bytes_used)
847 max_pass_bytes = bytes_used;
849 /* Free up memory, then reallocate for code hoisting. We can
850 not re-use the existing allocated memory because the tables
851 will not have info for the insns or registers created by
852 partial redundancy elimination. */
853 free_gcse_mem ();
855 /* It does not make sense to run code hoisting unless we optimizing
856 for code size -- it rarely makes programs faster, and can make
857 them bigger if we did partial redundancy elimination (when optimizing
858 for space, we use a classic gcse algorithm instead of partial
859 redundancy algorithms). */
860 if (optimize_size)
862 max_gcse_regno = max_reg_num ();
863 alloc_gcse_mem (f);
864 changed |= one_code_hoisting_pass ();
865 free_gcse_mem ();
867 if (max_pass_bytes < bytes_used)
868 max_pass_bytes = bytes_used;
871 if (file)
873 fprintf (file, "\n");
874 fflush (file);
877 obstack_free (&gcse_obstack, gcse_obstack_bottom);
878 pass++;
881 /* Do one last pass of copy propagation, including cprop into
882 conditional jumps. */
884 max_gcse_regno = max_reg_num ();
885 alloc_gcse_mem (f);
886 /* This time, go ahead and allow cprop to alter jumps. */
887 one_cprop_pass (pass + 1, 1, 0);
888 free_gcse_mem ();
890 if (file)
892 fprintf (file, "GCSE of %s: %d basic blocks, ",
893 current_function_name, n_basic_blocks);
894 fprintf (file, "%d pass%s, %d bytes\n\n",
895 pass, pass > 1 ? "es" : "", max_pass_bytes);
898 obstack_free (&gcse_obstack, NULL);
899 free_reg_set_mem ();
900 /* We are finished with alias. */
901 end_alias_analysis ();
902 allocate_reg_info (max_reg_num (), FALSE, FALSE);
904 if (!optimize_size && flag_gcse_sm)
905 store_motion ();
907 /* Record where pseudo-registers are set. */
908 return run_jump_opt_after_gcse;
911 /* Misc. utilities. */
913 /* Nonzero for each mode that supports (set (reg) (reg)).
914 This is trivially true for integer and floating point values.
915 It may or may not be true for condition codes. */
916 static char can_copy[(int) NUM_MACHINE_MODES];
918 /* Compute which modes support reg/reg copy operations. */
920 static void
921 compute_can_copy ()
923 int i;
924 #ifndef AVOID_CCMODE_COPIES
925 rtx reg, insn;
926 #endif
927 memset (can_copy, 0, NUM_MACHINE_MODES);
929 start_sequence ();
930 for (i = 0; i < NUM_MACHINE_MODES; i++)
931 if (GET_MODE_CLASS (i) == MODE_CC)
933 #ifdef AVOID_CCMODE_COPIES
934 can_copy[i] = 0;
935 #else
936 reg = gen_rtx_REG ((enum machine_mode) i, LAST_VIRTUAL_REGISTER + 1);
937 insn = emit_insn (gen_rtx_SET (VOIDmode, reg, reg));
938 if (recog (PATTERN (insn), insn, NULL) >= 0)
939 can_copy[i] = 1;
940 #endif
942 else
943 can_copy[i] = 1;
945 end_sequence ();
948 /* Returns whether the mode supports reg/reg copy operations. */
950 bool
951 can_copy_p (mode)
952 enum machine_mode mode;
954 static bool can_copy_init_p = false;
956 if (! can_copy_init_p)
958 compute_can_copy ();
959 can_copy_init_p = true;
962 return can_copy[mode] != 0;
965 /* Cover function to xmalloc to record bytes allocated. */
967 static char *
968 gmalloc (size)
969 unsigned int size;
971 bytes_used += size;
972 return xmalloc (size);
975 /* Cover function to xrealloc.
976 We don't record the additional size since we don't know it.
977 It won't affect memory usage stats much anyway. */
979 static char *
980 grealloc (ptr, size)
981 char *ptr;
982 unsigned int size;
984 return xrealloc (ptr, size);
987 /* Cover function to obstack_alloc. */
989 static char *
990 gcse_alloc (size)
991 unsigned long size;
993 bytes_used += size;
994 return (char *) obstack_alloc (&gcse_obstack, size);
997 /* Allocate memory for the cuid mapping array,
998 and reg/memory set tracking tables.
1000 This is called at the start of each pass. */
1002 static void
1003 alloc_gcse_mem (f)
1004 rtx f;
1006 int i, n;
1007 rtx insn;
1009 /* Find the largest UID and create a mapping from UIDs to CUIDs.
1010 CUIDs are like UIDs except they increase monotonically, have no gaps,
1011 and only apply to real insns. */
1013 max_uid = get_max_uid ();
1014 n = (max_uid + 1) * sizeof (int);
1015 uid_cuid = (int *) gmalloc (n);
1016 memset ((char *) uid_cuid, 0, n);
1017 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
1019 if (INSN_P (insn))
1020 uid_cuid[INSN_UID (insn)] = i++;
1021 else
1022 uid_cuid[INSN_UID (insn)] = i;
1025 /* Create a table mapping cuids to insns. */
1027 max_cuid = i;
1028 n = (max_cuid + 1) * sizeof (rtx);
1029 cuid_insn = (rtx *) gmalloc (n);
1030 memset ((char *) cuid_insn, 0, n);
1031 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
1032 if (INSN_P (insn))
1033 CUID_INSN (i++) = insn;
1035 /* Allocate vars to track sets of regs. */
1036 reg_set_bitmap = BITMAP_XMALLOC ();
1038 /* Allocate vars to track sets of regs, memory per block. */
1039 reg_set_in_block = (sbitmap *) sbitmap_vector_alloc (last_basic_block,
1040 max_gcse_regno);
1041 /* Allocate array to keep a list of insns which modify memory in each
1042 basic block. */
1043 modify_mem_list = (rtx *) gmalloc (last_basic_block * sizeof (rtx));
1044 canon_modify_mem_list = (rtx *) gmalloc (last_basic_block * sizeof (rtx));
1045 memset ((char *) modify_mem_list, 0, last_basic_block * sizeof (rtx));
1046 memset ((char *) canon_modify_mem_list, 0, last_basic_block * sizeof (rtx));
1047 modify_mem_list_set = BITMAP_XMALLOC ();
1048 canon_modify_mem_list_set = BITMAP_XMALLOC ();
1051 /* Free memory allocated by alloc_gcse_mem. */
1053 static void
1054 free_gcse_mem ()
1056 free (uid_cuid);
1057 free (cuid_insn);
1059 BITMAP_XFREE (reg_set_bitmap);
1061 sbitmap_vector_free (reg_set_in_block);
1062 free_modify_mem_tables ();
1063 BITMAP_XFREE (modify_mem_list_set);
1064 BITMAP_XFREE (canon_modify_mem_list_set);
1067 /* Many of the global optimization algorithms work by solving dataflow
1068 equations for various expressions. Initially, some local value is
1069 computed for each expression in each block. Then, the values across the
1070 various blocks are combined (by following flow graph edges) to arrive at
1071 global values. Conceptually, each set of equations is independent. We
1072 may therefore solve all the equations in parallel, solve them one at a
1073 time, or pick any intermediate approach.
1075 When you're going to need N two-dimensional bitmaps, each X (say, the
1076 number of blocks) by Y (say, the number of expressions), call this
1077 function. It's not important what X and Y represent; only that Y
1078 correspond to the things that can be done in parallel. This function will
1079 return an appropriate chunking factor C; you should solve C sets of
1080 equations in parallel. By going through this function, we can easily
1081 trade space against time; by solving fewer equations in parallel we use
1082 less space. */
1084 static int
1085 get_bitmap_width (n, x, y)
1086 int n;
1087 int x;
1088 int y;
1090 /* It's not really worth figuring out *exactly* how much memory will
1091 be used by a particular choice. The important thing is to get
1092 something approximately right. */
1093 size_t max_bitmap_memory = 10 * 1024 * 1024;
1095 /* The number of bytes we'd use for a single column of minimum
1096 width. */
1097 size_t column_size = n * x * sizeof (SBITMAP_ELT_TYPE);
1099 /* Often, it's reasonable just to solve all the equations in
1100 parallel. */
1101 if (column_size * SBITMAP_SET_SIZE (y) <= max_bitmap_memory)
1102 return y;
1104 /* Otherwise, pick the largest width we can, without going over the
1105 limit. */
1106 return SBITMAP_ELT_BITS * ((max_bitmap_memory + column_size - 1)
1107 / column_size);
1110 /* Compute the local properties of each recorded expression.
1112 Local properties are those that are defined by the block, irrespective of
1113 other blocks.
1115 An expression is transparent in a block if its operands are not modified
1116 in the block.
1118 An expression is computed (locally available) in a block if it is computed
1119 at least once and expression would contain the same value if the
1120 computation was moved to the end of the block.
1122 An expression is locally anticipatable in a block if it is computed at
1123 least once and expression would contain the same value if the computation
1124 was moved to the beginning of the block.
1126 We call this routine for cprop, pre and code hoisting. They all compute
1127 basically the same information and thus can easily share this code.
1129 TRANSP, COMP, and ANTLOC are destination sbitmaps for recording local
1130 properties. If NULL, then it is not necessary to compute or record that
1131 particular property.
1133 TABLE controls which hash table to look at. If it is set hash table,
1134 additionally, TRANSP is computed as ~TRANSP, since this is really cprop's
1135 ABSALTERED. */
1137 static void
1138 compute_local_properties (transp, comp, antloc, table)
1139 sbitmap *transp;
1140 sbitmap *comp;
1141 sbitmap *antloc;
1142 struct hash_table *table;
1144 unsigned int i;
1146 /* Initialize any bitmaps that were passed in. */
1147 if (transp)
1149 if (table->set_p)
1150 sbitmap_vector_zero (transp, last_basic_block);
1151 else
1152 sbitmap_vector_ones (transp, last_basic_block);
1155 if (comp)
1156 sbitmap_vector_zero (comp, last_basic_block);
1157 if (antloc)
1158 sbitmap_vector_zero (antloc, last_basic_block);
1160 for (i = 0; i < table->size; i++)
1162 struct expr *expr;
1164 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
1166 int indx = expr->bitmap_index;
1167 struct occr *occr;
1169 /* The expression is transparent in this block if it is not killed.
1170 We start by assuming all are transparent [none are killed], and
1171 then reset the bits for those that are. */
1172 if (transp)
1173 compute_transp (expr->expr, indx, transp, table->set_p);
1175 /* The occurrences recorded in antic_occr are exactly those that
1176 we want to set to nonzero in ANTLOC. */
1177 if (antloc)
1178 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
1180 SET_BIT (antloc[BLOCK_NUM (occr->insn)], indx);
1182 /* While we're scanning the table, this is a good place to
1183 initialize this. */
1184 occr->deleted_p = 0;
1187 /* The occurrences recorded in avail_occr are exactly those that
1188 we want to set to nonzero in COMP. */
1189 if (comp)
1190 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
1192 SET_BIT (comp[BLOCK_NUM (occr->insn)], indx);
1194 /* While we're scanning the table, this is a good place to
1195 initialize this. */
1196 occr->copied_p = 0;
1199 /* While we're scanning the table, this is a good place to
1200 initialize this. */
1201 expr->reaching_reg = 0;
1206 /* Register set information.
1208 `reg_set_table' records where each register is set or otherwise
1209 modified. */
1211 static struct obstack reg_set_obstack;
1213 static void
1214 alloc_reg_set_mem (n_regs)
1215 int n_regs;
1217 unsigned int n;
1219 reg_set_table_size = n_regs + REG_SET_TABLE_SLOP;
1220 n = reg_set_table_size * sizeof (struct reg_set *);
1221 reg_set_table = (struct reg_set **) gmalloc (n);
1222 memset ((char *) reg_set_table, 0, n);
1224 gcc_obstack_init (&reg_set_obstack);
1227 static void
1228 free_reg_set_mem ()
1230 free (reg_set_table);
1231 obstack_free (&reg_set_obstack, NULL);
1234 /* Record REGNO in the reg_set table. */
1236 static void
1237 record_one_set (regno, insn)
1238 int regno;
1239 rtx insn;
1241 /* Allocate a new reg_set element and link it onto the list. */
1242 struct reg_set *new_reg_info;
1244 /* If the table isn't big enough, enlarge it. */
1245 if (regno >= reg_set_table_size)
1247 int new_size = regno + REG_SET_TABLE_SLOP;
1249 reg_set_table
1250 = (struct reg_set **) grealloc ((char *) reg_set_table,
1251 new_size * sizeof (struct reg_set *));
1252 memset ((char *) (reg_set_table + reg_set_table_size), 0,
1253 (new_size - reg_set_table_size) * sizeof (struct reg_set *));
1254 reg_set_table_size = new_size;
1257 new_reg_info = (struct reg_set *) obstack_alloc (&reg_set_obstack,
1258 sizeof (struct reg_set));
1259 bytes_used += sizeof (struct reg_set);
1260 new_reg_info->insn = insn;
1261 new_reg_info->next = reg_set_table[regno];
1262 reg_set_table[regno] = new_reg_info;
1265 /* Called from compute_sets via note_stores to handle one SET or CLOBBER in
1266 an insn. The DATA is really the instruction in which the SET is
1267 occurring. */
1269 static void
1270 record_set_info (dest, setter, data)
1271 rtx dest, setter ATTRIBUTE_UNUSED;
1272 void *data;
1274 rtx record_set_insn = (rtx) data;
1276 if (GET_CODE (dest) == REG && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
1277 record_one_set (REGNO (dest), record_set_insn);
1280 /* Scan the function and record each set of each pseudo-register.
1282 This is called once, at the start of the gcse pass. See the comments for
1283 `reg_set_table' for further documentation. */
1285 static void
1286 compute_sets (f)
1287 rtx f;
1289 rtx insn;
1291 for (insn = f; insn != 0; insn = NEXT_INSN (insn))
1292 if (INSN_P (insn))
1293 note_stores (PATTERN (insn), record_set_info, insn);
1296 /* Hash table support. */
1298 struct reg_avail_info
1300 basic_block last_bb;
1301 int first_set;
1302 int last_set;
1305 static struct reg_avail_info *reg_avail_info;
1306 static basic_block current_bb;
1309 /* See whether X, the source of a set, is something we want to consider for
1310 GCSE. */
1312 static GTY(()) rtx test_insn;
1313 static int
1314 want_to_gcse_p (x)
1315 rtx x;
1317 int num_clobbers = 0;
1318 int icode;
1320 switch (GET_CODE (x))
1322 case REG:
1323 case SUBREG:
1324 case CONST_INT:
1325 case CONST_DOUBLE:
1326 case CONST_VECTOR:
1327 case CALL:
1328 case CONSTANT_P_RTX:
1329 return 0;
1331 default:
1332 break;
1335 /* If this is a valid operand, we are OK. If it's VOIDmode, we aren't. */
1336 if (general_operand (x, GET_MODE (x)))
1337 return 1;
1338 else if (GET_MODE (x) == VOIDmode)
1339 return 0;
1341 /* Otherwise, check if we can make a valid insn from it. First initialize
1342 our test insn if we haven't already. */
1343 if (test_insn == 0)
1345 test_insn
1346 = make_insn_raw (gen_rtx_SET (VOIDmode,
1347 gen_rtx_REG (word_mode,
1348 FIRST_PSEUDO_REGISTER * 2),
1349 const0_rtx));
1350 NEXT_INSN (test_insn) = PREV_INSN (test_insn) = 0;
1353 /* Now make an insn like the one we would make when GCSE'ing and see if
1354 valid. */
1355 PUT_MODE (SET_DEST (PATTERN (test_insn)), GET_MODE (x));
1356 SET_SRC (PATTERN (test_insn)) = x;
1357 return ((icode = recog (PATTERN (test_insn), test_insn, &num_clobbers)) >= 0
1358 && (num_clobbers == 0 || ! added_clobbers_hard_reg_p (icode)));
1361 /* Return nonzero if the operands of expression X are unchanged from the
1362 start of INSN's basic block up to but not including INSN (if AVAIL_P == 0),
1363 or from INSN to the end of INSN's basic block (if AVAIL_P != 0). */
1365 static int
1366 oprs_unchanged_p (x, insn, avail_p)
1367 rtx x, insn;
1368 int avail_p;
1370 int i, j;
1371 enum rtx_code code;
1372 const char *fmt;
1374 if (x == 0)
1375 return 1;
1377 code = GET_CODE (x);
1378 switch (code)
1380 case REG:
1382 struct reg_avail_info *info = &reg_avail_info[REGNO (x)];
1384 if (info->last_bb != current_bb)
1385 return 1;
1386 if (avail_p)
1387 return info->last_set < INSN_CUID (insn);
1388 else
1389 return info->first_set >= INSN_CUID (insn);
1392 case MEM:
1393 if (load_killed_in_block_p (current_bb, INSN_CUID (insn),
1394 x, avail_p))
1395 return 0;
1396 else
1397 return oprs_unchanged_p (XEXP (x, 0), insn, avail_p);
1399 case PRE_DEC:
1400 case PRE_INC:
1401 case POST_DEC:
1402 case POST_INC:
1403 case PRE_MODIFY:
1404 case POST_MODIFY:
1405 return 0;
1407 case PC:
1408 case CC0: /*FIXME*/
1409 case CONST:
1410 case CONST_INT:
1411 case CONST_DOUBLE:
1412 case CONST_VECTOR:
1413 case SYMBOL_REF:
1414 case LABEL_REF:
1415 case ADDR_VEC:
1416 case ADDR_DIFF_VEC:
1417 return 1;
1419 default:
1420 break;
1423 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
1425 if (fmt[i] == 'e')
1427 /* If we are about to do the last recursive call needed at this
1428 level, change it into iteration. This function is called enough
1429 to be worth it. */
1430 if (i == 0)
1431 return oprs_unchanged_p (XEXP (x, i), insn, avail_p);
1433 else if (! oprs_unchanged_p (XEXP (x, i), insn, avail_p))
1434 return 0;
1436 else if (fmt[i] == 'E')
1437 for (j = 0; j < XVECLEN (x, i); j++)
1438 if (! oprs_unchanged_p (XVECEXP (x, i, j), insn, avail_p))
1439 return 0;
1442 return 1;
1445 /* Used for communication between mems_conflict_for_gcse_p and
1446 load_killed_in_block_p. Nonzero if mems_conflict_for_gcse_p finds a
1447 conflict between two memory references. */
1448 static int gcse_mems_conflict_p;
1450 /* Used for communication between mems_conflict_for_gcse_p and
1451 load_killed_in_block_p. A memory reference for a load instruction,
1452 mems_conflict_for_gcse_p will see if a memory store conflicts with
1453 this memory load. */
1454 static rtx gcse_mem_operand;
1456 /* DEST is the output of an instruction. If it is a memory reference, and
1457 possibly conflicts with the load found in gcse_mem_operand, then set
1458 gcse_mems_conflict_p to a nonzero value. */
1460 static void
1461 mems_conflict_for_gcse_p (dest, setter, data)
1462 rtx dest, setter ATTRIBUTE_UNUSED;
1463 void *data ATTRIBUTE_UNUSED;
1465 while (GET_CODE (dest) == SUBREG
1466 || GET_CODE (dest) == ZERO_EXTRACT
1467 || GET_CODE (dest) == SIGN_EXTRACT
1468 || GET_CODE (dest) == STRICT_LOW_PART)
1469 dest = XEXP (dest, 0);
1471 /* If DEST is not a MEM, then it will not conflict with the load. Note
1472 that function calls are assumed to clobber memory, but are handled
1473 elsewhere. */
1474 if (GET_CODE (dest) != MEM)
1475 return;
1477 /* If we are setting a MEM in our list of specially recognized MEMs,
1478 don't mark as killed this time. */
1480 if (expr_equiv_p (dest, gcse_mem_operand) && pre_ldst_mems != NULL)
1482 if (!find_rtx_in_ldst (dest))
1483 gcse_mems_conflict_p = 1;
1484 return;
1487 if (true_dependence (dest, GET_MODE (dest), gcse_mem_operand,
1488 rtx_addr_varies_p))
1489 gcse_mems_conflict_p = 1;
1492 /* Return nonzero if the expression in X (a memory reference) is killed
1493 in block BB before or after the insn with the CUID in UID_LIMIT.
1494 AVAIL_P is nonzero for kills after UID_LIMIT, and zero for kills
1495 before UID_LIMIT.
1497 To check the entire block, set UID_LIMIT to max_uid + 1 and
1498 AVAIL_P to 0. */
1500 static int
1501 load_killed_in_block_p (bb, uid_limit, x, avail_p)
1502 basic_block bb;
1503 int uid_limit;
1504 rtx x;
1505 int avail_p;
1507 rtx list_entry = modify_mem_list[bb->index];
1508 while (list_entry)
1510 rtx setter;
1511 /* Ignore entries in the list that do not apply. */
1512 if ((avail_p
1513 && INSN_CUID (XEXP (list_entry, 0)) < uid_limit)
1514 || (! avail_p
1515 && INSN_CUID (XEXP (list_entry, 0)) > uid_limit))
1517 list_entry = XEXP (list_entry, 1);
1518 continue;
1521 setter = XEXP (list_entry, 0);
1523 /* If SETTER is a call everything is clobbered. Note that calls
1524 to pure functions are never put on the list, so we need not
1525 worry about them. */
1526 if (GET_CODE (setter) == CALL_INSN)
1527 return 1;
1529 /* SETTER must be an INSN of some kind that sets memory. Call
1530 note_stores to examine each hunk of memory that is modified.
1532 The note_stores interface is pretty limited, so we have to
1533 communicate via global variables. Yuk. */
1534 gcse_mem_operand = x;
1535 gcse_mems_conflict_p = 0;
1536 note_stores (PATTERN (setter), mems_conflict_for_gcse_p, NULL);
1537 if (gcse_mems_conflict_p)
1538 return 1;
1539 list_entry = XEXP (list_entry, 1);
1541 return 0;
1544 /* Return nonzero if the operands of expression X are unchanged from
1545 the start of INSN's basic block up to but not including INSN. */
1547 static int
1548 oprs_anticipatable_p (x, insn)
1549 rtx x, insn;
1551 return oprs_unchanged_p (x, insn, 0);
1554 /* Return nonzero if the operands of expression X are unchanged from
1555 INSN to the end of INSN's basic block. */
1557 static int
1558 oprs_available_p (x, insn)
1559 rtx x, insn;
1561 return oprs_unchanged_p (x, insn, 1);
1564 /* Hash expression X.
1566 MODE is only used if X is a CONST_INT. DO_NOT_RECORD_P is a boolean
1567 indicating if a volatile operand is found or if the expression contains
1568 something we don't want to insert in the table.
1570 ??? One might want to merge this with canon_hash. Later. */
1572 static unsigned int
1573 hash_expr (x, mode, do_not_record_p, hash_table_size)
1574 rtx x;
1575 enum machine_mode mode;
1576 int *do_not_record_p;
1577 int hash_table_size;
1579 unsigned int hash;
1581 *do_not_record_p = 0;
1583 hash = hash_expr_1 (x, mode, do_not_record_p);
1584 return hash % hash_table_size;
1587 /* Hash a string. Just add its bytes up. */
1589 static inline unsigned
1590 hash_string_1 (ps)
1591 const char *ps;
1593 unsigned hash = 0;
1594 const unsigned char *p = (const unsigned char *) ps;
1596 if (p)
1597 while (*p)
1598 hash += *p++;
1600 return hash;
1603 /* Subroutine of hash_expr to do the actual work. */
1605 static unsigned int
1606 hash_expr_1 (x, mode, do_not_record_p)
1607 rtx x;
1608 enum machine_mode mode;
1609 int *do_not_record_p;
1611 int i, j;
1612 unsigned hash = 0;
1613 enum rtx_code code;
1614 const char *fmt;
1616 /* Used to turn recursion into iteration. We can't rely on GCC's
1617 tail-recursion elimination since we need to keep accumulating values
1618 in HASH. */
1620 if (x == 0)
1621 return hash;
1623 repeat:
1624 code = GET_CODE (x);
1625 switch (code)
1627 case REG:
1628 hash += ((unsigned int) REG << 7) + REGNO (x);
1629 return hash;
1631 case CONST_INT:
1632 hash += (((unsigned int) CONST_INT << 7) + (unsigned int) mode
1633 + (unsigned int) INTVAL (x));
1634 return hash;
1636 case CONST_DOUBLE:
1637 /* This is like the general case, except that it only counts
1638 the integers representing the constant. */
1639 hash += (unsigned int) code + (unsigned int) GET_MODE (x);
1640 if (GET_MODE (x) != VOIDmode)
1641 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
1642 hash += (unsigned int) XWINT (x, i);
1643 else
1644 hash += ((unsigned int) CONST_DOUBLE_LOW (x)
1645 + (unsigned int) CONST_DOUBLE_HIGH (x));
1646 return hash;
1648 case CONST_VECTOR:
1650 int units;
1651 rtx elt;
1653 units = CONST_VECTOR_NUNITS (x);
1655 for (i = 0; i < units; ++i)
1657 elt = CONST_VECTOR_ELT (x, i);
1658 hash += hash_expr_1 (elt, GET_MODE (elt), do_not_record_p);
1661 return hash;
1664 /* Assume there is only one rtx object for any given label. */
1665 case LABEL_REF:
1666 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
1667 differences and differences between each stage's debugging dumps. */
1668 hash += (((unsigned int) LABEL_REF << 7)
1669 + CODE_LABEL_NUMBER (XEXP (x, 0)));
1670 return hash;
1672 case SYMBOL_REF:
1674 /* Don't hash on the symbol's address to avoid bootstrap differences.
1675 Different hash values may cause expressions to be recorded in
1676 different orders and thus different registers to be used in the
1677 final assembler. This also avoids differences in the dump files
1678 between various stages. */
1679 unsigned int h = 0;
1680 const unsigned char *p = (const unsigned char *) XSTR (x, 0);
1682 while (*p)
1683 h += (h << 7) + *p++; /* ??? revisit */
1685 hash += ((unsigned int) SYMBOL_REF << 7) + h;
1686 return hash;
1689 case MEM:
1690 if (MEM_VOLATILE_P (x))
1692 *do_not_record_p = 1;
1693 return 0;
1696 hash += (unsigned int) MEM;
1697 /* We used alias set for hashing, but this is not good, since the alias
1698 set may differ in -fprofile-arcs and -fbranch-probabilities compilation
1699 causing the profiles to fail to match. */
1700 x = XEXP (x, 0);
1701 goto repeat;
1703 case PRE_DEC:
1704 case PRE_INC:
1705 case POST_DEC:
1706 case POST_INC:
1707 case PC:
1708 case CC0:
1709 case CALL:
1710 case UNSPEC_VOLATILE:
1711 *do_not_record_p = 1;
1712 return 0;
1714 case ASM_OPERANDS:
1715 if (MEM_VOLATILE_P (x))
1717 *do_not_record_p = 1;
1718 return 0;
1720 else
1722 /* We don't want to take the filename and line into account. */
1723 hash += (unsigned) code + (unsigned) GET_MODE (x)
1724 + hash_string_1 (ASM_OPERANDS_TEMPLATE (x))
1725 + hash_string_1 (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
1726 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
1728 if (ASM_OPERANDS_INPUT_LENGTH (x))
1730 for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
1732 hash += (hash_expr_1 (ASM_OPERANDS_INPUT (x, i),
1733 GET_MODE (ASM_OPERANDS_INPUT (x, i)),
1734 do_not_record_p)
1735 + hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT
1736 (x, i)));
1739 hash += hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
1740 x = ASM_OPERANDS_INPUT (x, 0);
1741 mode = GET_MODE (x);
1742 goto repeat;
1744 return hash;
1747 default:
1748 break;
1751 hash += (unsigned) code + (unsigned) GET_MODE (x);
1752 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
1754 if (fmt[i] == 'e')
1756 /* If we are about to do the last recursive call
1757 needed at this level, change it into iteration.
1758 This function is called enough to be worth it. */
1759 if (i == 0)
1761 x = XEXP (x, i);
1762 goto repeat;
1765 hash += hash_expr_1 (XEXP (x, i), 0, do_not_record_p);
1766 if (*do_not_record_p)
1767 return 0;
1770 else if (fmt[i] == 'E')
1771 for (j = 0; j < XVECLEN (x, i); j++)
1773 hash += hash_expr_1 (XVECEXP (x, i, j), 0, do_not_record_p);
1774 if (*do_not_record_p)
1775 return 0;
1778 else if (fmt[i] == 's')
1779 hash += hash_string_1 (XSTR (x, i));
1780 else if (fmt[i] == 'i')
1781 hash += (unsigned int) XINT (x, i);
1782 else
1783 abort ();
1786 return hash;
1789 /* Hash a set of register REGNO.
1791 Sets are hashed on the register that is set. This simplifies the PRE copy
1792 propagation code.
1794 ??? May need to make things more elaborate. Later, as necessary. */
1796 static unsigned int
1797 hash_set (regno, hash_table_size)
1798 int regno;
1799 int hash_table_size;
1801 unsigned int hash;
1803 hash = regno;
1804 return hash % hash_table_size;
1807 /* Return nonzero if exp1 is equivalent to exp2.
1808 ??? Borrowed from cse.c. Might want to remerge with cse.c. Later. */
1810 static int
1811 expr_equiv_p (x, y)
1812 rtx x, y;
1814 int i, j;
1815 enum rtx_code code;
1816 const char *fmt;
1818 if (x == y)
1819 return 1;
1821 if (x == 0 || y == 0)
1822 return x == y;
1824 code = GET_CODE (x);
1825 if (code != GET_CODE (y))
1826 return 0;
1828 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
1829 if (GET_MODE (x) != GET_MODE (y))
1830 return 0;
1832 switch (code)
1834 case PC:
1835 case CC0:
1836 return x == y;
1838 case CONST_INT:
1839 return INTVAL (x) == INTVAL (y);
1841 case LABEL_REF:
1842 return XEXP (x, 0) == XEXP (y, 0);
1844 case SYMBOL_REF:
1845 return XSTR (x, 0) == XSTR (y, 0);
1847 case REG:
1848 return REGNO (x) == REGNO (y);
1850 case MEM:
1851 /* Can't merge two expressions in different alias sets, since we can
1852 decide that the expression is transparent in a block when it isn't,
1853 due to it being set with the different alias set. */
1854 if (MEM_ALIAS_SET (x) != MEM_ALIAS_SET (y))
1855 return 0;
1856 break;
1858 /* For commutative operations, check both orders. */
1859 case PLUS:
1860 case MULT:
1861 case AND:
1862 case IOR:
1863 case XOR:
1864 case NE:
1865 case EQ:
1866 return ((expr_equiv_p (XEXP (x, 0), XEXP (y, 0))
1867 && expr_equiv_p (XEXP (x, 1), XEXP (y, 1)))
1868 || (expr_equiv_p (XEXP (x, 0), XEXP (y, 1))
1869 && expr_equiv_p (XEXP (x, 1), XEXP (y, 0))));
1871 case ASM_OPERANDS:
1872 /* We don't use the generic code below because we want to
1873 disregard filename and line numbers. */
1875 /* A volatile asm isn't equivalent to any other. */
1876 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
1877 return 0;
1879 if (GET_MODE (x) != GET_MODE (y)
1880 || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
1881 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
1882 ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
1883 || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
1884 || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
1885 return 0;
1887 if (ASM_OPERANDS_INPUT_LENGTH (x))
1889 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
1890 if (! expr_equiv_p (ASM_OPERANDS_INPUT (x, i),
1891 ASM_OPERANDS_INPUT (y, i))
1892 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
1893 ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
1894 return 0;
1897 return 1;
1899 default:
1900 break;
1903 /* Compare the elements. If any pair of corresponding elements
1904 fail to match, return 0 for the whole thing. */
1906 fmt = GET_RTX_FORMAT (code);
1907 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1909 switch (fmt[i])
1911 case 'e':
1912 if (! expr_equiv_p (XEXP (x, i), XEXP (y, i)))
1913 return 0;
1914 break;
1916 case 'E':
1917 if (XVECLEN (x, i) != XVECLEN (y, i))
1918 return 0;
1919 for (j = 0; j < XVECLEN (x, i); j++)
1920 if (! expr_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
1921 return 0;
1922 break;
1924 case 's':
1925 if (strcmp (XSTR (x, i), XSTR (y, i)))
1926 return 0;
1927 break;
1929 case 'i':
1930 if (XINT (x, i) != XINT (y, i))
1931 return 0;
1932 break;
1934 case 'w':
1935 if (XWINT (x, i) != XWINT (y, i))
1936 return 0;
1937 break;
1939 case '0':
1940 break;
1942 default:
1943 abort ();
1947 return 1;
1950 /* Insert expression X in INSN in the hash TABLE.
1951 If it is already present, record it as the last occurrence in INSN's
1952 basic block.
1954 MODE is the mode of the value X is being stored into.
1955 It is only used if X is a CONST_INT.
1957 ANTIC_P is nonzero if X is an anticipatable expression.
1958 AVAIL_P is nonzero if X is an available expression. */
1960 static void
1961 insert_expr_in_table (x, mode, insn, antic_p, avail_p, table)
1962 rtx x;
1963 enum machine_mode mode;
1964 rtx insn;
1965 int antic_p, avail_p;
1966 struct hash_table *table;
1968 int found, do_not_record_p;
1969 unsigned int hash;
1970 struct expr *cur_expr, *last_expr = NULL;
1971 struct occr *antic_occr, *avail_occr;
1972 struct occr *last_occr = NULL;
1974 hash = hash_expr (x, mode, &do_not_record_p, table->size);
1976 /* Do not insert expression in table if it contains volatile operands,
1977 or if hash_expr determines the expression is something we don't want
1978 to or can't handle. */
1979 if (do_not_record_p)
1980 return;
1982 cur_expr = table->table[hash];
1983 found = 0;
1985 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
1987 /* If the expression isn't found, save a pointer to the end of
1988 the list. */
1989 last_expr = cur_expr;
1990 cur_expr = cur_expr->next_same_hash;
1993 if (! found)
1995 cur_expr = (struct expr *) gcse_alloc (sizeof (struct expr));
1996 bytes_used += sizeof (struct expr);
1997 if (table->table[hash] == NULL)
1998 /* This is the first pattern that hashed to this index. */
1999 table->table[hash] = cur_expr;
2000 else
2001 /* Add EXPR to end of this hash chain. */
2002 last_expr->next_same_hash = cur_expr;
2004 /* Set the fields of the expr element. */
2005 cur_expr->expr = x;
2006 cur_expr->bitmap_index = table->n_elems++;
2007 cur_expr->next_same_hash = NULL;
2008 cur_expr->antic_occr = NULL;
2009 cur_expr->avail_occr = NULL;
2012 /* Now record the occurrence(s). */
2013 if (antic_p)
2015 antic_occr = cur_expr->antic_occr;
2017 /* Search for another occurrence in the same basic block. */
2018 while (antic_occr && BLOCK_NUM (antic_occr->insn) != BLOCK_NUM (insn))
2020 /* If an occurrence isn't found, save a pointer to the end of
2021 the list. */
2022 last_occr = antic_occr;
2023 antic_occr = antic_occr->next;
2026 if (antic_occr)
2027 /* Found another instance of the expression in the same basic block.
2028 Prefer the currently recorded one. We want the first one in the
2029 block and the block is scanned from start to end. */
2030 ; /* nothing to do */
2031 else
2033 /* First occurrence of this expression in this basic block. */
2034 antic_occr = (struct occr *) gcse_alloc (sizeof (struct occr));
2035 bytes_used += sizeof (struct occr);
2036 /* First occurrence of this expression in any block? */
2037 if (cur_expr->antic_occr == NULL)
2038 cur_expr->antic_occr = antic_occr;
2039 else
2040 last_occr->next = antic_occr;
2042 antic_occr->insn = insn;
2043 antic_occr->next = NULL;
2047 if (avail_p)
2049 avail_occr = cur_expr->avail_occr;
2051 /* Search for another occurrence in the same basic block. */
2052 while (avail_occr && BLOCK_NUM (avail_occr->insn) != BLOCK_NUM (insn))
2054 /* If an occurrence isn't found, save a pointer to the end of
2055 the list. */
2056 last_occr = avail_occr;
2057 avail_occr = avail_occr->next;
2060 if (avail_occr)
2061 /* Found another instance of the expression in the same basic block.
2062 Prefer this occurrence to the currently recorded one. We want
2063 the last one in the block and the block is scanned from start
2064 to end. */
2065 avail_occr->insn = insn;
2066 else
2068 /* First occurrence of this expression in this basic block. */
2069 avail_occr = (struct occr *) gcse_alloc (sizeof (struct occr));
2070 bytes_used += sizeof (struct occr);
2072 /* First occurrence of this expression in any block? */
2073 if (cur_expr->avail_occr == NULL)
2074 cur_expr->avail_occr = avail_occr;
2075 else
2076 last_occr->next = avail_occr;
2078 avail_occr->insn = insn;
2079 avail_occr->next = NULL;
2084 /* Insert pattern X in INSN in the hash table.
2085 X is a SET of a reg to either another reg or a constant.
2086 If it is already present, record it as the last occurrence in INSN's
2087 basic block. */
2089 static void
2090 insert_set_in_table (x, insn, table)
2091 rtx x;
2092 rtx insn;
2093 struct hash_table *table;
2095 int found;
2096 unsigned int hash;
2097 struct expr *cur_expr, *last_expr = NULL;
2098 struct occr *cur_occr, *last_occr = NULL;
2100 if (GET_CODE (x) != SET
2101 || GET_CODE (SET_DEST (x)) != REG)
2102 abort ();
2104 hash = hash_set (REGNO (SET_DEST (x)), table->size);
2106 cur_expr = table->table[hash];
2107 found = 0;
2109 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
2111 /* If the expression isn't found, save a pointer to the end of
2112 the list. */
2113 last_expr = cur_expr;
2114 cur_expr = cur_expr->next_same_hash;
2117 if (! found)
2119 cur_expr = (struct expr *) gcse_alloc (sizeof (struct expr));
2120 bytes_used += sizeof (struct expr);
2121 if (table->table[hash] == NULL)
2122 /* This is the first pattern that hashed to this index. */
2123 table->table[hash] = cur_expr;
2124 else
2125 /* Add EXPR to end of this hash chain. */
2126 last_expr->next_same_hash = cur_expr;
2128 /* Set the fields of the expr element.
2129 We must copy X because it can be modified when copy propagation is
2130 performed on its operands. */
2131 cur_expr->expr = copy_rtx (x);
2132 cur_expr->bitmap_index = table->n_elems++;
2133 cur_expr->next_same_hash = NULL;
2134 cur_expr->antic_occr = NULL;
2135 cur_expr->avail_occr = NULL;
2138 /* Now record the occurrence. */
2139 cur_occr = cur_expr->avail_occr;
2141 /* Search for another occurrence in the same basic block. */
2142 while (cur_occr && BLOCK_NUM (cur_occr->insn) != BLOCK_NUM (insn))
2144 /* If an occurrence isn't found, save a pointer to the end of
2145 the list. */
2146 last_occr = cur_occr;
2147 cur_occr = cur_occr->next;
2150 if (cur_occr)
2151 /* Found another instance of the expression in the same basic block.
2152 Prefer this occurrence to the currently recorded one. We want the
2153 last one in the block and the block is scanned from start to end. */
2154 cur_occr->insn = insn;
2155 else
2157 /* First occurrence of this expression in this basic block. */
2158 cur_occr = (struct occr *) gcse_alloc (sizeof (struct occr));
2159 bytes_used += sizeof (struct occr);
2161 /* First occurrence of this expression in any block? */
2162 if (cur_expr->avail_occr == NULL)
2163 cur_expr->avail_occr = cur_occr;
2164 else
2165 last_occr->next = cur_occr;
2167 cur_occr->insn = insn;
2168 cur_occr->next = NULL;
2172 /* Determine whether the rtx X should be treated as a constant for
2173 the purposes of GCSE's constant propagation. */
2175 static bool
2176 gcse_constant_p (x)
2177 rtx x;
2179 /* Consider a COMPARE of two integers constant. */
2180 if (GET_CODE (x) == COMPARE
2181 && GET_CODE (XEXP (x, 0)) == CONST_INT
2182 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2183 return true;
2185 if (GET_CODE (x) == CONSTANT_P_RTX)
2186 return false;
2188 return CONSTANT_P (x);
2191 /* Scan pattern PAT of INSN and add an entry to the hash TABLE (set or
2192 expression one). */
2194 static void
2195 hash_scan_set (pat, insn, table)
2196 rtx pat, insn;
2197 struct hash_table *table;
2199 rtx src = SET_SRC (pat);
2200 rtx dest = SET_DEST (pat);
2201 rtx note;
2203 if (GET_CODE (src) == CALL)
2204 hash_scan_call (src, insn, table);
2206 else if (GET_CODE (dest) == REG)
2208 unsigned int regno = REGNO (dest);
2209 rtx tmp;
2211 /* If this is a single set and we are doing constant propagation,
2212 see if a REG_NOTE shows this equivalent to a constant. */
2213 if (table->set_p && (note = find_reg_equal_equiv_note (insn)) != 0
2214 && gcse_constant_p (XEXP (note, 0)))
2215 src = XEXP (note, 0), pat = gen_rtx_SET (VOIDmode, dest, src);
2217 /* Only record sets of pseudo-regs in the hash table. */
2218 if (! table->set_p
2219 && regno >= FIRST_PSEUDO_REGISTER
2220 /* Don't GCSE something if we can't do a reg/reg copy. */
2221 && can_copy_p (GET_MODE (dest))
2222 /* GCSE commonly inserts instruction after the insn. We can't
2223 do that easily for EH_REGION notes so disable GCSE on these
2224 for now. */
2225 && !find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2226 /* Is SET_SRC something we want to gcse? */
2227 && want_to_gcse_p (src)
2228 /* Don't CSE a nop. */
2229 && ! set_noop_p (pat)
2230 /* Don't GCSE if it has attached REG_EQUIV note.
2231 At this point this only function parameters should have
2232 REG_EQUIV notes and if the argument slot is used somewhere
2233 explicitly, it means address of parameter has been taken,
2234 so we should not extend the lifetime of the pseudo. */
2235 && ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) == 0
2236 || GET_CODE (XEXP (note, 0)) != MEM))
2238 /* An expression is not anticipatable if its operands are
2239 modified before this insn or if this is not the only SET in
2240 this insn. */
2241 int antic_p = oprs_anticipatable_p (src, insn) && single_set (insn);
2242 /* An expression is not available if its operands are
2243 subsequently modified, including this insn. It's also not
2244 available if this is a branch, because we can't insert
2245 a set after the branch. */
2246 int avail_p = (oprs_available_p (src, insn)
2247 && ! JUMP_P (insn));
2249 insert_expr_in_table (src, GET_MODE (dest), insn, antic_p, avail_p, table);
2252 /* Record sets for constant/copy propagation. */
2253 else if (table->set_p
2254 && regno >= FIRST_PSEUDO_REGISTER
2255 && ((GET_CODE (src) == REG
2256 && REGNO (src) >= FIRST_PSEUDO_REGISTER
2257 && can_copy_p (GET_MODE (dest))
2258 && REGNO (src) != regno)
2259 || gcse_constant_p (src))
2260 /* A copy is not available if its src or dest is subsequently
2261 modified. Here we want to search from INSN+1 on, but
2262 oprs_available_p searches from INSN on. */
2263 && (insn == BLOCK_END (BLOCK_NUM (insn))
2264 || ((tmp = next_nonnote_insn (insn)) != NULL_RTX
2265 && oprs_available_p (pat, tmp))))
2266 insert_set_in_table (pat, insn, table);
2270 static void
2271 hash_scan_clobber (x, insn, table)
2272 rtx x ATTRIBUTE_UNUSED, insn ATTRIBUTE_UNUSED;
2273 struct hash_table *table ATTRIBUTE_UNUSED;
2275 /* Currently nothing to do. */
2278 static void
2279 hash_scan_call (x, insn, table)
2280 rtx x ATTRIBUTE_UNUSED, insn ATTRIBUTE_UNUSED;
2281 struct hash_table *table ATTRIBUTE_UNUSED;
2283 /* Currently nothing to do. */
2286 /* Process INSN and add hash table entries as appropriate.
2288 Only available expressions that set a single pseudo-reg are recorded.
2290 Single sets in a PARALLEL could be handled, but it's an extra complication
2291 that isn't dealt with right now. The trick is handling the CLOBBERs that
2292 are also in the PARALLEL. Later.
2294 If SET_P is nonzero, this is for the assignment hash table,
2295 otherwise it is for the expression hash table.
2296 If IN_LIBCALL_BLOCK nonzero, we are in a libcall block, and should
2297 not record any expressions. */
2299 static void
2300 hash_scan_insn (insn, table, in_libcall_block)
2301 rtx insn;
2302 struct hash_table *table;
2303 int in_libcall_block;
2305 rtx pat = PATTERN (insn);
2306 int i;
2308 if (in_libcall_block)
2309 return;
2311 /* Pick out the sets of INSN and for other forms of instructions record
2312 what's been modified. */
2314 if (GET_CODE (pat) == SET)
2315 hash_scan_set (pat, insn, table);
2316 else if (GET_CODE (pat) == PARALLEL)
2317 for (i = 0; i < XVECLEN (pat, 0); i++)
2319 rtx x = XVECEXP (pat, 0, i);
2321 if (GET_CODE (x) == SET)
2322 hash_scan_set (x, insn, table);
2323 else if (GET_CODE (x) == CLOBBER)
2324 hash_scan_clobber (x, insn, table);
2325 else if (GET_CODE (x) == CALL)
2326 hash_scan_call (x, insn, table);
2329 else if (GET_CODE (pat) == CLOBBER)
2330 hash_scan_clobber (pat, insn, table);
2331 else if (GET_CODE (pat) == CALL)
2332 hash_scan_call (pat, insn, table);
2335 static void
2336 dump_hash_table (file, name, table)
2337 FILE *file;
2338 const char *name;
2339 struct hash_table *table;
2341 int i;
2342 /* Flattened out table, so it's printed in proper order. */
2343 struct expr **flat_table;
2344 unsigned int *hash_val;
2345 struct expr *expr;
2347 flat_table
2348 = (struct expr **) xcalloc (table->n_elems, sizeof (struct expr *));
2349 hash_val = (unsigned int *) xmalloc (table->n_elems * sizeof (unsigned int));
2351 for (i = 0; i < (int) table->size; i++)
2352 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
2354 flat_table[expr->bitmap_index] = expr;
2355 hash_val[expr->bitmap_index] = i;
2358 fprintf (file, "%s hash table (%d buckets, %d entries)\n",
2359 name, table->size, table->n_elems);
2361 for (i = 0; i < (int) table->n_elems; i++)
2362 if (flat_table[i] != 0)
2364 expr = flat_table[i];
2365 fprintf (file, "Index %d (hash value %d)\n ",
2366 expr->bitmap_index, hash_val[i]);
2367 print_rtl (file, expr->expr);
2368 fprintf (file, "\n");
2371 fprintf (file, "\n");
2373 free (flat_table);
2374 free (hash_val);
2377 /* Record register first/last/block set information for REGNO in INSN.
2379 first_set records the first place in the block where the register
2380 is set and is used to compute "anticipatability".
2382 last_set records the last place in the block where the register
2383 is set and is used to compute "availability".
2385 last_bb records the block for which first_set and last_set are
2386 valid, as a quick test to invalidate them.
2388 reg_set_in_block records whether the register is set in the block
2389 and is used to compute "transparency". */
2391 static void
2392 record_last_reg_set_info (insn, regno)
2393 rtx insn;
2394 int regno;
2396 struct reg_avail_info *info = &reg_avail_info[regno];
2397 int cuid = INSN_CUID (insn);
2399 info->last_set = cuid;
2400 if (info->last_bb != current_bb)
2402 info->last_bb = current_bb;
2403 info->first_set = cuid;
2404 SET_BIT (reg_set_in_block[current_bb->index], regno);
2409 /* Record all of the canonicalized MEMs of record_last_mem_set_info's insn.
2410 Note we store a pair of elements in the list, so they have to be
2411 taken off pairwise. */
2413 static void
2414 canon_list_insert (dest, unused1, v_insn)
2415 rtx dest ATTRIBUTE_UNUSED;
2416 rtx unused1 ATTRIBUTE_UNUSED;
2417 void * v_insn;
2419 rtx dest_addr, insn;
2420 int bb;
2422 while (GET_CODE (dest) == SUBREG
2423 || GET_CODE (dest) == ZERO_EXTRACT
2424 || GET_CODE (dest) == SIGN_EXTRACT
2425 || GET_CODE (dest) == STRICT_LOW_PART)
2426 dest = XEXP (dest, 0);
2428 /* If DEST is not a MEM, then it will not conflict with a load. Note
2429 that function calls are assumed to clobber memory, but are handled
2430 elsewhere. */
2432 if (GET_CODE (dest) != MEM)
2433 return;
2435 dest_addr = get_addr (XEXP (dest, 0));
2436 dest_addr = canon_rtx (dest_addr);
2437 insn = (rtx) v_insn;
2438 bb = BLOCK_NUM (insn);
2440 canon_modify_mem_list[bb] =
2441 alloc_EXPR_LIST (VOIDmode, dest_addr, canon_modify_mem_list[bb]);
2442 canon_modify_mem_list[bb] =
2443 alloc_EXPR_LIST (VOIDmode, dest, canon_modify_mem_list[bb]);
2444 bitmap_set_bit (canon_modify_mem_list_set, bb);
2447 /* Record memory modification information for INSN. We do not actually care
2448 about the memory location(s) that are set, or even how they are set (consider
2449 a CALL_INSN). We merely need to record which insns modify memory. */
2451 static void
2452 record_last_mem_set_info (insn)
2453 rtx insn;
2455 int bb = BLOCK_NUM (insn);
2457 /* load_killed_in_block_p will handle the case of calls clobbering
2458 everything. */
2459 modify_mem_list[bb] = alloc_INSN_LIST (insn, modify_mem_list[bb]);
2460 bitmap_set_bit (modify_mem_list_set, bb);
2462 if (GET_CODE (insn) == CALL_INSN)
2464 /* Note that traversals of this loop (other than for free-ing)
2465 will break after encountering a CALL_INSN. So, there's no
2466 need to insert a pair of items, as canon_list_insert does. */
2467 canon_modify_mem_list[bb] =
2468 alloc_INSN_LIST (insn, canon_modify_mem_list[bb]);
2469 bitmap_set_bit (canon_modify_mem_list_set, bb);
2471 else
2472 note_stores (PATTERN (insn), canon_list_insert, (void*) insn);
2475 /* Called from compute_hash_table via note_stores to handle one
2476 SET or CLOBBER in an insn. DATA is really the instruction in which
2477 the SET is taking place. */
2479 static void
2480 record_last_set_info (dest, setter, data)
2481 rtx dest, setter ATTRIBUTE_UNUSED;
2482 void *data;
2484 rtx last_set_insn = (rtx) data;
2486 if (GET_CODE (dest) == SUBREG)
2487 dest = SUBREG_REG (dest);
2489 if (GET_CODE (dest) == REG)
2490 record_last_reg_set_info (last_set_insn, REGNO (dest));
2491 else if (GET_CODE (dest) == MEM
2492 /* Ignore pushes, they clobber nothing. */
2493 && ! push_operand (dest, GET_MODE (dest)))
2494 record_last_mem_set_info (last_set_insn);
2497 /* Top level function to create an expression or assignment hash table.
2499 Expression entries are placed in the hash table if
2500 - they are of the form (set (pseudo-reg) src),
2501 - src is something we want to perform GCSE on,
2502 - none of the operands are subsequently modified in the block
2504 Assignment entries are placed in the hash table if
2505 - they are of the form (set (pseudo-reg) src),
2506 - src is something we want to perform const/copy propagation on,
2507 - none of the operands or target are subsequently modified in the block
2509 Currently src must be a pseudo-reg or a const_int.
2511 TABLE is the table computed. */
2513 static void
2514 compute_hash_table_work (table)
2515 struct hash_table *table;
2517 unsigned int i;
2519 /* While we compute the hash table we also compute a bit array of which
2520 registers are set in which blocks.
2521 ??? This isn't needed during const/copy propagation, but it's cheap to
2522 compute. Later. */
2523 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
2525 /* re-Cache any INSN_LIST nodes we have allocated. */
2526 clear_modify_mem_tables ();
2527 /* Some working arrays used to track first and last set in each block. */
2528 reg_avail_info = (struct reg_avail_info*)
2529 gmalloc (max_gcse_regno * sizeof (struct reg_avail_info));
2531 for (i = 0; i < max_gcse_regno; ++i)
2532 reg_avail_info[i].last_bb = NULL;
2534 FOR_EACH_BB (current_bb)
2536 rtx insn;
2537 unsigned int regno;
2538 int in_libcall_block;
2540 /* First pass over the instructions records information used to
2541 determine when registers and memory are first and last set.
2542 ??? hard-reg reg_set_in_block computation
2543 could be moved to compute_sets since they currently don't change. */
2545 for (insn = current_bb->head;
2546 insn && insn != NEXT_INSN (current_bb->end);
2547 insn = NEXT_INSN (insn))
2549 if (! INSN_P (insn))
2550 continue;
2552 if (GET_CODE (insn) == CALL_INSN)
2554 bool clobbers_all = false;
2555 #ifdef NON_SAVING_SETJMP
2556 if (NON_SAVING_SETJMP
2557 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
2558 clobbers_all = true;
2559 #endif
2561 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2562 if (clobbers_all
2563 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2564 record_last_reg_set_info (insn, regno);
2566 mark_call (insn);
2569 note_stores (PATTERN (insn), record_last_set_info, insn);
2572 /* Insert implicit sets in the hash table. */
2573 if (table->set_p
2574 && implicit_sets[current_bb->index] != NULL_RTX)
2575 hash_scan_set (implicit_sets[current_bb->index],
2576 current_bb->head, table);
2578 /* The next pass builds the hash table. */
2580 for (insn = current_bb->head, in_libcall_block = 0;
2581 insn && insn != NEXT_INSN (current_bb->end);
2582 insn = NEXT_INSN (insn))
2583 if (INSN_P (insn))
2585 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
2586 in_libcall_block = 1;
2587 else if (table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
2588 in_libcall_block = 0;
2589 hash_scan_insn (insn, table, in_libcall_block);
2590 if (!table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
2591 in_libcall_block = 0;
2595 free (reg_avail_info);
2596 reg_avail_info = NULL;
2599 /* Allocate space for the set/expr hash TABLE.
2600 N_INSNS is the number of instructions in the function.
2601 It is used to determine the number of buckets to use.
2602 SET_P determines whether set or expression table will
2603 be created. */
2605 static void
2606 alloc_hash_table (n_insns, table, set_p)
2607 int n_insns;
2608 struct hash_table *table;
2609 int set_p;
2611 int n;
2613 table->size = n_insns / 4;
2614 if (table->size < 11)
2615 table->size = 11;
2617 /* Attempt to maintain efficient use of hash table.
2618 Making it an odd number is simplest for now.
2619 ??? Later take some measurements. */
2620 table->size |= 1;
2621 n = table->size * sizeof (struct expr *);
2622 table->table = (struct expr **) gmalloc (n);
2623 table->set_p = set_p;
2626 /* Free things allocated by alloc_hash_table. */
2628 static void
2629 free_hash_table (table)
2630 struct hash_table *table;
2632 free (table->table);
2635 /* Compute the hash TABLE for doing copy/const propagation or
2636 expression hash table. */
2638 static void
2639 compute_hash_table (table)
2640 struct hash_table *table;
2642 /* Initialize count of number of entries in hash table. */
2643 table->n_elems = 0;
2644 memset ((char *) table->table, 0,
2645 table->size * sizeof (struct expr *));
2647 compute_hash_table_work (table);
2650 /* Expression tracking support. */
2652 /* Lookup pattern PAT in the expression TABLE.
2653 The result is a pointer to the table entry, or NULL if not found. */
2655 static struct expr *
2656 lookup_expr (pat, table)
2657 rtx pat;
2658 struct hash_table *table;
2660 int do_not_record_p;
2661 unsigned int hash = hash_expr (pat, GET_MODE (pat), &do_not_record_p,
2662 table->size);
2663 struct expr *expr;
2665 if (do_not_record_p)
2666 return NULL;
2668 expr = table->table[hash];
2670 while (expr && ! expr_equiv_p (expr->expr, pat))
2671 expr = expr->next_same_hash;
2673 return expr;
2676 /* Lookup REGNO in the set TABLE. The result is a pointer to the
2677 table entry, or NULL if not found. */
2679 static struct expr *
2680 lookup_set (regno, table)
2681 unsigned int regno;
2682 struct hash_table *table;
2684 unsigned int hash = hash_set (regno, table->size);
2685 struct expr *expr;
2687 expr = table->table[hash];
2689 while (expr && REGNO (SET_DEST (expr->expr)) != regno)
2690 expr = expr->next_same_hash;
2692 return expr;
2695 /* Return the next entry for REGNO in list EXPR. */
2697 static struct expr *
2698 next_set (regno, expr)
2699 unsigned int regno;
2700 struct expr *expr;
2703 expr = expr->next_same_hash;
2704 while (expr && REGNO (SET_DEST (expr->expr)) != regno);
2706 return expr;
2709 /* Like free_INSN_LIST_list or free_EXPR_LIST_list, except that the node
2710 types may be mixed. */
2712 static void
2713 free_insn_expr_list_list (listp)
2714 rtx *listp;
2716 rtx list, next;
2718 for (list = *listp; list ; list = next)
2720 next = XEXP (list, 1);
2721 if (GET_CODE (list) == EXPR_LIST)
2722 free_EXPR_LIST_node (list);
2723 else
2724 free_INSN_LIST_node (list);
2727 *listp = NULL;
2730 /* Clear canon_modify_mem_list and modify_mem_list tables. */
2731 static void
2732 clear_modify_mem_tables ()
2734 int i;
2736 EXECUTE_IF_SET_IN_BITMAP
2737 (modify_mem_list_set, 0, i, free_INSN_LIST_list (modify_mem_list + i));
2738 bitmap_clear (modify_mem_list_set);
2740 EXECUTE_IF_SET_IN_BITMAP
2741 (canon_modify_mem_list_set, 0, i,
2742 free_insn_expr_list_list (canon_modify_mem_list + i));
2743 bitmap_clear (canon_modify_mem_list_set);
2746 /* Release memory used by modify_mem_list_set and canon_modify_mem_list_set. */
2748 static void
2749 free_modify_mem_tables ()
2751 clear_modify_mem_tables ();
2752 free (modify_mem_list);
2753 free (canon_modify_mem_list);
2754 modify_mem_list = 0;
2755 canon_modify_mem_list = 0;
2758 /* Reset tables used to keep track of what's still available [since the
2759 start of the block]. */
2761 static void
2762 reset_opr_set_tables ()
2764 /* Maintain a bitmap of which regs have been set since beginning of
2765 the block. */
2766 CLEAR_REG_SET (reg_set_bitmap);
2768 /* Also keep a record of the last instruction to modify memory.
2769 For now this is very trivial, we only record whether any memory
2770 location has been modified. */
2771 clear_modify_mem_tables ();
2774 /* Return nonzero if the operands of X are not set before INSN in
2775 INSN's basic block. */
2777 static int
2778 oprs_not_set_p (x, insn)
2779 rtx x, insn;
2781 int i, j;
2782 enum rtx_code code;
2783 const char *fmt;
2785 if (x == 0)
2786 return 1;
2788 code = GET_CODE (x);
2789 switch (code)
2791 case PC:
2792 case CC0:
2793 case CONST:
2794 case CONST_INT:
2795 case CONST_DOUBLE:
2796 case CONST_VECTOR:
2797 case SYMBOL_REF:
2798 case LABEL_REF:
2799 case ADDR_VEC:
2800 case ADDR_DIFF_VEC:
2801 return 1;
2803 case MEM:
2804 if (load_killed_in_block_p (BLOCK_FOR_INSN (insn),
2805 INSN_CUID (insn), x, 0))
2806 return 0;
2807 else
2808 return oprs_not_set_p (XEXP (x, 0), insn);
2810 case REG:
2811 return ! REGNO_REG_SET_P (reg_set_bitmap, REGNO (x));
2813 default:
2814 break;
2817 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
2819 if (fmt[i] == 'e')
2821 /* If we are about to do the last recursive call
2822 needed at this level, change it into iteration.
2823 This function is called enough to be worth it. */
2824 if (i == 0)
2825 return oprs_not_set_p (XEXP (x, i), insn);
2827 if (! oprs_not_set_p (XEXP (x, i), insn))
2828 return 0;
2830 else if (fmt[i] == 'E')
2831 for (j = 0; j < XVECLEN (x, i); j++)
2832 if (! oprs_not_set_p (XVECEXP (x, i, j), insn))
2833 return 0;
2836 return 1;
2839 /* Mark things set by a CALL. */
2841 static void
2842 mark_call (insn)
2843 rtx insn;
2845 if (! CONST_OR_PURE_CALL_P (insn))
2846 record_last_mem_set_info (insn);
2849 /* Mark things set by a SET. */
2851 static void
2852 mark_set (pat, insn)
2853 rtx pat, insn;
2855 rtx dest = SET_DEST (pat);
2857 while (GET_CODE (dest) == SUBREG
2858 || GET_CODE (dest) == ZERO_EXTRACT
2859 || GET_CODE (dest) == SIGN_EXTRACT
2860 || GET_CODE (dest) == STRICT_LOW_PART)
2861 dest = XEXP (dest, 0);
2863 if (GET_CODE (dest) == REG)
2864 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (dest));
2865 else if (GET_CODE (dest) == MEM)
2866 record_last_mem_set_info (insn);
2868 if (GET_CODE (SET_SRC (pat)) == CALL)
2869 mark_call (insn);
2872 /* Record things set by a CLOBBER. */
2874 static void
2875 mark_clobber (pat, insn)
2876 rtx pat, insn;
2878 rtx clob = XEXP (pat, 0);
2880 while (GET_CODE (clob) == SUBREG || GET_CODE (clob) == STRICT_LOW_PART)
2881 clob = XEXP (clob, 0);
2883 if (GET_CODE (clob) == REG)
2884 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (clob));
2885 else
2886 record_last_mem_set_info (insn);
2889 /* Record things set by INSN.
2890 This data is used by oprs_not_set_p. */
2892 static void
2893 mark_oprs_set (insn)
2894 rtx insn;
2896 rtx pat = PATTERN (insn);
2897 int i;
2899 if (GET_CODE (pat) == SET)
2900 mark_set (pat, insn);
2901 else if (GET_CODE (pat) == PARALLEL)
2902 for (i = 0; i < XVECLEN (pat, 0); i++)
2904 rtx x = XVECEXP (pat, 0, i);
2906 if (GET_CODE (x) == SET)
2907 mark_set (x, insn);
2908 else if (GET_CODE (x) == CLOBBER)
2909 mark_clobber (x, insn);
2910 else if (GET_CODE (x) == CALL)
2911 mark_call (insn);
2914 else if (GET_CODE (pat) == CLOBBER)
2915 mark_clobber (pat, insn);
2916 else if (GET_CODE (pat) == CALL)
2917 mark_call (insn);
2921 /* Classic GCSE reaching definition support. */
2923 /* Allocate reaching def variables. */
2925 static void
2926 alloc_rd_mem (n_blocks, n_insns)
2927 int n_blocks, n_insns;
2929 rd_kill = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2930 sbitmap_vector_zero (rd_kill, n_blocks);
2932 rd_gen = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2933 sbitmap_vector_zero (rd_gen, n_blocks);
2935 reaching_defs = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2936 sbitmap_vector_zero (reaching_defs, n_blocks);
2938 rd_out = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2939 sbitmap_vector_zero (rd_out, n_blocks);
2942 /* Free reaching def variables. */
2944 static void
2945 free_rd_mem ()
2947 sbitmap_vector_free (rd_kill);
2948 sbitmap_vector_free (rd_gen);
2949 sbitmap_vector_free (reaching_defs);
2950 sbitmap_vector_free (rd_out);
2953 /* Add INSN to the kills of BB. REGNO, set in BB, is killed by INSN. */
2955 static void
2956 handle_rd_kill_set (insn, regno, bb)
2957 rtx insn;
2958 int regno;
2959 basic_block bb;
2961 struct reg_set *this_reg;
2963 for (this_reg = reg_set_table[regno]; this_reg; this_reg = this_reg ->next)
2964 if (BLOCK_NUM (this_reg->insn) != BLOCK_NUM (insn))
2965 SET_BIT (rd_kill[bb->index], INSN_CUID (this_reg->insn));
2968 /* Compute the set of kill's for reaching definitions. */
2970 static void
2971 compute_kill_rd ()
2973 int cuid;
2974 unsigned int regno;
2975 int i;
2976 basic_block bb;
2978 /* For each block
2979 For each set bit in `gen' of the block (i.e each insn which
2980 generates a definition in the block)
2981 Call the reg set by the insn corresponding to that bit regx
2982 Look at the linked list starting at reg_set_table[regx]
2983 For each setting of regx in the linked list, which is not in
2984 this block
2985 Set the bit in `kill' corresponding to that insn. */
2986 FOR_EACH_BB (bb)
2987 for (cuid = 0; cuid < max_cuid; cuid++)
2988 if (TEST_BIT (rd_gen[bb->index], cuid))
2990 rtx insn = CUID_INSN (cuid);
2991 rtx pat = PATTERN (insn);
2993 if (GET_CODE (insn) == CALL_INSN)
2995 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2996 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2997 handle_rd_kill_set (insn, regno, bb);
3000 if (GET_CODE (pat) == PARALLEL)
3002 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
3004 enum rtx_code code = GET_CODE (XVECEXP (pat, 0, i));
3006 if ((code == SET || code == CLOBBER)
3007 && GET_CODE (XEXP (XVECEXP (pat, 0, i), 0)) == REG)
3008 handle_rd_kill_set (insn,
3009 REGNO (XEXP (XVECEXP (pat, 0, i), 0)),
3010 bb);
3013 else if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == REG)
3014 /* Each setting of this register outside of this block
3015 must be marked in the set of kills in this block. */
3016 handle_rd_kill_set (insn, REGNO (SET_DEST (pat)), bb);
3020 /* Compute the reaching definitions as in
3021 Compilers Principles, Techniques, and Tools. Aho, Sethi, Ullman,
3022 Chapter 10. It is the same algorithm as used for computing available
3023 expressions but applied to the gens and kills of reaching definitions. */
3025 static void
3026 compute_rd ()
3028 int changed, passes;
3029 basic_block bb;
3031 FOR_EACH_BB (bb)
3032 sbitmap_copy (rd_out[bb->index] /*dst*/, rd_gen[bb->index] /*src*/);
3034 passes = 0;
3035 changed = 1;
3036 while (changed)
3038 changed = 0;
3039 FOR_EACH_BB (bb)
3041 sbitmap_union_of_preds (reaching_defs[bb->index], rd_out, bb->index);
3042 changed |= sbitmap_union_of_diff_cg (rd_out[bb->index], rd_gen[bb->index],
3043 reaching_defs[bb->index], rd_kill[bb->index]);
3045 passes++;
3048 if (gcse_file)
3049 fprintf (gcse_file, "reaching def computation: %d passes\n", passes);
3052 /* Classic GCSE available expression support. */
3054 /* Allocate memory for available expression computation. */
3056 static void
3057 alloc_avail_expr_mem (n_blocks, n_exprs)
3058 int n_blocks, n_exprs;
3060 ae_kill = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
3061 sbitmap_vector_zero (ae_kill, n_blocks);
3063 ae_gen = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
3064 sbitmap_vector_zero (ae_gen, n_blocks);
3066 ae_in = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
3067 sbitmap_vector_zero (ae_in, n_blocks);
3069 ae_out = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
3070 sbitmap_vector_zero (ae_out, n_blocks);
3073 static void
3074 free_avail_expr_mem ()
3076 sbitmap_vector_free (ae_kill);
3077 sbitmap_vector_free (ae_gen);
3078 sbitmap_vector_free (ae_in);
3079 sbitmap_vector_free (ae_out);
3082 /* Compute the set of available expressions generated in each basic block. */
3084 static void
3085 compute_ae_gen (expr_hash_table)
3086 struct hash_table *expr_hash_table;
3088 unsigned int i;
3089 struct expr *expr;
3090 struct occr *occr;
3092 /* For each recorded occurrence of each expression, set ae_gen[bb][expr].
3093 This is all we have to do because an expression is not recorded if it
3094 is not available, and the only expressions we want to work with are the
3095 ones that are recorded. */
3096 for (i = 0; i < expr_hash_table->size; i++)
3097 for (expr = expr_hash_table->table[i]; expr != 0; expr = expr->next_same_hash)
3098 for (occr = expr->avail_occr; occr != 0; occr = occr->next)
3099 SET_BIT (ae_gen[BLOCK_NUM (occr->insn)], expr->bitmap_index);
3102 /* Return nonzero if expression X is killed in BB. */
3104 static int
3105 expr_killed_p (x, bb)
3106 rtx x;
3107 basic_block bb;
3109 int i, j;
3110 enum rtx_code code;
3111 const char *fmt;
3113 if (x == 0)
3114 return 1;
3116 code = GET_CODE (x);
3117 switch (code)
3119 case REG:
3120 return TEST_BIT (reg_set_in_block[bb->index], REGNO (x));
3122 case MEM:
3123 if (load_killed_in_block_p (bb, get_max_uid () + 1, x, 0))
3124 return 1;
3125 else
3126 return expr_killed_p (XEXP (x, 0), bb);
3128 case PC:
3129 case CC0: /*FIXME*/
3130 case CONST:
3131 case CONST_INT:
3132 case CONST_DOUBLE:
3133 case CONST_VECTOR:
3134 case SYMBOL_REF:
3135 case LABEL_REF:
3136 case ADDR_VEC:
3137 case ADDR_DIFF_VEC:
3138 return 0;
3140 default:
3141 break;
3144 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
3146 if (fmt[i] == 'e')
3148 /* If we are about to do the last recursive call
3149 needed at this level, change it into iteration.
3150 This function is called enough to be worth it. */
3151 if (i == 0)
3152 return expr_killed_p (XEXP (x, i), bb);
3153 else if (expr_killed_p (XEXP (x, i), bb))
3154 return 1;
3156 else if (fmt[i] == 'E')
3157 for (j = 0; j < XVECLEN (x, i); j++)
3158 if (expr_killed_p (XVECEXP (x, i, j), bb))
3159 return 1;
3162 return 0;
3165 /* Compute the set of available expressions killed in each basic block. */
3167 static void
3168 compute_ae_kill (ae_gen, ae_kill, expr_hash_table)
3169 sbitmap *ae_gen, *ae_kill;
3170 struct hash_table *expr_hash_table;
3172 basic_block bb;
3173 unsigned int i;
3174 struct expr *expr;
3176 FOR_EACH_BB (bb)
3177 for (i = 0; i < expr_hash_table->size; i++)
3178 for (expr = expr_hash_table->table[i]; expr; expr = expr->next_same_hash)
3180 /* Skip EXPR if generated in this block. */
3181 if (TEST_BIT (ae_gen[bb->index], expr->bitmap_index))
3182 continue;
3184 if (expr_killed_p (expr->expr, bb))
3185 SET_BIT (ae_kill[bb->index], expr->bitmap_index);
3189 /* Actually perform the Classic GCSE optimizations. */
3191 /* Return nonzero if occurrence OCCR of expression EXPR reaches block BB.
3193 CHECK_SELF_LOOP is nonzero if we should consider a block reaching itself
3194 as a positive reach. We want to do this when there are two computations
3195 of the expression in the block.
3197 VISITED is a pointer to a working buffer for tracking which BB's have
3198 been visited. It is NULL for the top-level call.
3200 We treat reaching expressions that go through blocks containing the same
3201 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
3202 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
3203 2 as not reaching. The intent is to improve the probability of finding
3204 only one reaching expression and to reduce register lifetimes by picking
3205 the closest such expression. */
3207 static int
3208 expr_reaches_here_p_work (occr, expr, bb, check_self_loop, visited)
3209 struct occr *occr;
3210 struct expr *expr;
3211 basic_block bb;
3212 int check_self_loop;
3213 char *visited;
3215 edge pred;
3217 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
3219 basic_block pred_bb = pred->src;
3221 if (visited[pred_bb->index])
3222 /* This predecessor has already been visited. Nothing to do. */
3224 else if (pred_bb == bb)
3226 /* BB loops on itself. */
3227 if (check_self_loop
3228 && TEST_BIT (ae_gen[pred_bb->index], expr->bitmap_index)
3229 && BLOCK_NUM (occr->insn) == pred_bb->index)
3230 return 1;
3232 visited[pred_bb->index] = 1;
3235 /* Ignore this predecessor if it kills the expression. */
3236 else if (TEST_BIT (ae_kill[pred_bb->index], expr->bitmap_index))
3237 visited[pred_bb->index] = 1;
3239 /* Does this predecessor generate this expression? */
3240 else if (TEST_BIT (ae_gen[pred_bb->index], expr->bitmap_index))
3242 /* Is this the occurrence we're looking for?
3243 Note that there's only one generating occurrence per block
3244 so we just need to check the block number. */
3245 if (BLOCK_NUM (occr->insn) == pred_bb->index)
3246 return 1;
3248 visited[pred_bb->index] = 1;
3251 /* Neither gen nor kill. */
3252 else
3254 visited[pred_bb->index] = 1;
3255 if (expr_reaches_here_p_work (occr, expr, pred_bb, check_self_loop,
3256 visited))
3258 return 1;
3262 /* All paths have been checked. */
3263 return 0;
3266 /* This wrapper for expr_reaches_here_p_work() is to ensure that any
3267 memory allocated for that function is returned. */
3269 static int
3270 expr_reaches_here_p (occr, expr, bb, check_self_loop)
3271 struct occr *occr;
3272 struct expr *expr;
3273 basic_block bb;
3274 int check_self_loop;
3276 int rval;
3277 char *visited = (char *) xcalloc (last_basic_block, 1);
3279 rval = expr_reaches_here_p_work (occr, expr, bb, check_self_loop, visited);
3281 free (visited);
3282 return rval;
3285 /* Return the instruction that computes EXPR that reaches INSN's basic block.
3286 If there is more than one such instruction, return NULL.
3288 Called only by handle_avail_expr. */
3290 static rtx
3291 computing_insn (expr, insn)
3292 struct expr *expr;
3293 rtx insn;
3295 basic_block bb = BLOCK_FOR_INSN (insn);
3297 if (expr->avail_occr->next == NULL)
3299 if (BLOCK_FOR_INSN (expr->avail_occr->insn) == bb)
3300 /* The available expression is actually itself
3301 (i.e. a loop in the flow graph) so do nothing. */
3302 return NULL;
3304 /* (FIXME) Case that we found a pattern that was created by
3305 a substitution that took place. */
3306 return expr->avail_occr->insn;
3308 else
3310 /* Pattern is computed more than once.
3311 Search backwards from this insn to see how many of these
3312 computations actually reach this insn. */
3313 struct occr *occr;
3314 rtx insn_computes_expr = NULL;
3315 int can_reach = 0;
3317 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
3319 if (BLOCK_FOR_INSN (occr->insn) == bb)
3321 /* The expression is generated in this block.
3322 The only time we care about this is when the expression
3323 is generated later in the block [and thus there's a loop].
3324 We let the normal cse pass handle the other cases. */
3325 if (INSN_CUID (insn) < INSN_CUID (occr->insn)
3326 && expr_reaches_here_p (occr, expr, bb, 1))
3328 can_reach++;
3329 if (can_reach > 1)
3330 return NULL;
3332 insn_computes_expr = occr->insn;
3335 else if (expr_reaches_here_p (occr, expr, bb, 0))
3337 can_reach++;
3338 if (can_reach > 1)
3339 return NULL;
3341 insn_computes_expr = occr->insn;
3345 if (insn_computes_expr == NULL)
3346 abort ();
3348 return insn_computes_expr;
3352 /* Return nonzero if the definition in DEF_INSN can reach INSN.
3353 Only called by can_disregard_other_sets. */
3355 static int
3356 def_reaches_here_p (insn, def_insn)
3357 rtx insn, def_insn;
3359 rtx reg;
3361 if (TEST_BIT (reaching_defs[BLOCK_NUM (insn)], INSN_CUID (def_insn)))
3362 return 1;
3364 if (BLOCK_NUM (insn) == BLOCK_NUM (def_insn))
3366 if (INSN_CUID (def_insn) < INSN_CUID (insn))
3368 if (GET_CODE (PATTERN (def_insn)) == PARALLEL)
3369 return 1;
3370 else if (GET_CODE (PATTERN (def_insn)) == CLOBBER)
3371 reg = XEXP (PATTERN (def_insn), 0);
3372 else if (GET_CODE (PATTERN (def_insn)) == SET)
3373 reg = SET_DEST (PATTERN (def_insn));
3374 else
3375 abort ();
3377 return ! reg_set_between_p (reg, NEXT_INSN (def_insn), insn);
3379 else
3380 return 0;
3383 return 0;
3386 /* Return nonzero if *ADDR_THIS_REG can only have one value at INSN. The
3387 value returned is the number of definitions that reach INSN. Returning a
3388 value of zero means that [maybe] more than one definition reaches INSN and
3389 the caller can't perform whatever optimization it is trying. i.e. it is
3390 always safe to return zero. */
3392 static int
3393 can_disregard_other_sets (addr_this_reg, insn, for_combine)
3394 struct reg_set **addr_this_reg;
3395 rtx insn;
3396 int for_combine;
3398 int number_of_reaching_defs = 0;
3399 struct reg_set *this_reg;
3401 for (this_reg = *addr_this_reg; this_reg != 0; this_reg = this_reg->next)
3402 if (def_reaches_here_p (insn, this_reg->insn))
3404 number_of_reaching_defs++;
3405 /* Ignore parallels for now. */
3406 if (GET_CODE (PATTERN (this_reg->insn)) == PARALLEL)
3407 return 0;
3409 if (!for_combine
3410 && (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER
3411 || ! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)),
3412 SET_SRC (PATTERN (insn)))))
3413 /* A setting of the reg to a different value reaches INSN. */
3414 return 0;
3416 if (number_of_reaching_defs > 1)
3418 /* If in this setting the value the register is being set to is
3419 equal to the previous value the register was set to and this
3420 setting reaches the insn we are trying to do the substitution
3421 on then we are ok. */
3422 if (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER)
3423 return 0;
3424 else if (! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)),
3425 SET_SRC (PATTERN (insn))))
3426 return 0;
3429 *addr_this_reg = this_reg;
3432 return number_of_reaching_defs;
3435 /* Expression computed by insn is available and the substitution is legal,
3436 so try to perform the substitution.
3438 The result is nonzero if any changes were made. */
3440 static int
3441 handle_avail_expr (insn, expr)
3442 rtx insn;
3443 struct expr *expr;
3445 rtx pat, insn_computes_expr, expr_set;
3446 rtx to;
3447 struct reg_set *this_reg;
3448 int found_setting, use_src;
3449 int changed = 0;
3451 /* We only handle the case where one computation of the expression
3452 reaches this instruction. */
3453 insn_computes_expr = computing_insn (expr, insn);
3454 if (insn_computes_expr == NULL)
3455 return 0;
3456 expr_set = single_set (insn_computes_expr);
3457 if (!expr_set)
3458 abort ();
3460 found_setting = 0;
3461 use_src = 0;
3463 /* At this point we know only one computation of EXPR outside of this
3464 block reaches this insn. Now try to find a register that the
3465 expression is computed into. */
3466 if (GET_CODE (SET_SRC (expr_set)) == REG)
3468 /* This is the case when the available expression that reaches
3469 here has already been handled as an available expression. */
3470 unsigned int regnum_for_replacing
3471 = REGNO (SET_SRC (expr_set));
3473 /* If the register was created by GCSE we can't use `reg_set_table',
3474 however we know it's set only once. */
3475 if (regnum_for_replacing >= max_gcse_regno
3476 /* If the register the expression is computed into is set only once,
3477 or only one set reaches this insn, we can use it. */
3478 || (((this_reg = reg_set_table[regnum_for_replacing]),
3479 this_reg->next == NULL)
3480 || can_disregard_other_sets (&this_reg, insn, 0)))
3482 use_src = 1;
3483 found_setting = 1;
3487 if (!found_setting)
3489 unsigned int regnum_for_replacing
3490 = REGNO (SET_DEST (expr_set));
3492 /* This shouldn't happen. */
3493 if (regnum_for_replacing >= max_gcse_regno)
3494 abort ();
3496 this_reg = reg_set_table[regnum_for_replacing];
3498 /* If the register the expression is computed into is set only once,
3499 or only one set reaches this insn, use it. */
3500 if (this_reg->next == NULL
3501 || can_disregard_other_sets (&this_reg, insn, 0))
3502 found_setting = 1;
3505 if (found_setting)
3507 pat = PATTERN (insn);
3508 if (use_src)
3509 to = SET_SRC (expr_set);
3510 else
3511 to = SET_DEST (expr_set);
3512 changed = validate_change (insn, &SET_SRC (pat), to, 0);
3514 /* We should be able to ignore the return code from validate_change but
3515 to play it safe we check. */
3516 if (changed)
3518 gcse_subst_count++;
3519 if (gcse_file != NULL)
3521 fprintf (gcse_file, "GCSE: Replacing the source in insn %d with",
3522 INSN_UID (insn));
3523 fprintf (gcse_file, " reg %d %s insn %d\n",
3524 REGNO (to), use_src ? "from" : "set in",
3525 INSN_UID (insn_computes_expr));
3530 /* The register that the expr is computed into is set more than once. */
3531 else if (1 /*expensive_op(this_pattrn->op) && do_expensive_gcse)*/)
3533 /* Insert an insn after insnx that copies the reg set in insnx
3534 into a new pseudo register call this new register REGN.
3535 From insnb until end of basic block or until REGB is set
3536 replace all uses of REGB with REGN. */
3537 rtx new_insn;
3539 to = gen_reg_rtx (GET_MODE (SET_DEST (expr_set)));
3541 /* Generate the new insn. */
3542 /* ??? If the change fails, we return 0, even though we created
3543 an insn. I think this is ok. */
3544 new_insn
3545 = emit_insn_after (gen_rtx_SET (VOIDmode, to,
3546 SET_DEST (expr_set)),
3547 insn_computes_expr);
3549 /* Keep register set table up to date. */
3550 record_one_set (REGNO (to), new_insn);
3552 gcse_create_count++;
3553 if (gcse_file != NULL)
3555 fprintf (gcse_file, "GCSE: Creating insn %d to copy value of reg %d",
3556 INSN_UID (NEXT_INSN (insn_computes_expr)),
3557 REGNO (SET_SRC (PATTERN (NEXT_INSN (insn_computes_expr)))));
3558 fprintf (gcse_file, ", computed in insn %d,\n",
3559 INSN_UID (insn_computes_expr));
3560 fprintf (gcse_file, " into newly allocated reg %d\n",
3561 REGNO (to));
3564 pat = PATTERN (insn);
3566 /* Do register replacement for INSN. */
3567 changed = validate_change (insn, &SET_SRC (pat),
3568 SET_DEST (PATTERN
3569 (NEXT_INSN (insn_computes_expr))),
3572 /* We should be able to ignore the return code from validate_change but
3573 to play it safe we check. */
3574 if (changed)
3576 gcse_subst_count++;
3577 if (gcse_file != NULL)
3579 fprintf (gcse_file,
3580 "GCSE: Replacing the source in insn %d with reg %d ",
3581 INSN_UID (insn),
3582 REGNO (SET_DEST (PATTERN (NEXT_INSN
3583 (insn_computes_expr)))));
3584 fprintf (gcse_file, "set in insn %d\n",
3585 INSN_UID (insn_computes_expr));
3590 return changed;
3593 /* Perform classic GCSE. This is called by one_classic_gcse_pass after all
3594 the dataflow analysis has been done.
3596 The result is nonzero if a change was made. */
3598 static int
3599 classic_gcse ()
3601 int changed;
3602 rtx insn;
3603 basic_block bb;
3605 /* Note we start at block 1. */
3607 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
3608 return 0;
3610 changed = 0;
3611 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR, next_bb)
3613 /* Reset tables used to keep track of what's still valid [since the
3614 start of the block]. */
3615 reset_opr_set_tables ();
3617 for (insn = bb->head;
3618 insn != NULL && insn != NEXT_INSN (bb->end);
3619 insn = NEXT_INSN (insn))
3621 /* Is insn of form (set (pseudo-reg) ...)? */
3622 if (GET_CODE (insn) == INSN
3623 && GET_CODE (PATTERN (insn)) == SET
3624 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
3625 && REGNO (SET_DEST (PATTERN (insn))) >= FIRST_PSEUDO_REGISTER)
3627 rtx pat = PATTERN (insn);
3628 rtx src = SET_SRC (pat);
3629 struct expr *expr;
3631 if (want_to_gcse_p (src)
3632 /* Is the expression recorded? */
3633 && ((expr = lookup_expr (src, &expr_hash_table)) != NULL)
3634 /* Is the expression available [at the start of the
3635 block]? */
3636 && TEST_BIT (ae_in[bb->index], expr->bitmap_index)
3637 /* Are the operands unchanged since the start of the
3638 block? */
3639 && oprs_not_set_p (src, insn))
3640 changed |= handle_avail_expr (insn, expr);
3643 /* Keep track of everything modified by this insn. */
3644 /* ??? Need to be careful w.r.t. mods done to INSN. */
3645 if (INSN_P (insn))
3646 mark_oprs_set (insn);
3650 return changed;
3653 /* Top level routine to perform one classic GCSE pass.
3655 Return nonzero if a change was made. */
3657 static int
3658 one_classic_gcse_pass (pass)
3659 int pass;
3661 int changed = 0;
3663 gcse_subst_count = 0;
3664 gcse_create_count = 0;
3666 alloc_hash_table (max_cuid, &expr_hash_table, 0);
3667 alloc_rd_mem (last_basic_block, max_cuid);
3668 compute_hash_table (&expr_hash_table);
3669 if (gcse_file)
3670 dump_hash_table (gcse_file, "Expression", &expr_hash_table);
3672 if (expr_hash_table.n_elems > 0)
3674 compute_kill_rd ();
3675 compute_rd ();
3676 alloc_avail_expr_mem (last_basic_block, expr_hash_table.n_elems);
3677 compute_ae_gen (&expr_hash_table);
3678 compute_ae_kill (ae_gen, ae_kill, &expr_hash_table);
3679 compute_available (ae_gen, ae_kill, ae_out, ae_in);
3680 changed = classic_gcse ();
3681 free_avail_expr_mem ();
3684 free_rd_mem ();
3685 free_hash_table (&expr_hash_table);
3687 if (gcse_file)
3689 fprintf (gcse_file, "\n");
3690 fprintf (gcse_file, "GCSE of %s, pass %d: %d bytes needed, %d substs,",
3691 current_function_name, pass, bytes_used, gcse_subst_count);
3692 fprintf (gcse_file, "%d insns created\n", gcse_create_count);
3695 return changed;
3698 /* Compute copy/constant propagation working variables. */
3700 /* Local properties of assignments. */
3701 static sbitmap *cprop_pavloc;
3702 static sbitmap *cprop_absaltered;
3704 /* Global properties of assignments (computed from the local properties). */
3705 static sbitmap *cprop_avin;
3706 static sbitmap *cprop_avout;
3708 /* Allocate vars used for copy/const propagation. N_BLOCKS is the number of
3709 basic blocks. N_SETS is the number of sets. */
3711 static void
3712 alloc_cprop_mem (n_blocks, n_sets)
3713 int n_blocks, n_sets;
3715 cprop_pavloc = sbitmap_vector_alloc (n_blocks, n_sets);
3716 cprop_absaltered = sbitmap_vector_alloc (n_blocks, n_sets);
3718 cprop_avin = sbitmap_vector_alloc (n_blocks, n_sets);
3719 cprop_avout = sbitmap_vector_alloc (n_blocks, n_sets);
3722 /* Free vars used by copy/const propagation. */
3724 static void
3725 free_cprop_mem ()
3727 sbitmap_vector_free (cprop_pavloc);
3728 sbitmap_vector_free (cprop_absaltered);
3729 sbitmap_vector_free (cprop_avin);
3730 sbitmap_vector_free (cprop_avout);
3733 /* For each block, compute whether X is transparent. X is either an
3734 expression or an assignment [though we don't care which, for this context
3735 an assignment is treated as an expression]. For each block where an
3736 element of X is modified, set (SET_P == 1) or reset (SET_P == 0) the INDX
3737 bit in BMAP. */
3739 static void
3740 compute_transp (x, indx, bmap, set_p)
3741 rtx x;
3742 int indx;
3743 sbitmap *bmap;
3744 int set_p;
3746 int i, j;
3747 basic_block bb;
3748 enum rtx_code code;
3749 reg_set *r;
3750 const char *fmt;
3752 /* repeat is used to turn tail-recursion into iteration since GCC
3753 can't do it when there's no return value. */
3754 repeat:
3756 if (x == 0)
3757 return;
3759 code = GET_CODE (x);
3760 switch (code)
3762 case REG:
3763 if (set_p)
3765 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3767 FOR_EACH_BB (bb)
3768 if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
3769 SET_BIT (bmap[bb->index], indx);
3771 else
3773 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
3774 SET_BIT (bmap[BLOCK_NUM (r->insn)], indx);
3777 else
3779 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3781 FOR_EACH_BB (bb)
3782 if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
3783 RESET_BIT (bmap[bb->index], indx);
3785 else
3787 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
3788 RESET_BIT (bmap[BLOCK_NUM (r->insn)], indx);
3792 return;
3794 case MEM:
3795 FOR_EACH_BB (bb)
3797 rtx list_entry = canon_modify_mem_list[bb->index];
3799 while (list_entry)
3801 rtx dest, dest_addr;
3803 if (GET_CODE (XEXP (list_entry, 0)) == CALL_INSN)
3805 if (set_p)
3806 SET_BIT (bmap[bb->index], indx);
3807 else
3808 RESET_BIT (bmap[bb->index], indx);
3809 break;
3811 /* LIST_ENTRY must be an INSN of some kind that sets memory.
3812 Examine each hunk of memory that is modified. */
3814 dest = XEXP (list_entry, 0);
3815 list_entry = XEXP (list_entry, 1);
3816 dest_addr = XEXP (list_entry, 0);
3818 if (canon_true_dependence (dest, GET_MODE (dest), dest_addr,
3819 x, rtx_addr_varies_p))
3821 if (set_p)
3822 SET_BIT (bmap[bb->index], indx);
3823 else
3824 RESET_BIT (bmap[bb->index], indx);
3825 break;
3827 list_entry = XEXP (list_entry, 1);
3831 x = XEXP (x, 0);
3832 goto repeat;
3834 case PC:
3835 case CC0: /*FIXME*/
3836 case CONST:
3837 case CONST_INT:
3838 case CONST_DOUBLE:
3839 case CONST_VECTOR:
3840 case SYMBOL_REF:
3841 case LABEL_REF:
3842 case ADDR_VEC:
3843 case ADDR_DIFF_VEC:
3844 return;
3846 default:
3847 break;
3850 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
3852 if (fmt[i] == 'e')
3854 /* If we are about to do the last recursive call
3855 needed at this level, change it into iteration.
3856 This function is called enough to be worth it. */
3857 if (i == 0)
3859 x = XEXP (x, i);
3860 goto repeat;
3863 compute_transp (XEXP (x, i), indx, bmap, set_p);
3865 else if (fmt[i] == 'E')
3866 for (j = 0; j < XVECLEN (x, i); j++)
3867 compute_transp (XVECEXP (x, i, j), indx, bmap, set_p);
3871 /* Top level routine to do the dataflow analysis needed by copy/const
3872 propagation. */
3874 static void
3875 compute_cprop_data ()
3877 compute_local_properties (cprop_absaltered, cprop_pavloc, NULL, &set_hash_table);
3878 compute_available (cprop_pavloc, cprop_absaltered,
3879 cprop_avout, cprop_avin);
3882 /* Copy/constant propagation. */
3884 /* Maximum number of register uses in an insn that we handle. */
3885 #define MAX_USES 8
3887 /* Table of uses found in an insn.
3888 Allocated statically to avoid alloc/free complexity and overhead. */
3889 static struct reg_use reg_use_table[MAX_USES];
3891 /* Index into `reg_use_table' while building it. */
3892 static int reg_use_count;
3894 /* Set up a list of register numbers used in INSN. The found uses are stored
3895 in `reg_use_table'. `reg_use_count' is initialized to zero before entry,
3896 and contains the number of uses in the table upon exit.
3898 ??? If a register appears multiple times we will record it multiple times.
3899 This doesn't hurt anything but it will slow things down. */
3901 static void
3902 find_used_regs (xptr, data)
3903 rtx *xptr;
3904 void *data ATTRIBUTE_UNUSED;
3906 int i, j;
3907 enum rtx_code code;
3908 const char *fmt;
3909 rtx x = *xptr;
3911 /* repeat is used to turn tail-recursion into iteration since GCC
3912 can't do it when there's no return value. */
3913 repeat:
3914 if (x == 0)
3915 return;
3917 code = GET_CODE (x);
3918 if (REG_P (x))
3920 if (reg_use_count == MAX_USES)
3921 return;
3923 reg_use_table[reg_use_count].reg_rtx = x;
3924 reg_use_count++;
3927 /* Recursively scan the operands of this expression. */
3929 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
3931 if (fmt[i] == 'e')
3933 /* If we are about to do the last recursive call
3934 needed at this level, change it into iteration.
3935 This function is called enough to be worth it. */
3936 if (i == 0)
3938 x = XEXP (x, 0);
3939 goto repeat;
3942 find_used_regs (&XEXP (x, i), data);
3944 else if (fmt[i] == 'E')
3945 for (j = 0; j < XVECLEN (x, i); j++)
3946 find_used_regs (&XVECEXP (x, i, j), data);
3950 /* Try to replace all non-SET_DEST occurrences of FROM in INSN with TO.
3951 Returns nonzero is successful. */
3953 static int
3954 try_replace_reg (from, to, insn)
3955 rtx from, to, insn;
3957 rtx note = find_reg_equal_equiv_note (insn);
3958 rtx src = 0;
3959 int success = 0;
3960 rtx set = single_set (insn);
3962 validate_replace_src_group (from, to, insn);
3963 if (num_changes_pending () && apply_change_group ())
3964 success = 1;
3966 /* Try to simplify SET_SRC if we have substituted a constant. */
3967 if (success && set && CONSTANT_P (to))
3969 src = simplify_rtx (SET_SRC (set));
3971 if (src)
3972 validate_change (insn, &SET_SRC (set), src, 0);
3975 if (!success && set && reg_mentioned_p (from, SET_SRC (set)))
3977 /* If above failed and this is a single set, try to simplify the source of
3978 the set given our substitution. We could perhaps try this for multiple
3979 SETs, but it probably won't buy us anything. */
3980 src = simplify_replace_rtx (SET_SRC (set), from, to);
3982 if (!rtx_equal_p (src, SET_SRC (set))
3983 && validate_change (insn, &SET_SRC (set), src, 0))
3984 success = 1;
3986 /* If we've failed to do replacement, have a single SET, and don't already
3987 have a note, add a REG_EQUAL note to not lose information. */
3988 if (!success && note == 0 && set != 0)
3989 note = set_unique_reg_note (insn, REG_EQUAL, copy_rtx (src));
3992 /* If there is already a NOTE, update the expression in it with our
3993 replacement. */
3994 else if (note != 0)
3995 XEXP (note, 0) = simplify_replace_rtx (XEXP (note, 0), from, to);
3997 /* REG_EQUAL may get simplified into register.
3998 We don't allow that. Remove that note. This code ought
3999 not to happen, because previous code ought to synthesize
4000 reg-reg move, but be on the safe side. */
4001 if (note && REG_P (XEXP (note, 0)))
4002 remove_note (insn, note);
4004 return success;
4007 /* Find a set of REGNOs that are available on entry to INSN's block. Returns
4008 NULL no such set is found. */
4010 static struct expr *
4011 find_avail_set (regno, insn)
4012 int regno;
4013 rtx insn;
4015 /* SET1 contains the last set found that can be returned to the caller for
4016 use in a substitution. */
4017 struct expr *set1 = 0;
4019 /* Loops are not possible here. To get a loop we would need two sets
4020 available at the start of the block containing INSN. ie we would
4021 need two sets like this available at the start of the block:
4023 (set (reg X) (reg Y))
4024 (set (reg Y) (reg X))
4026 This can not happen since the set of (reg Y) would have killed the
4027 set of (reg X) making it unavailable at the start of this block. */
4028 while (1)
4030 rtx src;
4031 struct expr *set = lookup_set (regno, &set_hash_table);
4033 /* Find a set that is available at the start of the block
4034 which contains INSN. */
4035 while (set)
4037 if (TEST_BIT (cprop_avin[BLOCK_NUM (insn)], set->bitmap_index))
4038 break;
4039 set = next_set (regno, set);
4042 /* If no available set was found we've reached the end of the
4043 (possibly empty) copy chain. */
4044 if (set == 0)
4045 break;
4047 if (GET_CODE (set->expr) != SET)
4048 abort ();
4050 src = SET_SRC (set->expr);
4052 /* We know the set is available.
4053 Now check that SRC is ANTLOC (i.e. none of the source operands
4054 have changed since the start of the block).
4056 If the source operand changed, we may still use it for the next
4057 iteration of this loop, but we may not use it for substitutions. */
4059 if (gcse_constant_p (src) || oprs_not_set_p (src, insn))
4060 set1 = set;
4062 /* If the source of the set is anything except a register, then
4063 we have reached the end of the copy chain. */
4064 if (GET_CODE (src) != REG)
4065 break;
4067 /* Follow the copy chain, ie start another iteration of the loop
4068 and see if we have an available copy into SRC. */
4069 regno = REGNO (src);
4072 /* SET1 holds the last set that was available and anticipatable at
4073 INSN. */
4074 return set1;
4077 /* Subroutine of cprop_insn that tries to propagate constants into
4078 JUMP_INSNS. JUMP must be a conditional jump. If SETCC is non-NULL
4079 it is the instruction that immediately precedes JUMP, and must be a
4080 single SET of a register. FROM is what we will try to replace,
4081 SRC is the constant we will try to substitute for it. Returns nonzero
4082 if a change was made. */
4084 static int
4085 cprop_jump (bb, setcc, jump, from, src)
4086 basic_block bb;
4087 rtx setcc;
4088 rtx jump;
4089 rtx from;
4090 rtx src;
4092 rtx new, new_set;
4093 rtx set = pc_set (jump);
4095 /* First substitute in the INSN condition as the SET_SRC of the JUMP,
4096 then substitute that given values in this expanded JUMP. */
4097 if (setcc != NULL
4098 && !modified_between_p (from, setcc, jump)
4099 && !modified_between_p (src, setcc, jump))
4101 rtx setcc_set = single_set (setcc);
4102 new_set = simplify_replace_rtx (SET_SRC (set),
4103 SET_DEST (setcc_set),
4104 SET_SRC (setcc_set));
4106 else
4107 new_set = set;
4109 new = simplify_replace_rtx (new_set, from, src);
4111 /* If no simplification can be made, then try the next
4112 register. */
4113 if (rtx_equal_p (new, new_set) || rtx_equal_p (new, SET_SRC (set)))
4114 return 0;
4116 /* If this is now a no-op delete it, otherwise this must be a valid insn. */
4117 if (new == pc_rtx)
4118 delete_insn (jump);
4119 else
4121 /* Ensure the value computed inside the jump insn to be equivalent
4122 to one computed by setcc. */
4123 if (setcc
4124 && modified_in_p (new, setcc))
4125 return 0;
4126 if (! validate_change (jump, &SET_SRC (set), new, 0))
4127 return 0;
4129 /* If this has turned into an unconditional jump,
4130 then put a barrier after it so that the unreachable
4131 code will be deleted. */
4132 if (GET_CODE (SET_SRC (set)) == LABEL_REF)
4133 emit_barrier_after (jump);
4136 #ifdef HAVE_cc0
4137 /* Delete the cc0 setter. */
4138 if (setcc != NULL && CC0_P (SET_DEST (single_set (setcc))))
4139 delete_insn (setcc);
4140 #endif
4142 run_jump_opt_after_gcse = 1;
4144 const_prop_count++;
4145 if (gcse_file != NULL)
4147 fprintf (gcse_file,
4148 "CONST-PROP: Replacing reg %d in jump_insn %d with constant ",
4149 REGNO (from), INSN_UID (jump));
4150 print_rtl (gcse_file, src);
4151 fprintf (gcse_file, "\n");
4153 purge_dead_edges (bb);
4155 return 1;
4158 static bool
4159 constprop_register (insn, from, to, alter_jumps)
4160 rtx insn;
4161 rtx from;
4162 rtx to;
4163 int alter_jumps;
4165 rtx sset;
4167 /* Check for reg or cc0 setting instructions followed by
4168 conditional branch instructions first. */
4169 if (alter_jumps
4170 && (sset = single_set (insn)) != NULL
4171 && NEXT_INSN (insn)
4172 && any_condjump_p (NEXT_INSN (insn)) && onlyjump_p (NEXT_INSN (insn)))
4174 rtx dest = SET_DEST (sset);
4175 if ((REG_P (dest) || CC0_P (dest))
4176 && cprop_jump (BLOCK_FOR_INSN (insn), insn, NEXT_INSN (insn), from, to))
4177 return 1;
4180 /* Handle normal insns next. */
4181 if (GET_CODE (insn) == INSN
4182 && try_replace_reg (from, to, insn))
4183 return 1;
4185 /* Try to propagate a CONST_INT into a conditional jump.
4186 We're pretty specific about what we will handle in this
4187 code, we can extend this as necessary over time.
4189 Right now the insn in question must look like
4190 (set (pc) (if_then_else ...)) */
4191 else if (alter_jumps && any_condjump_p (insn) && onlyjump_p (insn))
4192 return cprop_jump (BLOCK_FOR_INSN (insn), NULL, insn, from, to);
4193 return 0;
4196 /* Perform constant and copy propagation on INSN.
4197 The result is nonzero if a change was made. */
4199 static int
4200 cprop_insn (insn, alter_jumps)
4201 rtx insn;
4202 int alter_jumps;
4204 struct reg_use *reg_used;
4205 int changed = 0;
4206 rtx note;
4208 if (!INSN_P (insn))
4209 return 0;
4211 reg_use_count = 0;
4212 note_uses (&PATTERN (insn), find_used_regs, NULL);
4214 note = find_reg_equal_equiv_note (insn);
4216 /* We may win even when propagating constants into notes. */
4217 if (note)
4218 find_used_regs (&XEXP (note, 0), NULL);
4220 for (reg_used = &reg_use_table[0]; reg_use_count > 0;
4221 reg_used++, reg_use_count--)
4223 unsigned int regno = REGNO (reg_used->reg_rtx);
4224 rtx pat, src;
4225 struct expr *set;
4227 /* Ignore registers created by GCSE.
4228 We do this because ... */
4229 if (regno >= max_gcse_regno)
4230 continue;
4232 /* If the register has already been set in this block, there's
4233 nothing we can do. */
4234 if (! oprs_not_set_p (reg_used->reg_rtx, insn))
4235 continue;
4237 /* Find an assignment that sets reg_used and is available
4238 at the start of the block. */
4239 set = find_avail_set (regno, insn);
4240 if (! set)
4241 continue;
4243 pat = set->expr;
4244 /* ??? We might be able to handle PARALLELs. Later. */
4245 if (GET_CODE (pat) != SET)
4246 abort ();
4248 src = SET_SRC (pat);
4250 /* Constant propagation. */
4251 if (gcse_constant_p (src))
4253 if (constprop_register (insn, reg_used->reg_rtx, src, alter_jumps))
4255 changed = 1;
4256 const_prop_count++;
4257 if (gcse_file != NULL)
4259 fprintf (gcse_file, "GLOBAL CONST-PROP: Replacing reg %d in ", regno);
4260 fprintf (gcse_file, "insn %d with constant ", INSN_UID (insn));
4261 print_rtl (gcse_file, src);
4262 fprintf (gcse_file, "\n");
4266 else if (GET_CODE (src) == REG
4267 && REGNO (src) >= FIRST_PSEUDO_REGISTER
4268 && REGNO (src) != regno)
4270 if (try_replace_reg (reg_used->reg_rtx, src, insn))
4272 changed = 1;
4273 copy_prop_count++;
4274 if (gcse_file != NULL)
4276 fprintf (gcse_file, "GLOBAL COPY-PROP: Replacing reg %d in insn %d",
4277 regno, INSN_UID (insn));
4278 fprintf (gcse_file, " with reg %d\n", REGNO (src));
4281 /* The original insn setting reg_used may or may not now be
4282 deletable. We leave the deletion to flow. */
4283 /* FIXME: If it turns out that the insn isn't deletable,
4284 then we may have unnecessarily extended register lifetimes
4285 and made things worse. */
4290 return changed;
4293 /* Like find_used_regs, but avoid recording uses that appear in
4294 input-output contexts such as zero_extract or pre_dec. This
4295 restricts the cases we consider to those for which local cprop
4296 can legitimately make replacements. */
4298 static void
4299 local_cprop_find_used_regs (xptr, data)
4300 rtx *xptr;
4301 void *data;
4303 rtx x = *xptr;
4305 if (x == 0)
4306 return;
4308 switch (GET_CODE (x))
4310 case ZERO_EXTRACT:
4311 case SIGN_EXTRACT:
4312 case STRICT_LOW_PART:
4313 return;
4315 case PRE_DEC:
4316 case PRE_INC:
4317 case POST_DEC:
4318 case POST_INC:
4319 case PRE_MODIFY:
4320 case POST_MODIFY:
4321 /* Can only legitimately appear this early in the context of
4322 stack pushes for function arguments, but handle all of the
4323 codes nonetheless. */
4324 return;
4326 case SUBREG:
4327 /* Setting a subreg of a register larger than word_mode leaves
4328 the non-written words unchanged. */
4329 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) > BITS_PER_WORD)
4330 return;
4331 break;
4333 default:
4334 break;
4337 find_used_regs (xptr, data);
4340 /* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
4341 their REG_EQUAL notes need updating. */
4343 static bool
4344 do_local_cprop (x, insn, alter_jumps, libcall_sp)
4345 rtx x;
4346 rtx insn;
4347 int alter_jumps;
4348 rtx *libcall_sp;
4350 rtx newreg = NULL, newcnst = NULL;
4352 /* Rule out USE instructions and ASM statements as we don't want to
4353 change the hard registers mentioned. */
4354 if (GET_CODE (x) == REG
4355 && (REGNO (x) >= FIRST_PSEUDO_REGISTER
4356 || (GET_CODE (PATTERN (insn)) != USE
4357 && asm_noperands (PATTERN (insn)) < 0)))
4359 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0);
4360 struct elt_loc_list *l;
4362 if (!val)
4363 return false;
4364 for (l = val->locs; l; l = l->next)
4366 rtx this_rtx = l->loc;
4367 rtx note;
4369 if (l->in_libcall)
4370 continue;
4372 if (gcse_constant_p (this_rtx))
4373 newcnst = this_rtx;
4374 if (REG_P (this_rtx) && REGNO (this_rtx) >= FIRST_PSEUDO_REGISTER
4375 /* Don't copy propagate if it has attached REG_EQUIV note.
4376 At this point this only function parameters should have
4377 REG_EQUIV notes and if the argument slot is used somewhere
4378 explicitly, it means address of parameter has been taken,
4379 so we should not extend the lifetime of the pseudo. */
4380 && (!(note = find_reg_note (l->setting_insn, REG_EQUIV, NULL_RTX))
4381 || GET_CODE (XEXP (note, 0)) != MEM))
4382 newreg = this_rtx;
4384 if (newcnst && constprop_register (insn, x, newcnst, alter_jumps))
4386 /* If we find a case where we can't fix the retval REG_EQUAL notes
4387 match the new register, we either have to abandon this replacement
4388 or fix delete_trivially_dead_insns to preserve the setting insn,
4389 or make it delete the REG_EUAQL note, and fix up all passes that
4390 require the REG_EQUAL note there. */
4391 if (!adjust_libcall_notes (x, newcnst, insn, libcall_sp))
4392 abort ();
4393 if (gcse_file != NULL)
4395 fprintf (gcse_file, "LOCAL CONST-PROP: Replacing reg %d in ",
4396 REGNO (x));
4397 fprintf (gcse_file, "insn %d with constant ",
4398 INSN_UID (insn));
4399 print_rtl (gcse_file, newcnst);
4400 fprintf (gcse_file, "\n");
4402 const_prop_count++;
4403 return true;
4405 else if (newreg && newreg != x && try_replace_reg (x, newreg, insn))
4407 adjust_libcall_notes (x, newreg, insn, libcall_sp);
4408 if (gcse_file != NULL)
4410 fprintf (gcse_file,
4411 "LOCAL COPY-PROP: Replacing reg %d in insn %d",
4412 REGNO (x), INSN_UID (insn));
4413 fprintf (gcse_file, " with reg %d\n", REGNO (newreg));
4415 copy_prop_count++;
4416 return true;
4419 return false;
4422 /* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
4423 their REG_EQUAL notes need updating to reflect that OLDREG has been
4424 replaced with NEWVAL in INSN. Return true if all substitutions could
4425 be made. */
4426 static bool
4427 adjust_libcall_notes (oldreg, newval, insn, libcall_sp)
4428 rtx oldreg, newval, insn, *libcall_sp;
4430 rtx end;
4432 while ((end = *libcall_sp++))
4434 rtx note = find_reg_equal_equiv_note (end);
4436 if (! note)
4437 continue;
4439 if (REG_P (newval))
4441 if (reg_set_between_p (newval, PREV_INSN (insn), end))
4445 note = find_reg_equal_equiv_note (end);
4446 if (! note)
4447 continue;
4448 if (reg_mentioned_p (newval, XEXP (note, 0)))
4449 return false;
4451 while ((end = *libcall_sp++));
4452 return true;
4455 XEXP (note, 0) = replace_rtx (XEXP (note, 0), oldreg, newval);
4456 insn = end;
4458 return true;
4461 #define MAX_NESTED_LIBCALLS 9
4463 static void
4464 local_cprop_pass (alter_jumps)
4465 int alter_jumps;
4467 rtx insn;
4468 struct reg_use *reg_used;
4469 rtx libcall_stack[MAX_NESTED_LIBCALLS + 1], *libcall_sp;
4470 bool changed = false;
4472 cselib_init ();
4473 libcall_sp = &libcall_stack[MAX_NESTED_LIBCALLS];
4474 *libcall_sp = 0;
4475 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4477 if (INSN_P (insn))
4479 rtx note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
4481 if (note)
4483 if (libcall_sp == libcall_stack)
4484 abort ();
4485 *--libcall_sp = XEXP (note, 0);
4487 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
4488 if (note)
4489 libcall_sp++;
4490 note = find_reg_equal_equiv_note (insn);
4493 reg_use_count = 0;
4494 note_uses (&PATTERN (insn), local_cprop_find_used_regs, NULL);
4495 if (note)
4496 local_cprop_find_used_regs (&XEXP (note, 0), NULL);
4498 for (reg_used = &reg_use_table[0]; reg_use_count > 0;
4499 reg_used++, reg_use_count--)
4500 if (do_local_cprop (reg_used->reg_rtx, insn, alter_jumps,
4501 libcall_sp))
4503 changed = true;
4504 break;
4507 while (reg_use_count);
4509 cselib_process_insn (insn);
4511 cselib_finish ();
4512 /* Global analysis may get into infinite loops for unreachable blocks. */
4513 if (changed && alter_jumps)
4515 delete_unreachable_blocks ();
4516 free_reg_set_mem ();
4517 alloc_reg_set_mem (max_reg_num ());
4518 compute_sets (get_insns ());
4522 /* Forward propagate copies. This includes copies and constants. Return
4523 nonzero if a change was made. */
4525 static int
4526 cprop (alter_jumps)
4527 int alter_jumps;
4529 int changed;
4530 basic_block bb;
4531 rtx insn;
4533 /* Note we start at block 1. */
4534 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
4536 if (gcse_file != NULL)
4537 fprintf (gcse_file, "\n");
4538 return 0;
4541 changed = 0;
4542 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR, next_bb)
4544 /* Reset tables used to keep track of what's still valid [since the
4545 start of the block]. */
4546 reset_opr_set_tables ();
4548 for (insn = bb->head;
4549 insn != NULL && insn != NEXT_INSN (bb->end);
4550 insn = NEXT_INSN (insn))
4551 if (INSN_P (insn))
4553 changed |= cprop_insn (insn, alter_jumps);
4555 /* Keep track of everything modified by this insn. */
4556 /* ??? Need to be careful w.r.t. mods done to INSN. Don't
4557 call mark_oprs_set if we turned the insn into a NOTE. */
4558 if (GET_CODE (insn) != NOTE)
4559 mark_oprs_set (insn);
4563 if (gcse_file != NULL)
4564 fprintf (gcse_file, "\n");
4566 return changed;
4569 /* Similar to get_condition, only the resulting condition must be
4570 valid at JUMP, instead of at EARLIEST.
4572 This differs from noce_get_condition in ifcvt.c in that we prefer not to
4573 settle for the condition variable in the jump instruction being integral.
4574 We prefer to be able to record the value of a user variable, rather than
4575 the value of a temporary used in a condition. This could be solved by
4576 recording the value of *every* register scaned by canonicalize_condition,
4577 but this would require some code reorganization. */
4579 static rtx
4580 fis_get_condition (jump)
4581 rtx jump;
4583 rtx cond, set, tmp, insn, earliest;
4584 bool reverse;
4586 if (! any_condjump_p (jump))
4587 return NULL_RTX;
4589 set = pc_set (jump);
4590 cond = XEXP (SET_SRC (set), 0);
4592 /* If this branches to JUMP_LABEL when the condition is false,
4593 reverse the condition. */
4594 reverse = (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
4595 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump));
4597 /* Use canonicalize_condition to do the dirty work of manipulating
4598 MODE_CC values and COMPARE rtx codes. */
4599 tmp = canonicalize_condition (jump, cond, reverse, &earliest, NULL_RTX);
4600 if (!tmp)
4601 return NULL_RTX;
4603 /* Verify that the given condition is valid at JUMP by virtue of not
4604 having been modified since EARLIEST. */
4605 for (insn = earliest; insn != jump; insn = NEXT_INSN (insn))
4606 if (INSN_P (insn) && modified_in_p (tmp, insn))
4607 break;
4608 if (insn == jump)
4609 return tmp;
4611 /* The condition was modified. See if we can get a partial result
4612 that doesn't follow all the reversals. Perhaps combine can fold
4613 them together later. */
4614 tmp = XEXP (tmp, 0);
4615 if (!REG_P (tmp) || GET_MODE_CLASS (GET_MODE (tmp)) != MODE_INT)
4616 return NULL_RTX;
4617 tmp = canonicalize_condition (jump, cond, reverse, &earliest, tmp);
4618 if (!tmp)
4619 return NULL_RTX;
4621 /* For sanity's sake, re-validate the new result. */
4622 for (insn = earliest; insn != jump; insn = NEXT_INSN (insn))
4623 if (INSN_P (insn) && modified_in_p (tmp, insn))
4624 return NULL_RTX;
4626 return tmp;
4629 /* Find the implicit sets of a function. An "implicit set" is a constraint
4630 on the value of a variable, implied by a conditional jump. For example,
4631 following "if (x == 2)", the then branch may be optimized as though the
4632 conditional performed an "explicit set", in this example, "x = 2". This
4633 function records the set patterns that are implicit at the start of each
4634 basic block. */
4636 static void
4637 find_implicit_sets ()
4639 basic_block bb, dest;
4640 unsigned int count;
4641 rtx cond, new;
4643 count = 0;
4644 FOR_EACH_BB (bb)
4645 /* Check for more than one sucessor. */
4646 if (bb->succ && bb->succ->succ_next)
4648 cond = fis_get_condition (bb->end);
4650 if (cond
4651 && (GET_CODE (cond) == EQ || GET_CODE (cond) == NE)
4652 && GET_CODE (XEXP (cond, 0)) == REG
4653 && REGNO (XEXP (cond, 0)) >= FIRST_PSEUDO_REGISTER
4654 && gcse_constant_p (XEXP (cond, 1)))
4656 dest = GET_CODE (cond) == EQ ? BRANCH_EDGE (bb)->dest
4657 : FALLTHRU_EDGE (bb)->dest;
4659 if (dest && ! dest->pred->pred_next
4660 && dest != EXIT_BLOCK_PTR)
4662 new = gen_rtx_SET (VOIDmode, XEXP (cond, 0),
4663 XEXP (cond, 1));
4664 implicit_sets[dest->index] = new;
4665 if (gcse_file)
4667 fprintf(gcse_file, "Implicit set of reg %d in ",
4668 REGNO (XEXP (cond, 0)));
4669 fprintf(gcse_file, "basic block %d\n", dest->index);
4671 count++;
4676 if (gcse_file)
4677 fprintf (gcse_file, "Found %d implicit sets\n", count);
4680 /* Perform one copy/constant propagation pass.
4681 PASS is the pass count. If CPROP_JUMPS is true, perform constant
4682 propagation into conditional jumps. If BYPASS_JUMPS is true,
4683 perform conditional jump bypassing optimizations. */
4685 static int
4686 one_cprop_pass (pass, cprop_jumps, bypass_jumps)
4687 int pass;
4688 int cprop_jumps;
4689 int bypass_jumps;
4691 int changed = 0;
4693 const_prop_count = 0;
4694 copy_prop_count = 0;
4696 local_cprop_pass (cprop_jumps);
4698 /* Determine implicit sets. */
4699 implicit_sets = (rtx *) xcalloc (last_basic_block, sizeof (rtx));
4700 find_implicit_sets ();
4702 alloc_hash_table (max_cuid, &set_hash_table, 1);
4703 compute_hash_table (&set_hash_table);
4705 /* Free implicit_sets before peak usage. */
4706 free (implicit_sets);
4707 implicit_sets = NULL;
4709 if (gcse_file)
4710 dump_hash_table (gcse_file, "SET", &set_hash_table);
4711 if (set_hash_table.n_elems > 0)
4713 alloc_cprop_mem (last_basic_block, set_hash_table.n_elems);
4714 compute_cprop_data ();
4715 changed = cprop (cprop_jumps);
4716 if (bypass_jumps)
4717 changed |= bypass_conditional_jumps ();
4718 free_cprop_mem ();
4721 free_hash_table (&set_hash_table);
4723 if (gcse_file)
4725 fprintf (gcse_file, "CPROP of %s, pass %d: %d bytes needed, ",
4726 current_function_name, pass, bytes_used);
4727 fprintf (gcse_file, "%d const props, %d copy props\n\n",
4728 const_prop_count, copy_prop_count);
4730 /* Global analysis may get into infinite loops for unreachable blocks. */
4731 if (changed && cprop_jumps)
4732 delete_unreachable_blocks ();
4734 return changed;
4737 /* Bypass conditional jumps. */
4739 /* The value of last_basic_block at the beginning of the jump_bypass
4740 pass. The use of redirect_edge_and_branch_force may introduce new
4741 basic blocks, but the data flow analysis is only valid for basic
4742 block indices less than bypass_last_basic_block. */
4744 static int bypass_last_basic_block;
4746 /* Find a set of REGNO to a constant that is available at the end of basic
4747 block BB. Returns NULL if no such set is found. Based heavily upon
4748 find_avail_set. */
4750 static struct expr *
4751 find_bypass_set (regno, bb)
4752 int regno;
4753 int bb;
4755 struct expr *result = 0;
4757 for (;;)
4759 rtx src;
4760 struct expr *set = lookup_set (regno, &set_hash_table);
4762 while (set)
4764 if (TEST_BIT (cprop_avout[bb], set->bitmap_index))
4765 break;
4766 set = next_set (regno, set);
4769 if (set == 0)
4770 break;
4772 if (GET_CODE (set->expr) != SET)
4773 abort ();
4775 src = SET_SRC (set->expr);
4776 if (gcse_constant_p (src))
4777 result = set;
4779 if (GET_CODE (src) != REG)
4780 break;
4782 regno = REGNO (src);
4784 return result;
4788 /* Subroutine of bypass_block that checks whether a pseudo is killed by
4789 any of the instructions inserted on an edge. Jump bypassing places
4790 condition code setters on CFG edges using insert_insn_on_edge. This
4791 function is required to check that our data flow analysis is still
4792 valid prior to commit_edge_insertions. */
4794 static bool
4795 reg_killed_on_edge (reg, e)
4796 rtx reg;
4797 edge e;
4799 rtx insn;
4801 for (insn = e->insns; insn; insn = NEXT_INSN (insn))
4802 if (INSN_P (insn) && reg_set_p (reg, insn))
4803 return true;
4805 return false;
4808 /* Subroutine of bypass_conditional_jumps that attempts to bypass the given
4809 basic block BB which has more than one predecessor. If not NULL, SETCC
4810 is the first instruction of BB, which is immediately followed by JUMP_INSN
4811 JUMP. Otherwise, SETCC is NULL, and JUMP is the first insn of BB.
4812 Returns nonzero if a change was made.
4814 During the jump bypassing pass, we may place copies of SETCC instuctions
4815 on CFG edges. The following routine must be careful to pay attention to
4816 these inserted insns when performing its transformations. */
4818 static int
4819 bypass_block (bb, setcc, jump)
4820 basic_block bb;
4821 rtx setcc, jump;
4823 rtx insn, note;
4824 edge e, enext, edest;
4825 int i, change;
4826 int may_be_loop_header;
4828 insn = (setcc != NULL) ? setcc : jump;
4830 /* Determine set of register uses in INSN. */
4831 reg_use_count = 0;
4832 note_uses (&PATTERN (insn), find_used_regs, NULL);
4833 note = find_reg_equal_equiv_note (insn);
4834 if (note)
4835 find_used_regs (&XEXP (note, 0), NULL);
4837 may_be_loop_header = false;
4838 for (e = bb->pred; e; e = e->pred_next)
4839 if (e->flags & EDGE_DFS_BACK)
4841 may_be_loop_header = true;
4842 break;
4845 change = 0;
4846 for (e = bb->pred; e; e = enext)
4848 enext = e->pred_next;
4849 if (e->flags & EDGE_COMPLEX)
4850 continue;
4852 /* We can't redirect edges from new basic blocks. */
4853 if (e->src->index >= bypass_last_basic_block)
4854 continue;
4856 /* The irreducible loops created by redirecting of edges entering the
4857 loop from outside would decrease effectivity of some of the following
4858 optimalizations, so prevent this. */
4859 if (may_be_loop_header
4860 && !(e->flags & EDGE_DFS_BACK))
4861 continue;
4863 for (i = 0; i < reg_use_count; i++)
4865 struct reg_use *reg_used = &reg_use_table[i];
4866 unsigned int regno = REGNO (reg_used->reg_rtx);
4867 basic_block dest, old_dest;
4868 struct expr *set;
4869 rtx src, new;
4871 if (regno >= max_gcse_regno)
4872 continue;
4874 set = find_bypass_set (regno, e->src->index);
4876 if (! set)
4877 continue;
4879 /* Check the data flow is valid after edge insertions. */
4880 if (e->insns && reg_killed_on_edge (reg_used->reg_rtx, e))
4881 continue;
4883 src = SET_SRC (pc_set (jump));
4885 if (setcc != NULL)
4886 src = simplify_replace_rtx (src,
4887 SET_DEST (PATTERN (setcc)),
4888 SET_SRC (PATTERN (setcc)));
4890 new = simplify_replace_rtx (src, reg_used->reg_rtx,
4891 SET_SRC (set->expr));
4893 /* Jump bypassing may have already placed instructions on
4894 edges of the CFG. We can't bypass an outgoing edge that
4895 has instructions associated with it, as these insns won't
4896 get executed if the incoming edge is redirected. */
4898 if (new == pc_rtx)
4900 edest = FALLTHRU_EDGE (bb);
4901 dest = edest->insns ? NULL : edest->dest;
4903 else if (GET_CODE (new) == LABEL_REF)
4905 dest = BLOCK_FOR_INSN (XEXP (new, 0));
4906 /* Don't bypass edges containing instructions. */
4907 for (edest = bb->succ; edest; edest = edest->succ_next)
4908 if (edest->dest == dest && edest->insns)
4910 dest = NULL;
4911 break;
4914 else
4915 dest = NULL;
4917 old_dest = e->dest;
4918 if (dest != NULL
4919 && dest != old_dest
4920 && dest != EXIT_BLOCK_PTR)
4922 redirect_edge_and_branch_force (e, dest);
4924 /* Copy the register setter to the redirected edge.
4925 Don't copy CC0 setters, as CC0 is dead after jump. */
4926 if (setcc)
4928 rtx pat = PATTERN (setcc);
4929 if (!CC0_P (SET_DEST (pat)))
4930 insert_insn_on_edge (copy_insn (pat), e);
4933 if (gcse_file != NULL)
4935 fprintf (gcse_file, "JUMP-BYPASS: Proved reg %d in jump_insn %d equals constant ",
4936 regno, INSN_UID (jump));
4937 print_rtl (gcse_file, SET_SRC (set->expr));
4938 fprintf (gcse_file, "\nBypass edge from %d->%d to %d\n",
4939 e->src->index, old_dest->index, dest->index);
4941 change = 1;
4942 break;
4946 return change;
4949 /* Find basic blocks with more than one predecessor that only contain a
4950 single conditional jump. If the result of the comparison is known at
4951 compile-time from any incoming edge, redirect that edge to the
4952 appropriate target. Returns nonzero if a change was made.
4954 This function is now mis-named, because we also handle indirect jumps. */
4956 static int
4957 bypass_conditional_jumps ()
4959 basic_block bb;
4960 int changed;
4961 rtx setcc;
4962 rtx insn;
4963 rtx dest;
4965 /* Note we start at block 1. */
4966 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
4967 return 0;
4969 bypass_last_basic_block = last_basic_block;
4970 mark_dfs_back_edges ();
4972 changed = 0;
4973 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb,
4974 EXIT_BLOCK_PTR, next_bb)
4976 /* Check for more than one predecessor. */
4977 if (bb->pred && bb->pred->pred_next)
4979 setcc = NULL_RTX;
4980 for (insn = bb->head;
4981 insn != NULL && insn != NEXT_INSN (bb->end);
4982 insn = NEXT_INSN (insn))
4983 if (GET_CODE (insn) == INSN)
4985 if (setcc)
4986 break;
4987 if (GET_CODE (PATTERN (insn)) != SET)
4988 break;
4990 dest = SET_DEST (PATTERN (insn));
4991 if (REG_P (dest) || CC0_P (dest))
4992 setcc = insn;
4993 else
4994 break;
4996 else if (GET_CODE (insn) == JUMP_INSN)
4998 if ((any_condjump_p (insn) || computed_jump_p (insn))
4999 && onlyjump_p (insn))
5000 changed |= bypass_block (bb, setcc, insn);
5001 break;
5003 else if (INSN_P (insn))
5004 break;
5008 /* If we bypassed any register setting insns, we inserted a
5009 copy on the redirected edge. These need to be committed. */
5010 if (changed)
5011 commit_edge_insertions();
5013 return changed;
5016 /* Compute PRE+LCM working variables. */
5018 /* Local properties of expressions. */
5019 /* Nonzero for expressions that are transparent in the block. */
5020 static sbitmap *transp;
5022 /* Nonzero for expressions that are transparent at the end of the block.
5023 This is only zero for expressions killed by abnormal critical edge
5024 created by a calls. */
5025 static sbitmap *transpout;
5027 /* Nonzero for expressions that are computed (available) in the block. */
5028 static sbitmap *comp;
5030 /* Nonzero for expressions that are locally anticipatable in the block. */
5031 static sbitmap *antloc;
5033 /* Nonzero for expressions where this block is an optimal computation
5034 point. */
5035 static sbitmap *pre_optimal;
5037 /* Nonzero for expressions which are redundant in a particular block. */
5038 static sbitmap *pre_redundant;
5040 /* Nonzero for expressions which should be inserted on a specific edge. */
5041 static sbitmap *pre_insert_map;
5043 /* Nonzero for expressions which should be deleted in a specific block. */
5044 static sbitmap *pre_delete_map;
5046 /* Contains the edge_list returned by pre_edge_lcm. */
5047 static struct edge_list *edge_list;
5049 /* Redundant insns. */
5050 static sbitmap pre_redundant_insns;
5052 /* Allocate vars used for PRE analysis. */
5054 static void
5055 alloc_pre_mem (n_blocks, n_exprs)
5056 int n_blocks, n_exprs;
5058 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
5059 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
5060 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
5062 pre_optimal = NULL;
5063 pre_redundant = NULL;
5064 pre_insert_map = NULL;
5065 pre_delete_map = NULL;
5066 ae_in = NULL;
5067 ae_out = NULL;
5068 ae_kill = sbitmap_vector_alloc (n_blocks, n_exprs);
5070 /* pre_insert and pre_delete are allocated later. */
5073 /* Free vars used for PRE analysis. */
5075 static void
5076 free_pre_mem ()
5078 sbitmap_vector_free (transp);
5079 sbitmap_vector_free (comp);
5081 /* ANTLOC and AE_KILL are freed just after pre_lcm finishes. */
5083 if (pre_optimal)
5084 sbitmap_vector_free (pre_optimal);
5085 if (pre_redundant)
5086 sbitmap_vector_free (pre_redundant);
5087 if (pre_insert_map)
5088 sbitmap_vector_free (pre_insert_map);
5089 if (pre_delete_map)
5090 sbitmap_vector_free (pre_delete_map);
5091 if (ae_in)
5092 sbitmap_vector_free (ae_in);
5093 if (ae_out)
5094 sbitmap_vector_free (ae_out);
5096 transp = comp = NULL;
5097 pre_optimal = pre_redundant = pre_insert_map = pre_delete_map = NULL;
5098 ae_in = ae_out = NULL;
5101 /* Top level routine to do the dataflow analysis needed by PRE. */
5103 static void
5104 compute_pre_data ()
5106 sbitmap trapping_expr;
5107 basic_block bb;
5108 unsigned int ui;
5110 compute_local_properties (transp, comp, antloc, &expr_hash_table);
5111 sbitmap_vector_zero (ae_kill, last_basic_block);
5113 /* Collect expressions which might trap. */
5114 trapping_expr = sbitmap_alloc (expr_hash_table.n_elems);
5115 sbitmap_zero (trapping_expr);
5116 for (ui = 0; ui < expr_hash_table.size; ui++)
5118 struct expr *e;
5119 for (e = expr_hash_table.table[ui]; e != NULL; e = e->next_same_hash)
5120 if (may_trap_p (e->expr))
5121 SET_BIT (trapping_expr, e->bitmap_index);
5124 /* Compute ae_kill for each basic block using:
5126 ~(TRANSP | COMP)
5128 This is significantly faster than compute_ae_kill. */
5130 FOR_EACH_BB (bb)
5132 edge e;
5134 /* If the current block is the destination of an abnormal edge, we
5135 kill all trapping expressions because we won't be able to properly
5136 place the instruction on the edge. So make them neither
5137 anticipatable nor transparent. This is fairly conservative. */
5138 for (e = bb->pred; e ; e = e->pred_next)
5139 if (e->flags & EDGE_ABNORMAL)
5141 sbitmap_difference (antloc[bb->index], antloc[bb->index], trapping_expr);
5142 sbitmap_difference (transp[bb->index], transp[bb->index], trapping_expr);
5143 break;
5146 sbitmap_a_or_b (ae_kill[bb->index], transp[bb->index], comp[bb->index]);
5147 sbitmap_not (ae_kill[bb->index], ae_kill[bb->index]);
5150 edge_list = pre_edge_lcm (gcse_file, expr_hash_table.n_elems, transp, comp, antloc,
5151 ae_kill, &pre_insert_map, &pre_delete_map);
5152 sbitmap_vector_free (antloc);
5153 antloc = NULL;
5154 sbitmap_vector_free (ae_kill);
5155 ae_kill = NULL;
5156 sbitmap_free (trapping_expr);
5159 /* PRE utilities */
5161 /* Return nonzero if an occurrence of expression EXPR in OCCR_BB would reach
5162 block BB.
5164 VISITED is a pointer to a working buffer for tracking which BB's have
5165 been visited. It is NULL for the top-level call.
5167 We treat reaching expressions that go through blocks containing the same
5168 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
5169 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
5170 2 as not reaching. The intent is to improve the probability of finding
5171 only one reaching expression and to reduce register lifetimes by picking
5172 the closest such expression. */
5174 static int
5175 pre_expr_reaches_here_p_work (occr_bb, expr, bb, visited)
5176 basic_block occr_bb;
5177 struct expr *expr;
5178 basic_block bb;
5179 char *visited;
5181 edge pred;
5183 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
5185 basic_block pred_bb = pred->src;
5187 if (pred->src == ENTRY_BLOCK_PTR
5188 /* Has predecessor has already been visited? */
5189 || visited[pred_bb->index])
5190 ;/* Nothing to do. */
5192 /* Does this predecessor generate this expression? */
5193 else if (TEST_BIT (comp[pred_bb->index], expr->bitmap_index))
5195 /* Is this the occurrence we're looking for?
5196 Note that there's only one generating occurrence per block
5197 so we just need to check the block number. */
5198 if (occr_bb == pred_bb)
5199 return 1;
5201 visited[pred_bb->index] = 1;
5203 /* Ignore this predecessor if it kills the expression. */
5204 else if (! TEST_BIT (transp[pred_bb->index], expr->bitmap_index))
5205 visited[pred_bb->index] = 1;
5207 /* Neither gen nor kill. */
5208 else
5210 visited[pred_bb->index] = 1;
5211 if (pre_expr_reaches_here_p_work (occr_bb, expr, pred_bb, visited))
5212 return 1;
5216 /* All paths have been checked. */
5217 return 0;
5220 /* The wrapper for pre_expr_reaches_here_work that ensures that any
5221 memory allocated for that function is returned. */
5223 static int
5224 pre_expr_reaches_here_p (occr_bb, expr, bb)
5225 basic_block occr_bb;
5226 struct expr *expr;
5227 basic_block bb;
5229 int rval;
5230 char *visited = (char *) xcalloc (last_basic_block, 1);
5232 rval = pre_expr_reaches_here_p_work (occr_bb, expr, bb, visited);
5234 free (visited);
5235 return rval;
5239 /* Given an expr, generate RTL which we can insert at the end of a BB,
5240 or on an edge. Set the block number of any insns generated to
5241 the value of BB. */
5243 static rtx
5244 process_insert_insn (expr)
5245 struct expr *expr;
5247 rtx reg = expr->reaching_reg;
5248 rtx exp = copy_rtx (expr->expr);
5249 rtx pat;
5251 start_sequence ();
5253 /* If the expression is something that's an operand, like a constant,
5254 just copy it to a register. */
5255 if (general_operand (exp, GET_MODE (reg)))
5256 emit_move_insn (reg, exp);
5258 /* Otherwise, make a new insn to compute this expression and make sure the
5259 insn will be recognized (this also adds any needed CLOBBERs). Copy the
5260 expression to make sure we don't have any sharing issues. */
5261 else if (insn_invalid_p (emit_insn (gen_rtx_SET (VOIDmode, reg, exp))))
5262 abort ();
5264 pat = get_insns ();
5265 end_sequence ();
5267 return pat;
5270 /* Add EXPR to the end of basic block BB.
5272 This is used by both the PRE and code hoisting.
5274 For PRE, we want to verify that the expr is either transparent
5275 or locally anticipatable in the target block. This check makes
5276 no sense for code hoisting. */
5278 static void
5279 insert_insn_end_bb (expr, bb, pre)
5280 struct expr *expr;
5281 basic_block bb;
5282 int pre;
5284 rtx insn = bb->end;
5285 rtx new_insn;
5286 rtx reg = expr->reaching_reg;
5287 int regno = REGNO (reg);
5288 rtx pat, pat_end;
5290 pat = process_insert_insn (expr);
5291 if (pat == NULL_RTX || ! INSN_P (pat))
5292 abort ();
5294 pat_end = pat;
5295 while (NEXT_INSN (pat_end) != NULL_RTX)
5296 pat_end = NEXT_INSN (pat_end);
5298 /* If the last insn is a jump, insert EXPR in front [taking care to
5299 handle cc0, etc. properly]. Similary we need to care trapping
5300 instructions in presence of non-call exceptions. */
5302 if (GET_CODE (insn) == JUMP_INSN
5303 || (GET_CODE (insn) == INSN
5304 && (bb->succ->succ_next || (bb->succ->flags & EDGE_ABNORMAL))))
5306 #ifdef HAVE_cc0
5307 rtx note;
5308 #endif
5309 /* It should always be the case that we can put these instructions
5310 anywhere in the basic block with performing PRE optimizations.
5311 Check this. */
5312 if (GET_CODE (insn) == INSN && pre
5313 && !TEST_BIT (antloc[bb->index], expr->bitmap_index)
5314 && !TEST_BIT (transp[bb->index], expr->bitmap_index))
5315 abort ();
5317 /* If this is a jump table, then we can't insert stuff here. Since
5318 we know the previous real insn must be the tablejump, we insert
5319 the new instruction just before the tablejump. */
5320 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
5321 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
5322 insn = prev_real_insn (insn);
5324 #ifdef HAVE_cc0
5325 /* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts
5326 if cc0 isn't set. */
5327 note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
5328 if (note)
5329 insn = XEXP (note, 0);
5330 else
5332 rtx maybe_cc0_setter = prev_nonnote_insn (insn);
5333 if (maybe_cc0_setter
5334 && INSN_P (maybe_cc0_setter)
5335 && sets_cc0_p (PATTERN (maybe_cc0_setter)))
5336 insn = maybe_cc0_setter;
5338 #endif
5339 /* FIXME: What if something in cc0/jump uses value set in new insn? */
5340 new_insn = emit_insn_before (pat, insn);
5343 /* Likewise if the last insn is a call, as will happen in the presence
5344 of exception handling. */
5345 else if (GET_CODE (insn) == CALL_INSN
5346 && (bb->succ->succ_next || (bb->succ->flags & EDGE_ABNORMAL)))
5348 /* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers,
5349 we search backward and place the instructions before the first
5350 parameter is loaded. Do this for everyone for consistency and a
5351 presumption that we'll get better code elsewhere as well.
5353 It should always be the case that we can put these instructions
5354 anywhere in the basic block with performing PRE optimizations.
5355 Check this. */
5357 if (pre
5358 && !TEST_BIT (antloc[bb->index], expr->bitmap_index)
5359 && !TEST_BIT (transp[bb->index], expr->bitmap_index))
5360 abort ();
5362 /* Since different machines initialize their parameter registers
5363 in different orders, assume nothing. Collect the set of all
5364 parameter registers. */
5365 insn = find_first_parameter_load (insn, bb->head);
5367 /* If we found all the parameter loads, then we want to insert
5368 before the first parameter load.
5370 If we did not find all the parameter loads, then we might have
5371 stopped on the head of the block, which could be a CODE_LABEL.
5372 If we inserted before the CODE_LABEL, then we would be putting
5373 the insn in the wrong basic block. In that case, put the insn
5374 after the CODE_LABEL. Also, respect NOTE_INSN_BASIC_BLOCK. */
5375 while (GET_CODE (insn) == CODE_LABEL
5376 || NOTE_INSN_BASIC_BLOCK_P (insn))
5377 insn = NEXT_INSN (insn);
5379 new_insn = emit_insn_before (pat, insn);
5381 else
5382 new_insn = emit_insn_after (pat, insn);
5384 while (1)
5386 if (INSN_P (pat))
5388 add_label_notes (PATTERN (pat), new_insn);
5389 note_stores (PATTERN (pat), record_set_info, pat);
5391 if (pat == pat_end)
5392 break;
5393 pat = NEXT_INSN (pat);
5396 gcse_create_count++;
5398 if (gcse_file)
5400 fprintf (gcse_file, "PRE/HOIST: end of bb %d, insn %d, ",
5401 bb->index, INSN_UID (new_insn));
5402 fprintf (gcse_file, "copying expression %d to reg %d\n",
5403 expr->bitmap_index, regno);
5407 /* Insert partially redundant expressions on edges in the CFG to make
5408 the expressions fully redundant. */
5410 static int
5411 pre_edge_insert (edge_list, index_map)
5412 struct edge_list *edge_list;
5413 struct expr **index_map;
5415 int e, i, j, num_edges, set_size, did_insert = 0;
5416 sbitmap *inserted;
5418 /* Where PRE_INSERT_MAP is nonzero, we add the expression on that edge
5419 if it reaches any of the deleted expressions. */
5421 set_size = pre_insert_map[0]->size;
5422 num_edges = NUM_EDGES (edge_list);
5423 inserted = sbitmap_vector_alloc (num_edges, expr_hash_table.n_elems);
5424 sbitmap_vector_zero (inserted, num_edges);
5426 for (e = 0; e < num_edges; e++)
5428 int indx;
5429 basic_block bb = INDEX_EDGE_PRED_BB (edge_list, e);
5431 for (i = indx = 0; i < set_size; i++, indx += SBITMAP_ELT_BITS)
5433 SBITMAP_ELT_TYPE insert = pre_insert_map[e]->elms[i];
5435 for (j = indx; insert && j < (int) expr_hash_table.n_elems; j++, insert >>= 1)
5436 if ((insert & 1) != 0 && index_map[j]->reaching_reg != NULL_RTX)
5438 struct expr *expr = index_map[j];
5439 struct occr *occr;
5441 /* Now look at each deleted occurrence of this expression. */
5442 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5444 if (! occr->deleted_p)
5445 continue;
5447 /* Insert this expression on this edge if if it would
5448 reach the deleted occurrence in BB. */
5449 if (!TEST_BIT (inserted[e], j))
5451 rtx insn;
5452 edge eg = INDEX_EDGE (edge_list, e);
5454 /* We can't insert anything on an abnormal and
5455 critical edge, so we insert the insn at the end of
5456 the previous block. There are several alternatives
5457 detailed in Morgans book P277 (sec 10.5) for
5458 handling this situation. This one is easiest for
5459 now. */
5461 if ((eg->flags & EDGE_ABNORMAL) == EDGE_ABNORMAL)
5462 insert_insn_end_bb (index_map[j], bb, 0);
5463 else
5465 insn = process_insert_insn (index_map[j]);
5466 insert_insn_on_edge (insn, eg);
5469 if (gcse_file)
5471 fprintf (gcse_file, "PRE/HOIST: edge (%d,%d), ",
5472 bb->index,
5473 INDEX_EDGE_SUCC_BB (edge_list, e)->index);
5474 fprintf (gcse_file, "copy expression %d\n",
5475 expr->bitmap_index);
5478 update_ld_motion_stores (expr);
5479 SET_BIT (inserted[e], j);
5480 did_insert = 1;
5481 gcse_create_count++;
5488 sbitmap_vector_free (inserted);
5489 return did_insert;
5492 /* Copy the result of INSN to REG. INDX is the expression number. */
5494 static void
5495 pre_insert_copy_insn (expr, insn)
5496 struct expr *expr;
5497 rtx insn;
5499 rtx reg = expr->reaching_reg;
5500 int regno = REGNO (reg);
5501 int indx = expr->bitmap_index;
5502 rtx set = single_set (insn);
5503 rtx new_insn;
5505 if (!set)
5506 abort ();
5508 new_insn = emit_insn_after (gen_move_insn (reg, copy_rtx (SET_DEST (set))), insn);
5510 /* Keep register set table up to date. */
5511 record_one_set (regno, new_insn);
5513 gcse_create_count++;
5515 if (gcse_file)
5516 fprintf (gcse_file,
5517 "PRE: bb %d, insn %d, copy expression %d in insn %d to reg %d\n",
5518 BLOCK_NUM (insn), INSN_UID (new_insn), indx,
5519 INSN_UID (insn), regno);
5520 update_ld_motion_stores (expr);
5523 /* Copy available expressions that reach the redundant expression
5524 to `reaching_reg'. */
5526 static void
5527 pre_insert_copies ()
5529 unsigned int i;
5530 struct expr *expr;
5531 struct occr *occr;
5532 struct occr *avail;
5534 /* For each available expression in the table, copy the result to
5535 `reaching_reg' if the expression reaches a deleted one.
5537 ??? The current algorithm is rather brute force.
5538 Need to do some profiling. */
5540 for (i = 0; i < expr_hash_table.size; i++)
5541 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
5543 /* If the basic block isn't reachable, PPOUT will be TRUE. However,
5544 we don't want to insert a copy here because the expression may not
5545 really be redundant. So only insert an insn if the expression was
5546 deleted. This test also avoids further processing if the
5547 expression wasn't deleted anywhere. */
5548 if (expr->reaching_reg == NULL)
5549 continue;
5551 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5553 if (! occr->deleted_p)
5554 continue;
5556 for (avail = expr->avail_occr; avail != NULL; avail = avail->next)
5558 rtx insn = avail->insn;
5560 /* No need to handle this one if handled already. */
5561 if (avail->copied_p)
5562 continue;
5564 /* Don't handle this one if it's a redundant one. */
5565 if (TEST_BIT (pre_redundant_insns, INSN_CUID (insn)))
5566 continue;
5568 /* Or if the expression doesn't reach the deleted one. */
5569 if (! pre_expr_reaches_here_p (BLOCK_FOR_INSN (avail->insn),
5570 expr,
5571 BLOCK_FOR_INSN (occr->insn)))
5572 continue;
5574 /* Copy the result of avail to reaching_reg. */
5575 pre_insert_copy_insn (expr, insn);
5576 avail->copied_p = 1;
5582 /* Emit move from SRC to DEST noting the equivalence with expression computed
5583 in INSN. */
5584 static rtx
5585 gcse_emit_move_after (src, dest, insn)
5586 rtx src, dest, insn;
5588 rtx new;
5589 rtx set = single_set (insn), set2;
5590 rtx note;
5591 rtx eqv;
5593 /* This should never fail since we're creating a reg->reg copy
5594 we've verified to be valid. */
5596 new = emit_insn_after (gen_move_insn (dest, src), insn);
5598 /* Note the equivalence for local CSE pass. */
5599 set2 = single_set (new);
5600 if (!set2 || !rtx_equal_p (SET_DEST (set2), dest))
5601 return new;
5602 if ((note = find_reg_equal_equiv_note (insn)))
5603 eqv = XEXP (note, 0);
5604 else
5605 eqv = SET_SRC (set);
5607 set_unique_reg_note (new, REG_EQUAL, copy_insn_1 (eqv));
5609 return new;
5612 /* Delete redundant computations.
5613 Deletion is done by changing the insn to copy the `reaching_reg' of
5614 the expression into the result of the SET. It is left to later passes
5615 (cprop, cse2, flow, combine, regmove) to propagate the copy or eliminate it.
5617 Returns nonzero if a change is made. */
5619 static int
5620 pre_delete ()
5622 unsigned int i;
5623 int changed;
5624 struct expr *expr;
5625 struct occr *occr;
5627 changed = 0;
5628 for (i = 0; i < expr_hash_table.size; i++)
5629 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
5631 int indx = expr->bitmap_index;
5633 /* We only need to search antic_occr since we require
5634 ANTLOC != 0. */
5636 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5638 rtx insn = occr->insn;
5639 rtx set;
5640 basic_block bb = BLOCK_FOR_INSN (insn);
5642 if (TEST_BIT (pre_delete_map[bb->index], indx))
5644 set = single_set (insn);
5645 if (! set)
5646 abort ();
5648 /* Create a pseudo-reg to store the result of reaching
5649 expressions into. Get the mode for the new pseudo from
5650 the mode of the original destination pseudo. */
5651 if (expr->reaching_reg == NULL)
5652 expr->reaching_reg
5653 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
5655 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
5656 delete_insn (insn);
5657 occr->deleted_p = 1;
5658 SET_BIT (pre_redundant_insns, INSN_CUID (insn));
5659 changed = 1;
5660 gcse_subst_count++;
5662 if (gcse_file)
5664 fprintf (gcse_file,
5665 "PRE: redundant insn %d (expression %d) in ",
5666 INSN_UID (insn), indx);
5667 fprintf (gcse_file, "bb %d, reaching reg is %d\n",
5668 bb->index, REGNO (expr->reaching_reg));
5674 return changed;
5677 /* Perform GCSE optimizations using PRE.
5678 This is called by one_pre_gcse_pass after all the dataflow analysis
5679 has been done.
5681 This is based on the original Morel-Renvoise paper Fred Chow's thesis, and
5682 lazy code motion from Knoop, Ruthing and Steffen as described in Advanced
5683 Compiler Design and Implementation.
5685 ??? A new pseudo reg is created to hold the reaching expression. The nice
5686 thing about the classical approach is that it would try to use an existing
5687 reg. If the register can't be adequately optimized [i.e. we introduce
5688 reload problems], one could add a pass here to propagate the new register
5689 through the block.
5691 ??? We don't handle single sets in PARALLELs because we're [currently] not
5692 able to copy the rest of the parallel when we insert copies to create full
5693 redundancies from partial redundancies. However, there's no reason why we
5694 can't handle PARALLELs in the cases where there are no partial
5695 redundancies. */
5697 static int
5698 pre_gcse ()
5700 unsigned int i;
5701 int did_insert, changed;
5702 struct expr **index_map;
5703 struct expr *expr;
5705 /* Compute a mapping from expression number (`bitmap_index') to
5706 hash table entry. */
5708 index_map = (struct expr **) xcalloc (expr_hash_table.n_elems, sizeof (struct expr *));
5709 for (i = 0; i < expr_hash_table.size; i++)
5710 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
5711 index_map[expr->bitmap_index] = expr;
5713 /* Reset bitmap used to track which insns are redundant. */
5714 pre_redundant_insns = sbitmap_alloc (max_cuid);
5715 sbitmap_zero (pre_redundant_insns);
5717 /* Delete the redundant insns first so that
5718 - we know what register to use for the new insns and for the other
5719 ones with reaching expressions
5720 - we know which insns are redundant when we go to create copies */
5722 changed = pre_delete ();
5724 did_insert = pre_edge_insert (edge_list, index_map);
5726 /* In other places with reaching expressions, copy the expression to the
5727 specially allocated pseudo-reg that reaches the redundant expr. */
5728 pre_insert_copies ();
5729 if (did_insert)
5731 commit_edge_insertions ();
5732 changed = 1;
5735 free (index_map);
5736 sbitmap_free (pre_redundant_insns);
5737 return changed;
5740 /* Top level routine to perform one PRE GCSE pass.
5742 Return nonzero if a change was made. */
5744 static int
5745 one_pre_gcse_pass (pass)
5746 int pass;
5748 int changed = 0;
5750 gcse_subst_count = 0;
5751 gcse_create_count = 0;
5753 alloc_hash_table (max_cuid, &expr_hash_table, 0);
5754 add_noreturn_fake_exit_edges ();
5755 if (flag_gcse_lm)
5756 compute_ld_motion_mems ();
5758 compute_hash_table (&expr_hash_table);
5759 trim_ld_motion_mems ();
5760 if (gcse_file)
5761 dump_hash_table (gcse_file, "Expression", &expr_hash_table);
5763 if (expr_hash_table.n_elems > 0)
5765 alloc_pre_mem (last_basic_block, expr_hash_table.n_elems);
5766 compute_pre_data ();
5767 changed |= pre_gcse ();
5768 free_edge_list (edge_list);
5769 free_pre_mem ();
5772 free_ldst_mems ();
5773 remove_fake_edges ();
5774 free_hash_table (&expr_hash_table);
5776 if (gcse_file)
5778 fprintf (gcse_file, "\nPRE GCSE of %s, pass %d: %d bytes needed, ",
5779 current_function_name, pass, bytes_used);
5780 fprintf (gcse_file, "%d substs, %d insns created\n",
5781 gcse_subst_count, gcse_create_count);
5784 return changed;
5787 /* If X contains any LABEL_REF's, add REG_LABEL notes for them to INSN.
5788 If notes are added to an insn which references a CODE_LABEL, the
5789 LABEL_NUSES count is incremented. We have to add REG_LABEL notes,
5790 because the following loop optimization pass requires them. */
5792 /* ??? This is very similar to the loop.c add_label_notes function. We
5793 could probably share code here. */
5795 /* ??? If there was a jump optimization pass after gcse and before loop,
5796 then we would not need to do this here, because jump would add the
5797 necessary REG_LABEL notes. */
5799 static void
5800 add_label_notes (x, insn)
5801 rtx x;
5802 rtx insn;
5804 enum rtx_code code = GET_CODE (x);
5805 int i, j;
5806 const char *fmt;
5808 if (code == LABEL_REF && !LABEL_REF_NONLOCAL_P (x))
5810 /* This code used to ignore labels that referred to dispatch tables to
5811 avoid flow generating (slighly) worse code.
5813 We no longer ignore such label references (see LABEL_REF handling in
5814 mark_jump_label for additional information). */
5816 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, XEXP (x, 0),
5817 REG_NOTES (insn));
5818 if (LABEL_P (XEXP (x, 0)))
5819 LABEL_NUSES (XEXP (x, 0))++;
5820 return;
5823 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
5825 if (fmt[i] == 'e')
5826 add_label_notes (XEXP (x, i), insn);
5827 else if (fmt[i] == 'E')
5828 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5829 add_label_notes (XVECEXP (x, i, j), insn);
5833 /* Compute transparent outgoing information for each block.
5835 An expression is transparent to an edge unless it is killed by
5836 the edge itself. This can only happen with abnormal control flow,
5837 when the edge is traversed through a call. This happens with
5838 non-local labels and exceptions.
5840 This would not be necessary if we split the edge. While this is
5841 normally impossible for abnormal critical edges, with some effort
5842 it should be possible with exception handling, since we still have
5843 control over which handler should be invoked. But due to increased
5844 EH table sizes, this may not be worthwhile. */
5846 static void
5847 compute_transpout ()
5849 basic_block bb;
5850 unsigned int i;
5851 struct expr *expr;
5853 sbitmap_vector_ones (transpout, last_basic_block);
5855 FOR_EACH_BB (bb)
5857 /* Note that flow inserted a nop a the end of basic blocks that
5858 end in call instructions for reasons other than abnormal
5859 control flow. */
5860 if (GET_CODE (bb->end) != CALL_INSN)
5861 continue;
5863 for (i = 0; i < expr_hash_table.size; i++)
5864 for (expr = expr_hash_table.table[i]; expr ; expr = expr->next_same_hash)
5865 if (GET_CODE (expr->expr) == MEM)
5867 if (GET_CODE (XEXP (expr->expr, 0)) == SYMBOL_REF
5868 && CONSTANT_POOL_ADDRESS_P (XEXP (expr->expr, 0)))
5869 continue;
5871 /* ??? Optimally, we would use interprocedural alias
5872 analysis to determine if this mem is actually killed
5873 by this call. */
5874 RESET_BIT (transpout[bb->index], expr->bitmap_index);
5879 /* Removal of useless null pointer checks */
5881 /* Called via note_stores. X is set by SETTER. If X is a register we must
5882 invalidate nonnull_local and set nonnull_killed. DATA is really a
5883 `null_pointer_info *'.
5885 We ignore hard registers. */
5887 static void
5888 invalidate_nonnull_info (x, setter, data)
5889 rtx x;
5890 rtx setter ATTRIBUTE_UNUSED;
5891 void *data;
5893 unsigned int regno;
5894 struct null_pointer_info *npi = (struct null_pointer_info *) data;
5896 while (GET_CODE (x) == SUBREG)
5897 x = SUBREG_REG (x);
5899 /* Ignore anything that is not a register or is a hard register. */
5900 if (GET_CODE (x) != REG
5901 || REGNO (x) < npi->min_reg
5902 || REGNO (x) >= npi->max_reg)
5903 return;
5905 regno = REGNO (x) - npi->min_reg;
5907 RESET_BIT (npi->nonnull_local[npi->current_block->index], regno);
5908 SET_BIT (npi->nonnull_killed[npi->current_block->index], regno);
5911 /* Do null-pointer check elimination for the registers indicated in
5912 NPI. NONNULL_AVIN and NONNULL_AVOUT are pre-allocated sbitmaps;
5913 they are not our responsibility to free. */
5915 static int
5916 delete_null_pointer_checks_1 (block_reg, nonnull_avin,
5917 nonnull_avout, npi)
5918 unsigned int *block_reg;
5919 sbitmap *nonnull_avin;
5920 sbitmap *nonnull_avout;
5921 struct null_pointer_info *npi;
5923 basic_block bb, current_block;
5924 sbitmap *nonnull_local = npi->nonnull_local;
5925 sbitmap *nonnull_killed = npi->nonnull_killed;
5926 int something_changed = 0;
5928 /* Compute local properties, nonnull and killed. A register will have
5929 the nonnull property if at the end of the current block its value is
5930 known to be nonnull. The killed property indicates that somewhere in
5931 the block any information we had about the register is killed.
5933 Note that a register can have both properties in a single block. That
5934 indicates that it's killed, then later in the block a new value is
5935 computed. */
5936 sbitmap_vector_zero (nonnull_local, last_basic_block);
5937 sbitmap_vector_zero (nonnull_killed, last_basic_block);
5939 FOR_EACH_BB (current_block)
5941 rtx insn, stop_insn;
5943 /* Set the current block for invalidate_nonnull_info. */
5944 npi->current_block = current_block;
5946 /* Scan each insn in the basic block looking for memory references and
5947 register sets. */
5948 stop_insn = NEXT_INSN (current_block->end);
5949 for (insn = current_block->head;
5950 insn != stop_insn;
5951 insn = NEXT_INSN (insn))
5953 rtx set;
5954 rtx reg;
5956 /* Ignore anything that is not a normal insn. */
5957 if (! INSN_P (insn))
5958 continue;
5960 /* Basically ignore anything that is not a simple SET. We do have
5961 to make sure to invalidate nonnull_local and set nonnull_killed
5962 for such insns though. */
5963 set = single_set (insn);
5964 if (!set)
5966 note_stores (PATTERN (insn), invalidate_nonnull_info, npi);
5967 continue;
5970 /* See if we've got a usable memory load. We handle it first
5971 in case it uses its address register as a dest (which kills
5972 the nonnull property). */
5973 if (GET_CODE (SET_SRC (set)) == MEM
5974 && GET_CODE ((reg = XEXP (SET_SRC (set), 0))) == REG
5975 && REGNO (reg) >= npi->min_reg
5976 && REGNO (reg) < npi->max_reg)
5977 SET_BIT (nonnull_local[current_block->index],
5978 REGNO (reg) - npi->min_reg);
5980 /* Now invalidate stuff clobbered by this insn. */
5981 note_stores (PATTERN (insn), invalidate_nonnull_info, npi);
5983 /* And handle stores, we do these last since any sets in INSN can
5984 not kill the nonnull property if it is derived from a MEM
5985 appearing in a SET_DEST. */
5986 if (GET_CODE (SET_DEST (set)) == MEM
5987 && GET_CODE ((reg = XEXP (SET_DEST (set), 0))) == REG
5988 && REGNO (reg) >= npi->min_reg
5989 && REGNO (reg) < npi->max_reg)
5990 SET_BIT (nonnull_local[current_block->index],
5991 REGNO (reg) - npi->min_reg);
5995 /* Now compute global properties based on the local properties. This
5996 is a classic global availability algorithm. */
5997 compute_available (nonnull_local, nonnull_killed,
5998 nonnull_avout, nonnull_avin);
6000 /* Now look at each bb and see if it ends with a compare of a value
6001 against zero. */
6002 FOR_EACH_BB (bb)
6004 rtx last_insn = bb->end;
6005 rtx condition, earliest;
6006 int compare_and_branch;
6008 /* Since MIN_REG is always at least FIRST_PSEUDO_REGISTER, and
6009 since BLOCK_REG[BB] is zero if this block did not end with a
6010 comparison against zero, this condition works. */
6011 if (block_reg[bb->index] < npi->min_reg
6012 || block_reg[bb->index] >= npi->max_reg)
6013 continue;
6015 /* LAST_INSN is a conditional jump. Get its condition. */
6016 condition = get_condition (last_insn, &earliest);
6018 /* If we can't determine the condition then skip. */
6019 if (! condition)
6020 continue;
6022 /* Is the register known to have a nonzero value? */
6023 if (!TEST_BIT (nonnull_avout[bb->index], block_reg[bb->index] - npi->min_reg))
6024 continue;
6026 /* Try to compute whether the compare/branch at the loop end is one or
6027 two instructions. */
6028 if (earliest == last_insn)
6029 compare_and_branch = 1;
6030 else if (earliest == prev_nonnote_insn (last_insn))
6031 compare_and_branch = 2;
6032 else
6033 continue;
6035 /* We know the register in this comparison is nonnull at exit from
6036 this block. We can optimize this comparison. */
6037 if (GET_CODE (condition) == NE)
6039 rtx new_jump;
6041 new_jump = emit_jump_insn_after (gen_jump (JUMP_LABEL (last_insn)),
6042 last_insn);
6043 JUMP_LABEL (new_jump) = JUMP_LABEL (last_insn);
6044 LABEL_NUSES (JUMP_LABEL (new_jump))++;
6045 emit_barrier_after (new_jump);
6048 something_changed = 1;
6049 delete_insn (last_insn);
6050 if (compare_and_branch == 2)
6051 delete_insn (earliest);
6052 purge_dead_edges (bb);
6054 /* Don't check this block again. (Note that BLOCK_END is
6055 invalid here; we deleted the last instruction in the
6056 block.) */
6057 block_reg[bb->index] = 0;
6060 return something_changed;
6063 /* Find EQ/NE comparisons against zero which can be (indirectly) evaluated
6064 at compile time.
6066 This is conceptually similar to global constant/copy propagation and
6067 classic global CSE (it even uses the same dataflow equations as cprop).
6069 If a register is used as memory address with the form (mem (reg)), then we
6070 know that REG can not be zero at that point in the program. Any instruction
6071 which sets REG "kills" this property.
6073 So, if every path leading to a conditional branch has an available memory
6074 reference of that form, then we know the register can not have the value
6075 zero at the conditional branch.
6077 So we merely need to compute the local properties and propagate that data
6078 around the cfg, then optimize where possible.
6080 We run this pass two times. Once before CSE, then again after CSE. This
6081 has proven to be the most profitable approach. It is rare for new
6082 optimization opportunities of this nature to appear after the first CSE
6083 pass.
6085 This could probably be integrated with global cprop with a little work. */
6088 delete_null_pointer_checks (f)
6089 rtx f ATTRIBUTE_UNUSED;
6091 sbitmap *nonnull_avin, *nonnull_avout;
6092 unsigned int *block_reg;
6093 basic_block bb;
6094 int reg;
6095 int regs_per_pass;
6096 int max_reg;
6097 struct null_pointer_info npi;
6098 int something_changed = 0;
6100 /* If we have only a single block, then there's nothing to do. */
6101 if (n_basic_blocks <= 1)
6102 return 0;
6104 /* Trying to perform global optimizations on flow graphs which have
6105 a high connectivity will take a long time and is unlikely to be
6106 particularly useful.
6108 In normal circumstances a cfg should have about twice as many edges
6109 as blocks. But we do not want to punish small functions which have
6110 a couple switch statements. So we require a relatively large number
6111 of basic blocks and the ratio of edges to blocks to be high. */
6112 if (n_basic_blocks > 1000 && n_edges / n_basic_blocks >= 20)
6113 return 0;
6115 /* We need four bitmaps, each with a bit for each register in each
6116 basic block. */
6117 max_reg = max_reg_num ();
6118 regs_per_pass = get_bitmap_width (4, last_basic_block, max_reg);
6120 /* Allocate bitmaps to hold local and global properties. */
6121 npi.nonnull_local = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
6122 npi.nonnull_killed = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
6123 nonnull_avin = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
6124 nonnull_avout = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
6126 /* Go through the basic blocks, seeing whether or not each block
6127 ends with a conditional branch whose condition is a comparison
6128 against zero. Record the register compared in BLOCK_REG. */
6129 block_reg = (unsigned int *) xcalloc (last_basic_block, sizeof (int));
6130 FOR_EACH_BB (bb)
6132 rtx last_insn = bb->end;
6133 rtx condition, earliest, reg;
6135 /* We only want conditional branches. */
6136 if (GET_CODE (last_insn) != JUMP_INSN
6137 || !any_condjump_p (last_insn)
6138 || !onlyjump_p (last_insn))
6139 continue;
6141 /* LAST_INSN is a conditional jump. Get its condition. */
6142 condition = get_condition (last_insn, &earliest);
6144 /* If we were unable to get the condition, or it is not an equality
6145 comparison against zero then there's nothing we can do. */
6146 if (!condition
6147 || (GET_CODE (condition) != NE && GET_CODE (condition) != EQ)
6148 || GET_CODE (XEXP (condition, 1)) != CONST_INT
6149 || (XEXP (condition, 1)
6150 != CONST0_RTX (GET_MODE (XEXP (condition, 0)))))
6151 continue;
6153 /* We must be checking a register against zero. */
6154 reg = XEXP (condition, 0);
6155 if (GET_CODE (reg) != REG)
6156 continue;
6158 block_reg[bb->index] = REGNO (reg);
6161 /* Go through the algorithm for each block of registers. */
6162 for (reg = FIRST_PSEUDO_REGISTER; reg < max_reg; reg += regs_per_pass)
6164 npi.min_reg = reg;
6165 npi.max_reg = MIN (reg + regs_per_pass, max_reg);
6166 something_changed |= delete_null_pointer_checks_1 (block_reg,
6167 nonnull_avin,
6168 nonnull_avout,
6169 &npi);
6172 /* Free the table of registers compared at the end of every block. */
6173 free (block_reg);
6175 /* Free bitmaps. */
6176 sbitmap_vector_free (npi.nonnull_local);
6177 sbitmap_vector_free (npi.nonnull_killed);
6178 sbitmap_vector_free (nonnull_avin);
6179 sbitmap_vector_free (nonnull_avout);
6181 return something_changed;
6184 /* Code Hoisting variables and subroutines. */
6186 /* Very busy expressions. */
6187 static sbitmap *hoist_vbein;
6188 static sbitmap *hoist_vbeout;
6190 /* Hoistable expressions. */
6191 static sbitmap *hoist_exprs;
6193 /* Dominator bitmaps. */
6194 dominance_info dominators;
6196 /* ??? We could compute post dominators and run this algorithm in
6197 reverse to perform tail merging, doing so would probably be
6198 more effective than the tail merging code in jump.c.
6200 It's unclear if tail merging could be run in parallel with
6201 code hoisting. It would be nice. */
6203 /* Allocate vars used for code hoisting analysis. */
6205 static void
6206 alloc_code_hoist_mem (n_blocks, n_exprs)
6207 int n_blocks, n_exprs;
6209 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
6210 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
6211 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
6213 hoist_vbein = sbitmap_vector_alloc (n_blocks, n_exprs);
6214 hoist_vbeout = sbitmap_vector_alloc (n_blocks, n_exprs);
6215 hoist_exprs = sbitmap_vector_alloc (n_blocks, n_exprs);
6216 transpout = sbitmap_vector_alloc (n_blocks, n_exprs);
6219 /* Free vars used for code hoisting analysis. */
6221 static void
6222 free_code_hoist_mem ()
6224 sbitmap_vector_free (antloc);
6225 sbitmap_vector_free (transp);
6226 sbitmap_vector_free (comp);
6228 sbitmap_vector_free (hoist_vbein);
6229 sbitmap_vector_free (hoist_vbeout);
6230 sbitmap_vector_free (hoist_exprs);
6231 sbitmap_vector_free (transpout);
6233 free_dominance_info (dominators);
6236 /* Compute the very busy expressions at entry/exit from each block.
6238 An expression is very busy if all paths from a given point
6239 compute the expression. */
6241 static void
6242 compute_code_hoist_vbeinout ()
6244 int changed, passes;
6245 basic_block bb;
6247 sbitmap_vector_zero (hoist_vbeout, last_basic_block);
6248 sbitmap_vector_zero (hoist_vbein, last_basic_block);
6250 passes = 0;
6251 changed = 1;
6253 while (changed)
6255 changed = 0;
6257 /* We scan the blocks in the reverse order to speed up
6258 the convergence. */
6259 FOR_EACH_BB_REVERSE (bb)
6261 changed |= sbitmap_a_or_b_and_c_cg (hoist_vbein[bb->index], antloc[bb->index],
6262 hoist_vbeout[bb->index], transp[bb->index]);
6263 if (bb->next_bb != EXIT_BLOCK_PTR)
6264 sbitmap_intersection_of_succs (hoist_vbeout[bb->index], hoist_vbein, bb->index);
6267 passes++;
6270 if (gcse_file)
6271 fprintf (gcse_file, "hoisting vbeinout computation: %d passes\n", passes);
6274 /* Top level routine to do the dataflow analysis needed by code hoisting. */
6276 static void
6277 compute_code_hoist_data ()
6279 compute_local_properties (transp, comp, antloc, &expr_hash_table);
6280 compute_transpout ();
6281 compute_code_hoist_vbeinout ();
6282 dominators = calculate_dominance_info (CDI_DOMINATORS);
6283 if (gcse_file)
6284 fprintf (gcse_file, "\n");
6287 /* Determine if the expression identified by EXPR_INDEX would
6288 reach BB unimpared if it was placed at the end of EXPR_BB.
6290 It's unclear exactly what Muchnick meant by "unimpared". It seems
6291 to me that the expression must either be computed or transparent in
6292 *every* block in the path(s) from EXPR_BB to BB. Any other definition
6293 would allow the expression to be hoisted out of loops, even if
6294 the expression wasn't a loop invariant.
6296 Contrast this to reachability for PRE where an expression is
6297 considered reachable if *any* path reaches instead of *all*
6298 paths. */
6300 static int
6301 hoist_expr_reaches_here_p (expr_bb, expr_index, bb, visited)
6302 basic_block expr_bb;
6303 int expr_index;
6304 basic_block bb;
6305 char *visited;
6307 edge pred;
6308 int visited_allocated_locally = 0;
6311 if (visited == NULL)
6313 visited_allocated_locally = 1;
6314 visited = xcalloc (last_basic_block, 1);
6317 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
6319 basic_block pred_bb = pred->src;
6321 if (pred->src == ENTRY_BLOCK_PTR)
6322 break;
6323 else if (pred_bb == expr_bb)
6324 continue;
6325 else if (visited[pred_bb->index])
6326 continue;
6328 /* Does this predecessor generate this expression? */
6329 else if (TEST_BIT (comp[pred_bb->index], expr_index))
6330 break;
6331 else if (! TEST_BIT (transp[pred_bb->index], expr_index))
6332 break;
6334 /* Not killed. */
6335 else
6337 visited[pred_bb->index] = 1;
6338 if (! hoist_expr_reaches_here_p (expr_bb, expr_index,
6339 pred_bb, visited))
6340 break;
6343 if (visited_allocated_locally)
6344 free (visited);
6346 return (pred == NULL);
6349 /* Actually perform code hoisting. */
6351 static void
6352 hoist_code ()
6354 basic_block bb, dominated;
6355 basic_block *domby;
6356 unsigned int domby_len;
6357 unsigned int i,j;
6358 struct expr **index_map;
6359 struct expr *expr;
6361 sbitmap_vector_zero (hoist_exprs, last_basic_block);
6363 /* Compute a mapping from expression number (`bitmap_index') to
6364 hash table entry. */
6366 index_map = (struct expr **) xcalloc (expr_hash_table.n_elems, sizeof (struct expr *));
6367 for (i = 0; i < expr_hash_table.size; i++)
6368 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
6369 index_map[expr->bitmap_index] = expr;
6371 /* Walk over each basic block looking for potentially hoistable
6372 expressions, nothing gets hoisted from the entry block. */
6373 FOR_EACH_BB (bb)
6375 int found = 0;
6376 int insn_inserted_p;
6378 domby_len = get_dominated_by (dominators, bb, &domby);
6379 /* Examine each expression that is very busy at the exit of this
6380 block. These are the potentially hoistable expressions. */
6381 for (i = 0; i < hoist_vbeout[bb->index]->n_bits; i++)
6383 int hoistable = 0;
6385 if (TEST_BIT (hoist_vbeout[bb->index], i)
6386 && TEST_BIT (transpout[bb->index], i))
6388 /* We've found a potentially hoistable expression, now
6389 we look at every block BB dominates to see if it
6390 computes the expression. */
6391 for (j = 0; j < domby_len; j++)
6393 dominated = domby[j];
6394 /* Ignore self dominance. */
6395 if (bb == dominated)
6396 continue;
6397 /* We've found a dominated block, now see if it computes
6398 the busy expression and whether or not moving that
6399 expression to the "beginning" of that block is safe. */
6400 if (!TEST_BIT (antloc[dominated->index], i))
6401 continue;
6403 /* Note if the expression would reach the dominated block
6404 unimpared if it was placed at the end of BB.
6406 Keep track of how many times this expression is hoistable
6407 from a dominated block into BB. */
6408 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
6409 hoistable++;
6412 /* If we found more than one hoistable occurrence of this
6413 expression, then note it in the bitmap of expressions to
6414 hoist. It makes no sense to hoist things which are computed
6415 in only one BB, and doing so tends to pessimize register
6416 allocation. One could increase this value to try harder
6417 to avoid any possible code expansion due to register
6418 allocation issues; however experiments have shown that
6419 the vast majority of hoistable expressions are only movable
6420 from two successors, so raising this threshhold is likely
6421 to nullify any benefit we get from code hoisting. */
6422 if (hoistable > 1)
6424 SET_BIT (hoist_exprs[bb->index], i);
6425 found = 1;
6429 /* If we found nothing to hoist, then quit now. */
6430 if (! found)
6432 free (domby);
6433 continue;
6436 /* Loop over all the hoistable expressions. */
6437 for (i = 0; i < hoist_exprs[bb->index]->n_bits; i++)
6439 /* We want to insert the expression into BB only once, so
6440 note when we've inserted it. */
6441 insn_inserted_p = 0;
6443 /* These tests should be the same as the tests above. */
6444 if (TEST_BIT (hoist_vbeout[bb->index], i))
6446 /* We've found a potentially hoistable expression, now
6447 we look at every block BB dominates to see if it
6448 computes the expression. */
6449 for (j = 0; j < domby_len; j++)
6451 dominated = domby[j];
6452 /* Ignore self dominance. */
6453 if (bb == dominated)
6454 continue;
6456 /* We've found a dominated block, now see if it computes
6457 the busy expression and whether or not moving that
6458 expression to the "beginning" of that block is safe. */
6459 if (!TEST_BIT (antloc[dominated->index], i))
6460 continue;
6462 /* The expression is computed in the dominated block and
6463 it would be safe to compute it at the start of the
6464 dominated block. Now we have to determine if the
6465 expression would reach the dominated block if it was
6466 placed at the end of BB. */
6467 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
6469 struct expr *expr = index_map[i];
6470 struct occr *occr = expr->antic_occr;
6471 rtx insn;
6472 rtx set;
6474 /* Find the right occurrence of this expression. */
6475 while (BLOCK_FOR_INSN (occr->insn) != dominated && occr)
6476 occr = occr->next;
6478 /* Should never happen. */
6479 if (!occr)
6480 abort ();
6482 insn = occr->insn;
6484 set = single_set (insn);
6485 if (! set)
6486 abort ();
6488 /* Create a pseudo-reg to store the result of reaching
6489 expressions into. Get the mode for the new pseudo
6490 from the mode of the original destination pseudo. */
6491 if (expr->reaching_reg == NULL)
6492 expr->reaching_reg
6493 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
6495 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
6496 delete_insn (insn);
6497 occr->deleted_p = 1;
6498 if (!insn_inserted_p)
6500 insert_insn_end_bb (index_map[i], bb, 0);
6501 insn_inserted_p = 1;
6507 free (domby);
6510 free (index_map);
6513 /* Top level routine to perform one code hoisting (aka unification) pass
6515 Return nonzero if a change was made. */
6517 static int
6518 one_code_hoisting_pass ()
6520 int changed = 0;
6522 alloc_hash_table (max_cuid, &expr_hash_table, 0);
6523 compute_hash_table (&expr_hash_table);
6524 if (gcse_file)
6525 dump_hash_table (gcse_file, "Code Hosting Expressions", &expr_hash_table);
6527 if (expr_hash_table.n_elems > 0)
6529 alloc_code_hoist_mem (last_basic_block, expr_hash_table.n_elems);
6530 compute_code_hoist_data ();
6531 hoist_code ();
6532 free_code_hoist_mem ();
6535 free_hash_table (&expr_hash_table);
6537 return changed;
6540 /* Here we provide the things required to do store motion towards
6541 the exit. In order for this to be effective, gcse also needed to
6542 be taught how to move a load when it is kill only by a store to itself.
6544 int i;
6545 float a[10];
6547 void foo(float scale)
6549 for (i=0; i<10; i++)
6550 a[i] *= scale;
6553 'i' is both loaded and stored to in the loop. Normally, gcse cannot move
6554 the load out since its live around the loop, and stored at the bottom
6555 of the loop.
6557 The 'Load Motion' referred to and implemented in this file is
6558 an enhancement to gcse which when using edge based lcm, recognizes
6559 this situation and allows gcse to move the load out of the loop.
6561 Once gcse has hoisted the load, store motion can then push this
6562 load towards the exit, and we end up with no loads or stores of 'i'
6563 in the loop. */
6565 /* This will search the ldst list for a matching expression. If it
6566 doesn't find one, we create one and initialize it. */
6568 static struct ls_expr *
6569 ldst_entry (x)
6570 rtx x;
6572 struct ls_expr * ptr;
6574 for (ptr = first_ls_expr(); ptr != NULL; ptr = next_ls_expr (ptr))
6575 if (expr_equiv_p (ptr->pattern, x))
6576 break;
6578 if (!ptr)
6580 ptr = (struct ls_expr *) xmalloc (sizeof (struct ls_expr));
6582 ptr->next = pre_ldst_mems;
6583 ptr->expr = NULL;
6584 ptr->pattern = x;
6585 ptr->pattern_regs = NULL_RTX;
6586 ptr->loads = NULL_RTX;
6587 ptr->stores = NULL_RTX;
6588 ptr->reaching_reg = NULL_RTX;
6589 ptr->invalid = 0;
6590 ptr->index = 0;
6591 ptr->hash_index = 0;
6592 pre_ldst_mems = ptr;
6595 return ptr;
6598 /* Free up an individual ldst entry. */
6600 static void
6601 free_ldst_entry (ptr)
6602 struct ls_expr * ptr;
6604 free_INSN_LIST_list (& ptr->loads);
6605 free_INSN_LIST_list (& ptr->stores);
6607 free (ptr);
6610 /* Free up all memory associated with the ldst list. */
6612 static void
6613 free_ldst_mems ()
6615 while (pre_ldst_mems)
6617 struct ls_expr * tmp = pre_ldst_mems;
6619 pre_ldst_mems = pre_ldst_mems->next;
6621 free_ldst_entry (tmp);
6624 pre_ldst_mems = NULL;
6627 /* Dump debugging info about the ldst list. */
6629 static void
6630 print_ldst_list (file)
6631 FILE * file;
6633 struct ls_expr * ptr;
6635 fprintf (file, "LDST list: \n");
6637 for (ptr = first_ls_expr(); ptr != NULL; ptr = next_ls_expr (ptr))
6639 fprintf (file, " Pattern (%3d): ", ptr->index);
6641 print_rtl (file, ptr->pattern);
6643 fprintf (file, "\n Loads : ");
6645 if (ptr->loads)
6646 print_rtl (file, ptr->loads);
6647 else
6648 fprintf (file, "(nil)");
6650 fprintf (file, "\n Stores : ");
6652 if (ptr->stores)
6653 print_rtl (file, ptr->stores);
6654 else
6655 fprintf (file, "(nil)");
6657 fprintf (file, "\n\n");
6660 fprintf (file, "\n");
6663 /* Returns 1 if X is in the list of ldst only expressions. */
6665 static struct ls_expr *
6666 find_rtx_in_ldst (x)
6667 rtx x;
6669 struct ls_expr * ptr;
6671 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
6672 if (expr_equiv_p (ptr->pattern, x) && ! ptr->invalid)
6673 return ptr;
6675 return NULL;
6678 /* Assign each element of the list of mems a monotonically increasing value. */
6680 static int
6681 enumerate_ldsts ()
6683 struct ls_expr * ptr;
6684 int n = 0;
6686 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
6687 ptr->index = n++;
6689 return n;
6692 /* Return first item in the list. */
6694 static inline struct ls_expr *
6695 first_ls_expr ()
6697 return pre_ldst_mems;
6700 /* Return the next item in ther list after the specified one. */
6702 static inline struct ls_expr *
6703 next_ls_expr (ptr)
6704 struct ls_expr * ptr;
6706 return ptr->next;
6709 /* Load Motion for loads which only kill themselves. */
6711 /* Return true if x is a simple MEM operation, with no registers or
6712 side effects. These are the types of loads we consider for the
6713 ld_motion list, otherwise we let the usual aliasing take care of it. */
6715 static int
6716 simple_mem (x)
6717 rtx x;
6719 if (GET_CODE (x) != MEM)
6720 return 0;
6722 if (MEM_VOLATILE_P (x))
6723 return 0;
6725 if (GET_MODE (x) == BLKmode)
6726 return 0;
6728 /* If we are handling exceptions, we must be careful with memory references
6729 that may trap. If we are not, the behavior is undefined, so we may just
6730 continue. */
6731 if (flag_non_call_exceptions && may_trap_p (x))
6732 return 0;
6734 if (side_effects_p (x))
6735 return 0;
6737 /* Do not consider function arguments passed on stack. */
6738 if (reg_mentioned_p (stack_pointer_rtx, x))
6739 return 0;
6741 if (flag_float_store && FLOAT_MODE_P (GET_MODE (x)))
6742 return 0;
6744 return 1;
6747 /* Make sure there isn't a buried reference in this pattern anywhere.
6748 If there is, invalidate the entry for it since we're not capable
6749 of fixing it up just yet.. We have to be sure we know about ALL
6750 loads since the aliasing code will allow all entries in the
6751 ld_motion list to not-alias itself. If we miss a load, we will get
6752 the wrong value since gcse might common it and we won't know to
6753 fix it up. */
6755 static void
6756 invalidate_any_buried_refs (x)
6757 rtx x;
6759 const char * fmt;
6760 int i, j;
6761 struct ls_expr * ptr;
6763 /* Invalidate it in the list. */
6764 if (GET_CODE (x) == MEM && simple_mem (x))
6766 ptr = ldst_entry (x);
6767 ptr->invalid = 1;
6770 /* Recursively process the insn. */
6771 fmt = GET_RTX_FORMAT (GET_CODE (x));
6773 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6775 if (fmt[i] == 'e')
6776 invalidate_any_buried_refs (XEXP (x, i));
6777 else if (fmt[i] == 'E')
6778 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6779 invalidate_any_buried_refs (XVECEXP (x, i, j));
6783 /* Find all the 'simple' MEMs which are used in LOADs and STORES. Simple
6784 being defined as MEM loads and stores to symbols, with no side effects
6785 and no registers in the expression. For a MEM destination, we also
6786 check that the insn is still valid if we replace the destination with a
6787 REG, as is done in update_ld_motion_stores. If there are any uses/defs
6788 which don't match this criteria, they are invalidated and trimmed out
6789 later. */
6791 static void
6792 compute_ld_motion_mems ()
6794 struct ls_expr * ptr;
6795 basic_block bb;
6796 rtx insn;
6798 pre_ldst_mems = NULL;
6800 FOR_EACH_BB (bb)
6802 for (insn = bb->head;
6803 insn && insn != NEXT_INSN (bb->end);
6804 insn = NEXT_INSN (insn))
6806 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
6808 if (GET_CODE (PATTERN (insn)) == SET)
6810 rtx src = SET_SRC (PATTERN (insn));
6811 rtx dest = SET_DEST (PATTERN (insn));
6813 /* Check for a simple LOAD... */
6814 if (GET_CODE (src) == MEM && simple_mem (src))
6816 ptr = ldst_entry (src);
6817 if (GET_CODE (dest) == REG)
6818 ptr->loads = alloc_INSN_LIST (insn, ptr->loads);
6819 else
6820 ptr->invalid = 1;
6822 else
6824 /* Make sure there isn't a buried load somewhere. */
6825 invalidate_any_buried_refs (src);
6828 /* Check for stores. Don't worry about aliased ones, they
6829 will block any movement we might do later. We only care
6830 about this exact pattern since those are the only
6831 circumstance that we will ignore the aliasing info. */
6832 if (GET_CODE (dest) == MEM && simple_mem (dest))
6834 ptr = ldst_entry (dest);
6836 if (GET_CODE (src) != MEM
6837 && GET_CODE (src) != ASM_OPERANDS
6838 /* Check for REG manually since want_to_gcse_p
6839 returns 0 for all REGs. */
6840 && (REG_P (src) || want_to_gcse_p (src)))
6841 ptr->stores = alloc_INSN_LIST (insn, ptr->stores);
6842 else
6843 ptr->invalid = 1;
6846 else
6847 invalidate_any_buried_refs (PATTERN (insn));
6853 /* Remove any references that have been either invalidated or are not in the
6854 expression list for pre gcse. */
6856 static void
6857 trim_ld_motion_mems ()
6859 struct ls_expr * last = NULL;
6860 struct ls_expr * ptr = first_ls_expr ();
6862 while (ptr != NULL)
6864 int del = ptr->invalid;
6865 struct expr * expr = NULL;
6867 /* Delete if entry has been made invalid. */
6868 if (!del)
6870 unsigned int i;
6872 del = 1;
6873 /* Delete if we cannot find this mem in the expression list. */
6874 for (i = 0; i < expr_hash_table.size && del; i++)
6876 for (expr = expr_hash_table.table[i];
6877 expr != NULL;
6878 expr = expr->next_same_hash)
6879 if (expr_equiv_p (expr->expr, ptr->pattern))
6881 del = 0;
6882 break;
6887 if (del)
6889 if (last != NULL)
6891 last->next = ptr->next;
6892 free_ldst_entry (ptr);
6893 ptr = last->next;
6895 else
6897 pre_ldst_mems = pre_ldst_mems->next;
6898 free_ldst_entry (ptr);
6899 ptr = pre_ldst_mems;
6902 else
6904 /* Set the expression field if we are keeping it. */
6905 last = ptr;
6906 ptr->expr = expr;
6907 ptr = ptr->next;
6911 /* Show the world what we've found. */
6912 if (gcse_file && pre_ldst_mems != NULL)
6913 print_ldst_list (gcse_file);
6916 /* This routine will take an expression which we are replacing with
6917 a reaching register, and update any stores that are needed if
6918 that expression is in the ld_motion list. Stores are updated by
6919 copying their SRC to the reaching register, and then storeing
6920 the reaching register into the store location. These keeps the
6921 correct value in the reaching register for the loads. */
6923 static void
6924 update_ld_motion_stores (expr)
6925 struct expr * expr;
6927 struct ls_expr * mem_ptr;
6929 if ((mem_ptr = find_rtx_in_ldst (expr->expr)))
6931 /* We can try to find just the REACHED stores, but is shouldn't
6932 matter to set the reaching reg everywhere... some might be
6933 dead and should be eliminated later. */
6935 /* We replace (set mem expr) with (set reg expr) (set mem reg)
6936 where reg is the reaching reg used in the load. We checked in
6937 compute_ld_motion_mems that we can replace (set mem expr) with
6938 (set reg expr) in that insn. */
6939 rtx list = mem_ptr->stores;
6941 for ( ; list != NULL_RTX; list = XEXP (list, 1))
6943 rtx insn = XEXP (list, 0);
6944 rtx pat = PATTERN (insn);
6945 rtx src = SET_SRC (pat);
6946 rtx reg = expr->reaching_reg;
6947 rtx copy, new;
6949 /* If we've already copied it, continue. */
6950 if (expr->reaching_reg == src)
6951 continue;
6953 if (gcse_file)
6955 fprintf (gcse_file, "PRE: store updated with reaching reg ");
6956 print_rtl (gcse_file, expr->reaching_reg);
6957 fprintf (gcse_file, ":\n ");
6958 print_inline_rtx (gcse_file, insn, 8);
6959 fprintf (gcse_file, "\n");
6962 copy = gen_move_insn ( reg, copy_rtx (SET_SRC (pat)));
6963 new = emit_insn_before (copy, insn);
6964 record_one_set (REGNO (reg), new);
6965 SET_SRC (pat) = reg;
6967 /* un-recognize this pattern since it's probably different now. */
6968 INSN_CODE (insn) = -1;
6969 gcse_create_count++;
6974 /* Store motion code. */
6976 #define ANTIC_STORE_LIST(x) ((x)->loads)
6977 #define AVAIL_STORE_LIST(x) ((x)->stores)
6978 #define LAST_AVAIL_CHECK_FAILURE(x) ((x)->reaching_reg)
6980 /* This is used to communicate the target bitvector we want to use in the
6981 reg_set_info routine when called via the note_stores mechanism. */
6982 static int * regvec;
6984 /* And current insn, for the same routine. */
6985 static rtx compute_store_table_current_insn;
6987 /* Used in computing the reverse edge graph bit vectors. */
6988 static sbitmap * st_antloc;
6990 /* Global holding the number of store expressions we are dealing with. */
6991 static int num_stores;
6993 /* Checks to set if we need to mark a register set. Called from note_stores. */
6995 static void
6996 reg_set_info (dest, setter, data)
6997 rtx dest, setter ATTRIBUTE_UNUSED;
6998 void * data ATTRIBUTE_UNUSED;
7000 if (GET_CODE (dest) == SUBREG)
7001 dest = SUBREG_REG (dest);
7003 if (GET_CODE (dest) == REG)
7004 regvec[REGNO (dest)] = INSN_UID (compute_store_table_current_insn);
7007 /* Return zero if some of the registers in list X are killed
7008 due to set of registers in bitmap REGS_SET. */
7010 static bool
7011 store_ops_ok (x, regs_set)
7012 rtx x;
7013 int *regs_set;
7015 rtx reg;
7017 for (; x; x = XEXP (x, 1))
7019 reg = XEXP (x, 0);
7020 if (regs_set[REGNO(reg)])
7021 return false;
7024 return true;
7027 /* Returns a list of registers mentioned in X. */
7028 static rtx
7029 extract_mentioned_regs (x)
7030 rtx x;
7032 return extract_mentioned_regs_helper (x, NULL_RTX);
7035 /* Helper for extract_mentioned_regs; ACCUM is used to accumulate used
7036 registers. */
7037 static rtx
7038 extract_mentioned_regs_helper (x, accum)
7039 rtx x;
7040 rtx accum;
7042 int i;
7043 enum rtx_code code;
7044 const char * fmt;
7046 /* Repeat is used to turn tail-recursion into iteration. */
7047 repeat:
7049 if (x == 0)
7050 return accum;
7052 code = GET_CODE (x);
7053 switch (code)
7055 case REG:
7056 return alloc_EXPR_LIST (0, x, accum);
7058 case MEM:
7059 x = XEXP (x, 0);
7060 goto repeat;
7062 case PRE_DEC:
7063 case PRE_INC:
7064 case POST_DEC:
7065 case POST_INC:
7066 /* We do not run this function with arguments having side effects. */
7067 abort ();
7069 case PC:
7070 case CC0: /*FIXME*/
7071 case CONST:
7072 case CONST_INT:
7073 case CONST_DOUBLE:
7074 case CONST_VECTOR:
7075 case SYMBOL_REF:
7076 case LABEL_REF:
7077 case ADDR_VEC:
7078 case ADDR_DIFF_VEC:
7079 return accum;
7081 default:
7082 break;
7085 i = GET_RTX_LENGTH (code) - 1;
7086 fmt = GET_RTX_FORMAT (code);
7088 for (; i >= 0; i--)
7090 if (fmt[i] == 'e')
7092 rtx tem = XEXP (x, i);
7094 /* If we are about to do the last recursive call
7095 needed at this level, change it into iteration. */
7096 if (i == 0)
7098 x = tem;
7099 goto repeat;
7102 accum = extract_mentioned_regs_helper (tem, accum);
7104 else if (fmt[i] == 'E')
7106 int j;
7108 for (j = 0; j < XVECLEN (x, i); j++)
7109 accum = extract_mentioned_regs_helper (XVECEXP (x, i, j), accum);
7113 return accum;
7116 /* Determine whether INSN is MEM store pattern that we will consider moving.
7117 REGS_SET_BEFORE is bitmap of registers set before (and including) the
7118 current insn, REGS_SET_AFTER is bitmap of registers set after (and
7119 including) the insn in this basic block. We must be passing through BB from
7120 head to end, as we are using this fact to speed things up.
7122 The results are stored this way:
7124 -- the first anticipatable expression is added into ANTIC_STORE_LIST
7125 -- if the processed expression is not anticipatable, NULL_RTX is added
7126 there instead, so that we can use it as indicator that no further
7127 expression of this type may be anticipatable
7128 -- if the expression is available, it is added as head of AVAIL_STORE_LIST;
7129 consequently, all of them but this head are dead and may be deleted.
7130 -- if the expression is not available, the insn due to that it fails to be
7131 available is stored in reaching_reg.
7133 The things are complicated a bit by fact that there already may be stores
7134 to the same MEM from other blocks; also caller must take care of the
7135 neccessary cleanup of the temporary markers after end of the basic block.
7138 static void
7139 find_moveable_store (insn, regs_set_before, regs_set_after)
7140 rtx insn;
7141 int *regs_set_before;
7142 int *regs_set_after;
7144 struct ls_expr * ptr;
7145 rtx dest, set, tmp;
7146 int check_anticipatable, check_available;
7147 basic_block bb = BLOCK_FOR_INSN (insn);
7149 set = single_set (insn);
7150 if (!set)
7151 return;
7153 dest = SET_DEST (set);
7155 if (GET_CODE (dest) != MEM || MEM_VOLATILE_P (dest)
7156 || GET_MODE (dest) == BLKmode)
7157 return;
7159 if (side_effects_p (dest))
7160 return;
7162 /* If we are handling exceptions, we must be careful with memory references
7163 that may trap. If we are not, the behavior is undefined, so we may just
7164 continue. */
7165 if (flag_non_call_exceptions && may_trap_p (dest))
7166 return;
7168 ptr = ldst_entry (dest);
7169 if (!ptr->pattern_regs)
7170 ptr->pattern_regs = extract_mentioned_regs (dest);
7172 /* Do not check for anticipatability if we either found one anticipatable
7173 store already, or tested for one and found out that it was killed. */
7174 check_anticipatable = 0;
7175 if (!ANTIC_STORE_LIST (ptr))
7176 check_anticipatable = 1;
7177 else
7179 tmp = XEXP (ANTIC_STORE_LIST (ptr), 0);
7180 if (tmp != NULL_RTX
7181 && BLOCK_FOR_INSN (tmp) != bb)
7182 check_anticipatable = 1;
7184 if (check_anticipatable)
7186 if (store_killed_before (dest, ptr->pattern_regs, insn, bb, regs_set_before))
7187 tmp = NULL_RTX;
7188 else
7189 tmp = insn;
7190 ANTIC_STORE_LIST (ptr) = alloc_INSN_LIST (tmp,
7191 ANTIC_STORE_LIST (ptr));
7194 /* It is not neccessary to check whether store is available if we did
7195 it successfully before; if we failed before, do not bother to check
7196 until we reach the insn that caused us to fail. */
7197 check_available = 0;
7198 if (!AVAIL_STORE_LIST (ptr))
7199 check_available = 1;
7200 else
7202 tmp = XEXP (AVAIL_STORE_LIST (ptr), 0);
7203 if (BLOCK_FOR_INSN (tmp) != bb)
7204 check_available = 1;
7206 if (check_available)
7208 /* Check that we have already reached the insn at that the check
7209 failed last time. */
7210 if (LAST_AVAIL_CHECK_FAILURE (ptr))
7212 for (tmp = bb->end;
7213 tmp != insn && tmp != LAST_AVAIL_CHECK_FAILURE (ptr);
7214 tmp = PREV_INSN (tmp))
7215 continue;
7216 if (tmp == insn)
7217 check_available = 0;
7219 else
7220 check_available = store_killed_after (dest, ptr->pattern_regs, insn,
7221 bb, regs_set_after,
7222 &LAST_AVAIL_CHECK_FAILURE (ptr));
7224 if (!check_available)
7225 AVAIL_STORE_LIST (ptr) = alloc_INSN_LIST (insn, AVAIL_STORE_LIST (ptr));
7228 /* Find available and anticipatable stores. */
7230 static int
7231 compute_store_table ()
7233 int ret;
7234 basic_block bb;
7235 unsigned regno;
7236 rtx insn, pat, tmp;
7237 int *last_set_in, *already_set;
7238 struct ls_expr * ptr, **prev_next_ptr_ptr;
7240 max_gcse_regno = max_reg_num ();
7242 reg_set_in_block = (sbitmap *) sbitmap_vector_alloc (last_basic_block,
7243 max_gcse_regno);
7244 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
7245 pre_ldst_mems = 0;
7246 last_set_in = xmalloc (sizeof (int) * max_gcse_regno);
7247 already_set = xmalloc (sizeof (int) * max_gcse_regno);
7249 /* Find all the stores we care about. */
7250 FOR_EACH_BB (bb)
7252 /* First compute the registers set in this block. */
7253 memset (last_set_in, 0, sizeof (int) * max_gcse_regno);
7254 regvec = last_set_in;
7256 for (insn = bb->head;
7257 insn != NEXT_INSN (bb->end);
7258 insn = NEXT_INSN (insn))
7260 if (! INSN_P (insn))
7261 continue;
7263 if (GET_CODE (insn) == CALL_INSN)
7265 bool clobbers_all = false;
7266 #ifdef NON_SAVING_SETJMP
7267 if (NON_SAVING_SETJMP
7268 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
7269 clobbers_all = true;
7270 #endif
7272 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7273 if (clobbers_all
7274 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
7275 last_set_in[regno] = INSN_UID (insn);
7278 pat = PATTERN (insn);
7279 compute_store_table_current_insn = insn;
7280 note_stores (pat, reg_set_info, NULL);
7283 /* Record the set registers. */
7284 for (regno = 0; regno < max_gcse_regno; regno++)
7285 if (last_set_in[regno])
7286 SET_BIT (reg_set_in_block[bb->index], regno);
7288 /* Now find the stores. */
7289 memset (already_set, 0, sizeof (int) * max_gcse_regno);
7290 regvec = already_set;
7291 for (insn = bb->head;
7292 insn != NEXT_INSN (bb->end);
7293 insn = NEXT_INSN (insn))
7295 if (! INSN_P (insn))
7296 continue;
7298 if (GET_CODE (insn) == CALL_INSN)
7300 bool clobbers_all = false;
7301 #ifdef NON_SAVING_SETJMP
7302 if (NON_SAVING_SETJMP
7303 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
7304 clobbers_all = true;
7305 #endif
7307 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7308 if (clobbers_all
7309 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
7310 already_set[regno] = 1;
7313 pat = PATTERN (insn);
7314 note_stores (pat, reg_set_info, NULL);
7316 /* Now that we've marked regs, look for stores. */
7317 find_moveable_store (insn, already_set, last_set_in);
7319 /* Unmark regs that are no longer set. */
7320 for (regno = 0; regno < max_gcse_regno; regno++)
7321 if (last_set_in[regno] == INSN_UID (insn))
7322 last_set_in[regno] = 0;
7325 /* Clear temporary marks. */
7326 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7328 LAST_AVAIL_CHECK_FAILURE(ptr) = NULL_RTX;
7329 if (ANTIC_STORE_LIST (ptr)
7330 && (tmp = XEXP (ANTIC_STORE_LIST (ptr), 0)) == NULL_RTX)
7331 ANTIC_STORE_LIST (ptr) = XEXP (ANTIC_STORE_LIST (ptr), 1);
7335 /* Remove the stores that are not available anywhere, as there will
7336 be no opportunity to optimize them. */
7337 for (ptr = pre_ldst_mems, prev_next_ptr_ptr = &pre_ldst_mems;
7338 ptr != NULL;
7339 ptr = *prev_next_ptr_ptr)
7341 if (!AVAIL_STORE_LIST (ptr))
7343 *prev_next_ptr_ptr = ptr->next;
7344 free_ldst_entry (ptr);
7346 else
7347 prev_next_ptr_ptr = &ptr->next;
7350 ret = enumerate_ldsts ();
7352 if (gcse_file)
7354 fprintf (gcse_file, "ST_avail and ST_antic (shown under loads..)\n");
7355 print_ldst_list (gcse_file);
7358 free (last_set_in);
7359 free (already_set);
7360 return ret;
7363 /* Check to see if the load X is aliased with STORE_PATTERN. */
7365 static bool
7366 load_kills_store (x, store_pattern)
7367 rtx x, store_pattern;
7369 if (true_dependence (x, GET_MODE (x), store_pattern, rtx_addr_varies_p))
7370 return true;
7371 return false;
7374 /* Go through the entire insn X, looking for any loads which might alias
7375 STORE_PATTERN. Return true if found. */
7377 static bool
7378 find_loads (x, store_pattern)
7379 rtx x, store_pattern;
7381 const char * fmt;
7382 int i, j;
7383 int ret = false;
7385 if (!x)
7386 return false;
7388 if (GET_CODE (x) == SET)
7389 x = SET_SRC (x);
7391 if (GET_CODE (x) == MEM)
7393 if (load_kills_store (x, store_pattern))
7394 return true;
7397 /* Recursively process the insn. */
7398 fmt = GET_RTX_FORMAT (GET_CODE (x));
7400 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0 && !ret; i--)
7402 if (fmt[i] == 'e')
7403 ret |= find_loads (XEXP (x, i), store_pattern);
7404 else if (fmt[i] == 'E')
7405 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7406 ret |= find_loads (XVECEXP (x, i, j), store_pattern);
7408 return ret;
7411 /* Check if INSN kills the store pattern X (is aliased with it).
7412 Return true if it it does. */
7414 static bool
7415 store_killed_in_insn (x, x_regs, insn)
7416 rtx x, x_regs, insn;
7418 rtx reg, base;
7420 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
7421 return false;
7423 if (GET_CODE (insn) == CALL_INSN)
7425 /* A normal or pure call might read from pattern,
7426 but a const call will not. */
7427 if (! CONST_OR_PURE_CALL_P (insn) || pure_call_p (insn))
7428 return true;
7430 /* But even a const call reads its parameters. Check whether the
7431 base of some of registers used in mem is stack pointer. */
7432 for (reg = x_regs; reg; reg = XEXP (reg, 1))
7434 base = find_base_term (reg);
7435 if (!base
7436 || (GET_CODE (base) == ADDRESS
7437 && GET_MODE (base) == Pmode
7438 && XEXP (base, 0) == stack_pointer_rtx))
7439 return true;
7442 return false;
7445 if (GET_CODE (PATTERN (insn)) == SET)
7447 rtx pat = PATTERN (insn);
7448 /* Check for memory stores to aliased objects. */
7449 if (GET_CODE (SET_DEST (pat)) == MEM && !expr_equiv_p (SET_DEST (pat), x))
7450 /* pretend its a load and check for aliasing. */
7451 if (find_loads (SET_DEST (pat), x))
7452 return true;
7453 return find_loads (SET_SRC (pat), x);
7455 else
7456 return find_loads (PATTERN (insn), x);
7459 /* Returns true if the expression X is loaded or clobbered on or after INSN
7460 within basic block BB. REGS_SET_AFTER is bitmap of registers set in
7461 or after the insn. X_REGS is list of registers mentioned in X. If the store
7462 is killed, return the last insn in that it occurs in FAIL_INSN. */
7464 static bool
7465 store_killed_after (x, x_regs, insn, bb, regs_set_after, fail_insn)
7466 rtx x, x_regs, insn;
7467 basic_block bb;
7468 int *regs_set_after;
7469 rtx *fail_insn;
7471 rtx last = bb->end, act;
7473 if (!store_ops_ok (x_regs, regs_set_after))
7475 /* We do not know where it will happen. */
7476 if (fail_insn)
7477 *fail_insn = NULL_RTX;
7478 return true;
7481 /* Scan from the end, so that fail_insn is determined correctly. */
7482 for (act = last; act != PREV_INSN (insn); act = PREV_INSN (act))
7483 if (store_killed_in_insn (x, x_regs, act))
7485 if (fail_insn)
7486 *fail_insn = act;
7487 return true;
7490 return false;
7493 /* Returns true if the expression X is loaded or clobbered on or before INSN
7494 within basic block BB. X_REGS is list of registers mentioned in X.
7495 REGS_SET_BEFORE is bitmap of registers set before or in this insn. */
7496 static bool
7497 store_killed_before (x, x_regs, insn, bb, regs_set_before)
7498 rtx x, x_regs, insn;
7499 basic_block bb;
7500 int *regs_set_before;
7502 rtx first = bb->head;
7504 if (!store_ops_ok (x_regs, regs_set_before))
7505 return true;
7507 for ( ; insn != PREV_INSN (first); insn = PREV_INSN (insn))
7508 if (store_killed_in_insn (x, x_regs, insn))
7509 return true;
7511 return false;
7514 /* Fill in available, anticipatable, transparent and kill vectors in
7515 STORE_DATA, based on lists of available and anticipatable stores. */
7516 static void
7517 build_store_vectors ()
7519 basic_block bb;
7520 int *regs_set_in_block;
7521 rtx insn, st;
7522 struct ls_expr * ptr;
7523 unsigned regno;
7525 /* Build the gen_vector. This is any store in the table which is not killed
7526 by aliasing later in its block. */
7527 ae_gen = (sbitmap *) sbitmap_vector_alloc (last_basic_block, num_stores);
7528 sbitmap_vector_zero (ae_gen, last_basic_block);
7530 st_antloc = (sbitmap *) sbitmap_vector_alloc (last_basic_block, num_stores);
7531 sbitmap_vector_zero (st_antloc, last_basic_block);
7533 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7535 for (st = AVAIL_STORE_LIST (ptr); st != NULL; st = XEXP (st, 1))
7537 insn = XEXP (st, 0);
7538 bb = BLOCK_FOR_INSN (insn);
7540 /* If we've already seen an available expression in this block,
7541 we can delete this one (It occurs earlier in the block). We'll
7542 copy the SRC expression to an unused register in case there
7543 are any side effects. */
7544 if (TEST_BIT (ae_gen[bb->index], ptr->index))
7546 rtx r = gen_reg_rtx (GET_MODE (ptr->pattern));
7547 if (gcse_file)
7548 fprintf (gcse_file, "Removing redundant store:\n");
7549 replace_store_insn (r, XEXP (st, 0), bb);
7550 continue;
7552 SET_BIT (ae_gen[bb->index], ptr->index);
7555 for (st = ANTIC_STORE_LIST (ptr); st != NULL; st = XEXP (st, 1))
7557 insn = XEXP (st, 0);
7558 bb = BLOCK_FOR_INSN (insn);
7559 SET_BIT (st_antloc[bb->index], ptr->index);
7563 ae_kill = (sbitmap *) sbitmap_vector_alloc (last_basic_block, num_stores);
7564 sbitmap_vector_zero (ae_kill, last_basic_block);
7566 transp = (sbitmap *) sbitmap_vector_alloc (last_basic_block, num_stores);
7567 sbitmap_vector_zero (transp, last_basic_block);
7568 regs_set_in_block = xmalloc (sizeof (int) * max_gcse_regno);
7570 FOR_EACH_BB (bb)
7572 for (regno = 0; regno < max_gcse_regno; regno++)
7573 regs_set_in_block[regno] = TEST_BIT (reg_set_in_block[bb->index], regno);
7575 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7577 if (store_killed_after (ptr->pattern, ptr->pattern_regs, bb->head,
7578 bb, regs_set_in_block, NULL))
7580 /* It should not be neccessary to consider the expression
7581 killed if it is both anticipatable and available. */
7582 if (!TEST_BIT (st_antloc[bb->index], ptr->index)
7583 || !TEST_BIT (ae_gen[bb->index], ptr->index))
7584 SET_BIT (ae_kill[bb->index], ptr->index);
7586 else
7587 SET_BIT (transp[bb->index], ptr->index);
7591 free (regs_set_in_block);
7593 if (gcse_file)
7595 dump_sbitmap_vector (gcse_file, "st_antloc", "", st_antloc, last_basic_block);
7596 dump_sbitmap_vector (gcse_file, "st_kill", "", ae_kill, last_basic_block);
7597 dump_sbitmap_vector (gcse_file, "Transpt", "", transp, last_basic_block);
7598 dump_sbitmap_vector (gcse_file, "st_avloc", "", ae_gen, last_basic_block);
7602 /* Insert an instruction at the beginning of a basic block, and update
7603 the BLOCK_HEAD if needed. */
7605 static void
7606 insert_insn_start_bb (insn, bb)
7607 rtx insn;
7608 basic_block bb;
7610 /* Insert at start of successor block. */
7611 rtx prev = PREV_INSN (bb->head);
7612 rtx before = bb->head;
7613 while (before != 0)
7615 if (GET_CODE (before) != CODE_LABEL
7616 && (GET_CODE (before) != NOTE
7617 || NOTE_LINE_NUMBER (before) != NOTE_INSN_BASIC_BLOCK))
7618 break;
7619 prev = before;
7620 if (prev == bb->end)
7621 break;
7622 before = NEXT_INSN (before);
7625 insn = emit_insn_after (insn, prev);
7627 if (gcse_file)
7629 fprintf (gcse_file, "STORE_MOTION insert store at start of BB %d:\n",
7630 bb->index);
7631 print_inline_rtx (gcse_file, insn, 6);
7632 fprintf (gcse_file, "\n");
7636 /* This routine will insert a store on an edge. EXPR is the ldst entry for
7637 the memory reference, and E is the edge to insert it on. Returns nonzero
7638 if an edge insertion was performed. */
7640 static int
7641 insert_store (expr, e)
7642 struct ls_expr * expr;
7643 edge e;
7645 rtx reg, insn;
7646 basic_block bb;
7647 edge tmp;
7649 /* We did all the deleted before this insert, so if we didn't delete a
7650 store, then we haven't set the reaching reg yet either. */
7651 if (expr->reaching_reg == NULL_RTX)
7652 return 0;
7654 reg = expr->reaching_reg;
7655 insn = gen_move_insn (copy_rtx (expr->pattern), reg);
7657 /* If we are inserting this expression on ALL predecessor edges of a BB,
7658 insert it at the start of the BB, and reset the insert bits on the other
7659 edges so we don't try to insert it on the other edges. */
7660 bb = e->dest;
7661 for (tmp = e->dest->pred; tmp ; tmp = tmp->pred_next)
7663 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
7664 if (index == EDGE_INDEX_NO_EDGE)
7665 abort ();
7666 if (! TEST_BIT (pre_insert_map[index], expr->index))
7667 break;
7670 /* If tmp is NULL, we found an insertion on every edge, blank the
7671 insertion vector for these edges, and insert at the start of the BB. */
7672 if (!tmp && bb != EXIT_BLOCK_PTR)
7674 for (tmp = e->dest->pred; tmp ; tmp = tmp->pred_next)
7676 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
7677 RESET_BIT (pre_insert_map[index], expr->index);
7679 insert_insn_start_bb (insn, bb);
7680 return 0;
7683 /* We can't insert on this edge, so we'll insert at the head of the
7684 successors block. See Morgan, sec 10.5. */
7685 if ((e->flags & EDGE_ABNORMAL) == EDGE_ABNORMAL)
7687 insert_insn_start_bb (insn, bb);
7688 return 0;
7691 insert_insn_on_edge (insn, e);
7693 if (gcse_file)
7695 fprintf (gcse_file, "STORE_MOTION insert insn on edge (%d, %d):\n",
7696 e->src->index, e->dest->index);
7697 print_inline_rtx (gcse_file, insn, 6);
7698 fprintf (gcse_file, "\n");
7701 return 1;
7704 /* This routine will replace a store with a SET to a specified register. */
7706 static void
7707 replace_store_insn (reg, del, bb)
7708 rtx reg, del;
7709 basic_block bb;
7711 rtx insn;
7713 insn = gen_move_insn (reg, SET_SRC (single_set (del)));
7714 insn = emit_insn_after (insn, del);
7716 if (gcse_file)
7718 fprintf (gcse_file,
7719 "STORE_MOTION delete insn in BB %d:\n ", bb->index);
7720 print_inline_rtx (gcse_file, del, 6);
7721 fprintf (gcse_file, "\nSTORE MOTION replaced with insn:\n ");
7722 print_inline_rtx (gcse_file, insn, 6);
7723 fprintf (gcse_file, "\n");
7726 delete_insn (del);
7730 /* Delete a store, but copy the value that would have been stored into
7731 the reaching_reg for later storing. */
7733 static void
7734 delete_store (expr, bb)
7735 struct ls_expr * expr;
7736 basic_block bb;
7738 rtx reg, i, del;
7740 if (expr->reaching_reg == NULL_RTX)
7741 expr->reaching_reg = gen_reg_rtx (GET_MODE (expr->pattern));
7743 reg = expr->reaching_reg;
7745 for (i = AVAIL_STORE_LIST (expr); i; i = XEXP (i, 1))
7747 del = XEXP (i, 0);
7748 if (BLOCK_FOR_INSN (del) == bb)
7750 /* We know there is only one since we deleted redundant
7751 ones during the available computation. */
7752 replace_store_insn (reg, del, bb);
7753 break;
7758 /* Free memory used by store motion. */
7760 static void
7761 free_store_memory ()
7763 free_ldst_mems ();
7765 if (ae_gen)
7766 sbitmap_vector_free (ae_gen);
7767 if (ae_kill)
7768 sbitmap_vector_free (ae_kill);
7769 if (transp)
7770 sbitmap_vector_free (transp);
7771 if (st_antloc)
7772 sbitmap_vector_free (st_antloc);
7773 if (pre_insert_map)
7774 sbitmap_vector_free (pre_insert_map);
7775 if (pre_delete_map)
7776 sbitmap_vector_free (pre_delete_map);
7777 if (reg_set_in_block)
7778 sbitmap_vector_free (reg_set_in_block);
7780 ae_gen = ae_kill = transp = st_antloc = NULL;
7781 pre_insert_map = pre_delete_map = reg_set_in_block = NULL;
7784 /* Perform store motion. Much like gcse, except we move expressions the
7785 other way by looking at the flowgraph in reverse. */
7787 static void
7788 store_motion ()
7790 basic_block bb;
7791 int x;
7792 struct ls_expr * ptr;
7793 int update_flow = 0;
7795 if (gcse_file)
7797 fprintf (gcse_file, "before store motion\n");
7798 print_rtl (gcse_file, get_insns ());
7802 init_alias_analysis ();
7804 /* Find all the available and anticipatable stores. */
7805 num_stores = compute_store_table ();
7806 if (num_stores == 0)
7808 sbitmap_vector_free (reg_set_in_block);
7809 end_alias_analysis ();
7810 return;
7813 /* Now compute kill & transp vectors. */
7814 build_store_vectors ();
7815 add_noreturn_fake_exit_edges ();
7817 edge_list = pre_edge_rev_lcm (gcse_file, num_stores, transp, ae_gen,
7818 st_antloc, ae_kill, &pre_insert_map,
7819 &pre_delete_map);
7821 /* Now we want to insert the new stores which are going to be needed. */
7822 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7824 FOR_EACH_BB (bb)
7825 if (TEST_BIT (pre_delete_map[bb->index], ptr->index))
7826 delete_store (ptr, bb);
7828 for (x = 0; x < NUM_EDGES (edge_list); x++)
7829 if (TEST_BIT (pre_insert_map[x], ptr->index))
7830 update_flow |= insert_store (ptr, INDEX_EDGE (edge_list, x));
7833 if (update_flow)
7834 commit_edge_insertions ();
7836 free_store_memory ();
7837 free_edge_list (edge_list);
7838 remove_fake_edges ();
7839 end_alias_analysis ();
7843 /* Entry point for jump bypassing optimization pass. */
7846 bypass_jumps (file)
7847 FILE *file;
7849 int changed;
7851 /* We do not construct an accurate cfg in functions which call
7852 setjmp, so just punt to be safe. */
7853 if (current_function_calls_setjmp)
7854 return 0;
7856 /* For calling dump_foo fns from gdb. */
7857 debug_stderr = stderr;
7858 gcse_file = file;
7860 /* Identify the basic block information for this function, including
7861 successors and predecessors. */
7862 max_gcse_regno = max_reg_num ();
7864 if (file)
7865 dump_flow_info (file);
7867 /* Return if there's nothing to do. */
7868 if (n_basic_blocks <= 1)
7869 return 0;
7871 /* Trying to perform global optimizations on flow graphs which have
7872 a high connectivity will take a long time and is unlikely to be
7873 particularly useful.
7875 In normal circumstances a cfg should have about twice as many edges
7876 as blocks. But we do not want to punish small functions which have
7877 a couple switch statements. So we require a relatively large number
7878 of basic blocks and the ratio of edges to blocks to be high. */
7879 if (n_basic_blocks > 1000 && n_edges / n_basic_blocks >= 20)
7881 if (warn_disabled_optimization)
7882 warning ("BYPASS disabled: %d > 1000 basic blocks and %d >= 20 edges/basic block",
7883 n_basic_blocks, n_edges / n_basic_blocks);
7884 return 0;
7887 /* If allocating memory for the cprop bitmap would take up too much
7888 storage it's better just to disable the optimization. */
7889 if ((n_basic_blocks
7890 * SBITMAP_SET_SIZE (max_gcse_regno)
7891 * sizeof (SBITMAP_ELT_TYPE)) > MAX_GCSE_MEMORY)
7893 if (warn_disabled_optimization)
7894 warning ("GCSE disabled: %d basic blocks and %d registers",
7895 n_basic_blocks, max_gcse_regno);
7897 return 0;
7900 gcc_obstack_init (&gcse_obstack);
7901 bytes_used = 0;
7903 /* We need alias. */
7904 init_alias_analysis ();
7906 /* Record where pseudo-registers are set. This data is kept accurate
7907 during each pass. ??? We could also record hard-reg information here
7908 [since it's unchanging], however it is currently done during hash table
7909 computation.
7911 It may be tempting to compute MEM set information here too, but MEM sets
7912 will be subject to code motion one day and thus we need to compute
7913 information about memory sets when we build the hash tables. */
7915 alloc_reg_set_mem (max_gcse_regno);
7916 compute_sets (get_insns ());
7918 max_gcse_regno = max_reg_num ();
7919 alloc_gcse_mem (get_insns ());
7920 changed = one_cprop_pass (1, 1, 1);
7921 free_gcse_mem ();
7923 if (file)
7925 fprintf (file, "BYPASS of %s: %d basic blocks, ",
7926 current_function_name, n_basic_blocks);
7927 fprintf (file, "%d bytes\n\n", bytes_used);
7930 obstack_free (&gcse_obstack, NULL);
7931 free_reg_set_mem ();
7933 /* We are finished with alias. */
7934 end_alias_analysis ();
7935 allocate_reg_info (max_reg_num (), FALSE, FALSE);
7937 return changed;
7940 #include "gt-gcse.h"