* Makefile.tpl: Add missing empty maybe-check-gcc target.
[official-gcc.git] / gcc / gcse.c
blob8050dd13c73dbe9e74ff9799b2e48b54cce65211
1 /* Global common subexpression elimination/Partial redundancy elimination
2 and global constant/copy propagation for GNU compiler.
3 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 02111-1307, USA. */
23 /* TODO
24 - reordering of memory allocation and freeing to be more space efficient
25 - do rough calc of how many regs are needed in each block, and a rough
26 calc of how many regs are available in each class and use that to
27 throttle back the code in cases where RTX_COST is minimal.
28 - a store to the same address as a load does not kill the load if the
29 source of the store is also the destination of the load. Handling this
30 allows more load motion, particularly out of loops.
31 - ability to realloc sbitmap vectors would allow one initial computation
32 of reg_set_in_block with only subsequent additions, rather than
33 recomputing it for each pass
37 /* References searched while implementing this.
39 Compilers Principles, Techniques and Tools
40 Aho, Sethi, Ullman
41 Addison-Wesley, 1988
43 Global Optimization by Suppression of Partial Redundancies
44 E. Morel, C. Renvoise
45 communications of the acm, Vol. 22, Num. 2, Feb. 1979
47 A Portable Machine-Independent Global Optimizer - Design and Measurements
48 Frederick Chow
49 Stanford Ph.D. thesis, Dec. 1983
51 A Fast Algorithm for Code Movement Optimization
52 D.M. Dhamdhere
53 SIGPLAN Notices, Vol. 23, Num. 10, Oct. 1988
55 A Solution to a Problem with Morel and Renvoise's
56 Global Optimization by Suppression of Partial Redundancies
57 K-H Drechsler, M.P. Stadel
58 ACM TOPLAS, Vol. 10, Num. 4, Oct. 1988
60 Practical Adaptation of the Global Optimization
61 Algorithm of Morel and Renvoise
62 D.M. Dhamdhere
63 ACM TOPLAS, Vol. 13, Num. 2. Apr. 1991
65 Efficiently Computing Static Single Assignment Form and the Control
66 Dependence Graph
67 R. Cytron, J. Ferrante, B.K. Rosen, M.N. Wegman, and F.K. Zadeck
68 ACM TOPLAS, Vol. 13, Num. 4, Oct. 1991
70 Lazy Code Motion
71 J. Knoop, O. Ruthing, B. Steffen
72 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
74 What's In a Region? Or Computing Control Dependence Regions in Near-Linear
75 Time for Reducible Flow Control
76 Thomas Ball
77 ACM Letters on Programming Languages and Systems,
78 Vol. 2, Num. 1-4, Mar-Dec 1993
80 An Efficient Representation for Sparse Sets
81 Preston Briggs, Linda Torczon
82 ACM Letters on Programming Languages and Systems,
83 Vol. 2, Num. 1-4, Mar-Dec 1993
85 A Variation of Knoop, Ruthing, and Steffen's Lazy Code Motion
86 K-H Drechsler, M.P. Stadel
87 ACM SIGPLAN Notices, Vol. 28, Num. 5, May 1993
89 Partial Dead Code Elimination
90 J. Knoop, O. Ruthing, B. Steffen
91 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
93 Effective Partial Redundancy Elimination
94 P. Briggs, K.D. Cooper
95 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
97 The Program Structure Tree: Computing Control Regions in Linear Time
98 R. Johnson, D. Pearson, K. Pingali
99 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
101 Optimal Code Motion: Theory and Practice
102 J. Knoop, O. Ruthing, B. Steffen
103 ACM TOPLAS, Vol. 16, Num. 4, Jul. 1994
105 The power of assignment motion
106 J. Knoop, O. Ruthing, B. Steffen
107 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
109 Global code motion / global value numbering
110 C. Click
111 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
113 Value Driven Redundancy Elimination
114 L.T. Simpson
115 Rice University Ph.D. thesis, Apr. 1996
117 Value Numbering
118 L.T. Simpson
119 Massively Scalar Compiler Project, Rice University, Sep. 1996
121 High Performance Compilers for Parallel Computing
122 Michael Wolfe
123 Addison-Wesley, 1996
125 Advanced Compiler Design and Implementation
126 Steven Muchnick
127 Morgan Kaufmann, 1997
129 Building an Optimizing Compiler
130 Robert Morgan
131 Digital Press, 1998
133 People wishing to speed up the code here should read:
134 Elimination Algorithms for Data Flow Analysis
135 B.G. Ryder, M.C. Paull
136 ACM Computing Surveys, Vol. 18, Num. 3, Sep. 1986
138 How to Analyze Large Programs Efficiently and Informatively
139 D.M. Dhamdhere, B.K. Rosen, F.K. Zadeck
140 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
142 People wishing to do something different can find various possibilities
143 in the above papers and elsewhere.
146 #include "config.h"
147 #include "system.h"
148 #include "coretypes.h"
149 #include "tm.h"
150 #include "toplev.h"
152 #include "rtl.h"
153 #include "tm_p.h"
154 #include "regs.h"
155 #include "hard-reg-set.h"
156 #include "flags.h"
157 #include "real.h"
158 #include "insn-config.h"
159 #include "recog.h"
160 #include "basic-block.h"
161 #include "output.h"
162 #include "function.h"
163 #include "expr.h"
164 #include "except.h"
165 #include "ggc.h"
166 #include "params.h"
167 #include "cselib.h"
169 #include "obstack.h"
171 /* Propagate flow information through back edges and thus enable PRE's
172 moving loop invariant calculations out of loops.
174 Originally this tended to create worse overall code, but several
175 improvements during the development of PRE seem to have made following
176 back edges generally a win.
178 Note much of the loop invariant code motion done here would normally
179 be done by loop.c, which has more heuristics for when to move invariants
180 out of loops. At some point we might need to move some of those
181 heuristics into gcse.c. */
183 /* We support GCSE via Partial Redundancy Elimination. PRE optimizations
184 are a superset of those done by GCSE.
186 We perform the following steps:
188 1) Compute basic block information.
190 2) Compute table of places where registers are set.
192 3) Perform copy/constant propagation.
194 4) Perform global cse.
196 5) Perform another pass of copy/constant propagation.
198 Two passes of copy/constant propagation are done because the first one
199 enables more GCSE and the second one helps to clean up the copies that
200 GCSE creates. This is needed more for PRE than for Classic because Classic
201 GCSE will try to use an existing register containing the common
202 subexpression rather than create a new one. This is harder to do for PRE
203 because of the code motion (which Classic GCSE doesn't do).
205 Expressions we are interested in GCSE-ing are of the form
206 (set (pseudo-reg) (expression)).
207 Function want_to_gcse_p says what these are.
209 PRE handles moving invariant expressions out of loops (by treating them as
210 partially redundant).
212 Eventually it would be nice to replace cse.c/gcse.c with SSA (static single
213 assignment) based GVN (global value numbering). L. T. Simpson's paper
214 (Rice University) on value numbering is a useful reference for this.
216 **********************
218 We used to support multiple passes but there are diminishing returns in
219 doing so. The first pass usually makes 90% of the changes that are doable.
220 A second pass can make a few more changes made possible by the first pass.
221 Experiments show any further passes don't make enough changes to justify
222 the expense.
224 A study of spec92 using an unlimited number of passes:
225 [1 pass] = 1208 substitutions, [2] = 577, [3] = 202, [4] = 192, [5] = 83,
226 [6] = 34, [7] = 17, [8] = 9, [9] = 4, [10] = 4, [11] = 2,
227 [12] = 2, [13] = 1, [15] = 1, [16] = 2, [41] = 1
229 It was found doing copy propagation between each pass enables further
230 substitutions.
232 PRE is quite expensive in complicated functions because the DFA can take
233 awhile to converge. Hence we only perform one pass. The parameter max-gcse-passes can
234 be modified if one wants to experiment.
236 **********************
238 The steps for PRE are:
240 1) Build the hash table of expressions we wish to GCSE (expr_hash_table).
242 2) Perform the data flow analysis for PRE.
244 3) Delete the redundant instructions
246 4) Insert the required copies [if any] that make the partially
247 redundant instructions fully redundant.
249 5) For other reaching expressions, insert an instruction to copy the value
250 to a newly created pseudo that will reach the redundant instruction.
252 The deletion is done first so that when we do insertions we
253 know which pseudo reg to use.
255 Various papers have argued that PRE DFA is expensive (O(n^2)) and others
256 argue it is not. The number of iterations for the algorithm to converge
257 is typically 2-4 so I don't view it as that expensive (relatively speaking).
259 PRE GCSE depends heavily on the second CSE pass to clean up the copies
260 we create. To make an expression reach the place where it's redundant,
261 the result of the expression is copied to a new register, and the redundant
262 expression is deleted by replacing it with this new register. Classic GCSE
263 doesn't have this problem as much as it computes the reaching defs of
264 each register in each block and thus can try to use an existing register.
266 **********************
268 A fair bit of simplicity is created by creating small functions for simple
269 tasks, even when the function is only called in one place. This may
270 measurably slow things down [or may not] by creating more function call
271 overhead than is necessary. The source is laid out so that it's trivial
272 to make the affected functions inline so that one can measure what speed
273 up, if any, can be achieved, and maybe later when things settle things can
274 be rearranged.
276 Help stamp out big monolithic functions! */
278 /* GCSE global vars. */
280 /* -dG dump file. */
281 static FILE *gcse_file;
283 /* Note whether or not we should run jump optimization after gcse. We
284 want to do this for two cases.
286 * If we changed any jumps via cprop.
288 * If we added any labels via edge splitting. */
290 static int run_jump_opt_after_gcse;
292 /* Bitmaps are normally not included in debugging dumps.
293 However it's useful to be able to print them from GDB.
294 We could create special functions for this, but it's simpler to
295 just allow passing stderr to the dump_foo fns. Since stderr can
296 be a macro, we store a copy here. */
297 static FILE *debug_stderr;
299 /* An obstack for our working variables. */
300 static struct obstack gcse_obstack;
302 struct reg_use {rtx reg_rtx; };
304 /* Hash table of expressions. */
306 struct expr
308 /* The expression (SET_SRC for expressions, PATTERN for assignments). */
309 rtx expr;
310 /* Index in the available expression bitmaps. */
311 int bitmap_index;
312 /* Next entry with the same hash. */
313 struct expr *next_same_hash;
314 /* List of anticipatable occurrences in basic blocks in the function.
315 An "anticipatable occurrence" is one that is the first occurrence in the
316 basic block, the operands are not modified in the basic block prior
317 to the occurrence and the output is not used between the start of
318 the block and the occurrence. */
319 struct occr *antic_occr;
320 /* List of available occurrence in basic blocks in the function.
321 An "available occurrence" is one that is the last occurrence in the
322 basic block and the operands are not modified by following statements in
323 the basic block [including this insn]. */
324 struct occr *avail_occr;
325 /* Non-null if the computation is PRE redundant.
326 The value is the newly created pseudo-reg to record a copy of the
327 expression in all the places that reach the redundant copy. */
328 rtx reaching_reg;
331 /* Occurrence of an expression.
332 There is one per basic block. If a pattern appears more than once the
333 last appearance is used [or first for anticipatable expressions]. */
335 struct occr
337 /* Next occurrence of this expression. */
338 struct occr *next;
339 /* The insn that computes the expression. */
340 rtx insn;
341 /* Nonzero if this [anticipatable] occurrence has been deleted. */
342 char deleted_p;
343 /* Nonzero if this [available] occurrence has been copied to
344 reaching_reg. */
345 /* ??? This is mutually exclusive with deleted_p, so they could share
346 the same byte. */
347 char copied_p;
350 /* Expression and copy propagation hash tables.
351 Each hash table is an array of buckets.
352 ??? It is known that if it were an array of entries, structure elements
353 `next_same_hash' and `bitmap_index' wouldn't be necessary. However, it is
354 not clear whether in the final analysis a sufficient amount of memory would
355 be saved as the size of the available expression bitmaps would be larger
356 [one could build a mapping table without holes afterwards though].
357 Someday I'll perform the computation and figure it out. */
359 struct hash_table
361 /* The table itself.
362 This is an array of `expr_hash_table_size' elements. */
363 struct expr **table;
365 /* Size of the hash table, in elements. */
366 unsigned int size;
368 /* Number of hash table elements. */
369 unsigned int n_elems;
371 /* Whether the table is expression of copy propagation one. */
372 int set_p;
375 /* Expression hash table. */
376 static struct hash_table expr_hash_table;
378 /* Copy propagation hash table. */
379 static struct hash_table set_hash_table;
381 /* Mapping of uids to cuids.
382 Only real insns get cuids. */
383 static int *uid_cuid;
385 /* Highest UID in UID_CUID. */
386 static int max_uid;
388 /* Get the cuid of an insn. */
389 #ifdef ENABLE_CHECKING
390 #define INSN_CUID(INSN) (INSN_UID (INSN) > max_uid ? (abort (), 0) : uid_cuid[INSN_UID (INSN)])
391 #else
392 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
393 #endif
395 /* Number of cuids. */
396 static int max_cuid;
398 /* Mapping of cuids to insns. */
399 static rtx *cuid_insn;
401 /* Get insn from cuid. */
402 #define CUID_INSN(CUID) (cuid_insn[CUID])
404 /* Maximum register number in function prior to doing gcse + 1.
405 Registers created during this pass have regno >= max_gcse_regno.
406 This is named with "gcse" to not collide with global of same name. */
407 static unsigned int max_gcse_regno;
409 /* Table of registers that are modified.
411 For each register, each element is a list of places where the pseudo-reg
412 is set.
414 For simplicity, GCSE is done on sets of pseudo-regs only. PRE GCSE only
415 requires knowledge of which blocks kill which regs [and thus could use
416 a bitmap instead of the lists `reg_set_table' uses].
418 `reg_set_table' and could be turned into an array of bitmaps (num-bbs x
419 num-regs) [however perhaps it may be useful to keep the data as is]. One
420 advantage of recording things this way is that `reg_set_table' is fairly
421 sparse with respect to pseudo regs but for hard regs could be fairly dense
422 [relatively speaking]. And recording sets of pseudo-regs in lists speeds
423 up functions like compute_transp since in the case of pseudo-regs we only
424 need to iterate over the number of times a pseudo-reg is set, not over the
425 number of basic blocks [clearly there is a bit of a slow down in the cases
426 where a pseudo is set more than once in a block, however it is believed
427 that the net effect is to speed things up]. This isn't done for hard-regs
428 because recording call-clobbered hard-regs in `reg_set_table' at each
429 function call can consume a fair bit of memory, and iterating over
430 hard-regs stored this way in compute_transp will be more expensive. */
432 typedef struct reg_set
434 /* The next setting of this register. */
435 struct reg_set *next;
436 /* The insn where it was set. */
437 rtx insn;
438 } reg_set;
440 static reg_set **reg_set_table;
442 /* Size of `reg_set_table'.
443 The table starts out at max_gcse_regno + slop, and is enlarged as
444 necessary. */
445 static int reg_set_table_size;
447 /* Amount to grow `reg_set_table' by when it's full. */
448 #define REG_SET_TABLE_SLOP 100
450 /* This is a list of expressions which are MEMs and will be used by load
451 or store motion.
452 Load motion tracks MEMs which aren't killed by
453 anything except itself. (ie, loads and stores to a single location).
454 We can then allow movement of these MEM refs with a little special
455 allowance. (all stores copy the same value to the reaching reg used
456 for the loads). This means all values used to store into memory must have
457 no side effects so we can re-issue the setter value.
458 Store Motion uses this structure as an expression table to track stores
459 which look interesting, and might be moveable towards the exit block. */
461 struct ls_expr
463 struct expr * expr; /* Gcse expression reference for LM. */
464 rtx pattern; /* Pattern of this mem. */
465 rtx pattern_regs; /* List of registers mentioned by the mem. */
466 rtx loads; /* INSN list of loads seen. */
467 rtx stores; /* INSN list of stores seen. */
468 struct ls_expr * next; /* Next in the list. */
469 int invalid; /* Invalid for some reason. */
470 int index; /* If it maps to a bitmap index. */
471 int hash_index; /* Index when in a hash table. */
472 rtx reaching_reg; /* Register to use when re-writing. */
475 /* Array of implicit set patterns indexed by basic block index. */
476 static rtx *implicit_sets;
478 /* Head of the list of load/store memory refs. */
479 static struct ls_expr * pre_ldst_mems = NULL;
481 /* Bitmap containing one bit for each register in the program.
482 Used when performing GCSE to track which registers have been set since
483 the start of the basic block. */
484 static regset reg_set_bitmap;
486 /* For each block, a bitmap of registers set in the block.
487 This is used by expr_killed_p and compute_transp.
488 It is computed during hash table computation and not by compute_sets
489 as it includes registers added since the last pass (or between cprop and
490 gcse) and it's currently not easy to realloc sbitmap vectors. */
491 static sbitmap *reg_set_in_block;
493 /* Array, indexed by basic block number for a list of insns which modify
494 memory within that block. */
495 static rtx * modify_mem_list;
496 bitmap modify_mem_list_set;
498 /* This array parallels modify_mem_list, but is kept canonicalized. */
499 static rtx * canon_modify_mem_list;
500 bitmap canon_modify_mem_list_set;
501 /* Various variables for statistics gathering. */
503 /* Memory used in a pass.
504 This isn't intended to be absolutely precise. Its intent is only
505 to keep an eye on memory usage. */
506 static int bytes_used;
508 /* GCSE substitutions made. */
509 static int gcse_subst_count;
510 /* Number of copy instructions created. */
511 static int gcse_create_count;
512 /* Number of constants propagated. */
513 static int const_prop_count;
514 /* Number of copys propagated. */
515 static int copy_prop_count;
517 /* These variables are used by classic GCSE.
518 Normally they'd be defined a bit later, but `rd_gen' needs to
519 be declared sooner. */
521 /* Each block has a bitmap of each type.
522 The length of each blocks bitmap is:
524 max_cuid - for reaching definitions
525 n_exprs - for available expressions
527 Thus we view the bitmaps as 2 dimensional arrays. i.e.
528 rd_kill[block_num][cuid_num]
529 ae_kill[block_num][expr_num] */
531 /* For reaching defs */
532 static sbitmap *rd_kill, *rd_gen, *reaching_defs, *rd_out;
534 /* for available exprs */
535 static sbitmap *ae_kill, *ae_gen, *ae_in, *ae_out;
537 /* Objects of this type are passed around by the null-pointer check
538 removal routines. */
539 struct null_pointer_info
541 /* The basic block being processed. */
542 basic_block current_block;
543 /* The first register to be handled in this pass. */
544 unsigned int min_reg;
545 /* One greater than the last register to be handled in this pass. */
546 unsigned int max_reg;
547 sbitmap *nonnull_local;
548 sbitmap *nonnull_killed;
551 static void compute_can_copy PARAMS ((void));
552 static char *gmalloc PARAMS ((unsigned int));
553 static char *grealloc PARAMS ((char *, unsigned int));
554 static char *gcse_alloc PARAMS ((unsigned long));
555 static void alloc_gcse_mem PARAMS ((rtx));
556 static void free_gcse_mem PARAMS ((void));
557 static void alloc_reg_set_mem PARAMS ((int));
558 static void free_reg_set_mem PARAMS ((void));
559 static int get_bitmap_width PARAMS ((int, int, int));
560 static void record_one_set PARAMS ((int, rtx));
561 static void record_set_info PARAMS ((rtx, rtx, void *));
562 static void compute_sets PARAMS ((rtx));
563 static void hash_scan_insn PARAMS ((rtx, struct hash_table *, int));
564 static void hash_scan_set PARAMS ((rtx, rtx, struct hash_table *));
565 static void hash_scan_clobber PARAMS ((rtx, rtx, struct hash_table *));
566 static void hash_scan_call PARAMS ((rtx, rtx, struct hash_table *));
567 static int want_to_gcse_p PARAMS ((rtx));
568 static bool gcse_constant_p PARAMS ((rtx));
569 static int oprs_unchanged_p PARAMS ((rtx, rtx, int));
570 static int oprs_anticipatable_p PARAMS ((rtx, rtx));
571 static int oprs_available_p PARAMS ((rtx, rtx));
572 static void insert_expr_in_table PARAMS ((rtx, enum machine_mode, rtx,
573 int, int, struct hash_table *));
574 static void insert_set_in_table PARAMS ((rtx, rtx, struct hash_table *));
575 static unsigned int hash_expr PARAMS ((rtx, enum machine_mode, int *, int));
576 static unsigned int hash_expr_1 PARAMS ((rtx, enum machine_mode, int *));
577 static unsigned int hash_string_1 PARAMS ((const char *));
578 static unsigned int hash_set PARAMS ((int, int));
579 static int expr_equiv_p PARAMS ((rtx, rtx));
580 static void record_last_reg_set_info PARAMS ((rtx, int));
581 static void record_last_mem_set_info PARAMS ((rtx));
582 static void record_last_set_info PARAMS ((rtx, rtx, void *));
583 static void compute_hash_table PARAMS ((struct hash_table *));
584 static void alloc_hash_table PARAMS ((int, struct hash_table *, int));
585 static void free_hash_table PARAMS ((struct hash_table *));
586 static void compute_hash_table_work PARAMS ((struct hash_table *));
587 static void dump_hash_table PARAMS ((FILE *, const char *,
588 struct hash_table *));
589 static struct expr *lookup_expr PARAMS ((rtx, struct hash_table *));
590 static struct expr *lookup_set PARAMS ((unsigned int, struct hash_table *));
591 static struct expr *next_set PARAMS ((unsigned int, struct expr *));
592 static void reset_opr_set_tables PARAMS ((void));
593 static int oprs_not_set_p PARAMS ((rtx, rtx));
594 static void mark_call PARAMS ((rtx));
595 static void mark_set PARAMS ((rtx, rtx));
596 static void mark_clobber PARAMS ((rtx, rtx));
597 static void mark_oprs_set PARAMS ((rtx));
598 static void alloc_cprop_mem PARAMS ((int, int));
599 static void free_cprop_mem PARAMS ((void));
600 static void compute_transp PARAMS ((rtx, int, sbitmap *, int));
601 static void compute_transpout PARAMS ((void));
602 static void compute_local_properties PARAMS ((sbitmap *, sbitmap *, sbitmap *,
603 struct hash_table *));
604 static void compute_cprop_data PARAMS ((void));
605 static void find_used_regs PARAMS ((rtx *, void *));
606 static int try_replace_reg PARAMS ((rtx, rtx, rtx));
607 static struct expr *find_avail_set PARAMS ((int, rtx));
608 static int cprop_jump PARAMS ((basic_block, rtx, rtx, rtx, rtx));
609 static void mems_conflict_for_gcse_p PARAMS ((rtx, rtx, void *));
610 static int load_killed_in_block_p PARAMS ((basic_block, int, rtx, int));
611 static void canon_list_insert PARAMS ((rtx, rtx, void *));
612 static int cprop_insn PARAMS ((rtx, int));
613 static int cprop PARAMS ((int));
614 static rtx fis_get_condition PARAMS ((rtx));
615 static void find_implicit_sets PARAMS ((void));
616 static int one_cprop_pass PARAMS ((int, int, int));
617 static bool constprop_register PARAMS ((rtx, rtx, rtx, int));
618 static struct expr *find_bypass_set PARAMS ((int, int));
619 static bool reg_killed_on_edge PARAMS ((rtx, edge));
620 static int bypass_block PARAMS ((basic_block, rtx, rtx));
621 static int bypass_conditional_jumps PARAMS ((void));
622 static void alloc_pre_mem PARAMS ((int, int));
623 static void free_pre_mem PARAMS ((void));
624 static void compute_pre_data PARAMS ((void));
625 static int pre_expr_reaches_here_p PARAMS ((basic_block, struct expr *,
626 basic_block));
627 static void insert_insn_end_bb PARAMS ((struct expr *, basic_block, int));
628 static void pre_insert_copy_insn PARAMS ((struct expr *, rtx));
629 static void pre_insert_copies PARAMS ((void));
630 static int pre_delete PARAMS ((void));
631 static int pre_gcse PARAMS ((void));
632 static int one_pre_gcse_pass PARAMS ((int));
633 static void add_label_notes PARAMS ((rtx, rtx));
634 static void alloc_code_hoist_mem PARAMS ((int, int));
635 static void free_code_hoist_mem PARAMS ((void));
636 static void compute_code_hoist_vbeinout PARAMS ((void));
637 static void compute_code_hoist_data PARAMS ((void));
638 static int hoist_expr_reaches_here_p PARAMS ((basic_block, int, basic_block,
639 char *));
640 static void hoist_code PARAMS ((void));
641 static int one_code_hoisting_pass PARAMS ((void));
642 static void alloc_rd_mem PARAMS ((int, int));
643 static void free_rd_mem PARAMS ((void));
644 static void handle_rd_kill_set PARAMS ((rtx, int, basic_block));
645 static void compute_kill_rd PARAMS ((void));
646 static void compute_rd PARAMS ((void));
647 static void alloc_avail_expr_mem PARAMS ((int, int));
648 static void free_avail_expr_mem PARAMS ((void));
649 static void compute_ae_gen PARAMS ((struct hash_table *));
650 static int expr_killed_p PARAMS ((rtx, basic_block));
651 static void compute_ae_kill PARAMS ((sbitmap *, sbitmap *, struct hash_table *));
652 static int expr_reaches_here_p PARAMS ((struct occr *, struct expr *,
653 basic_block, int));
654 static rtx computing_insn PARAMS ((struct expr *, rtx));
655 static int def_reaches_here_p PARAMS ((rtx, rtx));
656 static int can_disregard_other_sets PARAMS ((struct reg_set **, rtx, int));
657 static int handle_avail_expr PARAMS ((rtx, struct expr *));
658 static int classic_gcse PARAMS ((void));
659 static int one_classic_gcse_pass PARAMS ((int));
660 static void invalidate_nonnull_info PARAMS ((rtx, rtx, void *));
661 static int delete_null_pointer_checks_1 PARAMS ((unsigned int *,
662 sbitmap *, sbitmap *,
663 struct null_pointer_info *));
664 static rtx process_insert_insn PARAMS ((struct expr *));
665 static int pre_edge_insert PARAMS ((struct edge_list *, struct expr **));
666 static int expr_reaches_here_p_work PARAMS ((struct occr *, struct expr *,
667 basic_block, int, char *));
668 static int pre_expr_reaches_here_p_work PARAMS ((basic_block, struct expr *,
669 basic_block, char *));
670 static struct ls_expr * ldst_entry PARAMS ((rtx));
671 static void free_ldst_entry PARAMS ((struct ls_expr *));
672 static void free_ldst_mems PARAMS ((void));
673 static void print_ldst_list PARAMS ((FILE *));
674 static struct ls_expr * find_rtx_in_ldst PARAMS ((rtx));
675 static int enumerate_ldsts PARAMS ((void));
676 static inline struct ls_expr * first_ls_expr PARAMS ((void));
677 static inline struct ls_expr * next_ls_expr PARAMS ((struct ls_expr *));
678 static int simple_mem PARAMS ((rtx));
679 static void invalidate_any_buried_refs PARAMS ((rtx));
680 static void compute_ld_motion_mems PARAMS ((void));
681 static void trim_ld_motion_mems PARAMS ((void));
682 static void update_ld_motion_stores PARAMS ((struct expr *));
683 static void reg_set_info PARAMS ((rtx, rtx, void *));
684 static bool store_ops_ok PARAMS ((rtx, int *));
685 static rtx extract_mentioned_regs PARAMS ((rtx));
686 static rtx extract_mentioned_regs_helper PARAMS ((rtx, rtx));
687 static void find_moveable_store PARAMS ((rtx, int *, int *));
688 static int compute_store_table PARAMS ((void));
689 static bool load_kills_store PARAMS ((rtx, rtx));
690 static bool find_loads PARAMS ((rtx, rtx));
691 static bool store_killed_in_insn PARAMS ((rtx, rtx, rtx));
692 static bool store_killed_after PARAMS ((rtx, rtx, rtx, basic_block,
693 int *, rtx *));
694 static bool store_killed_before PARAMS ((rtx, rtx, rtx, basic_block,
695 int *));
696 static void build_store_vectors PARAMS ((void));
697 static void insert_insn_start_bb PARAMS ((rtx, basic_block));
698 static int insert_store PARAMS ((struct ls_expr *, edge));
699 static void replace_store_insn PARAMS ((rtx, rtx, basic_block));
700 static void delete_store PARAMS ((struct ls_expr *,
701 basic_block));
702 static void free_store_memory PARAMS ((void));
703 static void store_motion PARAMS ((void));
704 static void free_insn_expr_list_list PARAMS ((rtx *));
705 static void clear_modify_mem_tables PARAMS ((void));
706 static void free_modify_mem_tables PARAMS ((void));
707 static rtx gcse_emit_move_after PARAMS ((rtx, rtx, rtx));
708 static void local_cprop_find_used_regs PARAMS ((rtx *, void *));
709 static bool do_local_cprop PARAMS ((rtx, rtx, int, rtx*));
710 static bool adjust_libcall_notes PARAMS ((rtx, rtx, rtx, rtx*));
711 static void local_cprop_pass PARAMS ((int));
713 /* Entry point for global common subexpression elimination.
714 F is the first instruction in the function. */
717 gcse_main (f, file)
718 rtx f;
719 FILE *file;
721 int changed, pass;
722 /* Bytes used at start of pass. */
723 int initial_bytes_used;
724 /* Maximum number of bytes used by a pass. */
725 int max_pass_bytes;
726 /* Point to release obstack data from for each pass. */
727 char *gcse_obstack_bottom;
729 /* We do not construct an accurate cfg in functions which call
730 setjmp, so just punt to be safe. */
731 if (current_function_calls_setjmp)
732 return 0;
734 /* Assume that we do not need to run jump optimizations after gcse. */
735 run_jump_opt_after_gcse = 0;
737 /* For calling dump_foo fns from gdb. */
738 debug_stderr = stderr;
739 gcse_file = file;
741 /* Identify the basic block information for this function, including
742 successors and predecessors. */
743 max_gcse_regno = max_reg_num ();
745 if (file)
746 dump_flow_info (file);
748 /* Return if there's nothing to do. */
749 if (n_basic_blocks <= 1)
750 return 0;
752 /* Trying to perform global optimizations on flow graphs which have
753 a high connectivity will take a long time and is unlikely to be
754 particularly useful.
756 In normal circumstances a cfg should have about twice as many edges
757 as blocks. But we do not want to punish small functions which have
758 a couple switch statements. So we require a relatively large number
759 of basic blocks and the ratio of edges to blocks to be high. */
760 if (n_basic_blocks > 1000 && n_edges / n_basic_blocks >= 20)
762 if (warn_disabled_optimization)
763 warning ("GCSE disabled: %d > 1000 basic blocks and %d >= 20 edges/basic block",
764 n_basic_blocks, n_edges / n_basic_blocks);
765 return 0;
768 /* If allocating memory for the cprop bitmap would take up too much
769 storage it's better just to disable the optimization. */
770 if ((n_basic_blocks
771 * SBITMAP_SET_SIZE (max_gcse_regno)
772 * sizeof (SBITMAP_ELT_TYPE)) > MAX_GCSE_MEMORY)
774 if (warn_disabled_optimization)
775 warning ("GCSE disabled: %d basic blocks and %d registers",
776 n_basic_blocks, max_gcse_regno);
778 return 0;
781 gcc_obstack_init (&gcse_obstack);
782 bytes_used = 0;
784 /* We need alias. */
785 init_alias_analysis ();
786 /* Record where pseudo-registers are set. This data is kept accurate
787 during each pass. ??? We could also record hard-reg information here
788 [since it's unchanging], however it is currently done during hash table
789 computation.
791 It may be tempting to compute MEM set information here too, but MEM sets
792 will be subject to code motion one day and thus we need to compute
793 information about memory sets when we build the hash tables. */
795 alloc_reg_set_mem (max_gcse_regno);
796 compute_sets (f);
798 pass = 0;
799 initial_bytes_used = bytes_used;
800 max_pass_bytes = 0;
801 gcse_obstack_bottom = gcse_alloc (1);
802 changed = 1;
803 while (changed && pass < MAX_GCSE_PASSES)
805 changed = 0;
806 if (file)
807 fprintf (file, "GCSE pass %d\n\n", pass + 1);
809 /* Initialize bytes_used to the space for the pred/succ lists,
810 and the reg_set_table data. */
811 bytes_used = initial_bytes_used;
813 /* Each pass may create new registers, so recalculate each time. */
814 max_gcse_regno = max_reg_num ();
816 alloc_gcse_mem (f);
818 /* Don't allow constant propagation to modify jumps
819 during this pass. */
820 changed = one_cprop_pass (pass + 1, 0, 0);
822 if (optimize_size)
823 changed |= one_classic_gcse_pass (pass + 1);
824 else
826 changed |= one_pre_gcse_pass (pass + 1);
827 /* We may have just created new basic blocks. Release and
828 recompute various things which are sized on the number of
829 basic blocks. */
830 if (changed)
832 free_modify_mem_tables ();
833 modify_mem_list
834 = (rtx *) gmalloc (last_basic_block * sizeof (rtx));
835 canon_modify_mem_list
836 = (rtx *) gmalloc (last_basic_block * sizeof (rtx));
837 memset ((char *) modify_mem_list, 0, last_basic_block * sizeof (rtx));
838 memset ((char *) canon_modify_mem_list, 0, last_basic_block * sizeof (rtx));
840 free_reg_set_mem ();
841 alloc_reg_set_mem (max_reg_num ());
842 compute_sets (f);
843 run_jump_opt_after_gcse = 1;
846 if (max_pass_bytes < bytes_used)
847 max_pass_bytes = bytes_used;
849 /* Free up memory, then reallocate for code hoisting. We can
850 not re-use the existing allocated memory because the tables
851 will not have info for the insns or registers created by
852 partial redundancy elimination. */
853 free_gcse_mem ();
855 /* It does not make sense to run code hoisting unless we optimizing
856 for code size -- it rarely makes programs faster, and can make
857 them bigger if we did partial redundancy elimination (when optimizing
858 for space, we use a classic gcse algorithm instead of partial
859 redundancy algorithms). */
860 if (optimize_size)
862 max_gcse_regno = max_reg_num ();
863 alloc_gcse_mem (f);
864 changed |= one_code_hoisting_pass ();
865 free_gcse_mem ();
867 if (max_pass_bytes < bytes_used)
868 max_pass_bytes = bytes_used;
871 if (file)
873 fprintf (file, "\n");
874 fflush (file);
877 obstack_free (&gcse_obstack, gcse_obstack_bottom);
878 pass++;
881 /* Do one last pass of copy propagation, including cprop into
882 conditional jumps. */
884 max_gcse_regno = max_reg_num ();
885 alloc_gcse_mem (f);
886 /* This time, go ahead and allow cprop to alter jumps. */
887 one_cprop_pass (pass + 1, 1, 0);
888 free_gcse_mem ();
890 if (file)
892 fprintf (file, "GCSE of %s: %d basic blocks, ",
893 current_function_name, n_basic_blocks);
894 fprintf (file, "%d pass%s, %d bytes\n\n",
895 pass, pass > 1 ? "es" : "", max_pass_bytes);
898 obstack_free (&gcse_obstack, NULL);
899 free_reg_set_mem ();
900 /* We are finished with alias. */
901 end_alias_analysis ();
902 allocate_reg_info (max_reg_num (), FALSE, FALSE);
904 if (!optimize_size && flag_gcse_sm)
905 store_motion ();
907 /* Record where pseudo-registers are set. */
908 return run_jump_opt_after_gcse;
911 /* Misc. utilities. */
913 /* Nonzero for each mode that supports (set (reg) (reg)).
914 This is trivially true for integer and floating point values.
915 It may or may not be true for condition codes. */
916 static char can_copy[(int) NUM_MACHINE_MODES];
918 /* Compute which modes support reg/reg copy operations. */
920 static void
921 compute_can_copy ()
923 int i;
924 #ifndef AVOID_CCMODE_COPIES
925 rtx reg, insn;
926 #endif
927 memset (can_copy, 0, NUM_MACHINE_MODES);
929 start_sequence ();
930 for (i = 0; i < NUM_MACHINE_MODES; i++)
931 if (GET_MODE_CLASS (i) == MODE_CC)
933 #ifdef AVOID_CCMODE_COPIES
934 can_copy[i] = 0;
935 #else
936 reg = gen_rtx_REG ((enum machine_mode) i, LAST_VIRTUAL_REGISTER + 1);
937 insn = emit_insn (gen_rtx_SET (VOIDmode, reg, reg));
938 if (recog (PATTERN (insn), insn, NULL) >= 0)
939 can_copy[i] = 1;
940 #endif
942 else
943 can_copy[i] = 1;
945 end_sequence ();
948 /* Returns whether the mode supports reg/reg copy operations. */
950 bool
951 can_copy_p (mode)
952 enum machine_mode mode;
954 static bool can_copy_init_p = false;
956 if (! can_copy_init_p)
958 compute_can_copy ();
959 can_copy_init_p = true;
962 return can_copy[mode] != 0;
965 /* Cover function to xmalloc to record bytes allocated. */
967 static char *
968 gmalloc (size)
969 unsigned int size;
971 bytes_used += size;
972 return xmalloc (size);
975 /* Cover function to xrealloc.
976 We don't record the additional size since we don't know it.
977 It won't affect memory usage stats much anyway. */
979 static char *
980 grealloc (ptr, size)
981 char *ptr;
982 unsigned int size;
984 return xrealloc (ptr, size);
987 /* Cover function to obstack_alloc. */
989 static char *
990 gcse_alloc (size)
991 unsigned long size;
993 bytes_used += size;
994 return (char *) obstack_alloc (&gcse_obstack, size);
997 /* Allocate memory for the cuid mapping array,
998 and reg/memory set tracking tables.
1000 This is called at the start of each pass. */
1002 static void
1003 alloc_gcse_mem (f)
1004 rtx f;
1006 int i, n;
1007 rtx insn;
1009 /* Find the largest UID and create a mapping from UIDs to CUIDs.
1010 CUIDs are like UIDs except they increase monotonically, have no gaps,
1011 and only apply to real insns. */
1013 max_uid = get_max_uid ();
1014 n = (max_uid + 1) * sizeof (int);
1015 uid_cuid = (int *) gmalloc (n);
1016 memset ((char *) uid_cuid, 0, n);
1017 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
1019 if (INSN_P (insn))
1020 uid_cuid[INSN_UID (insn)] = i++;
1021 else
1022 uid_cuid[INSN_UID (insn)] = i;
1025 /* Create a table mapping cuids to insns. */
1027 max_cuid = i;
1028 n = (max_cuid + 1) * sizeof (rtx);
1029 cuid_insn = (rtx *) gmalloc (n);
1030 memset ((char *) cuid_insn, 0, n);
1031 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
1032 if (INSN_P (insn))
1033 CUID_INSN (i++) = insn;
1035 /* Allocate vars to track sets of regs. */
1036 reg_set_bitmap = BITMAP_XMALLOC ();
1038 /* Allocate vars to track sets of regs, memory per block. */
1039 reg_set_in_block = (sbitmap *) sbitmap_vector_alloc (last_basic_block,
1040 max_gcse_regno);
1041 /* Allocate array to keep a list of insns which modify memory in each
1042 basic block. */
1043 modify_mem_list = (rtx *) gmalloc (last_basic_block * sizeof (rtx));
1044 canon_modify_mem_list = (rtx *) gmalloc (last_basic_block * sizeof (rtx));
1045 memset ((char *) modify_mem_list, 0, last_basic_block * sizeof (rtx));
1046 memset ((char *) canon_modify_mem_list, 0, last_basic_block * sizeof (rtx));
1047 modify_mem_list_set = BITMAP_XMALLOC ();
1048 canon_modify_mem_list_set = BITMAP_XMALLOC ();
1051 /* Free memory allocated by alloc_gcse_mem. */
1053 static void
1054 free_gcse_mem ()
1056 free (uid_cuid);
1057 free (cuid_insn);
1059 BITMAP_XFREE (reg_set_bitmap);
1061 sbitmap_vector_free (reg_set_in_block);
1062 free_modify_mem_tables ();
1063 BITMAP_XFREE (modify_mem_list_set);
1064 BITMAP_XFREE (canon_modify_mem_list_set);
1067 /* Many of the global optimization algorithms work by solving dataflow
1068 equations for various expressions. Initially, some local value is
1069 computed for each expression in each block. Then, the values across the
1070 various blocks are combined (by following flow graph edges) to arrive at
1071 global values. Conceptually, each set of equations is independent. We
1072 may therefore solve all the equations in parallel, solve them one at a
1073 time, or pick any intermediate approach.
1075 When you're going to need N two-dimensional bitmaps, each X (say, the
1076 number of blocks) by Y (say, the number of expressions), call this
1077 function. It's not important what X and Y represent; only that Y
1078 correspond to the things that can be done in parallel. This function will
1079 return an appropriate chunking factor C; you should solve C sets of
1080 equations in parallel. By going through this function, we can easily
1081 trade space against time; by solving fewer equations in parallel we use
1082 less space. */
1084 static int
1085 get_bitmap_width (n, x, y)
1086 int n;
1087 int x;
1088 int y;
1090 /* It's not really worth figuring out *exactly* how much memory will
1091 be used by a particular choice. The important thing is to get
1092 something approximately right. */
1093 size_t max_bitmap_memory = 10 * 1024 * 1024;
1095 /* The number of bytes we'd use for a single column of minimum
1096 width. */
1097 size_t column_size = n * x * sizeof (SBITMAP_ELT_TYPE);
1099 /* Often, it's reasonable just to solve all the equations in
1100 parallel. */
1101 if (column_size * SBITMAP_SET_SIZE (y) <= max_bitmap_memory)
1102 return y;
1104 /* Otherwise, pick the largest width we can, without going over the
1105 limit. */
1106 return SBITMAP_ELT_BITS * ((max_bitmap_memory + column_size - 1)
1107 / column_size);
1110 /* Compute the local properties of each recorded expression.
1112 Local properties are those that are defined by the block, irrespective of
1113 other blocks.
1115 An expression is transparent in a block if its operands are not modified
1116 in the block.
1118 An expression is computed (locally available) in a block if it is computed
1119 at least once and expression would contain the same value if the
1120 computation was moved to the end of the block.
1122 An expression is locally anticipatable in a block if it is computed at
1123 least once and expression would contain the same value if the computation
1124 was moved to the beginning of the block.
1126 We call this routine for cprop, pre and code hoisting. They all compute
1127 basically the same information and thus can easily share this code.
1129 TRANSP, COMP, and ANTLOC are destination sbitmaps for recording local
1130 properties. If NULL, then it is not necessary to compute or record that
1131 particular property.
1133 TABLE controls which hash table to look at. If it is set hash table,
1134 additionally, TRANSP is computed as ~TRANSP, since this is really cprop's
1135 ABSALTERED. */
1137 static void
1138 compute_local_properties (transp, comp, antloc, table)
1139 sbitmap *transp;
1140 sbitmap *comp;
1141 sbitmap *antloc;
1142 struct hash_table *table;
1144 unsigned int i;
1146 /* Initialize any bitmaps that were passed in. */
1147 if (transp)
1149 if (table->set_p)
1150 sbitmap_vector_zero (transp, last_basic_block);
1151 else
1152 sbitmap_vector_ones (transp, last_basic_block);
1155 if (comp)
1156 sbitmap_vector_zero (comp, last_basic_block);
1157 if (antloc)
1158 sbitmap_vector_zero (antloc, last_basic_block);
1160 for (i = 0; i < table->size; i++)
1162 struct expr *expr;
1164 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
1166 int indx = expr->bitmap_index;
1167 struct occr *occr;
1169 /* The expression is transparent in this block if it is not killed.
1170 We start by assuming all are transparent [none are killed], and
1171 then reset the bits for those that are. */
1172 if (transp)
1173 compute_transp (expr->expr, indx, transp, table->set_p);
1175 /* The occurrences recorded in antic_occr are exactly those that
1176 we want to set to nonzero in ANTLOC. */
1177 if (antloc)
1178 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
1180 SET_BIT (antloc[BLOCK_NUM (occr->insn)], indx);
1182 /* While we're scanning the table, this is a good place to
1183 initialize this. */
1184 occr->deleted_p = 0;
1187 /* The occurrences recorded in avail_occr are exactly those that
1188 we want to set to nonzero in COMP. */
1189 if (comp)
1190 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
1192 SET_BIT (comp[BLOCK_NUM (occr->insn)], indx);
1194 /* While we're scanning the table, this is a good place to
1195 initialize this. */
1196 occr->copied_p = 0;
1199 /* While we're scanning the table, this is a good place to
1200 initialize this. */
1201 expr->reaching_reg = 0;
1206 /* Register set information.
1208 `reg_set_table' records where each register is set or otherwise
1209 modified. */
1211 static struct obstack reg_set_obstack;
1213 static void
1214 alloc_reg_set_mem (n_regs)
1215 int n_regs;
1217 unsigned int n;
1219 reg_set_table_size = n_regs + REG_SET_TABLE_SLOP;
1220 n = reg_set_table_size * sizeof (struct reg_set *);
1221 reg_set_table = (struct reg_set **) gmalloc (n);
1222 memset ((char *) reg_set_table, 0, n);
1224 gcc_obstack_init (&reg_set_obstack);
1227 static void
1228 free_reg_set_mem ()
1230 free (reg_set_table);
1231 obstack_free (&reg_set_obstack, NULL);
1234 /* Record REGNO in the reg_set table. */
1236 static void
1237 record_one_set (regno, insn)
1238 int regno;
1239 rtx insn;
1241 /* Allocate a new reg_set element and link it onto the list. */
1242 struct reg_set *new_reg_info;
1244 /* If the table isn't big enough, enlarge it. */
1245 if (regno >= reg_set_table_size)
1247 int new_size = regno + REG_SET_TABLE_SLOP;
1249 reg_set_table
1250 = (struct reg_set **) grealloc ((char *) reg_set_table,
1251 new_size * sizeof (struct reg_set *));
1252 memset ((char *) (reg_set_table + reg_set_table_size), 0,
1253 (new_size - reg_set_table_size) * sizeof (struct reg_set *));
1254 reg_set_table_size = new_size;
1257 new_reg_info = (struct reg_set *) obstack_alloc (&reg_set_obstack,
1258 sizeof (struct reg_set));
1259 bytes_used += sizeof (struct reg_set);
1260 new_reg_info->insn = insn;
1261 new_reg_info->next = reg_set_table[regno];
1262 reg_set_table[regno] = new_reg_info;
1265 /* Called from compute_sets via note_stores to handle one SET or CLOBBER in
1266 an insn. The DATA is really the instruction in which the SET is
1267 occurring. */
1269 static void
1270 record_set_info (dest, setter, data)
1271 rtx dest, setter ATTRIBUTE_UNUSED;
1272 void *data;
1274 rtx record_set_insn = (rtx) data;
1276 if (GET_CODE (dest) == REG && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
1277 record_one_set (REGNO (dest), record_set_insn);
1280 /* Scan the function and record each set of each pseudo-register.
1282 This is called once, at the start of the gcse pass. See the comments for
1283 `reg_set_table' for further documentation. */
1285 static void
1286 compute_sets (f)
1287 rtx f;
1289 rtx insn;
1291 for (insn = f; insn != 0; insn = NEXT_INSN (insn))
1292 if (INSN_P (insn))
1293 note_stores (PATTERN (insn), record_set_info, insn);
1296 /* Hash table support. */
1298 struct reg_avail_info
1300 basic_block last_bb;
1301 int first_set;
1302 int last_set;
1305 static struct reg_avail_info *reg_avail_info;
1306 static basic_block current_bb;
1309 /* See whether X, the source of a set, is something we want to consider for
1310 GCSE. */
1312 static GTY(()) rtx test_insn;
1313 static int
1314 want_to_gcse_p (x)
1315 rtx x;
1317 int num_clobbers = 0;
1318 int icode;
1320 switch (GET_CODE (x))
1322 case REG:
1323 case SUBREG:
1324 case CONST_INT:
1325 case CONST_DOUBLE:
1326 case CONST_VECTOR:
1327 case CALL:
1328 case CONSTANT_P_RTX:
1329 return 0;
1331 default:
1332 break;
1335 /* If this is a valid operand, we are OK. If it's VOIDmode, we aren't. */
1336 if (general_operand (x, GET_MODE (x)))
1337 return 1;
1338 else if (GET_MODE (x) == VOIDmode)
1339 return 0;
1341 /* Otherwise, check if we can make a valid insn from it. First initialize
1342 our test insn if we haven't already. */
1343 if (test_insn == 0)
1345 test_insn
1346 = make_insn_raw (gen_rtx_SET (VOIDmode,
1347 gen_rtx_REG (word_mode,
1348 FIRST_PSEUDO_REGISTER * 2),
1349 const0_rtx));
1350 NEXT_INSN (test_insn) = PREV_INSN (test_insn) = 0;
1353 /* Now make an insn like the one we would make when GCSE'ing and see if
1354 valid. */
1355 PUT_MODE (SET_DEST (PATTERN (test_insn)), GET_MODE (x));
1356 SET_SRC (PATTERN (test_insn)) = x;
1357 return ((icode = recog (PATTERN (test_insn), test_insn, &num_clobbers)) >= 0
1358 && (num_clobbers == 0 || ! added_clobbers_hard_reg_p (icode)));
1361 /* Return nonzero if the operands of expression X are unchanged from the
1362 start of INSN's basic block up to but not including INSN (if AVAIL_P == 0),
1363 or from INSN to the end of INSN's basic block (if AVAIL_P != 0). */
1365 static int
1366 oprs_unchanged_p (x, insn, avail_p)
1367 rtx x, insn;
1368 int avail_p;
1370 int i, j;
1371 enum rtx_code code;
1372 const char *fmt;
1374 if (x == 0)
1375 return 1;
1377 code = GET_CODE (x);
1378 switch (code)
1380 case REG:
1382 struct reg_avail_info *info = &reg_avail_info[REGNO (x)];
1384 if (info->last_bb != current_bb)
1385 return 1;
1386 if (avail_p)
1387 return info->last_set < INSN_CUID (insn);
1388 else
1389 return info->first_set >= INSN_CUID (insn);
1392 case MEM:
1393 if (load_killed_in_block_p (current_bb, INSN_CUID (insn),
1394 x, avail_p))
1395 return 0;
1396 else
1397 return oprs_unchanged_p (XEXP (x, 0), insn, avail_p);
1399 case PRE_DEC:
1400 case PRE_INC:
1401 case POST_DEC:
1402 case POST_INC:
1403 case PRE_MODIFY:
1404 case POST_MODIFY:
1405 return 0;
1407 case PC:
1408 case CC0: /*FIXME*/
1409 case CONST:
1410 case CONST_INT:
1411 case CONST_DOUBLE:
1412 case CONST_VECTOR:
1413 case SYMBOL_REF:
1414 case LABEL_REF:
1415 case ADDR_VEC:
1416 case ADDR_DIFF_VEC:
1417 return 1;
1419 default:
1420 break;
1423 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
1425 if (fmt[i] == 'e')
1427 /* If we are about to do the last recursive call needed at this
1428 level, change it into iteration. This function is called enough
1429 to be worth it. */
1430 if (i == 0)
1431 return oprs_unchanged_p (XEXP (x, i), insn, avail_p);
1433 else if (! oprs_unchanged_p (XEXP (x, i), insn, avail_p))
1434 return 0;
1436 else if (fmt[i] == 'E')
1437 for (j = 0; j < XVECLEN (x, i); j++)
1438 if (! oprs_unchanged_p (XVECEXP (x, i, j), insn, avail_p))
1439 return 0;
1442 return 1;
1445 /* Used for communication between mems_conflict_for_gcse_p and
1446 load_killed_in_block_p. Nonzero if mems_conflict_for_gcse_p finds a
1447 conflict between two memory references. */
1448 static int gcse_mems_conflict_p;
1450 /* Used for communication between mems_conflict_for_gcse_p and
1451 load_killed_in_block_p. A memory reference for a load instruction,
1452 mems_conflict_for_gcse_p will see if a memory store conflicts with
1453 this memory load. */
1454 static rtx gcse_mem_operand;
1456 /* DEST is the output of an instruction. If it is a memory reference, and
1457 possibly conflicts with the load found in gcse_mem_operand, then set
1458 gcse_mems_conflict_p to a nonzero value. */
1460 static void
1461 mems_conflict_for_gcse_p (dest, setter, data)
1462 rtx dest, setter ATTRIBUTE_UNUSED;
1463 void *data ATTRIBUTE_UNUSED;
1465 while (GET_CODE (dest) == SUBREG
1466 || GET_CODE (dest) == ZERO_EXTRACT
1467 || GET_CODE (dest) == SIGN_EXTRACT
1468 || GET_CODE (dest) == STRICT_LOW_PART)
1469 dest = XEXP (dest, 0);
1471 /* If DEST is not a MEM, then it will not conflict with the load. Note
1472 that function calls are assumed to clobber memory, but are handled
1473 elsewhere. */
1474 if (GET_CODE (dest) != MEM)
1475 return;
1477 /* If we are setting a MEM in our list of specially recognized MEMs,
1478 don't mark as killed this time. */
1480 if (expr_equiv_p (dest, gcse_mem_operand) && pre_ldst_mems != NULL)
1482 if (!find_rtx_in_ldst (dest))
1483 gcse_mems_conflict_p = 1;
1484 return;
1487 if (true_dependence (dest, GET_MODE (dest), gcse_mem_operand,
1488 rtx_addr_varies_p))
1489 gcse_mems_conflict_p = 1;
1492 /* Return nonzero if the expression in X (a memory reference) is killed
1493 in block BB before or after the insn with the CUID in UID_LIMIT.
1494 AVAIL_P is nonzero for kills after UID_LIMIT, and zero for kills
1495 before UID_LIMIT.
1497 To check the entire block, set UID_LIMIT to max_uid + 1 and
1498 AVAIL_P to 0. */
1500 static int
1501 load_killed_in_block_p (bb, uid_limit, x, avail_p)
1502 basic_block bb;
1503 int uid_limit;
1504 rtx x;
1505 int avail_p;
1507 rtx list_entry = modify_mem_list[bb->index];
1508 while (list_entry)
1510 rtx setter;
1511 /* Ignore entries in the list that do not apply. */
1512 if ((avail_p
1513 && INSN_CUID (XEXP (list_entry, 0)) < uid_limit)
1514 || (! avail_p
1515 && INSN_CUID (XEXP (list_entry, 0)) > uid_limit))
1517 list_entry = XEXP (list_entry, 1);
1518 continue;
1521 setter = XEXP (list_entry, 0);
1523 /* If SETTER is a call everything is clobbered. Note that calls
1524 to pure functions are never put on the list, so we need not
1525 worry about them. */
1526 if (GET_CODE (setter) == CALL_INSN)
1527 return 1;
1529 /* SETTER must be an INSN of some kind that sets memory. Call
1530 note_stores to examine each hunk of memory that is modified.
1532 The note_stores interface is pretty limited, so we have to
1533 communicate via global variables. Yuk. */
1534 gcse_mem_operand = x;
1535 gcse_mems_conflict_p = 0;
1536 note_stores (PATTERN (setter), mems_conflict_for_gcse_p, NULL);
1537 if (gcse_mems_conflict_p)
1538 return 1;
1539 list_entry = XEXP (list_entry, 1);
1541 return 0;
1544 /* Return nonzero if the operands of expression X are unchanged from
1545 the start of INSN's basic block up to but not including INSN. */
1547 static int
1548 oprs_anticipatable_p (x, insn)
1549 rtx x, insn;
1551 return oprs_unchanged_p (x, insn, 0);
1554 /* Return nonzero if the operands of expression X are unchanged from
1555 INSN to the end of INSN's basic block. */
1557 static int
1558 oprs_available_p (x, insn)
1559 rtx x, insn;
1561 return oprs_unchanged_p (x, insn, 1);
1564 /* Hash expression X.
1566 MODE is only used if X is a CONST_INT. DO_NOT_RECORD_P is a boolean
1567 indicating if a volatile operand is found or if the expression contains
1568 something we don't want to insert in the table.
1570 ??? One might want to merge this with canon_hash. Later. */
1572 static unsigned int
1573 hash_expr (x, mode, do_not_record_p, hash_table_size)
1574 rtx x;
1575 enum machine_mode mode;
1576 int *do_not_record_p;
1577 int hash_table_size;
1579 unsigned int hash;
1581 *do_not_record_p = 0;
1583 hash = hash_expr_1 (x, mode, do_not_record_p);
1584 return hash % hash_table_size;
1587 /* Hash a string. Just add its bytes up. */
1589 static inline unsigned
1590 hash_string_1 (ps)
1591 const char *ps;
1593 unsigned hash = 0;
1594 const unsigned char *p = (const unsigned char *) ps;
1596 if (p)
1597 while (*p)
1598 hash += *p++;
1600 return hash;
1603 /* Subroutine of hash_expr to do the actual work. */
1605 static unsigned int
1606 hash_expr_1 (x, mode, do_not_record_p)
1607 rtx x;
1608 enum machine_mode mode;
1609 int *do_not_record_p;
1611 int i, j;
1612 unsigned hash = 0;
1613 enum rtx_code code;
1614 const char *fmt;
1616 /* Used to turn recursion into iteration. We can't rely on GCC's
1617 tail-recursion elimination since we need to keep accumulating values
1618 in HASH. */
1620 if (x == 0)
1621 return hash;
1623 repeat:
1624 code = GET_CODE (x);
1625 switch (code)
1627 case REG:
1628 hash += ((unsigned int) REG << 7) + REGNO (x);
1629 return hash;
1631 case CONST_INT:
1632 hash += (((unsigned int) CONST_INT << 7) + (unsigned int) mode
1633 + (unsigned int) INTVAL (x));
1634 return hash;
1636 case CONST_DOUBLE:
1637 /* This is like the general case, except that it only counts
1638 the integers representing the constant. */
1639 hash += (unsigned int) code + (unsigned int) GET_MODE (x);
1640 if (GET_MODE (x) != VOIDmode)
1641 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
1642 hash += (unsigned int) XWINT (x, i);
1643 else
1644 hash += ((unsigned int) CONST_DOUBLE_LOW (x)
1645 + (unsigned int) CONST_DOUBLE_HIGH (x));
1646 return hash;
1648 case CONST_VECTOR:
1650 int units;
1651 rtx elt;
1653 units = CONST_VECTOR_NUNITS (x);
1655 for (i = 0; i < units; ++i)
1657 elt = CONST_VECTOR_ELT (x, i);
1658 hash += hash_expr_1 (elt, GET_MODE (elt), do_not_record_p);
1661 return hash;
1664 /* Assume there is only one rtx object for any given label. */
1665 case LABEL_REF:
1666 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
1667 differences and differences between each stage's debugging dumps. */
1668 hash += (((unsigned int) LABEL_REF << 7)
1669 + CODE_LABEL_NUMBER (XEXP (x, 0)));
1670 return hash;
1672 case SYMBOL_REF:
1674 /* Don't hash on the symbol's address to avoid bootstrap differences.
1675 Different hash values may cause expressions to be recorded in
1676 different orders and thus different registers to be used in the
1677 final assembler. This also avoids differences in the dump files
1678 between various stages. */
1679 unsigned int h = 0;
1680 const unsigned char *p = (const unsigned char *) XSTR (x, 0);
1682 while (*p)
1683 h += (h << 7) + *p++; /* ??? revisit */
1685 hash += ((unsigned int) SYMBOL_REF << 7) + h;
1686 return hash;
1689 case MEM:
1690 if (MEM_VOLATILE_P (x))
1692 *do_not_record_p = 1;
1693 return 0;
1696 hash += (unsigned int) MEM;
1697 /* We used alias set for hashing, but this is not good, since the alias
1698 set may differ in -fprofile-arcs and -fbranch-probabilities compilation
1699 causing the profiles to fail to match. */
1700 x = XEXP (x, 0);
1701 goto repeat;
1703 case PRE_DEC:
1704 case PRE_INC:
1705 case POST_DEC:
1706 case POST_INC:
1707 case PC:
1708 case CC0:
1709 case CALL:
1710 case UNSPEC_VOLATILE:
1711 *do_not_record_p = 1;
1712 return 0;
1714 case ASM_OPERANDS:
1715 if (MEM_VOLATILE_P (x))
1717 *do_not_record_p = 1;
1718 return 0;
1720 else
1722 /* We don't want to take the filename and line into account. */
1723 hash += (unsigned) code + (unsigned) GET_MODE (x)
1724 + hash_string_1 (ASM_OPERANDS_TEMPLATE (x))
1725 + hash_string_1 (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
1726 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
1728 if (ASM_OPERANDS_INPUT_LENGTH (x))
1730 for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
1732 hash += (hash_expr_1 (ASM_OPERANDS_INPUT (x, i),
1733 GET_MODE (ASM_OPERANDS_INPUT (x, i)),
1734 do_not_record_p)
1735 + hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT
1736 (x, i)));
1739 hash += hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
1740 x = ASM_OPERANDS_INPUT (x, 0);
1741 mode = GET_MODE (x);
1742 goto repeat;
1744 return hash;
1747 default:
1748 break;
1751 hash += (unsigned) code + (unsigned) GET_MODE (x);
1752 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
1754 if (fmt[i] == 'e')
1756 /* If we are about to do the last recursive call
1757 needed at this level, change it into iteration.
1758 This function is called enough to be worth it. */
1759 if (i == 0)
1761 x = XEXP (x, i);
1762 goto repeat;
1765 hash += hash_expr_1 (XEXP (x, i), 0, do_not_record_p);
1766 if (*do_not_record_p)
1767 return 0;
1770 else if (fmt[i] == 'E')
1771 for (j = 0; j < XVECLEN (x, i); j++)
1773 hash += hash_expr_1 (XVECEXP (x, i, j), 0, do_not_record_p);
1774 if (*do_not_record_p)
1775 return 0;
1778 else if (fmt[i] == 's')
1779 hash += hash_string_1 (XSTR (x, i));
1780 else if (fmt[i] == 'i')
1781 hash += (unsigned int) XINT (x, i);
1782 else
1783 abort ();
1786 return hash;
1789 /* Hash a set of register REGNO.
1791 Sets are hashed on the register that is set. This simplifies the PRE copy
1792 propagation code.
1794 ??? May need to make things more elaborate. Later, as necessary. */
1796 static unsigned int
1797 hash_set (regno, hash_table_size)
1798 int regno;
1799 int hash_table_size;
1801 unsigned int hash;
1803 hash = regno;
1804 return hash % hash_table_size;
1807 /* Return nonzero if exp1 is equivalent to exp2.
1808 ??? Borrowed from cse.c. Might want to remerge with cse.c. Later. */
1810 static int
1811 expr_equiv_p (x, y)
1812 rtx x, y;
1814 int i, j;
1815 enum rtx_code code;
1816 const char *fmt;
1818 if (x == y)
1819 return 1;
1821 if (x == 0 || y == 0)
1822 return x == y;
1824 code = GET_CODE (x);
1825 if (code != GET_CODE (y))
1826 return 0;
1828 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
1829 if (GET_MODE (x) != GET_MODE (y))
1830 return 0;
1832 switch (code)
1834 case PC:
1835 case CC0:
1836 return x == y;
1838 case CONST_INT:
1839 return INTVAL (x) == INTVAL (y);
1841 case LABEL_REF:
1842 return XEXP (x, 0) == XEXP (y, 0);
1844 case SYMBOL_REF:
1845 return XSTR (x, 0) == XSTR (y, 0);
1847 case REG:
1848 return REGNO (x) == REGNO (y);
1850 case MEM:
1851 /* Can't merge two expressions in different alias sets, since we can
1852 decide that the expression is transparent in a block when it isn't,
1853 due to it being set with the different alias set. */
1854 if (MEM_ALIAS_SET (x) != MEM_ALIAS_SET (y))
1855 return 0;
1856 break;
1858 /* For commutative operations, check both orders. */
1859 case PLUS:
1860 case MULT:
1861 case AND:
1862 case IOR:
1863 case XOR:
1864 case NE:
1865 case EQ:
1866 return ((expr_equiv_p (XEXP (x, 0), XEXP (y, 0))
1867 && expr_equiv_p (XEXP (x, 1), XEXP (y, 1)))
1868 || (expr_equiv_p (XEXP (x, 0), XEXP (y, 1))
1869 && expr_equiv_p (XEXP (x, 1), XEXP (y, 0))));
1871 case ASM_OPERANDS:
1872 /* We don't use the generic code below because we want to
1873 disregard filename and line numbers. */
1875 /* A volatile asm isn't equivalent to any other. */
1876 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
1877 return 0;
1879 if (GET_MODE (x) != GET_MODE (y)
1880 || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
1881 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
1882 ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
1883 || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
1884 || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
1885 return 0;
1887 if (ASM_OPERANDS_INPUT_LENGTH (x))
1889 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
1890 if (! expr_equiv_p (ASM_OPERANDS_INPUT (x, i),
1891 ASM_OPERANDS_INPUT (y, i))
1892 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
1893 ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
1894 return 0;
1897 return 1;
1899 default:
1900 break;
1903 /* Compare the elements. If any pair of corresponding elements
1904 fail to match, return 0 for the whole thing. */
1906 fmt = GET_RTX_FORMAT (code);
1907 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1909 switch (fmt[i])
1911 case 'e':
1912 if (! expr_equiv_p (XEXP (x, i), XEXP (y, i)))
1913 return 0;
1914 break;
1916 case 'E':
1917 if (XVECLEN (x, i) != XVECLEN (y, i))
1918 return 0;
1919 for (j = 0; j < XVECLEN (x, i); j++)
1920 if (! expr_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
1921 return 0;
1922 break;
1924 case 's':
1925 if (strcmp (XSTR (x, i), XSTR (y, i)))
1926 return 0;
1927 break;
1929 case 'i':
1930 if (XINT (x, i) != XINT (y, i))
1931 return 0;
1932 break;
1934 case 'w':
1935 if (XWINT (x, i) != XWINT (y, i))
1936 return 0;
1937 break;
1939 case '0':
1940 break;
1942 default:
1943 abort ();
1947 return 1;
1950 /* Insert expression X in INSN in the hash TABLE.
1951 If it is already present, record it as the last occurrence in INSN's
1952 basic block.
1954 MODE is the mode of the value X is being stored into.
1955 It is only used if X is a CONST_INT.
1957 ANTIC_P is nonzero if X is an anticipatable expression.
1958 AVAIL_P is nonzero if X is an available expression. */
1960 static void
1961 insert_expr_in_table (x, mode, insn, antic_p, avail_p, table)
1962 rtx x;
1963 enum machine_mode mode;
1964 rtx insn;
1965 int antic_p, avail_p;
1966 struct hash_table *table;
1968 int found, do_not_record_p;
1969 unsigned int hash;
1970 struct expr *cur_expr, *last_expr = NULL;
1971 struct occr *antic_occr, *avail_occr;
1972 struct occr *last_occr = NULL;
1974 hash = hash_expr (x, mode, &do_not_record_p, table->size);
1976 /* Do not insert expression in table if it contains volatile operands,
1977 or if hash_expr determines the expression is something we don't want
1978 to or can't handle. */
1979 if (do_not_record_p)
1980 return;
1982 cur_expr = table->table[hash];
1983 found = 0;
1985 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
1987 /* If the expression isn't found, save a pointer to the end of
1988 the list. */
1989 last_expr = cur_expr;
1990 cur_expr = cur_expr->next_same_hash;
1993 if (! found)
1995 cur_expr = (struct expr *) gcse_alloc (sizeof (struct expr));
1996 bytes_used += sizeof (struct expr);
1997 if (table->table[hash] == NULL)
1998 /* This is the first pattern that hashed to this index. */
1999 table->table[hash] = cur_expr;
2000 else
2001 /* Add EXPR to end of this hash chain. */
2002 last_expr->next_same_hash = cur_expr;
2004 /* Set the fields of the expr element. */
2005 cur_expr->expr = x;
2006 cur_expr->bitmap_index = table->n_elems++;
2007 cur_expr->next_same_hash = NULL;
2008 cur_expr->antic_occr = NULL;
2009 cur_expr->avail_occr = NULL;
2012 /* Now record the occurrence(s). */
2013 if (antic_p)
2015 antic_occr = cur_expr->antic_occr;
2017 /* Search for another occurrence in the same basic block. */
2018 while (antic_occr && BLOCK_NUM (antic_occr->insn) != BLOCK_NUM (insn))
2020 /* If an occurrence isn't found, save a pointer to the end of
2021 the list. */
2022 last_occr = antic_occr;
2023 antic_occr = antic_occr->next;
2026 if (antic_occr)
2027 /* Found another instance of the expression in the same basic block.
2028 Prefer the currently recorded one. We want the first one in the
2029 block and the block is scanned from start to end. */
2030 ; /* nothing to do */
2031 else
2033 /* First occurrence of this expression in this basic block. */
2034 antic_occr = (struct occr *) gcse_alloc (sizeof (struct occr));
2035 bytes_used += sizeof (struct occr);
2036 /* First occurrence of this expression in any block? */
2037 if (cur_expr->antic_occr == NULL)
2038 cur_expr->antic_occr = antic_occr;
2039 else
2040 last_occr->next = antic_occr;
2042 antic_occr->insn = insn;
2043 antic_occr->next = NULL;
2047 if (avail_p)
2049 avail_occr = cur_expr->avail_occr;
2051 /* Search for another occurrence in the same basic block. */
2052 while (avail_occr && BLOCK_NUM (avail_occr->insn) != BLOCK_NUM (insn))
2054 /* If an occurrence isn't found, save a pointer to the end of
2055 the list. */
2056 last_occr = avail_occr;
2057 avail_occr = avail_occr->next;
2060 if (avail_occr)
2061 /* Found another instance of the expression in the same basic block.
2062 Prefer this occurrence to the currently recorded one. We want
2063 the last one in the block and the block is scanned from start
2064 to end. */
2065 avail_occr->insn = insn;
2066 else
2068 /* First occurrence of this expression in this basic block. */
2069 avail_occr = (struct occr *) gcse_alloc (sizeof (struct occr));
2070 bytes_used += sizeof (struct occr);
2072 /* First occurrence of this expression in any block? */
2073 if (cur_expr->avail_occr == NULL)
2074 cur_expr->avail_occr = avail_occr;
2075 else
2076 last_occr->next = avail_occr;
2078 avail_occr->insn = insn;
2079 avail_occr->next = NULL;
2084 /* Insert pattern X in INSN in the hash table.
2085 X is a SET of a reg to either another reg or a constant.
2086 If it is already present, record it as the last occurrence in INSN's
2087 basic block. */
2089 static void
2090 insert_set_in_table (x, insn, table)
2091 rtx x;
2092 rtx insn;
2093 struct hash_table *table;
2095 int found;
2096 unsigned int hash;
2097 struct expr *cur_expr, *last_expr = NULL;
2098 struct occr *cur_occr, *last_occr = NULL;
2100 if (GET_CODE (x) != SET
2101 || GET_CODE (SET_DEST (x)) != REG)
2102 abort ();
2104 hash = hash_set (REGNO (SET_DEST (x)), table->size);
2106 cur_expr = table->table[hash];
2107 found = 0;
2109 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
2111 /* If the expression isn't found, save a pointer to the end of
2112 the list. */
2113 last_expr = cur_expr;
2114 cur_expr = cur_expr->next_same_hash;
2117 if (! found)
2119 cur_expr = (struct expr *) gcse_alloc (sizeof (struct expr));
2120 bytes_used += sizeof (struct expr);
2121 if (table->table[hash] == NULL)
2122 /* This is the first pattern that hashed to this index. */
2123 table->table[hash] = cur_expr;
2124 else
2125 /* Add EXPR to end of this hash chain. */
2126 last_expr->next_same_hash = cur_expr;
2128 /* Set the fields of the expr element.
2129 We must copy X because it can be modified when copy propagation is
2130 performed on its operands. */
2131 cur_expr->expr = copy_rtx (x);
2132 cur_expr->bitmap_index = table->n_elems++;
2133 cur_expr->next_same_hash = NULL;
2134 cur_expr->antic_occr = NULL;
2135 cur_expr->avail_occr = NULL;
2138 /* Now record the occurrence. */
2139 cur_occr = cur_expr->avail_occr;
2141 /* Search for another occurrence in the same basic block. */
2142 while (cur_occr && BLOCK_NUM (cur_occr->insn) != BLOCK_NUM (insn))
2144 /* If an occurrence isn't found, save a pointer to the end of
2145 the list. */
2146 last_occr = cur_occr;
2147 cur_occr = cur_occr->next;
2150 if (cur_occr)
2151 /* Found another instance of the expression in the same basic block.
2152 Prefer this occurrence to the currently recorded one. We want the
2153 last one in the block and the block is scanned from start to end. */
2154 cur_occr->insn = insn;
2155 else
2157 /* First occurrence of this expression in this basic block. */
2158 cur_occr = (struct occr *) gcse_alloc (sizeof (struct occr));
2159 bytes_used += sizeof (struct occr);
2161 /* First occurrence of this expression in any block? */
2162 if (cur_expr->avail_occr == NULL)
2163 cur_expr->avail_occr = cur_occr;
2164 else
2165 last_occr->next = cur_occr;
2167 cur_occr->insn = insn;
2168 cur_occr->next = NULL;
2172 /* Determine whether the rtx X should be treated as a constant for
2173 the purposes of GCSE's constant propagation. */
2175 static bool
2176 gcse_constant_p (x)
2177 rtx x;
2179 /* Consider a COMPARE of two integers constant. */
2180 if (GET_CODE (x) == COMPARE
2181 && GET_CODE (XEXP (x, 0)) == CONST_INT
2182 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2183 return true;
2185 if (GET_CODE (x) == CONSTANT_P_RTX)
2186 return false;
2188 return CONSTANT_P (x);
2191 /* Scan pattern PAT of INSN and add an entry to the hash TABLE (set or
2192 expression one). */
2194 static void
2195 hash_scan_set (pat, insn, table)
2196 rtx pat, insn;
2197 struct hash_table *table;
2199 rtx src = SET_SRC (pat);
2200 rtx dest = SET_DEST (pat);
2201 rtx note;
2203 if (GET_CODE (src) == CALL)
2204 hash_scan_call (src, insn, table);
2206 else if (GET_CODE (dest) == REG)
2208 unsigned int regno = REGNO (dest);
2209 rtx tmp;
2211 /* If this is a single set and we are doing constant propagation,
2212 see if a REG_NOTE shows this equivalent to a constant. */
2213 if (table->set_p && (note = find_reg_equal_equiv_note (insn)) != 0
2214 && gcse_constant_p (XEXP (note, 0)))
2215 src = XEXP (note, 0), pat = gen_rtx_SET (VOIDmode, dest, src);
2217 /* Only record sets of pseudo-regs in the hash table. */
2218 if (! table->set_p
2219 && regno >= FIRST_PSEUDO_REGISTER
2220 /* Don't GCSE something if we can't do a reg/reg copy. */
2221 && can_copy_p (GET_MODE (dest))
2222 /* GCSE commonly inserts instruction after the insn. We can't
2223 do that easily for EH_REGION notes so disable GCSE on these
2224 for now. */
2225 && !find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2226 /* Is SET_SRC something we want to gcse? */
2227 && want_to_gcse_p (src)
2228 /* Don't CSE a nop. */
2229 && ! set_noop_p (pat)
2230 /* Don't GCSE if it has attached REG_EQUIV note.
2231 At this point this only function parameters should have
2232 REG_EQUIV notes and if the argument slot is used somewhere
2233 explicitly, it means address of parameter has been taken,
2234 so we should not extend the lifetime of the pseudo. */
2235 && ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) == 0
2236 || GET_CODE (XEXP (note, 0)) != MEM))
2238 /* An expression is not anticipatable if its operands are
2239 modified before this insn or if this is not the only SET in
2240 this insn. */
2241 int antic_p = oprs_anticipatable_p (src, insn) && single_set (insn);
2242 /* An expression is not available if its operands are
2243 subsequently modified, including this insn. It's also not
2244 available if this is a branch, because we can't insert
2245 a set after the branch. */
2246 int avail_p = (oprs_available_p (src, insn)
2247 && ! JUMP_P (insn));
2249 insert_expr_in_table (src, GET_MODE (dest), insn, antic_p, avail_p, table);
2252 /* Record sets for constant/copy propagation. */
2253 else if (table->set_p
2254 && regno >= FIRST_PSEUDO_REGISTER
2255 && ((GET_CODE (src) == REG
2256 && REGNO (src) >= FIRST_PSEUDO_REGISTER
2257 && can_copy_p (GET_MODE (dest))
2258 && REGNO (src) != regno)
2259 || gcse_constant_p (src))
2260 /* A copy is not available if its src or dest is subsequently
2261 modified. Here we want to search from INSN+1 on, but
2262 oprs_available_p searches from INSN on. */
2263 && (insn == BLOCK_END (BLOCK_NUM (insn))
2264 || ((tmp = next_nonnote_insn (insn)) != NULL_RTX
2265 && oprs_available_p (pat, tmp))))
2266 insert_set_in_table (pat, insn, table);
2270 static void
2271 hash_scan_clobber (x, insn, table)
2272 rtx x ATTRIBUTE_UNUSED, insn ATTRIBUTE_UNUSED;
2273 struct hash_table *table ATTRIBUTE_UNUSED;
2275 /* Currently nothing to do. */
2278 static void
2279 hash_scan_call (x, insn, table)
2280 rtx x ATTRIBUTE_UNUSED, insn ATTRIBUTE_UNUSED;
2281 struct hash_table *table ATTRIBUTE_UNUSED;
2283 /* Currently nothing to do. */
2286 /* Process INSN and add hash table entries as appropriate.
2288 Only available expressions that set a single pseudo-reg are recorded.
2290 Single sets in a PARALLEL could be handled, but it's an extra complication
2291 that isn't dealt with right now. The trick is handling the CLOBBERs that
2292 are also in the PARALLEL. Later.
2294 If SET_P is nonzero, this is for the assignment hash table,
2295 otherwise it is for the expression hash table.
2296 If IN_LIBCALL_BLOCK nonzero, we are in a libcall block, and should
2297 not record any expressions. */
2299 static void
2300 hash_scan_insn (insn, table, in_libcall_block)
2301 rtx insn;
2302 struct hash_table *table;
2303 int in_libcall_block;
2305 rtx pat = PATTERN (insn);
2306 int i;
2308 if (in_libcall_block)
2309 return;
2311 /* Pick out the sets of INSN and for other forms of instructions record
2312 what's been modified. */
2314 if (GET_CODE (pat) == SET)
2315 hash_scan_set (pat, insn, table);
2316 else if (GET_CODE (pat) == PARALLEL)
2317 for (i = 0; i < XVECLEN (pat, 0); i++)
2319 rtx x = XVECEXP (pat, 0, i);
2321 if (GET_CODE (x) == SET)
2322 hash_scan_set (x, insn, table);
2323 else if (GET_CODE (x) == CLOBBER)
2324 hash_scan_clobber (x, insn, table);
2325 else if (GET_CODE (x) == CALL)
2326 hash_scan_call (x, insn, table);
2329 else if (GET_CODE (pat) == CLOBBER)
2330 hash_scan_clobber (pat, insn, table);
2331 else if (GET_CODE (pat) == CALL)
2332 hash_scan_call (pat, insn, table);
2335 static void
2336 dump_hash_table (file, name, table)
2337 FILE *file;
2338 const char *name;
2339 struct hash_table *table;
2341 int i;
2342 /* Flattened out table, so it's printed in proper order. */
2343 struct expr **flat_table;
2344 unsigned int *hash_val;
2345 struct expr *expr;
2347 flat_table
2348 = (struct expr **) xcalloc (table->n_elems, sizeof (struct expr *));
2349 hash_val = (unsigned int *) xmalloc (table->n_elems * sizeof (unsigned int));
2351 for (i = 0; i < (int) table->size; i++)
2352 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
2354 flat_table[expr->bitmap_index] = expr;
2355 hash_val[expr->bitmap_index] = i;
2358 fprintf (file, "%s hash table (%d buckets, %d entries)\n",
2359 name, table->size, table->n_elems);
2361 for (i = 0; i < (int) table->n_elems; i++)
2362 if (flat_table[i] != 0)
2364 expr = flat_table[i];
2365 fprintf (file, "Index %d (hash value %d)\n ",
2366 expr->bitmap_index, hash_val[i]);
2367 print_rtl (file, expr->expr);
2368 fprintf (file, "\n");
2371 fprintf (file, "\n");
2373 free (flat_table);
2374 free (hash_val);
2377 /* Record register first/last/block set information for REGNO in INSN.
2379 first_set records the first place in the block where the register
2380 is set and is used to compute "anticipatability".
2382 last_set records the last place in the block where the register
2383 is set and is used to compute "availability".
2385 last_bb records the block for which first_set and last_set are
2386 valid, as a quick test to invalidate them.
2388 reg_set_in_block records whether the register is set in the block
2389 and is used to compute "transparency". */
2391 static void
2392 record_last_reg_set_info (insn, regno)
2393 rtx insn;
2394 int regno;
2396 struct reg_avail_info *info = &reg_avail_info[regno];
2397 int cuid = INSN_CUID (insn);
2399 info->last_set = cuid;
2400 if (info->last_bb != current_bb)
2402 info->last_bb = current_bb;
2403 info->first_set = cuid;
2404 SET_BIT (reg_set_in_block[current_bb->index], regno);
2409 /* Record all of the canonicalized MEMs of record_last_mem_set_info's insn.
2410 Note we store a pair of elements in the list, so they have to be
2411 taken off pairwise. */
2413 static void
2414 canon_list_insert (dest, unused1, v_insn)
2415 rtx dest ATTRIBUTE_UNUSED;
2416 rtx unused1 ATTRIBUTE_UNUSED;
2417 void * v_insn;
2419 rtx dest_addr, insn;
2420 int bb;
2422 while (GET_CODE (dest) == SUBREG
2423 || GET_CODE (dest) == ZERO_EXTRACT
2424 || GET_CODE (dest) == SIGN_EXTRACT
2425 || GET_CODE (dest) == STRICT_LOW_PART)
2426 dest = XEXP (dest, 0);
2428 /* If DEST is not a MEM, then it will not conflict with a load. Note
2429 that function calls are assumed to clobber memory, but are handled
2430 elsewhere. */
2432 if (GET_CODE (dest) != MEM)
2433 return;
2435 dest_addr = get_addr (XEXP (dest, 0));
2436 dest_addr = canon_rtx (dest_addr);
2437 insn = (rtx) v_insn;
2438 bb = BLOCK_NUM (insn);
2440 canon_modify_mem_list[bb] =
2441 alloc_EXPR_LIST (VOIDmode, dest_addr, canon_modify_mem_list[bb]);
2442 canon_modify_mem_list[bb] =
2443 alloc_EXPR_LIST (VOIDmode, dest, canon_modify_mem_list[bb]);
2444 bitmap_set_bit (canon_modify_mem_list_set, bb);
2447 /* Record memory modification information for INSN. We do not actually care
2448 about the memory location(s) that are set, or even how they are set (consider
2449 a CALL_INSN). We merely need to record which insns modify memory. */
2451 static void
2452 record_last_mem_set_info (insn)
2453 rtx insn;
2455 int bb = BLOCK_NUM (insn);
2457 /* load_killed_in_block_p will handle the case of calls clobbering
2458 everything. */
2459 modify_mem_list[bb] = alloc_INSN_LIST (insn, modify_mem_list[bb]);
2460 bitmap_set_bit (modify_mem_list_set, bb);
2462 if (GET_CODE (insn) == CALL_INSN)
2464 /* Note that traversals of this loop (other than for free-ing)
2465 will break after encountering a CALL_INSN. So, there's no
2466 need to insert a pair of items, as canon_list_insert does. */
2467 canon_modify_mem_list[bb] =
2468 alloc_INSN_LIST (insn, canon_modify_mem_list[bb]);
2469 bitmap_set_bit (canon_modify_mem_list_set, bb);
2471 else
2472 note_stores (PATTERN (insn), canon_list_insert, (void*) insn);
2475 /* Called from compute_hash_table via note_stores to handle one
2476 SET or CLOBBER in an insn. DATA is really the instruction in which
2477 the SET is taking place. */
2479 static void
2480 record_last_set_info (dest, setter, data)
2481 rtx dest, setter ATTRIBUTE_UNUSED;
2482 void *data;
2484 rtx last_set_insn = (rtx) data;
2486 if (GET_CODE (dest) == SUBREG)
2487 dest = SUBREG_REG (dest);
2489 if (GET_CODE (dest) == REG)
2490 record_last_reg_set_info (last_set_insn, REGNO (dest));
2491 else if (GET_CODE (dest) == MEM
2492 /* Ignore pushes, they clobber nothing. */
2493 && ! push_operand (dest, GET_MODE (dest)))
2494 record_last_mem_set_info (last_set_insn);
2497 /* Top level function to create an expression or assignment hash table.
2499 Expression entries are placed in the hash table if
2500 - they are of the form (set (pseudo-reg) src),
2501 - src is something we want to perform GCSE on,
2502 - none of the operands are subsequently modified in the block
2504 Assignment entries are placed in the hash table if
2505 - they are of the form (set (pseudo-reg) src),
2506 - src is something we want to perform const/copy propagation on,
2507 - none of the operands or target are subsequently modified in the block
2509 Currently src must be a pseudo-reg or a const_int.
2511 TABLE is the table computed. */
2513 static void
2514 compute_hash_table_work (table)
2515 struct hash_table *table;
2517 unsigned int i;
2519 /* While we compute the hash table we also compute a bit array of which
2520 registers are set in which blocks.
2521 ??? This isn't needed during const/copy propagation, but it's cheap to
2522 compute. Later. */
2523 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
2525 /* re-Cache any INSN_LIST nodes we have allocated. */
2526 clear_modify_mem_tables ();
2527 /* Some working arrays used to track first and last set in each block. */
2528 reg_avail_info = (struct reg_avail_info*)
2529 gmalloc (max_gcse_regno * sizeof (struct reg_avail_info));
2531 for (i = 0; i < max_gcse_regno; ++i)
2532 reg_avail_info[i].last_bb = NULL;
2534 FOR_EACH_BB (current_bb)
2536 rtx insn;
2537 unsigned int regno;
2538 int in_libcall_block;
2540 /* First pass over the instructions records information used to
2541 determine when registers and memory are first and last set.
2542 ??? hard-reg reg_set_in_block computation
2543 could be moved to compute_sets since they currently don't change. */
2545 for (insn = current_bb->head;
2546 insn && insn != NEXT_INSN (current_bb->end);
2547 insn = NEXT_INSN (insn))
2549 if (! INSN_P (insn))
2550 continue;
2552 if (GET_CODE (insn) == CALL_INSN)
2554 bool clobbers_all = false;
2555 #ifdef NON_SAVING_SETJMP
2556 if (NON_SAVING_SETJMP
2557 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
2558 clobbers_all = true;
2559 #endif
2561 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2562 if (clobbers_all
2563 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2564 record_last_reg_set_info (insn, regno);
2566 mark_call (insn);
2569 note_stores (PATTERN (insn), record_last_set_info, insn);
2572 /* Insert implicit sets in the hash table. */
2573 if (table->set_p
2574 && implicit_sets[current_bb->index] != NULL_RTX)
2575 hash_scan_set (implicit_sets[current_bb->index],
2576 current_bb->head, table);
2578 /* The next pass builds the hash table. */
2580 for (insn = current_bb->head, in_libcall_block = 0;
2581 insn && insn != NEXT_INSN (current_bb->end);
2582 insn = NEXT_INSN (insn))
2583 if (INSN_P (insn))
2585 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
2586 in_libcall_block = 1;
2587 else if (table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
2588 in_libcall_block = 0;
2589 hash_scan_insn (insn, table, in_libcall_block);
2590 if (!table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
2591 in_libcall_block = 0;
2595 free (reg_avail_info);
2596 reg_avail_info = NULL;
2599 /* Allocate space for the set/expr hash TABLE.
2600 N_INSNS is the number of instructions in the function.
2601 It is used to determine the number of buckets to use.
2602 SET_P determines whether set or expression table will
2603 be created. */
2605 static void
2606 alloc_hash_table (n_insns, table, set_p)
2607 int n_insns;
2608 struct hash_table *table;
2609 int set_p;
2611 int n;
2613 table->size = n_insns / 4;
2614 if (table->size < 11)
2615 table->size = 11;
2617 /* Attempt to maintain efficient use of hash table.
2618 Making it an odd number is simplest for now.
2619 ??? Later take some measurements. */
2620 table->size |= 1;
2621 n = table->size * sizeof (struct expr *);
2622 table->table = (struct expr **) gmalloc (n);
2623 table->set_p = set_p;
2626 /* Free things allocated by alloc_hash_table. */
2628 static void
2629 free_hash_table (table)
2630 struct hash_table *table;
2632 free (table->table);
2635 /* Compute the hash TABLE for doing copy/const propagation or
2636 expression hash table. */
2638 static void
2639 compute_hash_table (table)
2640 struct hash_table *table;
2642 /* Initialize count of number of entries in hash table. */
2643 table->n_elems = 0;
2644 memset ((char *) table->table, 0,
2645 table->size * sizeof (struct expr *));
2647 compute_hash_table_work (table);
2650 /* Expression tracking support. */
2652 /* Lookup pattern PAT in the expression TABLE.
2653 The result is a pointer to the table entry, or NULL if not found. */
2655 static struct expr *
2656 lookup_expr (pat, table)
2657 rtx pat;
2658 struct hash_table *table;
2660 int do_not_record_p;
2661 unsigned int hash = hash_expr (pat, GET_MODE (pat), &do_not_record_p,
2662 table->size);
2663 struct expr *expr;
2665 if (do_not_record_p)
2666 return NULL;
2668 expr = table->table[hash];
2670 while (expr && ! expr_equiv_p (expr->expr, pat))
2671 expr = expr->next_same_hash;
2673 return expr;
2676 /* Lookup REGNO in the set TABLE. The result is a pointer to the
2677 table entry, or NULL if not found. */
2679 static struct expr *
2680 lookup_set (regno, table)
2681 unsigned int regno;
2682 struct hash_table *table;
2684 unsigned int hash = hash_set (regno, table->size);
2685 struct expr *expr;
2687 expr = table->table[hash];
2689 while (expr && REGNO (SET_DEST (expr->expr)) != regno)
2690 expr = expr->next_same_hash;
2692 return expr;
2695 /* Return the next entry for REGNO in list EXPR. */
2697 static struct expr *
2698 next_set (regno, expr)
2699 unsigned int regno;
2700 struct expr *expr;
2703 expr = expr->next_same_hash;
2704 while (expr && REGNO (SET_DEST (expr->expr)) != regno);
2706 return expr;
2709 /* Like free_INSN_LIST_list or free_EXPR_LIST_list, except that the node
2710 types may be mixed. */
2712 static void
2713 free_insn_expr_list_list (listp)
2714 rtx *listp;
2716 rtx list, next;
2718 for (list = *listp; list ; list = next)
2720 next = XEXP (list, 1);
2721 if (GET_CODE (list) == EXPR_LIST)
2722 free_EXPR_LIST_node (list);
2723 else
2724 free_INSN_LIST_node (list);
2727 *listp = NULL;
2730 /* Clear canon_modify_mem_list and modify_mem_list tables. */
2731 static void
2732 clear_modify_mem_tables ()
2734 int i;
2736 EXECUTE_IF_SET_IN_BITMAP
2737 (modify_mem_list_set, 0, i, free_INSN_LIST_list (modify_mem_list + i));
2738 bitmap_clear (modify_mem_list_set);
2740 EXECUTE_IF_SET_IN_BITMAP
2741 (canon_modify_mem_list_set, 0, i,
2742 free_insn_expr_list_list (canon_modify_mem_list + i));
2743 bitmap_clear (canon_modify_mem_list_set);
2746 /* Release memory used by modify_mem_list_set and canon_modify_mem_list_set. */
2748 static void
2749 free_modify_mem_tables ()
2751 clear_modify_mem_tables ();
2752 free (modify_mem_list);
2753 free (canon_modify_mem_list);
2754 modify_mem_list = 0;
2755 canon_modify_mem_list = 0;
2758 /* Reset tables used to keep track of what's still available [since the
2759 start of the block]. */
2761 static void
2762 reset_opr_set_tables ()
2764 /* Maintain a bitmap of which regs have been set since beginning of
2765 the block. */
2766 CLEAR_REG_SET (reg_set_bitmap);
2768 /* Also keep a record of the last instruction to modify memory.
2769 For now this is very trivial, we only record whether any memory
2770 location has been modified. */
2771 clear_modify_mem_tables ();
2774 /* Return nonzero if the operands of X are not set before INSN in
2775 INSN's basic block. */
2777 static int
2778 oprs_not_set_p (x, insn)
2779 rtx x, insn;
2781 int i, j;
2782 enum rtx_code code;
2783 const char *fmt;
2785 if (x == 0)
2786 return 1;
2788 code = GET_CODE (x);
2789 switch (code)
2791 case PC:
2792 case CC0:
2793 case CONST:
2794 case CONST_INT:
2795 case CONST_DOUBLE:
2796 case CONST_VECTOR:
2797 case SYMBOL_REF:
2798 case LABEL_REF:
2799 case ADDR_VEC:
2800 case ADDR_DIFF_VEC:
2801 return 1;
2803 case MEM:
2804 if (load_killed_in_block_p (BLOCK_FOR_INSN (insn),
2805 INSN_CUID (insn), x, 0))
2806 return 0;
2807 else
2808 return oprs_not_set_p (XEXP (x, 0), insn);
2810 case REG:
2811 return ! REGNO_REG_SET_P (reg_set_bitmap, REGNO (x));
2813 default:
2814 break;
2817 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
2819 if (fmt[i] == 'e')
2821 /* If we are about to do the last recursive call
2822 needed at this level, change it into iteration.
2823 This function is called enough to be worth it. */
2824 if (i == 0)
2825 return oprs_not_set_p (XEXP (x, i), insn);
2827 if (! oprs_not_set_p (XEXP (x, i), insn))
2828 return 0;
2830 else if (fmt[i] == 'E')
2831 for (j = 0; j < XVECLEN (x, i); j++)
2832 if (! oprs_not_set_p (XVECEXP (x, i, j), insn))
2833 return 0;
2836 return 1;
2839 /* Mark things set by a CALL. */
2841 static void
2842 mark_call (insn)
2843 rtx insn;
2845 if (! CONST_OR_PURE_CALL_P (insn))
2846 record_last_mem_set_info (insn);
2849 /* Mark things set by a SET. */
2851 static void
2852 mark_set (pat, insn)
2853 rtx pat, insn;
2855 rtx dest = SET_DEST (pat);
2857 while (GET_CODE (dest) == SUBREG
2858 || GET_CODE (dest) == ZERO_EXTRACT
2859 || GET_CODE (dest) == SIGN_EXTRACT
2860 || GET_CODE (dest) == STRICT_LOW_PART)
2861 dest = XEXP (dest, 0);
2863 if (GET_CODE (dest) == REG)
2864 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (dest));
2865 else if (GET_CODE (dest) == MEM)
2866 record_last_mem_set_info (insn);
2868 if (GET_CODE (SET_SRC (pat)) == CALL)
2869 mark_call (insn);
2872 /* Record things set by a CLOBBER. */
2874 static void
2875 mark_clobber (pat, insn)
2876 rtx pat, insn;
2878 rtx clob = XEXP (pat, 0);
2880 while (GET_CODE (clob) == SUBREG || GET_CODE (clob) == STRICT_LOW_PART)
2881 clob = XEXP (clob, 0);
2883 if (GET_CODE (clob) == REG)
2884 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (clob));
2885 else
2886 record_last_mem_set_info (insn);
2889 /* Record things set by INSN.
2890 This data is used by oprs_not_set_p. */
2892 static void
2893 mark_oprs_set (insn)
2894 rtx insn;
2896 rtx pat = PATTERN (insn);
2897 int i;
2899 if (GET_CODE (pat) == SET)
2900 mark_set (pat, insn);
2901 else if (GET_CODE (pat) == PARALLEL)
2902 for (i = 0; i < XVECLEN (pat, 0); i++)
2904 rtx x = XVECEXP (pat, 0, i);
2906 if (GET_CODE (x) == SET)
2907 mark_set (x, insn);
2908 else if (GET_CODE (x) == CLOBBER)
2909 mark_clobber (x, insn);
2910 else if (GET_CODE (x) == CALL)
2911 mark_call (insn);
2914 else if (GET_CODE (pat) == CLOBBER)
2915 mark_clobber (pat, insn);
2916 else if (GET_CODE (pat) == CALL)
2917 mark_call (insn);
2921 /* Classic GCSE reaching definition support. */
2923 /* Allocate reaching def variables. */
2925 static void
2926 alloc_rd_mem (n_blocks, n_insns)
2927 int n_blocks, n_insns;
2929 rd_kill = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2930 sbitmap_vector_zero (rd_kill, n_blocks);
2932 rd_gen = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2933 sbitmap_vector_zero (rd_gen, n_blocks);
2935 reaching_defs = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2936 sbitmap_vector_zero (reaching_defs, n_blocks);
2938 rd_out = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2939 sbitmap_vector_zero (rd_out, n_blocks);
2942 /* Free reaching def variables. */
2944 static void
2945 free_rd_mem ()
2947 sbitmap_vector_free (rd_kill);
2948 sbitmap_vector_free (rd_gen);
2949 sbitmap_vector_free (reaching_defs);
2950 sbitmap_vector_free (rd_out);
2953 /* Add INSN to the kills of BB. REGNO, set in BB, is killed by INSN. */
2955 static void
2956 handle_rd_kill_set (insn, regno, bb)
2957 rtx insn;
2958 int regno;
2959 basic_block bb;
2961 struct reg_set *this_reg;
2963 for (this_reg = reg_set_table[regno]; this_reg; this_reg = this_reg ->next)
2964 if (BLOCK_NUM (this_reg->insn) != BLOCK_NUM (insn))
2965 SET_BIT (rd_kill[bb->index], INSN_CUID (this_reg->insn));
2968 /* Compute the set of kill's for reaching definitions. */
2970 static void
2971 compute_kill_rd ()
2973 int cuid;
2974 unsigned int regno;
2975 int i;
2976 basic_block bb;
2978 /* For each block
2979 For each set bit in `gen' of the block (i.e each insn which
2980 generates a definition in the block)
2981 Call the reg set by the insn corresponding to that bit regx
2982 Look at the linked list starting at reg_set_table[regx]
2983 For each setting of regx in the linked list, which is not in
2984 this block
2985 Set the bit in `kill' corresponding to that insn. */
2986 FOR_EACH_BB (bb)
2987 for (cuid = 0; cuid < max_cuid; cuid++)
2988 if (TEST_BIT (rd_gen[bb->index], cuid))
2990 rtx insn = CUID_INSN (cuid);
2991 rtx pat = PATTERN (insn);
2993 if (GET_CODE (insn) == CALL_INSN)
2995 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2996 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2997 handle_rd_kill_set (insn, regno, bb);
3000 if (GET_CODE (pat) == PARALLEL)
3002 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
3004 enum rtx_code code = GET_CODE (XVECEXP (pat, 0, i));
3006 if ((code == SET || code == CLOBBER)
3007 && GET_CODE (XEXP (XVECEXP (pat, 0, i), 0)) == REG)
3008 handle_rd_kill_set (insn,
3009 REGNO (XEXP (XVECEXP (pat, 0, i), 0)),
3010 bb);
3013 else if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == REG)
3014 /* Each setting of this register outside of this block
3015 must be marked in the set of kills in this block. */
3016 handle_rd_kill_set (insn, REGNO (SET_DEST (pat)), bb);
3020 /* Compute the reaching definitions as in
3021 Compilers Principles, Techniques, and Tools. Aho, Sethi, Ullman,
3022 Chapter 10. It is the same algorithm as used for computing available
3023 expressions but applied to the gens and kills of reaching definitions. */
3025 static void
3026 compute_rd ()
3028 int changed, passes;
3029 basic_block bb;
3031 FOR_EACH_BB (bb)
3032 sbitmap_copy (rd_out[bb->index] /*dst*/, rd_gen[bb->index] /*src*/);
3034 passes = 0;
3035 changed = 1;
3036 while (changed)
3038 changed = 0;
3039 FOR_EACH_BB (bb)
3041 sbitmap_union_of_preds (reaching_defs[bb->index], rd_out, bb->index);
3042 changed |= sbitmap_union_of_diff_cg (rd_out[bb->index], rd_gen[bb->index],
3043 reaching_defs[bb->index], rd_kill[bb->index]);
3045 passes++;
3048 if (gcse_file)
3049 fprintf (gcse_file, "reaching def computation: %d passes\n", passes);
3052 /* Classic GCSE available expression support. */
3054 /* Allocate memory for available expression computation. */
3056 static void
3057 alloc_avail_expr_mem (n_blocks, n_exprs)
3058 int n_blocks, n_exprs;
3060 ae_kill = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
3061 sbitmap_vector_zero (ae_kill, n_blocks);
3063 ae_gen = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
3064 sbitmap_vector_zero (ae_gen, n_blocks);
3066 ae_in = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
3067 sbitmap_vector_zero (ae_in, n_blocks);
3069 ae_out = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
3070 sbitmap_vector_zero (ae_out, n_blocks);
3073 static void
3074 free_avail_expr_mem ()
3076 sbitmap_vector_free (ae_kill);
3077 sbitmap_vector_free (ae_gen);
3078 sbitmap_vector_free (ae_in);
3079 sbitmap_vector_free (ae_out);
3082 /* Compute the set of available expressions generated in each basic block. */
3084 static void
3085 compute_ae_gen (expr_hash_table)
3086 struct hash_table *expr_hash_table;
3088 unsigned int i;
3089 struct expr *expr;
3090 struct occr *occr;
3092 /* For each recorded occurrence of each expression, set ae_gen[bb][expr].
3093 This is all we have to do because an expression is not recorded if it
3094 is not available, and the only expressions we want to work with are the
3095 ones that are recorded. */
3096 for (i = 0; i < expr_hash_table->size; i++)
3097 for (expr = expr_hash_table->table[i]; expr != 0; expr = expr->next_same_hash)
3098 for (occr = expr->avail_occr; occr != 0; occr = occr->next)
3099 SET_BIT (ae_gen[BLOCK_NUM (occr->insn)], expr->bitmap_index);
3102 /* Return nonzero if expression X is killed in BB. */
3104 static int
3105 expr_killed_p (x, bb)
3106 rtx x;
3107 basic_block bb;
3109 int i, j;
3110 enum rtx_code code;
3111 const char *fmt;
3113 if (x == 0)
3114 return 1;
3116 code = GET_CODE (x);
3117 switch (code)
3119 case REG:
3120 return TEST_BIT (reg_set_in_block[bb->index], REGNO (x));
3122 case MEM:
3123 if (load_killed_in_block_p (bb, get_max_uid () + 1, x, 0))
3124 return 1;
3125 else
3126 return expr_killed_p (XEXP (x, 0), bb);
3128 case PC:
3129 case CC0: /*FIXME*/
3130 case CONST:
3131 case CONST_INT:
3132 case CONST_DOUBLE:
3133 case CONST_VECTOR:
3134 case SYMBOL_REF:
3135 case LABEL_REF:
3136 case ADDR_VEC:
3137 case ADDR_DIFF_VEC:
3138 return 0;
3140 default:
3141 break;
3144 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
3146 if (fmt[i] == 'e')
3148 /* If we are about to do the last recursive call
3149 needed at this level, change it into iteration.
3150 This function is called enough to be worth it. */
3151 if (i == 0)
3152 return expr_killed_p (XEXP (x, i), bb);
3153 else if (expr_killed_p (XEXP (x, i), bb))
3154 return 1;
3156 else if (fmt[i] == 'E')
3157 for (j = 0; j < XVECLEN (x, i); j++)
3158 if (expr_killed_p (XVECEXP (x, i, j), bb))
3159 return 1;
3162 return 0;
3165 /* Compute the set of available expressions killed in each basic block. */
3167 static void
3168 compute_ae_kill (ae_gen, ae_kill, expr_hash_table)
3169 sbitmap *ae_gen, *ae_kill;
3170 struct hash_table *expr_hash_table;
3172 basic_block bb;
3173 unsigned int i;
3174 struct expr *expr;
3176 FOR_EACH_BB (bb)
3177 for (i = 0; i < expr_hash_table->size; i++)
3178 for (expr = expr_hash_table->table[i]; expr; expr = expr->next_same_hash)
3180 /* Skip EXPR if generated in this block. */
3181 if (TEST_BIT (ae_gen[bb->index], expr->bitmap_index))
3182 continue;
3184 if (expr_killed_p (expr->expr, bb))
3185 SET_BIT (ae_kill[bb->index], expr->bitmap_index);
3189 /* Actually perform the Classic GCSE optimizations. */
3191 /* Return nonzero if occurrence OCCR of expression EXPR reaches block BB.
3193 CHECK_SELF_LOOP is nonzero if we should consider a block reaching itself
3194 as a positive reach. We want to do this when there are two computations
3195 of the expression in the block.
3197 VISITED is a pointer to a working buffer for tracking which BB's have
3198 been visited. It is NULL for the top-level call.
3200 We treat reaching expressions that go through blocks containing the same
3201 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
3202 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
3203 2 as not reaching. The intent is to improve the probability of finding
3204 only one reaching expression and to reduce register lifetimes by picking
3205 the closest such expression. */
3207 static int
3208 expr_reaches_here_p_work (occr, expr, bb, check_self_loop, visited)
3209 struct occr *occr;
3210 struct expr *expr;
3211 basic_block bb;
3212 int check_self_loop;
3213 char *visited;
3215 edge pred;
3217 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
3219 basic_block pred_bb = pred->src;
3221 if (visited[pred_bb->index])
3222 /* This predecessor has already been visited. Nothing to do. */
3224 else if (pred_bb == bb)
3226 /* BB loops on itself. */
3227 if (check_self_loop
3228 && TEST_BIT (ae_gen[pred_bb->index], expr->bitmap_index)
3229 && BLOCK_NUM (occr->insn) == pred_bb->index)
3230 return 1;
3232 visited[pred_bb->index] = 1;
3235 /* Ignore this predecessor if it kills the expression. */
3236 else if (TEST_BIT (ae_kill[pred_bb->index], expr->bitmap_index))
3237 visited[pred_bb->index] = 1;
3239 /* Does this predecessor generate this expression? */
3240 else if (TEST_BIT (ae_gen[pred_bb->index], expr->bitmap_index))
3242 /* Is this the occurrence we're looking for?
3243 Note that there's only one generating occurrence per block
3244 so we just need to check the block number. */
3245 if (BLOCK_NUM (occr->insn) == pred_bb->index)
3246 return 1;
3248 visited[pred_bb->index] = 1;
3251 /* Neither gen nor kill. */
3252 else
3254 visited[pred_bb->index] = 1;
3255 if (expr_reaches_here_p_work (occr, expr, pred_bb, check_self_loop,
3256 visited))
3258 return 1;
3262 /* All paths have been checked. */
3263 return 0;
3266 /* This wrapper for expr_reaches_here_p_work() is to ensure that any
3267 memory allocated for that function is returned. */
3269 static int
3270 expr_reaches_here_p (occr, expr, bb, check_self_loop)
3271 struct occr *occr;
3272 struct expr *expr;
3273 basic_block bb;
3274 int check_self_loop;
3276 int rval;
3277 char *visited = (char *) xcalloc (last_basic_block, 1);
3279 rval = expr_reaches_here_p_work (occr, expr, bb, check_self_loop, visited);
3281 free (visited);
3282 return rval;
3285 /* Return the instruction that computes EXPR that reaches INSN's basic block.
3286 If there is more than one such instruction, return NULL.
3288 Called only by handle_avail_expr. */
3290 static rtx
3291 computing_insn (expr, insn)
3292 struct expr *expr;
3293 rtx insn;
3295 basic_block bb = BLOCK_FOR_INSN (insn);
3297 if (expr->avail_occr->next == NULL)
3299 if (BLOCK_FOR_INSN (expr->avail_occr->insn) == bb)
3300 /* The available expression is actually itself
3301 (i.e. a loop in the flow graph) so do nothing. */
3302 return NULL;
3304 /* (FIXME) Case that we found a pattern that was created by
3305 a substitution that took place. */
3306 return expr->avail_occr->insn;
3308 else
3310 /* Pattern is computed more than once.
3311 Search backwards from this insn to see how many of these
3312 computations actually reach this insn. */
3313 struct occr *occr;
3314 rtx insn_computes_expr = NULL;
3315 int can_reach = 0;
3317 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
3319 if (BLOCK_FOR_INSN (occr->insn) == bb)
3321 /* The expression is generated in this block.
3322 The only time we care about this is when the expression
3323 is generated later in the block [and thus there's a loop].
3324 We let the normal cse pass handle the other cases. */
3325 if (INSN_CUID (insn) < INSN_CUID (occr->insn)
3326 && expr_reaches_here_p (occr, expr, bb, 1))
3328 can_reach++;
3329 if (can_reach > 1)
3330 return NULL;
3332 insn_computes_expr = occr->insn;
3335 else if (expr_reaches_here_p (occr, expr, bb, 0))
3337 can_reach++;
3338 if (can_reach > 1)
3339 return NULL;
3341 insn_computes_expr = occr->insn;
3345 if (insn_computes_expr == NULL)
3346 abort ();
3348 return insn_computes_expr;
3352 /* Return nonzero if the definition in DEF_INSN can reach INSN.
3353 Only called by can_disregard_other_sets. */
3355 static int
3356 def_reaches_here_p (insn, def_insn)
3357 rtx insn, def_insn;
3359 rtx reg;
3361 if (TEST_BIT (reaching_defs[BLOCK_NUM (insn)], INSN_CUID (def_insn)))
3362 return 1;
3364 if (BLOCK_NUM (insn) == BLOCK_NUM (def_insn))
3366 if (INSN_CUID (def_insn) < INSN_CUID (insn))
3368 if (GET_CODE (PATTERN (def_insn)) == PARALLEL)
3369 return 1;
3370 else if (GET_CODE (PATTERN (def_insn)) == CLOBBER)
3371 reg = XEXP (PATTERN (def_insn), 0);
3372 else if (GET_CODE (PATTERN (def_insn)) == SET)
3373 reg = SET_DEST (PATTERN (def_insn));
3374 else
3375 abort ();
3377 return ! reg_set_between_p (reg, NEXT_INSN (def_insn), insn);
3379 else
3380 return 0;
3383 return 0;
3386 /* Return nonzero if *ADDR_THIS_REG can only have one value at INSN. The
3387 value returned is the number of definitions that reach INSN. Returning a
3388 value of zero means that [maybe] more than one definition reaches INSN and
3389 the caller can't perform whatever optimization it is trying. i.e. it is
3390 always safe to return zero. */
3392 static int
3393 can_disregard_other_sets (addr_this_reg, insn, for_combine)
3394 struct reg_set **addr_this_reg;
3395 rtx insn;
3396 int for_combine;
3398 int number_of_reaching_defs = 0;
3399 struct reg_set *this_reg;
3401 for (this_reg = *addr_this_reg; this_reg != 0; this_reg = this_reg->next)
3402 if (def_reaches_here_p (insn, this_reg->insn))
3404 number_of_reaching_defs++;
3405 /* Ignore parallels for now. */
3406 if (GET_CODE (PATTERN (this_reg->insn)) == PARALLEL)
3407 return 0;
3409 if (!for_combine
3410 && (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER
3411 || ! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)),
3412 SET_SRC (PATTERN (insn)))))
3413 /* A setting of the reg to a different value reaches INSN. */
3414 return 0;
3416 if (number_of_reaching_defs > 1)
3418 /* If in this setting the value the register is being set to is
3419 equal to the previous value the register was set to and this
3420 setting reaches the insn we are trying to do the substitution
3421 on then we are ok. */
3422 if (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER)
3423 return 0;
3424 else if (! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)),
3425 SET_SRC (PATTERN (insn))))
3426 return 0;
3429 *addr_this_reg = this_reg;
3432 return number_of_reaching_defs;
3435 /* Expression computed by insn is available and the substitution is legal,
3436 so try to perform the substitution.
3438 The result is nonzero if any changes were made. */
3440 static int
3441 handle_avail_expr (insn, expr)
3442 rtx insn;
3443 struct expr *expr;
3445 rtx pat, insn_computes_expr, expr_set;
3446 rtx to;
3447 struct reg_set *this_reg;
3448 int found_setting, use_src;
3449 int changed = 0;
3451 /* We only handle the case where one computation of the expression
3452 reaches this instruction. */
3453 insn_computes_expr = computing_insn (expr, insn);
3454 if (insn_computes_expr == NULL)
3455 return 0;
3456 expr_set = single_set (insn_computes_expr);
3457 if (!expr_set)
3458 abort ();
3460 found_setting = 0;
3461 use_src = 0;
3463 /* At this point we know only one computation of EXPR outside of this
3464 block reaches this insn. Now try to find a register that the
3465 expression is computed into. */
3466 if (GET_CODE (SET_SRC (expr_set)) == REG)
3468 /* This is the case when the available expression that reaches
3469 here has already been handled as an available expression. */
3470 unsigned int regnum_for_replacing
3471 = REGNO (SET_SRC (expr_set));
3473 /* If the register was created by GCSE we can't use `reg_set_table',
3474 however we know it's set only once. */
3475 if (regnum_for_replacing >= max_gcse_regno
3476 /* If the register the expression is computed into is set only once,
3477 or only one set reaches this insn, we can use it. */
3478 || (((this_reg = reg_set_table[regnum_for_replacing]),
3479 this_reg->next == NULL)
3480 || can_disregard_other_sets (&this_reg, insn, 0)))
3482 use_src = 1;
3483 found_setting = 1;
3487 if (!found_setting)
3489 unsigned int regnum_for_replacing
3490 = REGNO (SET_DEST (expr_set));
3492 /* This shouldn't happen. */
3493 if (regnum_for_replacing >= max_gcse_regno)
3494 abort ();
3496 this_reg = reg_set_table[regnum_for_replacing];
3498 /* If the register the expression is computed into is set only once,
3499 or only one set reaches this insn, use it. */
3500 if (this_reg->next == NULL
3501 || can_disregard_other_sets (&this_reg, insn, 0))
3502 found_setting = 1;
3505 if (found_setting)
3507 pat = PATTERN (insn);
3508 if (use_src)
3509 to = SET_SRC (expr_set);
3510 else
3511 to = SET_DEST (expr_set);
3512 changed = validate_change (insn, &SET_SRC (pat), to, 0);
3514 /* We should be able to ignore the return code from validate_change but
3515 to play it safe we check. */
3516 if (changed)
3518 gcse_subst_count++;
3519 if (gcse_file != NULL)
3521 fprintf (gcse_file, "GCSE: Replacing the source in insn %d with",
3522 INSN_UID (insn));
3523 fprintf (gcse_file, " reg %d %s insn %d\n",
3524 REGNO (to), use_src ? "from" : "set in",
3525 INSN_UID (insn_computes_expr));
3530 /* The register that the expr is computed into is set more than once. */
3531 else if (1 /*expensive_op(this_pattrn->op) && do_expensive_gcse)*/)
3533 /* Insert an insn after insnx that copies the reg set in insnx
3534 into a new pseudo register call this new register REGN.
3535 From insnb until end of basic block or until REGB is set
3536 replace all uses of REGB with REGN. */
3537 rtx new_insn;
3539 to = gen_reg_rtx (GET_MODE (SET_DEST (expr_set)));
3541 /* Generate the new insn. */
3542 /* ??? If the change fails, we return 0, even though we created
3543 an insn. I think this is ok. */
3544 new_insn
3545 = emit_insn_after (gen_rtx_SET (VOIDmode, to,
3546 SET_DEST (expr_set)),
3547 insn_computes_expr);
3549 /* Keep register set table up to date. */
3550 record_one_set (REGNO (to), new_insn);
3552 gcse_create_count++;
3553 if (gcse_file != NULL)
3555 fprintf (gcse_file, "GCSE: Creating insn %d to copy value of reg %d",
3556 INSN_UID (NEXT_INSN (insn_computes_expr)),
3557 REGNO (SET_SRC (PATTERN (NEXT_INSN (insn_computes_expr)))));
3558 fprintf (gcse_file, ", computed in insn %d,\n",
3559 INSN_UID (insn_computes_expr));
3560 fprintf (gcse_file, " into newly allocated reg %d\n",
3561 REGNO (to));
3564 pat = PATTERN (insn);
3566 /* Do register replacement for INSN. */
3567 changed = validate_change (insn, &SET_SRC (pat),
3568 SET_DEST (PATTERN
3569 (NEXT_INSN (insn_computes_expr))),
3572 /* We should be able to ignore the return code from validate_change but
3573 to play it safe we check. */
3574 if (changed)
3576 gcse_subst_count++;
3577 if (gcse_file != NULL)
3579 fprintf (gcse_file,
3580 "GCSE: Replacing the source in insn %d with reg %d ",
3581 INSN_UID (insn),
3582 REGNO (SET_DEST (PATTERN (NEXT_INSN
3583 (insn_computes_expr)))));
3584 fprintf (gcse_file, "set in insn %d\n",
3585 INSN_UID (insn_computes_expr));
3590 return changed;
3593 /* Perform classic GCSE. This is called by one_classic_gcse_pass after all
3594 the dataflow analysis has been done.
3596 The result is nonzero if a change was made. */
3598 static int
3599 classic_gcse ()
3601 int changed;
3602 rtx insn;
3603 basic_block bb;
3605 /* Note we start at block 1. */
3607 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
3608 return 0;
3610 changed = 0;
3611 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR, next_bb)
3613 /* Reset tables used to keep track of what's still valid [since the
3614 start of the block]. */
3615 reset_opr_set_tables ();
3617 for (insn = bb->head;
3618 insn != NULL && insn != NEXT_INSN (bb->end);
3619 insn = NEXT_INSN (insn))
3621 /* Is insn of form (set (pseudo-reg) ...)? */
3622 if (GET_CODE (insn) == INSN
3623 && GET_CODE (PATTERN (insn)) == SET
3624 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
3625 && REGNO (SET_DEST (PATTERN (insn))) >= FIRST_PSEUDO_REGISTER)
3627 rtx pat = PATTERN (insn);
3628 rtx src = SET_SRC (pat);
3629 struct expr *expr;
3631 if (want_to_gcse_p (src)
3632 /* Is the expression recorded? */
3633 && ((expr = lookup_expr (src, &expr_hash_table)) != NULL)
3634 /* Is the expression available [at the start of the
3635 block]? */
3636 && TEST_BIT (ae_in[bb->index], expr->bitmap_index)
3637 /* Are the operands unchanged since the start of the
3638 block? */
3639 && oprs_not_set_p (src, insn))
3640 changed |= handle_avail_expr (insn, expr);
3643 /* Keep track of everything modified by this insn. */
3644 /* ??? Need to be careful w.r.t. mods done to INSN. */
3645 if (INSN_P (insn))
3646 mark_oprs_set (insn);
3650 return changed;
3653 /* Top level routine to perform one classic GCSE pass.
3655 Return nonzero if a change was made. */
3657 static int
3658 one_classic_gcse_pass (pass)
3659 int pass;
3661 int changed = 0;
3663 gcse_subst_count = 0;
3664 gcse_create_count = 0;
3666 alloc_hash_table (max_cuid, &expr_hash_table, 0);
3667 alloc_rd_mem (last_basic_block, max_cuid);
3668 compute_hash_table (&expr_hash_table);
3669 if (gcse_file)
3670 dump_hash_table (gcse_file, "Expression", &expr_hash_table);
3672 if (expr_hash_table.n_elems > 0)
3674 compute_kill_rd ();
3675 compute_rd ();
3676 alloc_avail_expr_mem (last_basic_block, expr_hash_table.n_elems);
3677 compute_ae_gen (&expr_hash_table);
3678 compute_ae_kill (ae_gen, ae_kill, &expr_hash_table);
3679 compute_available (ae_gen, ae_kill, ae_out, ae_in);
3680 changed = classic_gcse ();
3681 free_avail_expr_mem ();
3684 free_rd_mem ();
3685 free_hash_table (&expr_hash_table);
3687 if (gcse_file)
3689 fprintf (gcse_file, "\n");
3690 fprintf (gcse_file, "GCSE of %s, pass %d: %d bytes needed, %d substs,",
3691 current_function_name, pass, bytes_used, gcse_subst_count);
3692 fprintf (gcse_file, "%d insns created\n", gcse_create_count);
3695 return changed;
3698 /* Compute copy/constant propagation working variables. */
3700 /* Local properties of assignments. */
3701 static sbitmap *cprop_pavloc;
3702 static sbitmap *cprop_absaltered;
3704 /* Global properties of assignments (computed from the local properties). */
3705 static sbitmap *cprop_avin;
3706 static sbitmap *cprop_avout;
3708 /* Allocate vars used for copy/const propagation. N_BLOCKS is the number of
3709 basic blocks. N_SETS is the number of sets. */
3711 static void
3712 alloc_cprop_mem (n_blocks, n_sets)
3713 int n_blocks, n_sets;
3715 cprop_pavloc = sbitmap_vector_alloc (n_blocks, n_sets);
3716 cprop_absaltered = sbitmap_vector_alloc (n_blocks, n_sets);
3718 cprop_avin = sbitmap_vector_alloc (n_blocks, n_sets);
3719 cprop_avout = sbitmap_vector_alloc (n_blocks, n_sets);
3722 /* Free vars used by copy/const propagation. */
3724 static void
3725 free_cprop_mem ()
3727 sbitmap_vector_free (cprop_pavloc);
3728 sbitmap_vector_free (cprop_absaltered);
3729 sbitmap_vector_free (cprop_avin);
3730 sbitmap_vector_free (cprop_avout);
3733 /* For each block, compute whether X is transparent. X is either an
3734 expression or an assignment [though we don't care which, for this context
3735 an assignment is treated as an expression]. For each block where an
3736 element of X is modified, set (SET_P == 1) or reset (SET_P == 0) the INDX
3737 bit in BMAP. */
3739 static void
3740 compute_transp (x, indx, bmap, set_p)
3741 rtx x;
3742 int indx;
3743 sbitmap *bmap;
3744 int set_p;
3746 int i, j;
3747 basic_block bb;
3748 enum rtx_code code;
3749 reg_set *r;
3750 const char *fmt;
3752 /* repeat is used to turn tail-recursion into iteration since GCC
3753 can't do it when there's no return value. */
3754 repeat:
3756 if (x == 0)
3757 return;
3759 code = GET_CODE (x);
3760 switch (code)
3762 case REG:
3763 if (set_p)
3765 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3767 FOR_EACH_BB (bb)
3768 if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
3769 SET_BIT (bmap[bb->index], indx);
3771 else
3773 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
3774 SET_BIT (bmap[BLOCK_NUM (r->insn)], indx);
3777 else
3779 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3781 FOR_EACH_BB (bb)
3782 if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
3783 RESET_BIT (bmap[bb->index], indx);
3785 else
3787 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
3788 RESET_BIT (bmap[BLOCK_NUM (r->insn)], indx);
3792 return;
3794 case MEM:
3795 FOR_EACH_BB (bb)
3797 rtx list_entry = canon_modify_mem_list[bb->index];
3799 while (list_entry)
3801 rtx dest, dest_addr;
3803 if (GET_CODE (XEXP (list_entry, 0)) == CALL_INSN)
3805 if (set_p)
3806 SET_BIT (bmap[bb->index], indx);
3807 else
3808 RESET_BIT (bmap[bb->index], indx);
3809 break;
3811 /* LIST_ENTRY must be an INSN of some kind that sets memory.
3812 Examine each hunk of memory that is modified. */
3814 dest = XEXP (list_entry, 0);
3815 list_entry = XEXP (list_entry, 1);
3816 dest_addr = XEXP (list_entry, 0);
3818 if (canon_true_dependence (dest, GET_MODE (dest), dest_addr,
3819 x, rtx_addr_varies_p))
3821 if (set_p)
3822 SET_BIT (bmap[bb->index], indx);
3823 else
3824 RESET_BIT (bmap[bb->index], indx);
3825 break;
3827 list_entry = XEXP (list_entry, 1);
3831 x = XEXP (x, 0);
3832 goto repeat;
3834 case PC:
3835 case CC0: /*FIXME*/
3836 case CONST:
3837 case CONST_INT:
3838 case CONST_DOUBLE:
3839 case CONST_VECTOR:
3840 case SYMBOL_REF:
3841 case LABEL_REF:
3842 case ADDR_VEC:
3843 case ADDR_DIFF_VEC:
3844 return;
3846 default:
3847 break;
3850 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
3852 if (fmt[i] == 'e')
3854 /* If we are about to do the last recursive call
3855 needed at this level, change it into iteration.
3856 This function is called enough to be worth it. */
3857 if (i == 0)
3859 x = XEXP (x, i);
3860 goto repeat;
3863 compute_transp (XEXP (x, i), indx, bmap, set_p);
3865 else if (fmt[i] == 'E')
3866 for (j = 0; j < XVECLEN (x, i); j++)
3867 compute_transp (XVECEXP (x, i, j), indx, bmap, set_p);
3871 /* Top level routine to do the dataflow analysis needed by copy/const
3872 propagation. */
3874 static void
3875 compute_cprop_data ()
3877 compute_local_properties (cprop_absaltered, cprop_pavloc, NULL, &set_hash_table);
3878 compute_available (cprop_pavloc, cprop_absaltered,
3879 cprop_avout, cprop_avin);
3882 /* Copy/constant propagation. */
3884 /* Maximum number of register uses in an insn that we handle. */
3885 #define MAX_USES 8
3887 /* Table of uses found in an insn.
3888 Allocated statically to avoid alloc/free complexity and overhead. */
3889 static struct reg_use reg_use_table[MAX_USES];
3891 /* Index into `reg_use_table' while building it. */
3892 static int reg_use_count;
3894 /* Set up a list of register numbers used in INSN. The found uses are stored
3895 in `reg_use_table'. `reg_use_count' is initialized to zero before entry,
3896 and contains the number of uses in the table upon exit.
3898 ??? If a register appears multiple times we will record it multiple times.
3899 This doesn't hurt anything but it will slow things down. */
3901 static void
3902 find_used_regs (xptr, data)
3903 rtx *xptr;
3904 void *data ATTRIBUTE_UNUSED;
3906 int i, j;
3907 enum rtx_code code;
3908 const char *fmt;
3909 rtx x = *xptr;
3911 /* repeat is used to turn tail-recursion into iteration since GCC
3912 can't do it when there's no return value. */
3913 repeat:
3914 if (x == 0)
3915 return;
3917 code = GET_CODE (x);
3918 if (REG_P (x))
3920 if (reg_use_count == MAX_USES)
3921 return;
3923 reg_use_table[reg_use_count].reg_rtx = x;
3924 reg_use_count++;
3927 /* Recursively scan the operands of this expression. */
3929 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
3931 if (fmt[i] == 'e')
3933 /* If we are about to do the last recursive call
3934 needed at this level, change it into iteration.
3935 This function is called enough to be worth it. */
3936 if (i == 0)
3938 x = XEXP (x, 0);
3939 goto repeat;
3942 find_used_regs (&XEXP (x, i), data);
3944 else if (fmt[i] == 'E')
3945 for (j = 0; j < XVECLEN (x, i); j++)
3946 find_used_regs (&XVECEXP (x, i, j), data);
3950 /* Try to replace all non-SET_DEST occurrences of FROM in INSN with TO.
3951 Returns nonzero is successful. */
3953 static int
3954 try_replace_reg (from, to, insn)
3955 rtx from, to, insn;
3957 rtx note = find_reg_equal_equiv_note (insn);
3958 rtx src = 0;
3959 int success = 0;
3960 rtx set = single_set (insn);
3962 validate_replace_src_group (from, to, insn);
3963 if (num_changes_pending () && apply_change_group ())
3964 success = 1;
3966 /* Try to simplify SET_SRC if we have substituted a constant. */
3967 if (success && set && CONSTANT_P (to))
3969 src = simplify_rtx (SET_SRC (set));
3971 if (src)
3972 validate_change (insn, &SET_SRC (set), src, 0);
3975 if (!success && set && reg_mentioned_p (from, SET_SRC (set)))
3977 /* If above failed and this is a single set, try to simplify the source of
3978 the set given our substitution. We could perhaps try this for multiple
3979 SETs, but it probably won't buy us anything. */
3980 src = simplify_replace_rtx (SET_SRC (set), from, to);
3982 if (!rtx_equal_p (src, SET_SRC (set))
3983 && validate_change (insn, &SET_SRC (set), src, 0))
3984 success = 1;
3986 /* If we've failed to do replacement, have a single SET, and don't already
3987 have a note, add a REG_EQUAL note to not lose information. */
3988 if (!success && note == 0 && set != 0)
3989 note = set_unique_reg_note (insn, REG_EQUAL, copy_rtx (src));
3992 /* If there is already a NOTE, update the expression in it with our
3993 replacement. */
3994 else if (note != 0)
3995 XEXP (note, 0) = simplify_replace_rtx (XEXP (note, 0), from, to);
3997 /* REG_EQUAL may get simplified into register.
3998 We don't allow that. Remove that note. This code ought
3999 not to happen, because previous code ought to synthesize
4000 reg-reg move, but be on the safe side. */
4001 if (note && REG_P (XEXP (note, 0)))
4002 remove_note (insn, note);
4004 return success;
4007 /* Find a set of REGNOs that are available on entry to INSN's block. Returns
4008 NULL no such set is found. */
4010 static struct expr *
4011 find_avail_set (regno, insn)
4012 int regno;
4013 rtx insn;
4015 /* SET1 contains the last set found that can be returned to the caller for
4016 use in a substitution. */
4017 struct expr *set1 = 0;
4019 /* Loops are not possible here. To get a loop we would need two sets
4020 available at the start of the block containing INSN. ie we would
4021 need two sets like this available at the start of the block:
4023 (set (reg X) (reg Y))
4024 (set (reg Y) (reg X))
4026 This can not happen since the set of (reg Y) would have killed the
4027 set of (reg X) making it unavailable at the start of this block. */
4028 while (1)
4030 rtx src;
4031 struct expr *set = lookup_set (regno, &set_hash_table);
4033 /* Find a set that is available at the start of the block
4034 which contains INSN. */
4035 while (set)
4037 if (TEST_BIT (cprop_avin[BLOCK_NUM (insn)], set->bitmap_index))
4038 break;
4039 set = next_set (regno, set);
4042 /* If no available set was found we've reached the end of the
4043 (possibly empty) copy chain. */
4044 if (set == 0)
4045 break;
4047 if (GET_CODE (set->expr) != SET)
4048 abort ();
4050 src = SET_SRC (set->expr);
4052 /* We know the set is available.
4053 Now check that SRC is ANTLOC (i.e. none of the source operands
4054 have changed since the start of the block).
4056 If the source operand changed, we may still use it for the next
4057 iteration of this loop, but we may not use it for substitutions. */
4059 if (gcse_constant_p (src) || oprs_not_set_p (src, insn))
4060 set1 = set;
4062 /* If the source of the set is anything except a register, then
4063 we have reached the end of the copy chain. */
4064 if (GET_CODE (src) != REG)
4065 break;
4067 /* Follow the copy chain, ie start another iteration of the loop
4068 and see if we have an available copy into SRC. */
4069 regno = REGNO (src);
4072 /* SET1 holds the last set that was available and anticipatable at
4073 INSN. */
4074 return set1;
4077 /* Subroutine of cprop_insn that tries to propagate constants into
4078 JUMP_INSNS. JUMP must be a conditional jump. If SETCC is non-NULL
4079 it is the instruction that immediately precedes JUMP, and must be a
4080 single SET of a register. FROM is what we will try to replace,
4081 SRC is the constant we will try to substitute for it. Returns nonzero
4082 if a change was made. */
4084 static int
4085 cprop_jump (bb, setcc, jump, from, src)
4086 basic_block bb;
4087 rtx setcc;
4088 rtx jump;
4089 rtx from;
4090 rtx src;
4092 rtx new, set_src, note_src;
4093 rtx set = pc_set (jump);
4094 rtx note = find_reg_equal_equiv_note (jump);
4096 if (note)
4098 note_src = XEXP (note, 0);
4099 if (GET_CODE (note_src) == EXPR_LIST)
4100 note_src = NULL_RTX;
4102 else note_src = NULL_RTX;
4104 /* Prefer REG_EQUAL notes except those containing EXPR_LISTs. */
4105 set_src = note_src ? note_src : SET_SRC (set);
4107 /* First substitute the SETCC condition into the JUMP instruction,
4108 then substitute that given values into this expanded JUMP. */
4109 if (setcc != NULL_RTX
4110 && !modified_between_p (from, setcc, jump)
4111 && !modified_between_p (src, setcc, jump))
4113 rtx setcc_src;
4114 rtx setcc_set = single_set (setcc);
4115 rtx setcc_note = find_reg_equal_equiv_note (setcc);
4116 setcc_src = (setcc_note && GET_CODE (XEXP (setcc_note, 0)) != EXPR_LIST)
4117 ? XEXP (setcc_note, 0) : SET_SRC (setcc_set);
4118 set_src = simplify_replace_rtx (set_src, SET_DEST (setcc_set),
4119 setcc_src);
4121 else
4122 setcc = NULL_RTX;
4124 new = simplify_replace_rtx (set_src, from, src);
4126 /* If no simplification can be made, then try the next register. */
4127 if (rtx_equal_p (new, SET_SRC (set)))
4128 return 0;
4130 /* If this is now a no-op delete it, otherwise this must be a valid insn. */
4131 if (new == pc_rtx)
4132 delete_insn (jump);
4133 else
4135 /* Ensure the value computed inside the jump insn to be equivalent
4136 to one computed by setcc. */
4137 if (setcc && modified_in_p (new, setcc))
4138 return 0;
4139 if (! validate_change (jump, &SET_SRC (set), new, 0))
4141 /* When (some) constants are not valid in a comparison, and there
4142 are two registers to be replaced by constants before the entire
4143 comparison can be folded into a constant, we need to keep
4144 intermediate information in REG_EQUAL notes. For targets with
4145 separate compare insns, such notes are added by try_replace_reg.
4146 When we have a combined compare-and-branch instruction, however,
4147 we need to attach a note to the branch itself to make this
4148 optimization work. */
4150 if (!rtx_equal_p (new, note_src))
4151 set_unique_reg_note (jump, REG_EQUAL, copy_rtx (new));
4152 return 0;
4155 /* Remove REG_EQUAL note after simplification. */
4156 if (note_src)
4157 remove_note (jump, note);
4159 /* If this has turned into an unconditional jump,
4160 then put a barrier after it so that the unreachable
4161 code will be deleted. */
4162 if (GET_CODE (SET_SRC (set)) == LABEL_REF)
4163 emit_barrier_after (jump);
4166 #ifdef HAVE_cc0
4167 /* Delete the cc0 setter. */
4168 if (setcc != NULL && CC0_P (SET_DEST (single_set (setcc))))
4169 delete_insn (setcc);
4170 #endif
4172 run_jump_opt_after_gcse = 1;
4174 const_prop_count++;
4175 if (gcse_file != NULL)
4177 fprintf (gcse_file,
4178 "CONST-PROP: Replacing reg %d in jump_insn %d with constant ",
4179 REGNO (from), INSN_UID (jump));
4180 print_rtl (gcse_file, src);
4181 fprintf (gcse_file, "\n");
4183 purge_dead_edges (bb);
4185 return 1;
4188 static bool
4189 constprop_register (insn, from, to, alter_jumps)
4190 rtx insn;
4191 rtx from;
4192 rtx to;
4193 int alter_jumps;
4195 rtx sset;
4197 /* Check for reg or cc0 setting instructions followed by
4198 conditional branch instructions first. */
4199 if (alter_jumps
4200 && (sset = single_set (insn)) != NULL
4201 && NEXT_INSN (insn)
4202 && any_condjump_p (NEXT_INSN (insn)) && onlyjump_p (NEXT_INSN (insn)))
4204 rtx dest = SET_DEST (sset);
4205 if ((REG_P (dest) || CC0_P (dest))
4206 && cprop_jump (BLOCK_FOR_INSN (insn), insn, NEXT_INSN (insn), from, to))
4207 return 1;
4210 /* Handle normal insns next. */
4211 if (GET_CODE (insn) == INSN
4212 && try_replace_reg (from, to, insn))
4213 return 1;
4215 /* Try to propagate a CONST_INT into a conditional jump.
4216 We're pretty specific about what we will handle in this
4217 code, we can extend this as necessary over time.
4219 Right now the insn in question must look like
4220 (set (pc) (if_then_else ...)) */
4221 else if (alter_jumps && any_condjump_p (insn) && onlyjump_p (insn))
4222 return cprop_jump (BLOCK_FOR_INSN (insn), NULL, insn, from, to);
4223 return 0;
4226 /* Perform constant and copy propagation on INSN.
4227 The result is nonzero if a change was made. */
4229 static int
4230 cprop_insn (insn, alter_jumps)
4231 rtx insn;
4232 int alter_jumps;
4234 struct reg_use *reg_used;
4235 int changed = 0;
4236 rtx note;
4238 if (!INSN_P (insn))
4239 return 0;
4241 reg_use_count = 0;
4242 note_uses (&PATTERN (insn), find_used_regs, NULL);
4244 note = find_reg_equal_equiv_note (insn);
4246 /* We may win even when propagating constants into notes. */
4247 if (note)
4248 find_used_regs (&XEXP (note, 0), NULL);
4250 for (reg_used = &reg_use_table[0]; reg_use_count > 0;
4251 reg_used++, reg_use_count--)
4253 unsigned int regno = REGNO (reg_used->reg_rtx);
4254 rtx pat, src;
4255 struct expr *set;
4257 /* Ignore registers created by GCSE.
4258 We do this because ... */
4259 if (regno >= max_gcse_regno)
4260 continue;
4262 /* If the register has already been set in this block, there's
4263 nothing we can do. */
4264 if (! oprs_not_set_p (reg_used->reg_rtx, insn))
4265 continue;
4267 /* Find an assignment that sets reg_used and is available
4268 at the start of the block. */
4269 set = find_avail_set (regno, insn);
4270 if (! set)
4271 continue;
4273 pat = set->expr;
4274 /* ??? We might be able to handle PARALLELs. Later. */
4275 if (GET_CODE (pat) != SET)
4276 abort ();
4278 src = SET_SRC (pat);
4280 /* Constant propagation. */
4281 if (gcse_constant_p (src))
4283 if (constprop_register (insn, reg_used->reg_rtx, src, alter_jumps))
4285 changed = 1;
4286 const_prop_count++;
4287 if (gcse_file != NULL)
4289 fprintf (gcse_file, "GLOBAL CONST-PROP: Replacing reg %d in ", regno);
4290 fprintf (gcse_file, "insn %d with constant ", INSN_UID (insn));
4291 print_rtl (gcse_file, src);
4292 fprintf (gcse_file, "\n");
4294 if (INSN_DELETED_P (insn))
4295 return 1;
4298 else if (GET_CODE (src) == REG
4299 && REGNO (src) >= FIRST_PSEUDO_REGISTER
4300 && REGNO (src) != regno)
4302 if (try_replace_reg (reg_used->reg_rtx, src, insn))
4304 changed = 1;
4305 copy_prop_count++;
4306 if (gcse_file != NULL)
4308 fprintf (gcse_file, "GLOBAL COPY-PROP: Replacing reg %d in insn %d",
4309 regno, INSN_UID (insn));
4310 fprintf (gcse_file, " with reg %d\n", REGNO (src));
4313 /* The original insn setting reg_used may or may not now be
4314 deletable. We leave the deletion to flow. */
4315 /* FIXME: If it turns out that the insn isn't deletable,
4316 then we may have unnecessarily extended register lifetimes
4317 and made things worse. */
4322 return changed;
4325 /* Like find_used_regs, but avoid recording uses that appear in
4326 input-output contexts such as zero_extract or pre_dec. This
4327 restricts the cases we consider to those for which local cprop
4328 can legitimately make replacements. */
4330 static void
4331 local_cprop_find_used_regs (xptr, data)
4332 rtx *xptr;
4333 void *data;
4335 rtx x = *xptr;
4337 if (x == 0)
4338 return;
4340 switch (GET_CODE (x))
4342 case ZERO_EXTRACT:
4343 case SIGN_EXTRACT:
4344 case STRICT_LOW_PART:
4345 return;
4347 case PRE_DEC:
4348 case PRE_INC:
4349 case POST_DEC:
4350 case POST_INC:
4351 case PRE_MODIFY:
4352 case POST_MODIFY:
4353 /* Can only legitimately appear this early in the context of
4354 stack pushes for function arguments, but handle all of the
4355 codes nonetheless. */
4356 return;
4358 case SUBREG:
4359 /* Setting a subreg of a register larger than word_mode leaves
4360 the non-written words unchanged. */
4361 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) > BITS_PER_WORD)
4362 return;
4363 break;
4365 default:
4366 break;
4369 find_used_regs (xptr, data);
4372 /* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
4373 their REG_EQUAL notes need updating. */
4375 static bool
4376 do_local_cprop (x, insn, alter_jumps, libcall_sp)
4377 rtx x;
4378 rtx insn;
4379 int alter_jumps;
4380 rtx *libcall_sp;
4382 rtx newreg = NULL, newcnst = NULL;
4384 /* Rule out USE instructions and ASM statements as we don't want to
4385 change the hard registers mentioned. */
4386 if (GET_CODE (x) == REG
4387 && (REGNO (x) >= FIRST_PSEUDO_REGISTER
4388 || (GET_CODE (PATTERN (insn)) != USE
4389 && asm_noperands (PATTERN (insn)) < 0)))
4391 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0);
4392 struct elt_loc_list *l;
4394 if (!val)
4395 return false;
4396 for (l = val->locs; l; l = l->next)
4398 rtx this_rtx = l->loc;
4399 rtx note;
4401 if (l->in_libcall)
4402 continue;
4404 if (gcse_constant_p (this_rtx))
4405 newcnst = this_rtx;
4406 if (REG_P (this_rtx) && REGNO (this_rtx) >= FIRST_PSEUDO_REGISTER
4407 /* Don't copy propagate if it has attached REG_EQUIV note.
4408 At this point this only function parameters should have
4409 REG_EQUIV notes and if the argument slot is used somewhere
4410 explicitly, it means address of parameter has been taken,
4411 so we should not extend the lifetime of the pseudo. */
4412 && (!(note = find_reg_note (l->setting_insn, REG_EQUIV, NULL_RTX))
4413 || GET_CODE (XEXP (note, 0)) != MEM))
4414 newreg = this_rtx;
4416 if (newcnst && constprop_register (insn, x, newcnst, alter_jumps))
4418 /* If we find a case where we can't fix the retval REG_EQUAL notes
4419 match the new register, we either have to abandon this replacement
4420 or fix delete_trivially_dead_insns to preserve the setting insn,
4421 or make it delete the REG_EUAQL note, and fix up all passes that
4422 require the REG_EQUAL note there. */
4423 if (!adjust_libcall_notes (x, newcnst, insn, libcall_sp))
4424 abort ();
4425 if (gcse_file != NULL)
4427 fprintf (gcse_file, "LOCAL CONST-PROP: Replacing reg %d in ",
4428 REGNO (x));
4429 fprintf (gcse_file, "insn %d with constant ",
4430 INSN_UID (insn));
4431 print_rtl (gcse_file, newcnst);
4432 fprintf (gcse_file, "\n");
4434 const_prop_count++;
4435 return true;
4437 else if (newreg && newreg != x && try_replace_reg (x, newreg, insn))
4439 adjust_libcall_notes (x, newreg, insn, libcall_sp);
4440 if (gcse_file != NULL)
4442 fprintf (gcse_file,
4443 "LOCAL COPY-PROP: Replacing reg %d in insn %d",
4444 REGNO (x), INSN_UID (insn));
4445 fprintf (gcse_file, " with reg %d\n", REGNO (newreg));
4447 copy_prop_count++;
4448 return true;
4451 return false;
4454 /* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
4455 their REG_EQUAL notes need updating to reflect that OLDREG has been
4456 replaced with NEWVAL in INSN. Return true if all substitutions could
4457 be made. */
4458 static bool
4459 adjust_libcall_notes (oldreg, newval, insn, libcall_sp)
4460 rtx oldreg, newval, insn, *libcall_sp;
4462 rtx end;
4464 while ((end = *libcall_sp++))
4466 rtx note = find_reg_equal_equiv_note (end);
4468 if (! note)
4469 continue;
4471 if (REG_P (newval))
4473 if (reg_set_between_p (newval, PREV_INSN (insn), end))
4477 note = find_reg_equal_equiv_note (end);
4478 if (! note)
4479 continue;
4480 if (reg_mentioned_p (newval, XEXP (note, 0)))
4481 return false;
4483 while ((end = *libcall_sp++));
4484 return true;
4487 XEXP (note, 0) = replace_rtx (XEXP (note, 0), oldreg, newval);
4488 insn = end;
4490 return true;
4493 #define MAX_NESTED_LIBCALLS 9
4495 static void
4496 local_cprop_pass (alter_jumps)
4497 int alter_jumps;
4499 rtx insn;
4500 struct reg_use *reg_used;
4501 rtx libcall_stack[MAX_NESTED_LIBCALLS + 1], *libcall_sp;
4502 bool changed = false;
4504 cselib_init ();
4505 libcall_sp = &libcall_stack[MAX_NESTED_LIBCALLS];
4506 *libcall_sp = 0;
4507 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4509 if (INSN_P (insn))
4511 rtx note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
4513 if (note)
4515 if (libcall_sp == libcall_stack)
4516 abort ();
4517 *--libcall_sp = XEXP (note, 0);
4519 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
4520 if (note)
4521 libcall_sp++;
4522 note = find_reg_equal_equiv_note (insn);
4525 reg_use_count = 0;
4526 note_uses (&PATTERN (insn), local_cprop_find_used_regs, NULL);
4527 if (note)
4528 local_cprop_find_used_regs (&XEXP (note, 0), NULL);
4530 for (reg_used = &reg_use_table[0]; reg_use_count > 0;
4531 reg_used++, reg_use_count--)
4532 if (do_local_cprop (reg_used->reg_rtx, insn, alter_jumps,
4533 libcall_sp))
4535 changed = true;
4536 break;
4538 if (INSN_DELETED_P (insn))
4539 break;
4541 while (reg_use_count);
4543 cselib_process_insn (insn);
4545 cselib_finish ();
4546 /* Global analysis may get into infinite loops for unreachable blocks. */
4547 if (changed && alter_jumps)
4549 delete_unreachable_blocks ();
4550 free_reg_set_mem ();
4551 alloc_reg_set_mem (max_reg_num ());
4552 compute_sets (get_insns ());
4556 /* Forward propagate copies. This includes copies and constants. Return
4557 nonzero if a change was made. */
4559 static int
4560 cprop (alter_jumps)
4561 int alter_jumps;
4563 int changed;
4564 basic_block bb;
4565 rtx insn;
4567 /* Note we start at block 1. */
4568 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
4570 if (gcse_file != NULL)
4571 fprintf (gcse_file, "\n");
4572 return 0;
4575 changed = 0;
4576 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR, next_bb)
4578 /* Reset tables used to keep track of what's still valid [since the
4579 start of the block]. */
4580 reset_opr_set_tables ();
4582 for (insn = bb->head;
4583 insn != NULL && insn != NEXT_INSN (bb->end);
4584 insn = NEXT_INSN (insn))
4585 if (INSN_P (insn))
4587 changed |= cprop_insn (insn, alter_jumps);
4589 /* Keep track of everything modified by this insn. */
4590 /* ??? Need to be careful w.r.t. mods done to INSN. Don't
4591 call mark_oprs_set if we turned the insn into a NOTE. */
4592 if (GET_CODE (insn) != NOTE)
4593 mark_oprs_set (insn);
4597 if (gcse_file != NULL)
4598 fprintf (gcse_file, "\n");
4600 return changed;
4603 /* Similar to get_condition, only the resulting condition must be
4604 valid at JUMP, instead of at EARLIEST.
4606 This differs from noce_get_condition in ifcvt.c in that we prefer not to
4607 settle for the condition variable in the jump instruction being integral.
4608 We prefer to be able to record the value of a user variable, rather than
4609 the value of a temporary used in a condition. This could be solved by
4610 recording the value of *every* register scaned by canonicalize_condition,
4611 but this would require some code reorganization. */
4613 static rtx
4614 fis_get_condition (jump)
4615 rtx jump;
4617 rtx cond, set, tmp, insn, earliest;
4618 bool reverse;
4620 if (! any_condjump_p (jump))
4621 return NULL_RTX;
4623 set = pc_set (jump);
4624 cond = XEXP (SET_SRC (set), 0);
4626 /* If this branches to JUMP_LABEL when the condition is false,
4627 reverse the condition. */
4628 reverse = (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
4629 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump));
4631 /* Use canonicalize_condition to do the dirty work of manipulating
4632 MODE_CC values and COMPARE rtx codes. */
4633 tmp = canonicalize_condition (jump, cond, reverse, &earliest, NULL_RTX);
4634 if (!tmp)
4635 return NULL_RTX;
4637 /* Verify that the given condition is valid at JUMP by virtue of not
4638 having been modified since EARLIEST. */
4639 for (insn = earliest; insn != jump; insn = NEXT_INSN (insn))
4640 if (INSN_P (insn) && modified_in_p (tmp, insn))
4641 break;
4642 if (insn == jump)
4643 return tmp;
4645 /* The condition was modified. See if we can get a partial result
4646 that doesn't follow all the reversals. Perhaps combine can fold
4647 them together later. */
4648 tmp = XEXP (tmp, 0);
4649 if (!REG_P (tmp) || GET_MODE_CLASS (GET_MODE (tmp)) != MODE_INT)
4650 return NULL_RTX;
4651 tmp = canonicalize_condition (jump, cond, reverse, &earliest, tmp);
4652 if (!tmp)
4653 return NULL_RTX;
4655 /* For sanity's sake, re-validate the new result. */
4656 for (insn = earliest; insn != jump; insn = NEXT_INSN (insn))
4657 if (INSN_P (insn) && modified_in_p (tmp, insn))
4658 return NULL_RTX;
4660 return tmp;
4663 /* Find the implicit sets of a function. An "implicit set" is a constraint
4664 on the value of a variable, implied by a conditional jump. For example,
4665 following "if (x == 2)", the then branch may be optimized as though the
4666 conditional performed an "explicit set", in this example, "x = 2". This
4667 function records the set patterns that are implicit at the start of each
4668 basic block. */
4670 static void
4671 find_implicit_sets ()
4673 basic_block bb, dest;
4674 unsigned int count;
4675 rtx cond, new;
4677 count = 0;
4678 FOR_EACH_BB (bb)
4679 /* Check for more than one sucessor. */
4680 if (bb->succ && bb->succ->succ_next)
4682 cond = fis_get_condition (bb->end);
4684 if (cond
4685 && (GET_CODE (cond) == EQ || GET_CODE (cond) == NE)
4686 && GET_CODE (XEXP (cond, 0)) == REG
4687 && REGNO (XEXP (cond, 0)) >= FIRST_PSEUDO_REGISTER
4688 && gcse_constant_p (XEXP (cond, 1)))
4690 dest = GET_CODE (cond) == EQ ? BRANCH_EDGE (bb)->dest
4691 : FALLTHRU_EDGE (bb)->dest;
4693 if (dest && ! dest->pred->pred_next
4694 && dest != EXIT_BLOCK_PTR)
4696 new = gen_rtx_SET (VOIDmode, XEXP (cond, 0),
4697 XEXP (cond, 1));
4698 implicit_sets[dest->index] = new;
4699 if (gcse_file)
4701 fprintf(gcse_file, "Implicit set of reg %d in ",
4702 REGNO (XEXP (cond, 0)));
4703 fprintf(gcse_file, "basic block %d\n", dest->index);
4705 count++;
4710 if (gcse_file)
4711 fprintf (gcse_file, "Found %d implicit sets\n", count);
4714 /* Perform one copy/constant propagation pass.
4715 PASS is the pass count. If CPROP_JUMPS is true, perform constant
4716 propagation into conditional jumps. If BYPASS_JUMPS is true,
4717 perform conditional jump bypassing optimizations. */
4719 static int
4720 one_cprop_pass (pass, cprop_jumps, bypass_jumps)
4721 int pass;
4722 int cprop_jumps;
4723 int bypass_jumps;
4725 int changed = 0;
4727 const_prop_count = 0;
4728 copy_prop_count = 0;
4730 local_cprop_pass (cprop_jumps);
4732 /* Determine implicit sets. */
4733 implicit_sets = (rtx *) xcalloc (last_basic_block, sizeof (rtx));
4734 find_implicit_sets ();
4736 alloc_hash_table (max_cuid, &set_hash_table, 1);
4737 compute_hash_table (&set_hash_table);
4739 /* Free implicit_sets before peak usage. */
4740 free (implicit_sets);
4741 implicit_sets = NULL;
4743 if (gcse_file)
4744 dump_hash_table (gcse_file, "SET", &set_hash_table);
4745 if (set_hash_table.n_elems > 0)
4747 alloc_cprop_mem (last_basic_block, set_hash_table.n_elems);
4748 compute_cprop_data ();
4749 changed = cprop (cprop_jumps);
4750 if (bypass_jumps)
4751 changed |= bypass_conditional_jumps ();
4752 free_cprop_mem ();
4755 free_hash_table (&set_hash_table);
4757 if (gcse_file)
4759 fprintf (gcse_file, "CPROP of %s, pass %d: %d bytes needed, ",
4760 current_function_name, pass, bytes_used);
4761 fprintf (gcse_file, "%d const props, %d copy props\n\n",
4762 const_prop_count, copy_prop_count);
4764 /* Global analysis may get into infinite loops for unreachable blocks. */
4765 if (changed && cprop_jumps)
4766 delete_unreachable_blocks ();
4768 return changed;
4771 /* Bypass conditional jumps. */
4773 /* The value of last_basic_block at the beginning of the jump_bypass
4774 pass. The use of redirect_edge_and_branch_force may introduce new
4775 basic blocks, but the data flow analysis is only valid for basic
4776 block indices less than bypass_last_basic_block. */
4778 static int bypass_last_basic_block;
4780 /* Find a set of REGNO to a constant that is available at the end of basic
4781 block BB. Returns NULL if no such set is found. Based heavily upon
4782 find_avail_set. */
4784 static struct expr *
4785 find_bypass_set (regno, bb)
4786 int regno;
4787 int bb;
4789 struct expr *result = 0;
4791 for (;;)
4793 rtx src;
4794 struct expr *set = lookup_set (regno, &set_hash_table);
4796 while (set)
4798 if (TEST_BIT (cprop_avout[bb], set->bitmap_index))
4799 break;
4800 set = next_set (regno, set);
4803 if (set == 0)
4804 break;
4806 if (GET_CODE (set->expr) != SET)
4807 abort ();
4809 src = SET_SRC (set->expr);
4810 if (gcse_constant_p (src))
4811 result = set;
4813 if (GET_CODE (src) != REG)
4814 break;
4816 regno = REGNO (src);
4818 return result;
4822 /* Subroutine of bypass_block that checks whether a pseudo is killed by
4823 any of the instructions inserted on an edge. Jump bypassing places
4824 condition code setters on CFG edges using insert_insn_on_edge. This
4825 function is required to check that our data flow analysis is still
4826 valid prior to commit_edge_insertions. */
4828 static bool
4829 reg_killed_on_edge (reg, e)
4830 rtx reg;
4831 edge e;
4833 rtx insn;
4835 for (insn = e->insns; insn; insn = NEXT_INSN (insn))
4836 if (INSN_P (insn) && reg_set_p (reg, insn))
4837 return true;
4839 return false;
4842 /* Subroutine of bypass_conditional_jumps that attempts to bypass the given
4843 basic block BB which has more than one predecessor. If not NULL, SETCC
4844 is the first instruction of BB, which is immediately followed by JUMP_INSN
4845 JUMP. Otherwise, SETCC is NULL, and JUMP is the first insn of BB.
4846 Returns nonzero if a change was made.
4848 During the jump bypassing pass, we may place copies of SETCC instuctions
4849 on CFG edges. The following routine must be careful to pay attention to
4850 these inserted insns when performing its transformations. */
4852 static int
4853 bypass_block (bb, setcc, jump)
4854 basic_block bb;
4855 rtx setcc, jump;
4857 rtx insn, note;
4858 edge e, enext, edest;
4859 int i, change;
4860 int may_be_loop_header;
4862 insn = (setcc != NULL) ? setcc : jump;
4864 /* Determine set of register uses in INSN. */
4865 reg_use_count = 0;
4866 note_uses (&PATTERN (insn), find_used_regs, NULL);
4867 note = find_reg_equal_equiv_note (insn);
4868 if (note)
4869 find_used_regs (&XEXP (note, 0), NULL);
4871 may_be_loop_header = false;
4872 for (e = bb->pred; e; e = e->pred_next)
4873 if (e->flags & EDGE_DFS_BACK)
4875 may_be_loop_header = true;
4876 break;
4879 change = 0;
4880 for (e = bb->pred; e; e = enext)
4882 enext = e->pred_next;
4883 if (e->flags & EDGE_COMPLEX)
4884 continue;
4886 /* We can't redirect edges from new basic blocks. */
4887 if (e->src->index >= bypass_last_basic_block)
4888 continue;
4890 /* The irreducible loops created by redirecting of edges entering the
4891 loop from outside would decrease effectivity of some of the following
4892 optimalizations, so prevent this. */
4893 if (may_be_loop_header
4894 && !(e->flags & EDGE_DFS_BACK))
4895 continue;
4897 for (i = 0; i < reg_use_count; i++)
4899 struct reg_use *reg_used = &reg_use_table[i];
4900 unsigned int regno = REGNO (reg_used->reg_rtx);
4901 basic_block dest, old_dest;
4902 struct expr *set;
4903 rtx src, new;
4905 if (regno >= max_gcse_regno)
4906 continue;
4908 set = find_bypass_set (regno, e->src->index);
4910 if (! set)
4911 continue;
4913 /* Check the data flow is valid after edge insertions. */
4914 if (e->insns && reg_killed_on_edge (reg_used->reg_rtx, e))
4915 continue;
4917 src = SET_SRC (pc_set (jump));
4919 if (setcc != NULL)
4920 src = simplify_replace_rtx (src,
4921 SET_DEST (PATTERN (setcc)),
4922 SET_SRC (PATTERN (setcc)));
4924 new = simplify_replace_rtx (src, reg_used->reg_rtx,
4925 SET_SRC (set->expr));
4927 /* Jump bypassing may have already placed instructions on
4928 edges of the CFG. We can't bypass an outgoing edge that
4929 has instructions associated with it, as these insns won't
4930 get executed if the incoming edge is redirected. */
4932 if (new == pc_rtx)
4934 edest = FALLTHRU_EDGE (bb);
4935 dest = edest->insns ? NULL : edest->dest;
4937 else if (GET_CODE (new) == LABEL_REF)
4939 dest = BLOCK_FOR_INSN (XEXP (new, 0));
4940 /* Don't bypass edges containing instructions. */
4941 for (edest = bb->succ; edest; edest = edest->succ_next)
4942 if (edest->dest == dest && edest->insns)
4944 dest = NULL;
4945 break;
4948 else
4949 dest = NULL;
4951 old_dest = e->dest;
4952 if (dest != NULL
4953 && dest != old_dest
4954 && dest != EXIT_BLOCK_PTR)
4956 redirect_edge_and_branch_force (e, dest);
4958 /* Copy the register setter to the redirected edge.
4959 Don't copy CC0 setters, as CC0 is dead after jump. */
4960 if (setcc)
4962 rtx pat = PATTERN (setcc);
4963 if (!CC0_P (SET_DEST (pat)))
4964 insert_insn_on_edge (copy_insn (pat), e);
4967 if (gcse_file != NULL)
4969 fprintf (gcse_file, "JUMP-BYPASS: Proved reg %d in jump_insn %d equals constant ",
4970 regno, INSN_UID (jump));
4971 print_rtl (gcse_file, SET_SRC (set->expr));
4972 fprintf (gcse_file, "\nBypass edge from %d->%d to %d\n",
4973 e->src->index, old_dest->index, dest->index);
4975 change = 1;
4976 break;
4980 return change;
4983 /* Find basic blocks with more than one predecessor that only contain a
4984 single conditional jump. If the result of the comparison is known at
4985 compile-time from any incoming edge, redirect that edge to the
4986 appropriate target. Returns nonzero if a change was made.
4988 This function is now mis-named, because we also handle indirect jumps. */
4990 static int
4991 bypass_conditional_jumps ()
4993 basic_block bb;
4994 int changed;
4995 rtx setcc;
4996 rtx insn;
4997 rtx dest;
4999 /* Note we start at block 1. */
5000 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
5001 return 0;
5003 bypass_last_basic_block = last_basic_block;
5004 mark_dfs_back_edges ();
5006 changed = 0;
5007 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb,
5008 EXIT_BLOCK_PTR, next_bb)
5010 /* Check for more than one predecessor. */
5011 if (bb->pred && bb->pred->pred_next)
5013 setcc = NULL_RTX;
5014 for (insn = bb->head;
5015 insn != NULL && insn != NEXT_INSN (bb->end);
5016 insn = NEXT_INSN (insn))
5017 if (GET_CODE (insn) == INSN)
5019 if (setcc)
5020 break;
5021 if (GET_CODE (PATTERN (insn)) != SET)
5022 break;
5024 dest = SET_DEST (PATTERN (insn));
5025 if (REG_P (dest) || CC0_P (dest))
5026 setcc = insn;
5027 else
5028 break;
5030 else if (GET_CODE (insn) == JUMP_INSN)
5032 if ((any_condjump_p (insn) || computed_jump_p (insn))
5033 && onlyjump_p (insn))
5034 changed |= bypass_block (bb, setcc, insn);
5035 break;
5037 else if (INSN_P (insn))
5038 break;
5042 /* If we bypassed any register setting insns, we inserted a
5043 copy on the redirected edge. These need to be committed. */
5044 if (changed)
5045 commit_edge_insertions();
5047 return changed;
5050 /* Compute PRE+LCM working variables. */
5052 /* Local properties of expressions. */
5053 /* Nonzero for expressions that are transparent in the block. */
5054 static sbitmap *transp;
5056 /* Nonzero for expressions that are transparent at the end of the block.
5057 This is only zero for expressions killed by abnormal critical edge
5058 created by a calls. */
5059 static sbitmap *transpout;
5061 /* Nonzero for expressions that are computed (available) in the block. */
5062 static sbitmap *comp;
5064 /* Nonzero for expressions that are locally anticipatable in the block. */
5065 static sbitmap *antloc;
5067 /* Nonzero for expressions where this block is an optimal computation
5068 point. */
5069 static sbitmap *pre_optimal;
5071 /* Nonzero for expressions which are redundant in a particular block. */
5072 static sbitmap *pre_redundant;
5074 /* Nonzero for expressions which should be inserted on a specific edge. */
5075 static sbitmap *pre_insert_map;
5077 /* Nonzero for expressions which should be deleted in a specific block. */
5078 static sbitmap *pre_delete_map;
5080 /* Contains the edge_list returned by pre_edge_lcm. */
5081 static struct edge_list *edge_list;
5083 /* Redundant insns. */
5084 static sbitmap pre_redundant_insns;
5086 /* Allocate vars used for PRE analysis. */
5088 static void
5089 alloc_pre_mem (n_blocks, n_exprs)
5090 int n_blocks, n_exprs;
5092 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
5093 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
5094 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
5096 pre_optimal = NULL;
5097 pre_redundant = NULL;
5098 pre_insert_map = NULL;
5099 pre_delete_map = NULL;
5100 ae_in = NULL;
5101 ae_out = NULL;
5102 ae_kill = sbitmap_vector_alloc (n_blocks, n_exprs);
5104 /* pre_insert and pre_delete are allocated later. */
5107 /* Free vars used for PRE analysis. */
5109 static void
5110 free_pre_mem ()
5112 sbitmap_vector_free (transp);
5113 sbitmap_vector_free (comp);
5115 /* ANTLOC and AE_KILL are freed just after pre_lcm finishes. */
5117 if (pre_optimal)
5118 sbitmap_vector_free (pre_optimal);
5119 if (pre_redundant)
5120 sbitmap_vector_free (pre_redundant);
5121 if (pre_insert_map)
5122 sbitmap_vector_free (pre_insert_map);
5123 if (pre_delete_map)
5124 sbitmap_vector_free (pre_delete_map);
5125 if (ae_in)
5126 sbitmap_vector_free (ae_in);
5127 if (ae_out)
5128 sbitmap_vector_free (ae_out);
5130 transp = comp = NULL;
5131 pre_optimal = pre_redundant = pre_insert_map = pre_delete_map = NULL;
5132 ae_in = ae_out = NULL;
5135 /* Top level routine to do the dataflow analysis needed by PRE. */
5137 static void
5138 compute_pre_data ()
5140 sbitmap trapping_expr;
5141 basic_block bb;
5142 unsigned int ui;
5144 compute_local_properties (transp, comp, antloc, &expr_hash_table);
5145 sbitmap_vector_zero (ae_kill, last_basic_block);
5147 /* Collect expressions which might trap. */
5148 trapping_expr = sbitmap_alloc (expr_hash_table.n_elems);
5149 sbitmap_zero (trapping_expr);
5150 for (ui = 0; ui < expr_hash_table.size; ui++)
5152 struct expr *e;
5153 for (e = expr_hash_table.table[ui]; e != NULL; e = e->next_same_hash)
5154 if (may_trap_p (e->expr))
5155 SET_BIT (trapping_expr, e->bitmap_index);
5158 /* Compute ae_kill for each basic block using:
5160 ~(TRANSP | COMP)
5162 This is significantly faster than compute_ae_kill. */
5164 FOR_EACH_BB (bb)
5166 edge e;
5168 /* If the current block is the destination of an abnormal edge, we
5169 kill all trapping expressions because we won't be able to properly
5170 place the instruction on the edge. So make them neither
5171 anticipatable nor transparent. This is fairly conservative. */
5172 for (e = bb->pred; e ; e = e->pred_next)
5173 if (e->flags & EDGE_ABNORMAL)
5175 sbitmap_difference (antloc[bb->index], antloc[bb->index], trapping_expr);
5176 sbitmap_difference (transp[bb->index], transp[bb->index], trapping_expr);
5177 break;
5180 sbitmap_a_or_b (ae_kill[bb->index], transp[bb->index], comp[bb->index]);
5181 sbitmap_not (ae_kill[bb->index], ae_kill[bb->index]);
5184 edge_list = pre_edge_lcm (gcse_file, expr_hash_table.n_elems, transp, comp, antloc,
5185 ae_kill, &pre_insert_map, &pre_delete_map);
5186 sbitmap_vector_free (antloc);
5187 antloc = NULL;
5188 sbitmap_vector_free (ae_kill);
5189 ae_kill = NULL;
5190 sbitmap_free (trapping_expr);
5193 /* PRE utilities */
5195 /* Return nonzero if an occurrence of expression EXPR in OCCR_BB would reach
5196 block BB.
5198 VISITED is a pointer to a working buffer for tracking which BB's have
5199 been visited. It is NULL for the top-level call.
5201 We treat reaching expressions that go through blocks containing the same
5202 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
5203 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
5204 2 as not reaching. The intent is to improve the probability of finding
5205 only one reaching expression and to reduce register lifetimes by picking
5206 the closest such expression. */
5208 static int
5209 pre_expr_reaches_here_p_work (occr_bb, expr, bb, visited)
5210 basic_block occr_bb;
5211 struct expr *expr;
5212 basic_block bb;
5213 char *visited;
5215 edge pred;
5217 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
5219 basic_block pred_bb = pred->src;
5221 if (pred->src == ENTRY_BLOCK_PTR
5222 /* Has predecessor has already been visited? */
5223 || visited[pred_bb->index])
5224 ;/* Nothing to do. */
5226 /* Does this predecessor generate this expression? */
5227 else if (TEST_BIT (comp[pred_bb->index], expr->bitmap_index))
5229 /* Is this the occurrence we're looking for?
5230 Note that there's only one generating occurrence per block
5231 so we just need to check the block number. */
5232 if (occr_bb == pred_bb)
5233 return 1;
5235 visited[pred_bb->index] = 1;
5237 /* Ignore this predecessor if it kills the expression. */
5238 else if (! TEST_BIT (transp[pred_bb->index], expr->bitmap_index))
5239 visited[pred_bb->index] = 1;
5241 /* Neither gen nor kill. */
5242 else
5244 visited[pred_bb->index] = 1;
5245 if (pre_expr_reaches_here_p_work (occr_bb, expr, pred_bb, visited))
5246 return 1;
5250 /* All paths have been checked. */
5251 return 0;
5254 /* The wrapper for pre_expr_reaches_here_work that ensures that any
5255 memory allocated for that function is returned. */
5257 static int
5258 pre_expr_reaches_here_p (occr_bb, expr, bb)
5259 basic_block occr_bb;
5260 struct expr *expr;
5261 basic_block bb;
5263 int rval;
5264 char *visited = (char *) xcalloc (last_basic_block, 1);
5266 rval = pre_expr_reaches_here_p_work (occr_bb, expr, bb, visited);
5268 free (visited);
5269 return rval;
5273 /* Given an expr, generate RTL which we can insert at the end of a BB,
5274 or on an edge. Set the block number of any insns generated to
5275 the value of BB. */
5277 static rtx
5278 process_insert_insn (expr)
5279 struct expr *expr;
5281 rtx reg = expr->reaching_reg;
5282 rtx exp = copy_rtx (expr->expr);
5283 rtx pat;
5285 start_sequence ();
5287 /* If the expression is something that's an operand, like a constant,
5288 just copy it to a register. */
5289 if (general_operand (exp, GET_MODE (reg)))
5290 emit_move_insn (reg, exp);
5292 /* Otherwise, make a new insn to compute this expression and make sure the
5293 insn will be recognized (this also adds any needed CLOBBERs). Copy the
5294 expression to make sure we don't have any sharing issues. */
5295 else if (insn_invalid_p (emit_insn (gen_rtx_SET (VOIDmode, reg, exp))))
5296 abort ();
5298 pat = get_insns ();
5299 end_sequence ();
5301 return pat;
5304 /* Add EXPR to the end of basic block BB.
5306 This is used by both the PRE and code hoisting.
5308 For PRE, we want to verify that the expr is either transparent
5309 or locally anticipatable in the target block. This check makes
5310 no sense for code hoisting. */
5312 static void
5313 insert_insn_end_bb (expr, bb, pre)
5314 struct expr *expr;
5315 basic_block bb;
5316 int pre;
5318 rtx insn = bb->end;
5319 rtx new_insn;
5320 rtx reg = expr->reaching_reg;
5321 int regno = REGNO (reg);
5322 rtx pat, pat_end;
5324 pat = process_insert_insn (expr);
5325 if (pat == NULL_RTX || ! INSN_P (pat))
5326 abort ();
5328 pat_end = pat;
5329 while (NEXT_INSN (pat_end) != NULL_RTX)
5330 pat_end = NEXT_INSN (pat_end);
5332 /* If the last insn is a jump, insert EXPR in front [taking care to
5333 handle cc0, etc. properly]. Similary we need to care trapping
5334 instructions in presence of non-call exceptions. */
5336 if (GET_CODE (insn) == JUMP_INSN
5337 || (GET_CODE (insn) == INSN
5338 && (bb->succ->succ_next || (bb->succ->flags & EDGE_ABNORMAL))))
5340 #ifdef HAVE_cc0
5341 rtx note;
5342 #endif
5343 /* It should always be the case that we can put these instructions
5344 anywhere in the basic block with performing PRE optimizations.
5345 Check this. */
5346 if (GET_CODE (insn) == INSN && pre
5347 && !TEST_BIT (antloc[bb->index], expr->bitmap_index)
5348 && !TEST_BIT (transp[bb->index], expr->bitmap_index))
5349 abort ();
5351 /* If this is a jump table, then we can't insert stuff here. Since
5352 we know the previous real insn must be the tablejump, we insert
5353 the new instruction just before the tablejump. */
5354 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
5355 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
5356 insn = prev_real_insn (insn);
5358 #ifdef HAVE_cc0
5359 /* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts
5360 if cc0 isn't set. */
5361 note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
5362 if (note)
5363 insn = XEXP (note, 0);
5364 else
5366 rtx maybe_cc0_setter = prev_nonnote_insn (insn);
5367 if (maybe_cc0_setter
5368 && INSN_P (maybe_cc0_setter)
5369 && sets_cc0_p (PATTERN (maybe_cc0_setter)))
5370 insn = maybe_cc0_setter;
5372 #endif
5373 /* FIXME: What if something in cc0/jump uses value set in new insn? */
5374 new_insn = emit_insn_before (pat, insn);
5377 /* Likewise if the last insn is a call, as will happen in the presence
5378 of exception handling. */
5379 else if (GET_CODE (insn) == CALL_INSN
5380 && (bb->succ->succ_next || (bb->succ->flags & EDGE_ABNORMAL)))
5382 /* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers,
5383 we search backward and place the instructions before the first
5384 parameter is loaded. Do this for everyone for consistency and a
5385 presumption that we'll get better code elsewhere as well.
5387 It should always be the case that we can put these instructions
5388 anywhere in the basic block with performing PRE optimizations.
5389 Check this. */
5391 if (pre
5392 && !TEST_BIT (antloc[bb->index], expr->bitmap_index)
5393 && !TEST_BIT (transp[bb->index], expr->bitmap_index))
5394 abort ();
5396 /* Since different machines initialize their parameter registers
5397 in different orders, assume nothing. Collect the set of all
5398 parameter registers. */
5399 insn = find_first_parameter_load (insn, bb->head);
5401 /* If we found all the parameter loads, then we want to insert
5402 before the first parameter load.
5404 If we did not find all the parameter loads, then we might have
5405 stopped on the head of the block, which could be a CODE_LABEL.
5406 If we inserted before the CODE_LABEL, then we would be putting
5407 the insn in the wrong basic block. In that case, put the insn
5408 after the CODE_LABEL. Also, respect NOTE_INSN_BASIC_BLOCK. */
5409 while (GET_CODE (insn) == CODE_LABEL
5410 || NOTE_INSN_BASIC_BLOCK_P (insn))
5411 insn = NEXT_INSN (insn);
5413 new_insn = emit_insn_before (pat, insn);
5415 else
5416 new_insn = emit_insn_after (pat, insn);
5418 while (1)
5420 if (INSN_P (pat))
5422 add_label_notes (PATTERN (pat), new_insn);
5423 note_stores (PATTERN (pat), record_set_info, pat);
5425 if (pat == pat_end)
5426 break;
5427 pat = NEXT_INSN (pat);
5430 gcse_create_count++;
5432 if (gcse_file)
5434 fprintf (gcse_file, "PRE/HOIST: end of bb %d, insn %d, ",
5435 bb->index, INSN_UID (new_insn));
5436 fprintf (gcse_file, "copying expression %d to reg %d\n",
5437 expr->bitmap_index, regno);
5441 /* Insert partially redundant expressions on edges in the CFG to make
5442 the expressions fully redundant. */
5444 static int
5445 pre_edge_insert (edge_list, index_map)
5446 struct edge_list *edge_list;
5447 struct expr **index_map;
5449 int e, i, j, num_edges, set_size, did_insert = 0;
5450 sbitmap *inserted;
5452 /* Where PRE_INSERT_MAP is nonzero, we add the expression on that edge
5453 if it reaches any of the deleted expressions. */
5455 set_size = pre_insert_map[0]->size;
5456 num_edges = NUM_EDGES (edge_list);
5457 inserted = sbitmap_vector_alloc (num_edges, expr_hash_table.n_elems);
5458 sbitmap_vector_zero (inserted, num_edges);
5460 for (e = 0; e < num_edges; e++)
5462 int indx;
5463 basic_block bb = INDEX_EDGE_PRED_BB (edge_list, e);
5465 for (i = indx = 0; i < set_size; i++, indx += SBITMAP_ELT_BITS)
5467 SBITMAP_ELT_TYPE insert = pre_insert_map[e]->elms[i];
5469 for (j = indx; insert && j < (int) expr_hash_table.n_elems; j++, insert >>= 1)
5470 if ((insert & 1) != 0 && index_map[j]->reaching_reg != NULL_RTX)
5472 struct expr *expr = index_map[j];
5473 struct occr *occr;
5475 /* Now look at each deleted occurrence of this expression. */
5476 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5478 if (! occr->deleted_p)
5479 continue;
5481 /* Insert this expression on this edge if if it would
5482 reach the deleted occurrence in BB. */
5483 if (!TEST_BIT (inserted[e], j))
5485 rtx insn;
5486 edge eg = INDEX_EDGE (edge_list, e);
5488 /* We can't insert anything on an abnormal and
5489 critical edge, so we insert the insn at the end of
5490 the previous block. There are several alternatives
5491 detailed in Morgans book P277 (sec 10.5) for
5492 handling this situation. This one is easiest for
5493 now. */
5495 if ((eg->flags & EDGE_ABNORMAL) == EDGE_ABNORMAL)
5496 insert_insn_end_bb (index_map[j], bb, 0);
5497 else
5499 insn = process_insert_insn (index_map[j]);
5500 insert_insn_on_edge (insn, eg);
5503 if (gcse_file)
5505 fprintf (gcse_file, "PRE/HOIST: edge (%d,%d), ",
5506 bb->index,
5507 INDEX_EDGE_SUCC_BB (edge_list, e)->index);
5508 fprintf (gcse_file, "copy expression %d\n",
5509 expr->bitmap_index);
5512 update_ld_motion_stores (expr);
5513 SET_BIT (inserted[e], j);
5514 did_insert = 1;
5515 gcse_create_count++;
5522 sbitmap_vector_free (inserted);
5523 return did_insert;
5526 /* Copy the result of INSN to REG. INDX is the expression number. */
5528 static void
5529 pre_insert_copy_insn (expr, insn)
5530 struct expr *expr;
5531 rtx insn;
5533 rtx reg = expr->reaching_reg;
5534 int regno = REGNO (reg);
5535 int indx = expr->bitmap_index;
5536 rtx set = single_set (insn);
5537 rtx new_insn;
5539 if (!set)
5540 abort ();
5542 new_insn = emit_insn_after (gen_move_insn (reg, copy_rtx (SET_DEST (set))), insn);
5544 /* Keep register set table up to date. */
5545 record_one_set (regno, new_insn);
5547 gcse_create_count++;
5549 if (gcse_file)
5550 fprintf (gcse_file,
5551 "PRE: bb %d, insn %d, copy expression %d in insn %d to reg %d\n",
5552 BLOCK_NUM (insn), INSN_UID (new_insn), indx,
5553 INSN_UID (insn), regno);
5554 update_ld_motion_stores (expr);
5557 /* Copy available expressions that reach the redundant expression
5558 to `reaching_reg'. */
5560 static void
5561 pre_insert_copies ()
5563 unsigned int i;
5564 struct expr *expr;
5565 struct occr *occr;
5566 struct occr *avail;
5568 /* For each available expression in the table, copy the result to
5569 `reaching_reg' if the expression reaches a deleted one.
5571 ??? The current algorithm is rather brute force.
5572 Need to do some profiling. */
5574 for (i = 0; i < expr_hash_table.size; i++)
5575 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
5577 /* If the basic block isn't reachable, PPOUT will be TRUE. However,
5578 we don't want to insert a copy here because the expression may not
5579 really be redundant. So only insert an insn if the expression was
5580 deleted. This test also avoids further processing if the
5581 expression wasn't deleted anywhere. */
5582 if (expr->reaching_reg == NULL)
5583 continue;
5585 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5587 if (! occr->deleted_p)
5588 continue;
5590 for (avail = expr->avail_occr; avail != NULL; avail = avail->next)
5592 rtx insn = avail->insn;
5594 /* No need to handle this one if handled already. */
5595 if (avail->copied_p)
5596 continue;
5598 /* Don't handle this one if it's a redundant one. */
5599 if (TEST_BIT (pre_redundant_insns, INSN_CUID (insn)))
5600 continue;
5602 /* Or if the expression doesn't reach the deleted one. */
5603 if (! pre_expr_reaches_here_p (BLOCK_FOR_INSN (avail->insn),
5604 expr,
5605 BLOCK_FOR_INSN (occr->insn)))
5606 continue;
5608 /* Copy the result of avail to reaching_reg. */
5609 pre_insert_copy_insn (expr, insn);
5610 avail->copied_p = 1;
5616 /* Emit move from SRC to DEST noting the equivalence with expression computed
5617 in INSN. */
5618 static rtx
5619 gcse_emit_move_after (src, dest, insn)
5620 rtx src, dest, insn;
5622 rtx new;
5623 rtx set = single_set (insn), set2;
5624 rtx note;
5625 rtx eqv;
5627 /* This should never fail since we're creating a reg->reg copy
5628 we've verified to be valid. */
5630 new = emit_insn_after (gen_move_insn (dest, src), insn);
5632 /* Note the equivalence for local CSE pass. */
5633 set2 = single_set (new);
5634 if (!set2 || !rtx_equal_p (SET_DEST (set2), dest))
5635 return new;
5636 if ((note = find_reg_equal_equiv_note (insn)))
5637 eqv = XEXP (note, 0);
5638 else
5639 eqv = SET_SRC (set);
5641 set_unique_reg_note (new, REG_EQUAL, copy_insn_1 (eqv));
5643 return new;
5646 /* Delete redundant computations.
5647 Deletion is done by changing the insn to copy the `reaching_reg' of
5648 the expression into the result of the SET. It is left to later passes
5649 (cprop, cse2, flow, combine, regmove) to propagate the copy or eliminate it.
5651 Returns nonzero if a change is made. */
5653 static int
5654 pre_delete ()
5656 unsigned int i;
5657 int changed;
5658 struct expr *expr;
5659 struct occr *occr;
5661 changed = 0;
5662 for (i = 0; i < expr_hash_table.size; i++)
5663 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
5665 int indx = expr->bitmap_index;
5667 /* We only need to search antic_occr since we require
5668 ANTLOC != 0. */
5670 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5672 rtx insn = occr->insn;
5673 rtx set;
5674 basic_block bb = BLOCK_FOR_INSN (insn);
5676 if (TEST_BIT (pre_delete_map[bb->index], indx))
5678 set = single_set (insn);
5679 if (! set)
5680 abort ();
5682 /* Create a pseudo-reg to store the result of reaching
5683 expressions into. Get the mode for the new pseudo from
5684 the mode of the original destination pseudo. */
5685 if (expr->reaching_reg == NULL)
5686 expr->reaching_reg
5687 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
5689 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
5690 delete_insn (insn);
5691 occr->deleted_p = 1;
5692 SET_BIT (pre_redundant_insns, INSN_CUID (insn));
5693 changed = 1;
5694 gcse_subst_count++;
5696 if (gcse_file)
5698 fprintf (gcse_file,
5699 "PRE: redundant insn %d (expression %d) in ",
5700 INSN_UID (insn), indx);
5701 fprintf (gcse_file, "bb %d, reaching reg is %d\n",
5702 bb->index, REGNO (expr->reaching_reg));
5708 return changed;
5711 /* Perform GCSE optimizations using PRE.
5712 This is called by one_pre_gcse_pass after all the dataflow analysis
5713 has been done.
5715 This is based on the original Morel-Renvoise paper Fred Chow's thesis, and
5716 lazy code motion from Knoop, Ruthing and Steffen as described in Advanced
5717 Compiler Design and Implementation.
5719 ??? A new pseudo reg is created to hold the reaching expression. The nice
5720 thing about the classical approach is that it would try to use an existing
5721 reg. If the register can't be adequately optimized [i.e. we introduce
5722 reload problems], one could add a pass here to propagate the new register
5723 through the block.
5725 ??? We don't handle single sets in PARALLELs because we're [currently] not
5726 able to copy the rest of the parallel when we insert copies to create full
5727 redundancies from partial redundancies. However, there's no reason why we
5728 can't handle PARALLELs in the cases where there are no partial
5729 redundancies. */
5731 static int
5732 pre_gcse ()
5734 unsigned int i;
5735 int did_insert, changed;
5736 struct expr **index_map;
5737 struct expr *expr;
5739 /* Compute a mapping from expression number (`bitmap_index') to
5740 hash table entry. */
5742 index_map = (struct expr **) xcalloc (expr_hash_table.n_elems, sizeof (struct expr *));
5743 for (i = 0; i < expr_hash_table.size; i++)
5744 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
5745 index_map[expr->bitmap_index] = expr;
5747 /* Reset bitmap used to track which insns are redundant. */
5748 pre_redundant_insns = sbitmap_alloc (max_cuid);
5749 sbitmap_zero (pre_redundant_insns);
5751 /* Delete the redundant insns first so that
5752 - we know what register to use for the new insns and for the other
5753 ones with reaching expressions
5754 - we know which insns are redundant when we go to create copies */
5756 changed = pre_delete ();
5758 did_insert = pre_edge_insert (edge_list, index_map);
5760 /* In other places with reaching expressions, copy the expression to the
5761 specially allocated pseudo-reg that reaches the redundant expr. */
5762 pre_insert_copies ();
5763 if (did_insert)
5765 commit_edge_insertions ();
5766 changed = 1;
5769 free (index_map);
5770 sbitmap_free (pre_redundant_insns);
5771 return changed;
5774 /* Top level routine to perform one PRE GCSE pass.
5776 Return nonzero if a change was made. */
5778 static int
5779 one_pre_gcse_pass (pass)
5780 int pass;
5782 int changed = 0;
5784 gcse_subst_count = 0;
5785 gcse_create_count = 0;
5787 alloc_hash_table (max_cuid, &expr_hash_table, 0);
5788 add_noreturn_fake_exit_edges ();
5789 if (flag_gcse_lm)
5790 compute_ld_motion_mems ();
5792 compute_hash_table (&expr_hash_table);
5793 trim_ld_motion_mems ();
5794 if (gcse_file)
5795 dump_hash_table (gcse_file, "Expression", &expr_hash_table);
5797 if (expr_hash_table.n_elems > 0)
5799 alloc_pre_mem (last_basic_block, expr_hash_table.n_elems);
5800 compute_pre_data ();
5801 changed |= pre_gcse ();
5802 free_edge_list (edge_list);
5803 free_pre_mem ();
5806 free_ldst_mems ();
5807 remove_fake_edges ();
5808 free_hash_table (&expr_hash_table);
5810 if (gcse_file)
5812 fprintf (gcse_file, "\nPRE GCSE of %s, pass %d: %d bytes needed, ",
5813 current_function_name, pass, bytes_used);
5814 fprintf (gcse_file, "%d substs, %d insns created\n",
5815 gcse_subst_count, gcse_create_count);
5818 return changed;
5821 /* If X contains any LABEL_REF's, add REG_LABEL notes for them to INSN.
5822 If notes are added to an insn which references a CODE_LABEL, the
5823 LABEL_NUSES count is incremented. We have to add REG_LABEL notes,
5824 because the following loop optimization pass requires them. */
5826 /* ??? This is very similar to the loop.c add_label_notes function. We
5827 could probably share code here. */
5829 /* ??? If there was a jump optimization pass after gcse and before loop,
5830 then we would not need to do this here, because jump would add the
5831 necessary REG_LABEL notes. */
5833 static void
5834 add_label_notes (x, insn)
5835 rtx x;
5836 rtx insn;
5838 enum rtx_code code = GET_CODE (x);
5839 int i, j;
5840 const char *fmt;
5842 if (code == LABEL_REF && !LABEL_REF_NONLOCAL_P (x))
5844 /* This code used to ignore labels that referred to dispatch tables to
5845 avoid flow generating (slighly) worse code.
5847 We no longer ignore such label references (see LABEL_REF handling in
5848 mark_jump_label for additional information). */
5850 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, XEXP (x, 0),
5851 REG_NOTES (insn));
5852 if (LABEL_P (XEXP (x, 0)))
5853 LABEL_NUSES (XEXP (x, 0))++;
5854 return;
5857 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
5859 if (fmt[i] == 'e')
5860 add_label_notes (XEXP (x, i), insn);
5861 else if (fmt[i] == 'E')
5862 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5863 add_label_notes (XVECEXP (x, i, j), insn);
5867 /* Compute transparent outgoing information for each block.
5869 An expression is transparent to an edge unless it is killed by
5870 the edge itself. This can only happen with abnormal control flow,
5871 when the edge is traversed through a call. This happens with
5872 non-local labels and exceptions.
5874 This would not be necessary if we split the edge. While this is
5875 normally impossible for abnormal critical edges, with some effort
5876 it should be possible with exception handling, since we still have
5877 control over which handler should be invoked. But due to increased
5878 EH table sizes, this may not be worthwhile. */
5880 static void
5881 compute_transpout ()
5883 basic_block bb;
5884 unsigned int i;
5885 struct expr *expr;
5887 sbitmap_vector_ones (transpout, last_basic_block);
5889 FOR_EACH_BB (bb)
5891 /* Note that flow inserted a nop a the end of basic blocks that
5892 end in call instructions for reasons other than abnormal
5893 control flow. */
5894 if (GET_CODE (bb->end) != CALL_INSN)
5895 continue;
5897 for (i = 0; i < expr_hash_table.size; i++)
5898 for (expr = expr_hash_table.table[i]; expr ; expr = expr->next_same_hash)
5899 if (GET_CODE (expr->expr) == MEM)
5901 if (GET_CODE (XEXP (expr->expr, 0)) == SYMBOL_REF
5902 && CONSTANT_POOL_ADDRESS_P (XEXP (expr->expr, 0)))
5903 continue;
5905 /* ??? Optimally, we would use interprocedural alias
5906 analysis to determine if this mem is actually killed
5907 by this call. */
5908 RESET_BIT (transpout[bb->index], expr->bitmap_index);
5913 /* Removal of useless null pointer checks */
5915 /* Called via note_stores. X is set by SETTER. If X is a register we must
5916 invalidate nonnull_local and set nonnull_killed. DATA is really a
5917 `null_pointer_info *'.
5919 We ignore hard registers. */
5921 static void
5922 invalidate_nonnull_info (x, setter, data)
5923 rtx x;
5924 rtx setter ATTRIBUTE_UNUSED;
5925 void *data;
5927 unsigned int regno;
5928 struct null_pointer_info *npi = (struct null_pointer_info *) data;
5930 while (GET_CODE (x) == SUBREG)
5931 x = SUBREG_REG (x);
5933 /* Ignore anything that is not a register or is a hard register. */
5934 if (GET_CODE (x) != REG
5935 || REGNO (x) < npi->min_reg
5936 || REGNO (x) >= npi->max_reg)
5937 return;
5939 regno = REGNO (x) - npi->min_reg;
5941 RESET_BIT (npi->nonnull_local[npi->current_block->index], regno);
5942 SET_BIT (npi->nonnull_killed[npi->current_block->index], regno);
5945 /* Do null-pointer check elimination for the registers indicated in
5946 NPI. NONNULL_AVIN and NONNULL_AVOUT are pre-allocated sbitmaps;
5947 they are not our responsibility to free. */
5949 static int
5950 delete_null_pointer_checks_1 (block_reg, nonnull_avin,
5951 nonnull_avout, npi)
5952 unsigned int *block_reg;
5953 sbitmap *nonnull_avin;
5954 sbitmap *nonnull_avout;
5955 struct null_pointer_info *npi;
5957 basic_block bb, current_block;
5958 sbitmap *nonnull_local = npi->nonnull_local;
5959 sbitmap *nonnull_killed = npi->nonnull_killed;
5960 int something_changed = 0;
5962 /* Compute local properties, nonnull and killed. A register will have
5963 the nonnull property if at the end of the current block its value is
5964 known to be nonnull. The killed property indicates that somewhere in
5965 the block any information we had about the register is killed.
5967 Note that a register can have both properties in a single block. That
5968 indicates that it's killed, then later in the block a new value is
5969 computed. */
5970 sbitmap_vector_zero (nonnull_local, last_basic_block);
5971 sbitmap_vector_zero (nonnull_killed, last_basic_block);
5973 FOR_EACH_BB (current_block)
5975 rtx insn, stop_insn;
5977 /* Set the current block for invalidate_nonnull_info. */
5978 npi->current_block = current_block;
5980 /* Scan each insn in the basic block looking for memory references and
5981 register sets. */
5982 stop_insn = NEXT_INSN (current_block->end);
5983 for (insn = current_block->head;
5984 insn != stop_insn;
5985 insn = NEXT_INSN (insn))
5987 rtx set;
5988 rtx reg;
5990 /* Ignore anything that is not a normal insn. */
5991 if (! INSN_P (insn))
5992 continue;
5994 /* Basically ignore anything that is not a simple SET. We do have
5995 to make sure to invalidate nonnull_local and set nonnull_killed
5996 for such insns though. */
5997 set = single_set (insn);
5998 if (!set)
6000 note_stores (PATTERN (insn), invalidate_nonnull_info, npi);
6001 continue;
6004 /* See if we've got a usable memory load. We handle it first
6005 in case it uses its address register as a dest (which kills
6006 the nonnull property). */
6007 if (GET_CODE (SET_SRC (set)) == MEM
6008 && GET_CODE ((reg = XEXP (SET_SRC (set), 0))) == REG
6009 && REGNO (reg) >= npi->min_reg
6010 && REGNO (reg) < npi->max_reg)
6011 SET_BIT (nonnull_local[current_block->index],
6012 REGNO (reg) - npi->min_reg);
6014 /* Now invalidate stuff clobbered by this insn. */
6015 note_stores (PATTERN (insn), invalidate_nonnull_info, npi);
6017 /* And handle stores, we do these last since any sets in INSN can
6018 not kill the nonnull property if it is derived from a MEM
6019 appearing in a SET_DEST. */
6020 if (GET_CODE (SET_DEST (set)) == MEM
6021 && GET_CODE ((reg = XEXP (SET_DEST (set), 0))) == REG
6022 && REGNO (reg) >= npi->min_reg
6023 && REGNO (reg) < npi->max_reg)
6024 SET_BIT (nonnull_local[current_block->index],
6025 REGNO (reg) - npi->min_reg);
6029 /* Now compute global properties based on the local properties. This
6030 is a classic global availability algorithm. */
6031 compute_available (nonnull_local, nonnull_killed,
6032 nonnull_avout, nonnull_avin);
6034 /* Now look at each bb and see if it ends with a compare of a value
6035 against zero. */
6036 FOR_EACH_BB (bb)
6038 rtx last_insn = bb->end;
6039 rtx condition, earliest;
6040 int compare_and_branch;
6042 /* Since MIN_REG is always at least FIRST_PSEUDO_REGISTER, and
6043 since BLOCK_REG[BB] is zero if this block did not end with a
6044 comparison against zero, this condition works. */
6045 if (block_reg[bb->index] < npi->min_reg
6046 || block_reg[bb->index] >= npi->max_reg)
6047 continue;
6049 /* LAST_INSN is a conditional jump. Get its condition. */
6050 condition = get_condition (last_insn, &earliest);
6052 /* If we can't determine the condition then skip. */
6053 if (! condition)
6054 continue;
6056 /* Is the register known to have a nonzero value? */
6057 if (!TEST_BIT (nonnull_avout[bb->index], block_reg[bb->index] - npi->min_reg))
6058 continue;
6060 /* Try to compute whether the compare/branch at the loop end is one or
6061 two instructions. */
6062 if (earliest == last_insn)
6063 compare_and_branch = 1;
6064 else if (earliest == prev_nonnote_insn (last_insn))
6065 compare_and_branch = 2;
6066 else
6067 continue;
6069 /* We know the register in this comparison is nonnull at exit from
6070 this block. We can optimize this comparison. */
6071 if (GET_CODE (condition) == NE)
6073 rtx new_jump;
6075 new_jump = emit_jump_insn_after (gen_jump (JUMP_LABEL (last_insn)),
6076 last_insn);
6077 JUMP_LABEL (new_jump) = JUMP_LABEL (last_insn);
6078 LABEL_NUSES (JUMP_LABEL (new_jump))++;
6079 emit_barrier_after (new_jump);
6082 something_changed = 1;
6083 delete_insn (last_insn);
6084 if (compare_and_branch == 2)
6085 delete_insn (earliest);
6086 purge_dead_edges (bb);
6088 /* Don't check this block again. (Note that BLOCK_END is
6089 invalid here; we deleted the last instruction in the
6090 block.) */
6091 block_reg[bb->index] = 0;
6094 return something_changed;
6097 /* Find EQ/NE comparisons against zero which can be (indirectly) evaluated
6098 at compile time.
6100 This is conceptually similar to global constant/copy propagation and
6101 classic global CSE (it even uses the same dataflow equations as cprop).
6103 If a register is used as memory address with the form (mem (reg)), then we
6104 know that REG can not be zero at that point in the program. Any instruction
6105 which sets REG "kills" this property.
6107 So, if every path leading to a conditional branch has an available memory
6108 reference of that form, then we know the register can not have the value
6109 zero at the conditional branch.
6111 So we merely need to compute the local properties and propagate that data
6112 around the cfg, then optimize where possible.
6114 We run this pass two times. Once before CSE, then again after CSE. This
6115 has proven to be the most profitable approach. It is rare for new
6116 optimization opportunities of this nature to appear after the first CSE
6117 pass.
6119 This could probably be integrated with global cprop with a little work. */
6122 delete_null_pointer_checks (f)
6123 rtx f ATTRIBUTE_UNUSED;
6125 sbitmap *nonnull_avin, *nonnull_avout;
6126 unsigned int *block_reg;
6127 basic_block bb;
6128 int reg;
6129 int regs_per_pass;
6130 int max_reg;
6131 struct null_pointer_info npi;
6132 int something_changed = 0;
6134 /* If we have only a single block, then there's nothing to do. */
6135 if (n_basic_blocks <= 1)
6136 return 0;
6138 /* Trying to perform global optimizations on flow graphs which have
6139 a high connectivity will take a long time and is unlikely to be
6140 particularly useful.
6142 In normal circumstances a cfg should have about twice as many edges
6143 as blocks. But we do not want to punish small functions which have
6144 a couple switch statements. So we require a relatively large number
6145 of basic blocks and the ratio of edges to blocks to be high. */
6146 if (n_basic_blocks > 1000 && n_edges / n_basic_blocks >= 20)
6147 return 0;
6149 /* We need four bitmaps, each with a bit for each register in each
6150 basic block. */
6151 max_reg = max_reg_num ();
6152 regs_per_pass = get_bitmap_width (4, last_basic_block, max_reg);
6154 /* Allocate bitmaps to hold local and global properties. */
6155 npi.nonnull_local = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
6156 npi.nonnull_killed = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
6157 nonnull_avin = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
6158 nonnull_avout = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
6160 /* Go through the basic blocks, seeing whether or not each block
6161 ends with a conditional branch whose condition is a comparison
6162 against zero. Record the register compared in BLOCK_REG. */
6163 block_reg = (unsigned int *) xcalloc (last_basic_block, sizeof (int));
6164 FOR_EACH_BB (bb)
6166 rtx last_insn = bb->end;
6167 rtx condition, earliest, reg;
6169 /* We only want conditional branches. */
6170 if (GET_CODE (last_insn) != JUMP_INSN
6171 || !any_condjump_p (last_insn)
6172 || !onlyjump_p (last_insn))
6173 continue;
6175 /* LAST_INSN is a conditional jump. Get its condition. */
6176 condition = get_condition (last_insn, &earliest);
6178 /* If we were unable to get the condition, or it is not an equality
6179 comparison against zero then there's nothing we can do. */
6180 if (!condition
6181 || (GET_CODE (condition) != NE && GET_CODE (condition) != EQ)
6182 || GET_CODE (XEXP (condition, 1)) != CONST_INT
6183 || (XEXP (condition, 1)
6184 != CONST0_RTX (GET_MODE (XEXP (condition, 0)))))
6185 continue;
6187 /* We must be checking a register against zero. */
6188 reg = XEXP (condition, 0);
6189 if (GET_CODE (reg) != REG)
6190 continue;
6192 block_reg[bb->index] = REGNO (reg);
6195 /* Go through the algorithm for each block of registers. */
6196 for (reg = FIRST_PSEUDO_REGISTER; reg < max_reg; reg += regs_per_pass)
6198 npi.min_reg = reg;
6199 npi.max_reg = MIN (reg + regs_per_pass, max_reg);
6200 something_changed |= delete_null_pointer_checks_1 (block_reg,
6201 nonnull_avin,
6202 nonnull_avout,
6203 &npi);
6206 /* Free the table of registers compared at the end of every block. */
6207 free (block_reg);
6209 /* Free bitmaps. */
6210 sbitmap_vector_free (npi.nonnull_local);
6211 sbitmap_vector_free (npi.nonnull_killed);
6212 sbitmap_vector_free (nonnull_avin);
6213 sbitmap_vector_free (nonnull_avout);
6215 return something_changed;
6218 /* Code Hoisting variables and subroutines. */
6220 /* Very busy expressions. */
6221 static sbitmap *hoist_vbein;
6222 static sbitmap *hoist_vbeout;
6224 /* Hoistable expressions. */
6225 static sbitmap *hoist_exprs;
6227 /* Dominator bitmaps. */
6228 dominance_info dominators;
6230 /* ??? We could compute post dominators and run this algorithm in
6231 reverse to perform tail merging, doing so would probably be
6232 more effective than the tail merging code in jump.c.
6234 It's unclear if tail merging could be run in parallel with
6235 code hoisting. It would be nice. */
6237 /* Allocate vars used for code hoisting analysis. */
6239 static void
6240 alloc_code_hoist_mem (n_blocks, n_exprs)
6241 int n_blocks, n_exprs;
6243 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
6244 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
6245 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
6247 hoist_vbein = sbitmap_vector_alloc (n_blocks, n_exprs);
6248 hoist_vbeout = sbitmap_vector_alloc (n_blocks, n_exprs);
6249 hoist_exprs = sbitmap_vector_alloc (n_blocks, n_exprs);
6250 transpout = sbitmap_vector_alloc (n_blocks, n_exprs);
6253 /* Free vars used for code hoisting analysis. */
6255 static void
6256 free_code_hoist_mem ()
6258 sbitmap_vector_free (antloc);
6259 sbitmap_vector_free (transp);
6260 sbitmap_vector_free (comp);
6262 sbitmap_vector_free (hoist_vbein);
6263 sbitmap_vector_free (hoist_vbeout);
6264 sbitmap_vector_free (hoist_exprs);
6265 sbitmap_vector_free (transpout);
6267 free_dominance_info (dominators);
6270 /* Compute the very busy expressions at entry/exit from each block.
6272 An expression is very busy if all paths from a given point
6273 compute the expression. */
6275 static void
6276 compute_code_hoist_vbeinout ()
6278 int changed, passes;
6279 basic_block bb;
6281 sbitmap_vector_zero (hoist_vbeout, last_basic_block);
6282 sbitmap_vector_zero (hoist_vbein, last_basic_block);
6284 passes = 0;
6285 changed = 1;
6287 while (changed)
6289 changed = 0;
6291 /* We scan the blocks in the reverse order to speed up
6292 the convergence. */
6293 FOR_EACH_BB_REVERSE (bb)
6295 changed |= sbitmap_a_or_b_and_c_cg (hoist_vbein[bb->index], antloc[bb->index],
6296 hoist_vbeout[bb->index], transp[bb->index]);
6297 if (bb->next_bb != EXIT_BLOCK_PTR)
6298 sbitmap_intersection_of_succs (hoist_vbeout[bb->index], hoist_vbein, bb->index);
6301 passes++;
6304 if (gcse_file)
6305 fprintf (gcse_file, "hoisting vbeinout computation: %d passes\n", passes);
6308 /* Top level routine to do the dataflow analysis needed by code hoisting. */
6310 static void
6311 compute_code_hoist_data ()
6313 compute_local_properties (transp, comp, antloc, &expr_hash_table);
6314 compute_transpout ();
6315 compute_code_hoist_vbeinout ();
6316 dominators = calculate_dominance_info (CDI_DOMINATORS);
6317 if (gcse_file)
6318 fprintf (gcse_file, "\n");
6321 /* Determine if the expression identified by EXPR_INDEX would
6322 reach BB unimpared if it was placed at the end of EXPR_BB.
6324 It's unclear exactly what Muchnick meant by "unimpared". It seems
6325 to me that the expression must either be computed or transparent in
6326 *every* block in the path(s) from EXPR_BB to BB. Any other definition
6327 would allow the expression to be hoisted out of loops, even if
6328 the expression wasn't a loop invariant.
6330 Contrast this to reachability for PRE where an expression is
6331 considered reachable if *any* path reaches instead of *all*
6332 paths. */
6334 static int
6335 hoist_expr_reaches_here_p (expr_bb, expr_index, bb, visited)
6336 basic_block expr_bb;
6337 int expr_index;
6338 basic_block bb;
6339 char *visited;
6341 edge pred;
6342 int visited_allocated_locally = 0;
6345 if (visited == NULL)
6347 visited_allocated_locally = 1;
6348 visited = xcalloc (last_basic_block, 1);
6351 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
6353 basic_block pred_bb = pred->src;
6355 if (pred->src == ENTRY_BLOCK_PTR)
6356 break;
6357 else if (pred_bb == expr_bb)
6358 continue;
6359 else if (visited[pred_bb->index])
6360 continue;
6362 /* Does this predecessor generate this expression? */
6363 else if (TEST_BIT (comp[pred_bb->index], expr_index))
6364 break;
6365 else if (! TEST_BIT (transp[pred_bb->index], expr_index))
6366 break;
6368 /* Not killed. */
6369 else
6371 visited[pred_bb->index] = 1;
6372 if (! hoist_expr_reaches_here_p (expr_bb, expr_index,
6373 pred_bb, visited))
6374 break;
6377 if (visited_allocated_locally)
6378 free (visited);
6380 return (pred == NULL);
6383 /* Actually perform code hoisting. */
6385 static void
6386 hoist_code ()
6388 basic_block bb, dominated;
6389 basic_block *domby;
6390 unsigned int domby_len;
6391 unsigned int i,j;
6392 struct expr **index_map;
6393 struct expr *expr;
6395 sbitmap_vector_zero (hoist_exprs, last_basic_block);
6397 /* Compute a mapping from expression number (`bitmap_index') to
6398 hash table entry. */
6400 index_map = (struct expr **) xcalloc (expr_hash_table.n_elems, sizeof (struct expr *));
6401 for (i = 0; i < expr_hash_table.size; i++)
6402 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
6403 index_map[expr->bitmap_index] = expr;
6405 /* Walk over each basic block looking for potentially hoistable
6406 expressions, nothing gets hoisted from the entry block. */
6407 FOR_EACH_BB (bb)
6409 int found = 0;
6410 int insn_inserted_p;
6412 domby_len = get_dominated_by (dominators, bb, &domby);
6413 /* Examine each expression that is very busy at the exit of this
6414 block. These are the potentially hoistable expressions. */
6415 for (i = 0; i < hoist_vbeout[bb->index]->n_bits; i++)
6417 int hoistable = 0;
6419 if (TEST_BIT (hoist_vbeout[bb->index], i)
6420 && TEST_BIT (transpout[bb->index], i))
6422 /* We've found a potentially hoistable expression, now
6423 we look at every block BB dominates to see if it
6424 computes the expression. */
6425 for (j = 0; j < domby_len; j++)
6427 dominated = domby[j];
6428 /* Ignore self dominance. */
6429 if (bb == dominated)
6430 continue;
6431 /* We've found a dominated block, now see if it computes
6432 the busy expression and whether or not moving that
6433 expression to the "beginning" of that block is safe. */
6434 if (!TEST_BIT (antloc[dominated->index], i))
6435 continue;
6437 /* Note if the expression would reach the dominated block
6438 unimpared if it was placed at the end of BB.
6440 Keep track of how many times this expression is hoistable
6441 from a dominated block into BB. */
6442 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
6443 hoistable++;
6446 /* If we found more than one hoistable occurrence of this
6447 expression, then note it in the bitmap of expressions to
6448 hoist. It makes no sense to hoist things which are computed
6449 in only one BB, and doing so tends to pessimize register
6450 allocation. One could increase this value to try harder
6451 to avoid any possible code expansion due to register
6452 allocation issues; however experiments have shown that
6453 the vast majority of hoistable expressions are only movable
6454 from two successors, so raising this threshhold is likely
6455 to nullify any benefit we get from code hoisting. */
6456 if (hoistable > 1)
6458 SET_BIT (hoist_exprs[bb->index], i);
6459 found = 1;
6463 /* If we found nothing to hoist, then quit now. */
6464 if (! found)
6466 free (domby);
6467 continue;
6470 /* Loop over all the hoistable expressions. */
6471 for (i = 0; i < hoist_exprs[bb->index]->n_bits; i++)
6473 /* We want to insert the expression into BB only once, so
6474 note when we've inserted it. */
6475 insn_inserted_p = 0;
6477 /* These tests should be the same as the tests above. */
6478 if (TEST_BIT (hoist_vbeout[bb->index], i))
6480 /* We've found a potentially hoistable expression, now
6481 we look at every block BB dominates to see if it
6482 computes the expression. */
6483 for (j = 0; j < domby_len; j++)
6485 dominated = domby[j];
6486 /* Ignore self dominance. */
6487 if (bb == dominated)
6488 continue;
6490 /* We've found a dominated block, now see if it computes
6491 the busy expression and whether or not moving that
6492 expression to the "beginning" of that block is safe. */
6493 if (!TEST_BIT (antloc[dominated->index], i))
6494 continue;
6496 /* The expression is computed in the dominated block and
6497 it would be safe to compute it at the start of the
6498 dominated block. Now we have to determine if the
6499 expression would reach the dominated block if it was
6500 placed at the end of BB. */
6501 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
6503 struct expr *expr = index_map[i];
6504 struct occr *occr = expr->antic_occr;
6505 rtx insn;
6506 rtx set;
6508 /* Find the right occurrence of this expression. */
6509 while (BLOCK_FOR_INSN (occr->insn) != dominated && occr)
6510 occr = occr->next;
6512 /* Should never happen. */
6513 if (!occr)
6514 abort ();
6516 insn = occr->insn;
6518 set = single_set (insn);
6519 if (! set)
6520 abort ();
6522 /* Create a pseudo-reg to store the result of reaching
6523 expressions into. Get the mode for the new pseudo
6524 from the mode of the original destination pseudo. */
6525 if (expr->reaching_reg == NULL)
6526 expr->reaching_reg
6527 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
6529 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
6530 delete_insn (insn);
6531 occr->deleted_p = 1;
6532 if (!insn_inserted_p)
6534 insert_insn_end_bb (index_map[i], bb, 0);
6535 insn_inserted_p = 1;
6541 free (domby);
6544 free (index_map);
6547 /* Top level routine to perform one code hoisting (aka unification) pass
6549 Return nonzero if a change was made. */
6551 static int
6552 one_code_hoisting_pass ()
6554 int changed = 0;
6556 alloc_hash_table (max_cuid, &expr_hash_table, 0);
6557 compute_hash_table (&expr_hash_table);
6558 if (gcse_file)
6559 dump_hash_table (gcse_file, "Code Hosting Expressions", &expr_hash_table);
6561 if (expr_hash_table.n_elems > 0)
6563 alloc_code_hoist_mem (last_basic_block, expr_hash_table.n_elems);
6564 compute_code_hoist_data ();
6565 hoist_code ();
6566 free_code_hoist_mem ();
6569 free_hash_table (&expr_hash_table);
6571 return changed;
6574 /* Here we provide the things required to do store motion towards
6575 the exit. In order for this to be effective, gcse also needed to
6576 be taught how to move a load when it is kill only by a store to itself.
6578 int i;
6579 float a[10];
6581 void foo(float scale)
6583 for (i=0; i<10; i++)
6584 a[i] *= scale;
6587 'i' is both loaded and stored to in the loop. Normally, gcse cannot move
6588 the load out since its live around the loop, and stored at the bottom
6589 of the loop.
6591 The 'Load Motion' referred to and implemented in this file is
6592 an enhancement to gcse which when using edge based lcm, recognizes
6593 this situation and allows gcse to move the load out of the loop.
6595 Once gcse has hoisted the load, store motion can then push this
6596 load towards the exit, and we end up with no loads or stores of 'i'
6597 in the loop. */
6599 /* This will search the ldst list for a matching expression. If it
6600 doesn't find one, we create one and initialize it. */
6602 static struct ls_expr *
6603 ldst_entry (x)
6604 rtx x;
6606 struct ls_expr * ptr;
6608 for (ptr = first_ls_expr(); ptr != NULL; ptr = next_ls_expr (ptr))
6609 if (expr_equiv_p (ptr->pattern, x))
6610 break;
6612 if (!ptr)
6614 ptr = (struct ls_expr *) xmalloc (sizeof (struct ls_expr));
6616 ptr->next = pre_ldst_mems;
6617 ptr->expr = NULL;
6618 ptr->pattern = x;
6619 ptr->pattern_regs = NULL_RTX;
6620 ptr->loads = NULL_RTX;
6621 ptr->stores = NULL_RTX;
6622 ptr->reaching_reg = NULL_RTX;
6623 ptr->invalid = 0;
6624 ptr->index = 0;
6625 ptr->hash_index = 0;
6626 pre_ldst_mems = ptr;
6629 return ptr;
6632 /* Free up an individual ldst entry. */
6634 static void
6635 free_ldst_entry (ptr)
6636 struct ls_expr * ptr;
6638 free_INSN_LIST_list (& ptr->loads);
6639 free_INSN_LIST_list (& ptr->stores);
6641 free (ptr);
6644 /* Free up all memory associated with the ldst list. */
6646 static void
6647 free_ldst_mems ()
6649 while (pre_ldst_mems)
6651 struct ls_expr * tmp = pre_ldst_mems;
6653 pre_ldst_mems = pre_ldst_mems->next;
6655 free_ldst_entry (tmp);
6658 pre_ldst_mems = NULL;
6661 /* Dump debugging info about the ldst list. */
6663 static void
6664 print_ldst_list (file)
6665 FILE * file;
6667 struct ls_expr * ptr;
6669 fprintf (file, "LDST list: \n");
6671 for (ptr = first_ls_expr(); ptr != NULL; ptr = next_ls_expr (ptr))
6673 fprintf (file, " Pattern (%3d): ", ptr->index);
6675 print_rtl (file, ptr->pattern);
6677 fprintf (file, "\n Loads : ");
6679 if (ptr->loads)
6680 print_rtl (file, ptr->loads);
6681 else
6682 fprintf (file, "(nil)");
6684 fprintf (file, "\n Stores : ");
6686 if (ptr->stores)
6687 print_rtl (file, ptr->stores);
6688 else
6689 fprintf (file, "(nil)");
6691 fprintf (file, "\n\n");
6694 fprintf (file, "\n");
6697 /* Returns 1 if X is in the list of ldst only expressions. */
6699 static struct ls_expr *
6700 find_rtx_in_ldst (x)
6701 rtx x;
6703 struct ls_expr * ptr;
6705 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
6706 if (expr_equiv_p (ptr->pattern, x) && ! ptr->invalid)
6707 return ptr;
6709 return NULL;
6712 /* Assign each element of the list of mems a monotonically increasing value. */
6714 static int
6715 enumerate_ldsts ()
6717 struct ls_expr * ptr;
6718 int n = 0;
6720 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
6721 ptr->index = n++;
6723 return n;
6726 /* Return first item in the list. */
6728 static inline struct ls_expr *
6729 first_ls_expr ()
6731 return pre_ldst_mems;
6734 /* Return the next item in ther list after the specified one. */
6736 static inline struct ls_expr *
6737 next_ls_expr (ptr)
6738 struct ls_expr * ptr;
6740 return ptr->next;
6743 /* Load Motion for loads which only kill themselves. */
6745 /* Return true if x is a simple MEM operation, with no registers or
6746 side effects. These are the types of loads we consider for the
6747 ld_motion list, otherwise we let the usual aliasing take care of it. */
6749 static int
6750 simple_mem (x)
6751 rtx x;
6753 if (GET_CODE (x) != MEM)
6754 return 0;
6756 if (MEM_VOLATILE_P (x))
6757 return 0;
6759 if (GET_MODE (x) == BLKmode)
6760 return 0;
6762 /* If we are handling exceptions, we must be careful with memory references
6763 that may trap. If we are not, the behavior is undefined, so we may just
6764 continue. */
6765 if (flag_non_call_exceptions && may_trap_p (x))
6766 return 0;
6768 if (side_effects_p (x))
6769 return 0;
6771 /* Do not consider function arguments passed on stack. */
6772 if (reg_mentioned_p (stack_pointer_rtx, x))
6773 return 0;
6775 if (flag_float_store && FLOAT_MODE_P (GET_MODE (x)))
6776 return 0;
6778 return 1;
6781 /* Make sure there isn't a buried reference in this pattern anywhere.
6782 If there is, invalidate the entry for it since we're not capable
6783 of fixing it up just yet.. We have to be sure we know about ALL
6784 loads since the aliasing code will allow all entries in the
6785 ld_motion list to not-alias itself. If we miss a load, we will get
6786 the wrong value since gcse might common it and we won't know to
6787 fix it up. */
6789 static void
6790 invalidate_any_buried_refs (x)
6791 rtx x;
6793 const char * fmt;
6794 int i, j;
6795 struct ls_expr * ptr;
6797 /* Invalidate it in the list. */
6798 if (GET_CODE (x) == MEM && simple_mem (x))
6800 ptr = ldst_entry (x);
6801 ptr->invalid = 1;
6804 /* Recursively process the insn. */
6805 fmt = GET_RTX_FORMAT (GET_CODE (x));
6807 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6809 if (fmt[i] == 'e')
6810 invalidate_any_buried_refs (XEXP (x, i));
6811 else if (fmt[i] == 'E')
6812 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6813 invalidate_any_buried_refs (XVECEXP (x, i, j));
6817 /* Find all the 'simple' MEMs which are used in LOADs and STORES. Simple
6818 being defined as MEM loads and stores to symbols, with no side effects
6819 and no registers in the expression. For a MEM destination, we also
6820 check that the insn is still valid if we replace the destination with a
6821 REG, as is done in update_ld_motion_stores. If there are any uses/defs
6822 which don't match this criteria, they are invalidated and trimmed out
6823 later. */
6825 static void
6826 compute_ld_motion_mems ()
6828 struct ls_expr * ptr;
6829 basic_block bb;
6830 rtx insn;
6832 pre_ldst_mems = NULL;
6834 FOR_EACH_BB (bb)
6836 for (insn = bb->head;
6837 insn && insn != NEXT_INSN (bb->end);
6838 insn = NEXT_INSN (insn))
6840 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
6842 if (GET_CODE (PATTERN (insn)) == SET)
6844 rtx src = SET_SRC (PATTERN (insn));
6845 rtx dest = SET_DEST (PATTERN (insn));
6847 /* Check for a simple LOAD... */
6848 if (GET_CODE (src) == MEM && simple_mem (src))
6850 ptr = ldst_entry (src);
6851 if (GET_CODE (dest) == REG)
6852 ptr->loads = alloc_INSN_LIST (insn, ptr->loads);
6853 else
6854 ptr->invalid = 1;
6856 else
6858 /* Make sure there isn't a buried load somewhere. */
6859 invalidate_any_buried_refs (src);
6862 /* Check for stores. Don't worry about aliased ones, they
6863 will block any movement we might do later. We only care
6864 about this exact pattern since those are the only
6865 circumstance that we will ignore the aliasing info. */
6866 if (GET_CODE (dest) == MEM && simple_mem (dest))
6868 ptr = ldst_entry (dest);
6870 if (GET_CODE (src) != MEM
6871 && GET_CODE (src) != ASM_OPERANDS
6872 /* Check for REG manually since want_to_gcse_p
6873 returns 0 for all REGs. */
6874 && (REG_P (src) || want_to_gcse_p (src)))
6875 ptr->stores = alloc_INSN_LIST (insn, ptr->stores);
6876 else
6877 ptr->invalid = 1;
6880 else
6881 invalidate_any_buried_refs (PATTERN (insn));
6887 /* Remove any references that have been either invalidated or are not in the
6888 expression list for pre gcse. */
6890 static void
6891 trim_ld_motion_mems ()
6893 struct ls_expr * last = NULL;
6894 struct ls_expr * ptr = first_ls_expr ();
6896 while (ptr != NULL)
6898 int del = ptr->invalid;
6899 struct expr * expr = NULL;
6901 /* Delete if entry has been made invalid. */
6902 if (!del)
6904 unsigned int i;
6906 del = 1;
6907 /* Delete if we cannot find this mem in the expression list. */
6908 for (i = 0; i < expr_hash_table.size && del; i++)
6910 for (expr = expr_hash_table.table[i];
6911 expr != NULL;
6912 expr = expr->next_same_hash)
6913 if (expr_equiv_p (expr->expr, ptr->pattern))
6915 del = 0;
6916 break;
6921 if (del)
6923 if (last != NULL)
6925 last->next = ptr->next;
6926 free_ldst_entry (ptr);
6927 ptr = last->next;
6929 else
6931 pre_ldst_mems = pre_ldst_mems->next;
6932 free_ldst_entry (ptr);
6933 ptr = pre_ldst_mems;
6936 else
6938 /* Set the expression field if we are keeping it. */
6939 last = ptr;
6940 ptr->expr = expr;
6941 ptr = ptr->next;
6945 /* Show the world what we've found. */
6946 if (gcse_file && pre_ldst_mems != NULL)
6947 print_ldst_list (gcse_file);
6950 /* This routine will take an expression which we are replacing with
6951 a reaching register, and update any stores that are needed if
6952 that expression is in the ld_motion list. Stores are updated by
6953 copying their SRC to the reaching register, and then storeing
6954 the reaching register into the store location. These keeps the
6955 correct value in the reaching register for the loads. */
6957 static void
6958 update_ld_motion_stores (expr)
6959 struct expr * expr;
6961 struct ls_expr * mem_ptr;
6963 if ((mem_ptr = find_rtx_in_ldst (expr->expr)))
6965 /* We can try to find just the REACHED stores, but is shouldn't
6966 matter to set the reaching reg everywhere... some might be
6967 dead and should be eliminated later. */
6969 /* We replace (set mem expr) with (set reg expr) (set mem reg)
6970 where reg is the reaching reg used in the load. We checked in
6971 compute_ld_motion_mems that we can replace (set mem expr) with
6972 (set reg expr) in that insn. */
6973 rtx list = mem_ptr->stores;
6975 for ( ; list != NULL_RTX; list = XEXP (list, 1))
6977 rtx insn = XEXP (list, 0);
6978 rtx pat = PATTERN (insn);
6979 rtx src = SET_SRC (pat);
6980 rtx reg = expr->reaching_reg;
6981 rtx copy, new;
6983 /* If we've already copied it, continue. */
6984 if (expr->reaching_reg == src)
6985 continue;
6987 if (gcse_file)
6989 fprintf (gcse_file, "PRE: store updated with reaching reg ");
6990 print_rtl (gcse_file, expr->reaching_reg);
6991 fprintf (gcse_file, ":\n ");
6992 print_inline_rtx (gcse_file, insn, 8);
6993 fprintf (gcse_file, "\n");
6996 copy = gen_move_insn ( reg, copy_rtx (SET_SRC (pat)));
6997 new = emit_insn_before (copy, insn);
6998 record_one_set (REGNO (reg), new);
6999 SET_SRC (pat) = reg;
7001 /* un-recognize this pattern since it's probably different now. */
7002 INSN_CODE (insn) = -1;
7003 gcse_create_count++;
7008 /* Store motion code. */
7010 #define ANTIC_STORE_LIST(x) ((x)->loads)
7011 #define AVAIL_STORE_LIST(x) ((x)->stores)
7012 #define LAST_AVAIL_CHECK_FAILURE(x) ((x)->reaching_reg)
7014 /* This is used to communicate the target bitvector we want to use in the
7015 reg_set_info routine when called via the note_stores mechanism. */
7016 static int * regvec;
7018 /* And current insn, for the same routine. */
7019 static rtx compute_store_table_current_insn;
7021 /* Used in computing the reverse edge graph bit vectors. */
7022 static sbitmap * st_antloc;
7024 /* Global holding the number of store expressions we are dealing with. */
7025 static int num_stores;
7027 /* Checks to set if we need to mark a register set. Called from note_stores. */
7029 static void
7030 reg_set_info (dest, setter, data)
7031 rtx dest, setter ATTRIBUTE_UNUSED;
7032 void * data ATTRIBUTE_UNUSED;
7034 if (GET_CODE (dest) == SUBREG)
7035 dest = SUBREG_REG (dest);
7037 if (GET_CODE (dest) == REG)
7038 regvec[REGNO (dest)] = INSN_UID (compute_store_table_current_insn);
7041 /* Return zero if some of the registers in list X are killed
7042 due to set of registers in bitmap REGS_SET. */
7044 static bool
7045 store_ops_ok (x, regs_set)
7046 rtx x;
7047 int *regs_set;
7049 rtx reg;
7051 for (; x; x = XEXP (x, 1))
7053 reg = XEXP (x, 0);
7054 if (regs_set[REGNO(reg)])
7055 return false;
7058 return true;
7061 /* Returns a list of registers mentioned in X. */
7062 static rtx
7063 extract_mentioned_regs (x)
7064 rtx x;
7066 return extract_mentioned_regs_helper (x, NULL_RTX);
7069 /* Helper for extract_mentioned_regs; ACCUM is used to accumulate used
7070 registers. */
7071 static rtx
7072 extract_mentioned_regs_helper (x, accum)
7073 rtx x;
7074 rtx accum;
7076 int i;
7077 enum rtx_code code;
7078 const char * fmt;
7080 /* Repeat is used to turn tail-recursion into iteration. */
7081 repeat:
7083 if (x == 0)
7084 return accum;
7086 code = GET_CODE (x);
7087 switch (code)
7089 case REG:
7090 return alloc_EXPR_LIST (0, x, accum);
7092 case MEM:
7093 x = XEXP (x, 0);
7094 goto repeat;
7096 case PRE_DEC:
7097 case PRE_INC:
7098 case POST_DEC:
7099 case POST_INC:
7100 /* We do not run this function with arguments having side effects. */
7101 abort ();
7103 case PC:
7104 case CC0: /*FIXME*/
7105 case CONST:
7106 case CONST_INT:
7107 case CONST_DOUBLE:
7108 case CONST_VECTOR:
7109 case SYMBOL_REF:
7110 case LABEL_REF:
7111 case ADDR_VEC:
7112 case ADDR_DIFF_VEC:
7113 return accum;
7115 default:
7116 break;
7119 i = GET_RTX_LENGTH (code) - 1;
7120 fmt = GET_RTX_FORMAT (code);
7122 for (; i >= 0; i--)
7124 if (fmt[i] == 'e')
7126 rtx tem = XEXP (x, i);
7128 /* If we are about to do the last recursive call
7129 needed at this level, change it into iteration. */
7130 if (i == 0)
7132 x = tem;
7133 goto repeat;
7136 accum = extract_mentioned_regs_helper (tem, accum);
7138 else if (fmt[i] == 'E')
7140 int j;
7142 for (j = 0; j < XVECLEN (x, i); j++)
7143 accum = extract_mentioned_regs_helper (XVECEXP (x, i, j), accum);
7147 return accum;
7150 /* Determine whether INSN is MEM store pattern that we will consider moving.
7151 REGS_SET_BEFORE is bitmap of registers set before (and including) the
7152 current insn, REGS_SET_AFTER is bitmap of registers set after (and
7153 including) the insn in this basic block. We must be passing through BB from
7154 head to end, as we are using this fact to speed things up.
7156 The results are stored this way:
7158 -- the first anticipatable expression is added into ANTIC_STORE_LIST
7159 -- if the processed expression is not anticipatable, NULL_RTX is added
7160 there instead, so that we can use it as indicator that no further
7161 expression of this type may be anticipatable
7162 -- if the expression is available, it is added as head of AVAIL_STORE_LIST;
7163 consequently, all of them but this head are dead and may be deleted.
7164 -- if the expression is not available, the insn due to that it fails to be
7165 available is stored in reaching_reg.
7167 The things are complicated a bit by fact that there already may be stores
7168 to the same MEM from other blocks; also caller must take care of the
7169 neccessary cleanup of the temporary markers after end of the basic block.
7172 static void
7173 find_moveable_store (insn, regs_set_before, regs_set_after)
7174 rtx insn;
7175 int *regs_set_before;
7176 int *regs_set_after;
7178 struct ls_expr * ptr;
7179 rtx dest, set, tmp;
7180 int check_anticipatable, check_available;
7181 basic_block bb = BLOCK_FOR_INSN (insn);
7183 set = single_set (insn);
7184 if (!set)
7185 return;
7187 dest = SET_DEST (set);
7189 if (GET_CODE (dest) != MEM || MEM_VOLATILE_P (dest)
7190 || GET_MODE (dest) == BLKmode)
7191 return;
7193 if (side_effects_p (dest))
7194 return;
7196 /* If we are handling exceptions, we must be careful with memory references
7197 that may trap. If we are not, the behavior is undefined, so we may just
7198 continue. */
7199 if (flag_non_call_exceptions && may_trap_p (dest))
7200 return;
7202 ptr = ldst_entry (dest);
7203 if (!ptr->pattern_regs)
7204 ptr->pattern_regs = extract_mentioned_regs (dest);
7206 /* Do not check for anticipatability if we either found one anticipatable
7207 store already, or tested for one and found out that it was killed. */
7208 check_anticipatable = 0;
7209 if (!ANTIC_STORE_LIST (ptr))
7210 check_anticipatable = 1;
7211 else
7213 tmp = XEXP (ANTIC_STORE_LIST (ptr), 0);
7214 if (tmp != NULL_RTX
7215 && BLOCK_FOR_INSN (tmp) != bb)
7216 check_anticipatable = 1;
7218 if (check_anticipatable)
7220 if (store_killed_before (dest, ptr->pattern_regs, insn, bb, regs_set_before))
7221 tmp = NULL_RTX;
7222 else
7223 tmp = insn;
7224 ANTIC_STORE_LIST (ptr) = alloc_INSN_LIST (tmp,
7225 ANTIC_STORE_LIST (ptr));
7228 /* It is not neccessary to check whether store is available if we did
7229 it successfully before; if we failed before, do not bother to check
7230 until we reach the insn that caused us to fail. */
7231 check_available = 0;
7232 if (!AVAIL_STORE_LIST (ptr))
7233 check_available = 1;
7234 else
7236 tmp = XEXP (AVAIL_STORE_LIST (ptr), 0);
7237 if (BLOCK_FOR_INSN (tmp) != bb)
7238 check_available = 1;
7240 if (check_available)
7242 /* Check that we have already reached the insn at that the check
7243 failed last time. */
7244 if (LAST_AVAIL_CHECK_FAILURE (ptr))
7246 for (tmp = bb->end;
7247 tmp != insn && tmp != LAST_AVAIL_CHECK_FAILURE (ptr);
7248 tmp = PREV_INSN (tmp))
7249 continue;
7250 if (tmp == insn)
7251 check_available = 0;
7253 else
7254 check_available = store_killed_after (dest, ptr->pattern_regs, insn,
7255 bb, regs_set_after,
7256 &LAST_AVAIL_CHECK_FAILURE (ptr));
7258 if (!check_available)
7259 AVAIL_STORE_LIST (ptr) = alloc_INSN_LIST (insn, AVAIL_STORE_LIST (ptr));
7262 /* Find available and anticipatable stores. */
7264 static int
7265 compute_store_table ()
7267 int ret;
7268 basic_block bb;
7269 unsigned regno;
7270 rtx insn, pat, tmp;
7271 int *last_set_in, *already_set;
7272 struct ls_expr * ptr, **prev_next_ptr_ptr;
7274 max_gcse_regno = max_reg_num ();
7276 reg_set_in_block = (sbitmap *) sbitmap_vector_alloc (last_basic_block,
7277 max_gcse_regno);
7278 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
7279 pre_ldst_mems = 0;
7280 last_set_in = xmalloc (sizeof (int) * max_gcse_regno);
7281 already_set = xmalloc (sizeof (int) * max_gcse_regno);
7283 /* Find all the stores we care about. */
7284 FOR_EACH_BB (bb)
7286 /* First compute the registers set in this block. */
7287 memset (last_set_in, 0, sizeof (int) * max_gcse_regno);
7288 regvec = last_set_in;
7290 for (insn = bb->head;
7291 insn != NEXT_INSN (bb->end);
7292 insn = NEXT_INSN (insn))
7294 if (! INSN_P (insn))
7295 continue;
7297 if (GET_CODE (insn) == CALL_INSN)
7299 bool clobbers_all = false;
7300 #ifdef NON_SAVING_SETJMP
7301 if (NON_SAVING_SETJMP
7302 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
7303 clobbers_all = true;
7304 #endif
7306 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7307 if (clobbers_all
7308 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
7309 last_set_in[regno] = INSN_UID (insn);
7312 pat = PATTERN (insn);
7313 compute_store_table_current_insn = insn;
7314 note_stores (pat, reg_set_info, NULL);
7317 /* Record the set registers. */
7318 for (regno = 0; regno < max_gcse_regno; regno++)
7319 if (last_set_in[regno])
7320 SET_BIT (reg_set_in_block[bb->index], regno);
7322 /* Now find the stores. */
7323 memset (already_set, 0, sizeof (int) * max_gcse_regno);
7324 regvec = already_set;
7325 for (insn = bb->head;
7326 insn != NEXT_INSN (bb->end);
7327 insn = NEXT_INSN (insn))
7329 if (! INSN_P (insn))
7330 continue;
7332 if (GET_CODE (insn) == CALL_INSN)
7334 bool clobbers_all = false;
7335 #ifdef NON_SAVING_SETJMP
7336 if (NON_SAVING_SETJMP
7337 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
7338 clobbers_all = true;
7339 #endif
7341 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7342 if (clobbers_all
7343 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
7344 already_set[regno] = 1;
7347 pat = PATTERN (insn);
7348 note_stores (pat, reg_set_info, NULL);
7350 /* Now that we've marked regs, look for stores. */
7351 find_moveable_store (insn, already_set, last_set_in);
7353 /* Unmark regs that are no longer set. */
7354 for (regno = 0; regno < max_gcse_regno; regno++)
7355 if (last_set_in[regno] == INSN_UID (insn))
7356 last_set_in[regno] = 0;
7359 /* Clear temporary marks. */
7360 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7362 LAST_AVAIL_CHECK_FAILURE(ptr) = NULL_RTX;
7363 if (ANTIC_STORE_LIST (ptr)
7364 && (tmp = XEXP (ANTIC_STORE_LIST (ptr), 0)) == NULL_RTX)
7365 ANTIC_STORE_LIST (ptr) = XEXP (ANTIC_STORE_LIST (ptr), 1);
7369 /* Remove the stores that are not available anywhere, as there will
7370 be no opportunity to optimize them. */
7371 for (ptr = pre_ldst_mems, prev_next_ptr_ptr = &pre_ldst_mems;
7372 ptr != NULL;
7373 ptr = *prev_next_ptr_ptr)
7375 if (!AVAIL_STORE_LIST (ptr))
7377 *prev_next_ptr_ptr = ptr->next;
7378 free_ldst_entry (ptr);
7380 else
7381 prev_next_ptr_ptr = &ptr->next;
7384 ret = enumerate_ldsts ();
7386 if (gcse_file)
7388 fprintf (gcse_file, "ST_avail and ST_antic (shown under loads..)\n");
7389 print_ldst_list (gcse_file);
7392 free (last_set_in);
7393 free (already_set);
7394 return ret;
7397 /* Check to see if the load X is aliased with STORE_PATTERN. */
7399 static bool
7400 load_kills_store (x, store_pattern)
7401 rtx x, store_pattern;
7403 if (true_dependence (x, GET_MODE (x), store_pattern, rtx_addr_varies_p))
7404 return true;
7405 return false;
7408 /* Go through the entire insn X, looking for any loads which might alias
7409 STORE_PATTERN. Return true if found. */
7411 static bool
7412 find_loads (x, store_pattern)
7413 rtx x, store_pattern;
7415 const char * fmt;
7416 int i, j;
7417 int ret = false;
7419 if (!x)
7420 return false;
7422 if (GET_CODE (x) == SET)
7423 x = SET_SRC (x);
7425 if (GET_CODE (x) == MEM)
7427 if (load_kills_store (x, store_pattern))
7428 return true;
7431 /* Recursively process the insn. */
7432 fmt = GET_RTX_FORMAT (GET_CODE (x));
7434 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0 && !ret; i--)
7436 if (fmt[i] == 'e')
7437 ret |= find_loads (XEXP (x, i), store_pattern);
7438 else if (fmt[i] == 'E')
7439 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7440 ret |= find_loads (XVECEXP (x, i, j), store_pattern);
7442 return ret;
7445 /* Check if INSN kills the store pattern X (is aliased with it).
7446 Return true if it it does. */
7448 static bool
7449 store_killed_in_insn (x, x_regs, insn)
7450 rtx x, x_regs, insn;
7452 rtx reg, base;
7454 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
7455 return false;
7457 if (GET_CODE (insn) == CALL_INSN)
7459 /* A normal or pure call might read from pattern,
7460 but a const call will not. */
7461 if (! CONST_OR_PURE_CALL_P (insn) || pure_call_p (insn))
7462 return true;
7464 /* But even a const call reads its parameters. Check whether the
7465 base of some of registers used in mem is stack pointer. */
7466 for (reg = x_regs; reg; reg = XEXP (reg, 1))
7468 base = find_base_term (reg);
7469 if (!base
7470 || (GET_CODE (base) == ADDRESS
7471 && GET_MODE (base) == Pmode
7472 && XEXP (base, 0) == stack_pointer_rtx))
7473 return true;
7476 return false;
7479 if (GET_CODE (PATTERN (insn)) == SET)
7481 rtx pat = PATTERN (insn);
7482 /* Check for memory stores to aliased objects. */
7483 if (GET_CODE (SET_DEST (pat)) == MEM && !expr_equiv_p (SET_DEST (pat), x))
7484 /* pretend its a load and check for aliasing. */
7485 if (find_loads (SET_DEST (pat), x))
7486 return true;
7487 return find_loads (SET_SRC (pat), x);
7489 else
7490 return find_loads (PATTERN (insn), x);
7493 /* Returns true if the expression X is loaded or clobbered on or after INSN
7494 within basic block BB. REGS_SET_AFTER is bitmap of registers set in
7495 or after the insn. X_REGS is list of registers mentioned in X. If the store
7496 is killed, return the last insn in that it occurs in FAIL_INSN. */
7498 static bool
7499 store_killed_after (x, x_regs, insn, bb, regs_set_after, fail_insn)
7500 rtx x, x_regs, insn;
7501 basic_block bb;
7502 int *regs_set_after;
7503 rtx *fail_insn;
7505 rtx last = bb->end, act;
7507 if (!store_ops_ok (x_regs, regs_set_after))
7509 /* We do not know where it will happen. */
7510 if (fail_insn)
7511 *fail_insn = NULL_RTX;
7512 return true;
7515 /* Scan from the end, so that fail_insn is determined correctly. */
7516 for (act = last; act != PREV_INSN (insn); act = PREV_INSN (act))
7517 if (store_killed_in_insn (x, x_regs, act))
7519 if (fail_insn)
7520 *fail_insn = act;
7521 return true;
7524 return false;
7527 /* Returns true if the expression X is loaded or clobbered on or before INSN
7528 within basic block BB. X_REGS is list of registers mentioned in X.
7529 REGS_SET_BEFORE is bitmap of registers set before or in this insn. */
7530 static bool
7531 store_killed_before (x, x_regs, insn, bb, regs_set_before)
7532 rtx x, x_regs, insn;
7533 basic_block bb;
7534 int *regs_set_before;
7536 rtx first = bb->head;
7538 if (!store_ops_ok (x_regs, regs_set_before))
7539 return true;
7541 for ( ; insn != PREV_INSN (first); insn = PREV_INSN (insn))
7542 if (store_killed_in_insn (x, x_regs, insn))
7543 return true;
7545 return false;
7548 /* Fill in available, anticipatable, transparent and kill vectors in
7549 STORE_DATA, based on lists of available and anticipatable stores. */
7550 static void
7551 build_store_vectors ()
7553 basic_block bb;
7554 int *regs_set_in_block;
7555 rtx insn, st;
7556 struct ls_expr * ptr;
7557 unsigned regno;
7559 /* Build the gen_vector. This is any store in the table which is not killed
7560 by aliasing later in its block. */
7561 ae_gen = (sbitmap *) sbitmap_vector_alloc (last_basic_block, num_stores);
7562 sbitmap_vector_zero (ae_gen, last_basic_block);
7564 st_antloc = (sbitmap *) sbitmap_vector_alloc (last_basic_block, num_stores);
7565 sbitmap_vector_zero (st_antloc, last_basic_block);
7567 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7569 for (st = AVAIL_STORE_LIST (ptr); st != NULL; st = XEXP (st, 1))
7571 insn = XEXP (st, 0);
7572 bb = BLOCK_FOR_INSN (insn);
7574 /* If we've already seen an available expression in this block,
7575 we can delete this one (It occurs earlier in the block). We'll
7576 copy the SRC expression to an unused register in case there
7577 are any side effects. */
7578 if (TEST_BIT (ae_gen[bb->index], ptr->index))
7580 rtx r = gen_reg_rtx (GET_MODE (ptr->pattern));
7581 if (gcse_file)
7582 fprintf (gcse_file, "Removing redundant store:\n");
7583 replace_store_insn (r, XEXP (st, 0), bb);
7584 continue;
7586 SET_BIT (ae_gen[bb->index], ptr->index);
7589 for (st = ANTIC_STORE_LIST (ptr); st != NULL; st = XEXP (st, 1))
7591 insn = XEXP (st, 0);
7592 bb = BLOCK_FOR_INSN (insn);
7593 SET_BIT (st_antloc[bb->index], ptr->index);
7597 ae_kill = (sbitmap *) sbitmap_vector_alloc (last_basic_block, num_stores);
7598 sbitmap_vector_zero (ae_kill, last_basic_block);
7600 transp = (sbitmap *) sbitmap_vector_alloc (last_basic_block, num_stores);
7601 sbitmap_vector_zero (transp, last_basic_block);
7602 regs_set_in_block = xmalloc (sizeof (int) * max_gcse_regno);
7604 FOR_EACH_BB (bb)
7606 for (regno = 0; regno < max_gcse_regno; regno++)
7607 regs_set_in_block[regno] = TEST_BIT (reg_set_in_block[bb->index], regno);
7609 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7611 if (store_killed_after (ptr->pattern, ptr->pattern_regs, bb->head,
7612 bb, regs_set_in_block, NULL))
7614 /* It should not be neccessary to consider the expression
7615 killed if it is both anticipatable and available. */
7616 if (!TEST_BIT (st_antloc[bb->index], ptr->index)
7617 || !TEST_BIT (ae_gen[bb->index], ptr->index))
7618 SET_BIT (ae_kill[bb->index], ptr->index);
7620 else
7621 SET_BIT (transp[bb->index], ptr->index);
7625 free (regs_set_in_block);
7627 if (gcse_file)
7629 dump_sbitmap_vector (gcse_file, "st_antloc", "", st_antloc, last_basic_block);
7630 dump_sbitmap_vector (gcse_file, "st_kill", "", ae_kill, last_basic_block);
7631 dump_sbitmap_vector (gcse_file, "Transpt", "", transp, last_basic_block);
7632 dump_sbitmap_vector (gcse_file, "st_avloc", "", ae_gen, last_basic_block);
7636 /* Insert an instruction at the beginning of a basic block, and update
7637 the BLOCK_HEAD if needed. */
7639 static void
7640 insert_insn_start_bb (insn, bb)
7641 rtx insn;
7642 basic_block bb;
7644 /* Insert at start of successor block. */
7645 rtx prev = PREV_INSN (bb->head);
7646 rtx before = bb->head;
7647 while (before != 0)
7649 if (GET_CODE (before) != CODE_LABEL
7650 && (GET_CODE (before) != NOTE
7651 || NOTE_LINE_NUMBER (before) != NOTE_INSN_BASIC_BLOCK))
7652 break;
7653 prev = before;
7654 if (prev == bb->end)
7655 break;
7656 before = NEXT_INSN (before);
7659 insn = emit_insn_after (insn, prev);
7661 if (gcse_file)
7663 fprintf (gcse_file, "STORE_MOTION insert store at start of BB %d:\n",
7664 bb->index);
7665 print_inline_rtx (gcse_file, insn, 6);
7666 fprintf (gcse_file, "\n");
7670 /* This routine will insert a store on an edge. EXPR is the ldst entry for
7671 the memory reference, and E is the edge to insert it on. Returns nonzero
7672 if an edge insertion was performed. */
7674 static int
7675 insert_store (expr, e)
7676 struct ls_expr * expr;
7677 edge e;
7679 rtx reg, insn;
7680 basic_block bb;
7681 edge tmp;
7683 /* We did all the deleted before this insert, so if we didn't delete a
7684 store, then we haven't set the reaching reg yet either. */
7685 if (expr->reaching_reg == NULL_RTX)
7686 return 0;
7688 reg = expr->reaching_reg;
7689 insn = gen_move_insn (copy_rtx (expr->pattern), reg);
7691 /* If we are inserting this expression on ALL predecessor edges of a BB,
7692 insert it at the start of the BB, and reset the insert bits on the other
7693 edges so we don't try to insert it on the other edges. */
7694 bb = e->dest;
7695 for (tmp = e->dest->pred; tmp ; tmp = tmp->pred_next)
7697 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
7698 if (index == EDGE_INDEX_NO_EDGE)
7699 abort ();
7700 if (! TEST_BIT (pre_insert_map[index], expr->index))
7701 break;
7704 /* If tmp is NULL, we found an insertion on every edge, blank the
7705 insertion vector for these edges, and insert at the start of the BB. */
7706 if (!tmp && bb != EXIT_BLOCK_PTR)
7708 for (tmp = e->dest->pred; tmp ; tmp = tmp->pred_next)
7710 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
7711 RESET_BIT (pre_insert_map[index], expr->index);
7713 insert_insn_start_bb (insn, bb);
7714 return 0;
7717 /* We can't insert on this edge, so we'll insert at the head of the
7718 successors block. See Morgan, sec 10.5. */
7719 if ((e->flags & EDGE_ABNORMAL) == EDGE_ABNORMAL)
7721 insert_insn_start_bb (insn, bb);
7722 return 0;
7725 insert_insn_on_edge (insn, e);
7727 if (gcse_file)
7729 fprintf (gcse_file, "STORE_MOTION insert insn on edge (%d, %d):\n",
7730 e->src->index, e->dest->index);
7731 print_inline_rtx (gcse_file, insn, 6);
7732 fprintf (gcse_file, "\n");
7735 return 1;
7738 /* This routine will replace a store with a SET to a specified register. */
7740 static void
7741 replace_store_insn (reg, del, bb)
7742 rtx reg, del;
7743 basic_block bb;
7745 rtx insn;
7747 insn = gen_move_insn (reg, SET_SRC (single_set (del)));
7748 insn = emit_insn_after (insn, del);
7750 if (gcse_file)
7752 fprintf (gcse_file,
7753 "STORE_MOTION delete insn in BB %d:\n ", bb->index);
7754 print_inline_rtx (gcse_file, del, 6);
7755 fprintf (gcse_file, "\nSTORE MOTION replaced with insn:\n ");
7756 print_inline_rtx (gcse_file, insn, 6);
7757 fprintf (gcse_file, "\n");
7760 delete_insn (del);
7764 /* Delete a store, but copy the value that would have been stored into
7765 the reaching_reg for later storing. */
7767 static void
7768 delete_store (expr, bb)
7769 struct ls_expr * expr;
7770 basic_block bb;
7772 rtx reg, i, del;
7774 if (expr->reaching_reg == NULL_RTX)
7775 expr->reaching_reg = gen_reg_rtx (GET_MODE (expr->pattern));
7777 reg = expr->reaching_reg;
7779 for (i = AVAIL_STORE_LIST (expr); i; i = XEXP (i, 1))
7781 del = XEXP (i, 0);
7782 if (BLOCK_FOR_INSN (del) == bb)
7784 /* We know there is only one since we deleted redundant
7785 ones during the available computation. */
7786 replace_store_insn (reg, del, bb);
7787 break;
7792 /* Free memory used by store motion. */
7794 static void
7795 free_store_memory ()
7797 free_ldst_mems ();
7799 if (ae_gen)
7800 sbitmap_vector_free (ae_gen);
7801 if (ae_kill)
7802 sbitmap_vector_free (ae_kill);
7803 if (transp)
7804 sbitmap_vector_free (transp);
7805 if (st_antloc)
7806 sbitmap_vector_free (st_antloc);
7807 if (pre_insert_map)
7808 sbitmap_vector_free (pre_insert_map);
7809 if (pre_delete_map)
7810 sbitmap_vector_free (pre_delete_map);
7811 if (reg_set_in_block)
7812 sbitmap_vector_free (reg_set_in_block);
7814 ae_gen = ae_kill = transp = st_antloc = NULL;
7815 pre_insert_map = pre_delete_map = reg_set_in_block = NULL;
7818 /* Perform store motion. Much like gcse, except we move expressions the
7819 other way by looking at the flowgraph in reverse. */
7821 static void
7822 store_motion ()
7824 basic_block bb;
7825 int x;
7826 struct ls_expr * ptr;
7827 int update_flow = 0;
7829 if (gcse_file)
7831 fprintf (gcse_file, "before store motion\n");
7832 print_rtl (gcse_file, get_insns ());
7836 init_alias_analysis ();
7838 /* Find all the available and anticipatable stores. */
7839 num_stores = compute_store_table ();
7840 if (num_stores == 0)
7842 sbitmap_vector_free (reg_set_in_block);
7843 end_alias_analysis ();
7844 return;
7847 /* Now compute kill & transp vectors. */
7848 build_store_vectors ();
7849 add_noreturn_fake_exit_edges ();
7851 edge_list = pre_edge_rev_lcm (gcse_file, num_stores, transp, ae_gen,
7852 st_antloc, ae_kill, &pre_insert_map,
7853 &pre_delete_map);
7855 /* Now we want to insert the new stores which are going to be needed. */
7856 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7858 FOR_EACH_BB (bb)
7859 if (TEST_BIT (pre_delete_map[bb->index], ptr->index))
7860 delete_store (ptr, bb);
7862 for (x = 0; x < NUM_EDGES (edge_list); x++)
7863 if (TEST_BIT (pre_insert_map[x], ptr->index))
7864 update_flow |= insert_store (ptr, INDEX_EDGE (edge_list, x));
7867 if (update_flow)
7868 commit_edge_insertions ();
7870 free_store_memory ();
7871 free_edge_list (edge_list);
7872 remove_fake_edges ();
7873 end_alias_analysis ();
7877 /* Entry point for jump bypassing optimization pass. */
7880 bypass_jumps (file)
7881 FILE *file;
7883 int changed;
7885 /* We do not construct an accurate cfg in functions which call
7886 setjmp, so just punt to be safe. */
7887 if (current_function_calls_setjmp)
7888 return 0;
7890 /* For calling dump_foo fns from gdb. */
7891 debug_stderr = stderr;
7892 gcse_file = file;
7894 /* Identify the basic block information for this function, including
7895 successors and predecessors. */
7896 max_gcse_regno = max_reg_num ();
7898 if (file)
7899 dump_flow_info (file);
7901 /* Return if there's nothing to do. */
7902 if (n_basic_blocks <= 1)
7903 return 0;
7905 /* Trying to perform global optimizations on flow graphs which have
7906 a high connectivity will take a long time and is unlikely to be
7907 particularly useful.
7909 In normal circumstances a cfg should have about twice as many edges
7910 as blocks. But we do not want to punish small functions which have
7911 a couple switch statements. So we require a relatively large number
7912 of basic blocks and the ratio of edges to blocks to be high. */
7913 if (n_basic_blocks > 1000 && n_edges / n_basic_blocks >= 20)
7915 if (warn_disabled_optimization)
7916 warning ("BYPASS disabled: %d > 1000 basic blocks and %d >= 20 edges/basic block",
7917 n_basic_blocks, n_edges / n_basic_blocks);
7918 return 0;
7921 /* If allocating memory for the cprop bitmap would take up too much
7922 storage it's better just to disable the optimization. */
7923 if ((n_basic_blocks
7924 * SBITMAP_SET_SIZE (max_gcse_regno)
7925 * sizeof (SBITMAP_ELT_TYPE)) > MAX_GCSE_MEMORY)
7927 if (warn_disabled_optimization)
7928 warning ("GCSE disabled: %d basic blocks and %d registers",
7929 n_basic_blocks, max_gcse_regno);
7931 return 0;
7934 gcc_obstack_init (&gcse_obstack);
7935 bytes_used = 0;
7937 /* We need alias. */
7938 init_alias_analysis ();
7940 /* Record where pseudo-registers are set. This data is kept accurate
7941 during each pass. ??? We could also record hard-reg information here
7942 [since it's unchanging], however it is currently done during hash table
7943 computation.
7945 It may be tempting to compute MEM set information here too, but MEM sets
7946 will be subject to code motion one day and thus we need to compute
7947 information about memory sets when we build the hash tables. */
7949 alloc_reg_set_mem (max_gcse_regno);
7950 compute_sets (get_insns ());
7952 max_gcse_regno = max_reg_num ();
7953 alloc_gcse_mem (get_insns ());
7954 changed = one_cprop_pass (1, 1, 1);
7955 free_gcse_mem ();
7957 if (file)
7959 fprintf (file, "BYPASS of %s: %d basic blocks, ",
7960 current_function_name, n_basic_blocks);
7961 fprintf (file, "%d bytes\n\n", bytes_used);
7964 obstack_free (&gcse_obstack, NULL);
7965 free_reg_set_mem ();
7967 /* We are finished with alias. */
7968 end_alias_analysis ();
7969 allocate_reg_info (max_reg_num (), FALSE, FALSE);
7971 return changed;
7974 #include "gt-gcse.h"