Missed one in last change.
[official-gcc.git] / gcc / gcse.c
blobe554dd867a69fb98a547da775c91d329bc8d8033
1 /* Global common subexpression elimination/Partial redundancy elimination
2 and global constant/copy propagation for GNU compiler.
3 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 02111-1307, USA. */
23 /* TODO
24 - reordering of memory allocation and freeing to be more space efficient
25 - do rough calc of how many regs are needed in each block, and a rough
26 calc of how many regs are available in each class and use that to
27 throttle back the code in cases where RTX_COST is minimal.
28 - a store to the same address as a load does not kill the load if the
29 source of the store is also the destination of the load. Handling this
30 allows more load motion, particularly out of loops.
31 - ability to realloc sbitmap vectors would allow one initial computation
32 of reg_set_in_block with only subsequent additions, rather than
33 recomputing it for each pass
37 /* References searched while implementing this.
39 Compilers Principles, Techniques and Tools
40 Aho, Sethi, Ullman
41 Addison-Wesley, 1988
43 Global Optimization by Suppression of Partial Redundancies
44 E. Morel, C. Renvoise
45 communications of the acm, Vol. 22, Num. 2, Feb. 1979
47 A Portable Machine-Independent Global Optimizer - Design and Measurements
48 Frederick Chow
49 Stanford Ph.D. thesis, Dec. 1983
51 A Fast Algorithm for Code Movement Optimization
52 D.M. Dhamdhere
53 SIGPLAN Notices, Vol. 23, Num. 10, Oct. 1988
55 A Solution to a Problem with Morel and Renvoise's
56 Global Optimization by Suppression of Partial Redundancies
57 K-H Drechsler, M.P. Stadel
58 ACM TOPLAS, Vol. 10, Num. 4, Oct. 1988
60 Practical Adaptation of the Global Optimization
61 Algorithm of Morel and Renvoise
62 D.M. Dhamdhere
63 ACM TOPLAS, Vol. 13, Num. 2. Apr. 1991
65 Efficiently Computing Static Single Assignment Form and the Control
66 Dependence Graph
67 R. Cytron, J. Ferrante, B.K. Rosen, M.N. Wegman, and F.K. Zadeck
68 ACM TOPLAS, Vol. 13, Num. 4, Oct. 1991
70 Lazy Code Motion
71 J. Knoop, O. Ruthing, B. Steffen
72 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
74 What's In a Region? Or Computing Control Dependence Regions in Near-Linear
75 Time for Reducible Flow Control
76 Thomas Ball
77 ACM Letters on Programming Languages and Systems,
78 Vol. 2, Num. 1-4, Mar-Dec 1993
80 An Efficient Representation for Sparse Sets
81 Preston Briggs, Linda Torczon
82 ACM Letters on Programming Languages and Systems,
83 Vol. 2, Num. 1-4, Mar-Dec 1993
85 A Variation of Knoop, Ruthing, and Steffen's Lazy Code Motion
86 K-H Drechsler, M.P. Stadel
87 ACM SIGPLAN Notices, Vol. 28, Num. 5, May 1993
89 Partial Dead Code Elimination
90 J. Knoop, O. Ruthing, B. Steffen
91 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
93 Effective Partial Redundancy Elimination
94 P. Briggs, K.D. Cooper
95 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
97 The Program Structure Tree: Computing Control Regions in Linear Time
98 R. Johnson, D. Pearson, K. Pingali
99 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
101 Optimal Code Motion: Theory and Practice
102 J. Knoop, O. Ruthing, B. Steffen
103 ACM TOPLAS, Vol. 16, Num. 4, Jul. 1994
105 The power of assignment motion
106 J. Knoop, O. Ruthing, B. Steffen
107 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
109 Global code motion / global value numbering
110 C. Click
111 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
113 Value Driven Redundancy Elimination
114 L.T. Simpson
115 Rice University Ph.D. thesis, Apr. 1996
117 Value Numbering
118 L.T. Simpson
119 Massively Scalar Compiler Project, Rice University, Sep. 1996
121 High Performance Compilers for Parallel Computing
122 Michael Wolfe
123 Addison-Wesley, 1996
125 Advanced Compiler Design and Implementation
126 Steven Muchnick
127 Morgan Kaufmann, 1997
129 Building an Optimizing Compiler
130 Robert Morgan
131 Digital Press, 1998
133 People wishing to speed up the code here should read:
134 Elimination Algorithms for Data Flow Analysis
135 B.G. Ryder, M.C. Paull
136 ACM Computing Surveys, Vol. 18, Num. 3, Sep. 1986
138 How to Analyze Large Programs Efficiently and Informatively
139 D.M. Dhamdhere, B.K. Rosen, F.K. Zadeck
140 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
142 People wishing to do something different can find various possibilities
143 in the above papers and elsewhere.
146 #include "config.h"
147 #include "system.h"
148 #include "coretypes.h"
149 #include "tm.h"
150 #include "toplev.h"
152 #include "rtl.h"
153 #include "tm_p.h"
154 #include "regs.h"
155 #include "hard-reg-set.h"
156 #include "flags.h"
157 #include "real.h"
158 #include "insn-config.h"
159 #include "recog.h"
160 #include "basic-block.h"
161 #include "output.h"
162 #include "function.h"
163 #include "expr.h"
164 #include "except.h"
165 #include "ggc.h"
166 #include "params.h"
167 #include "cselib.h"
169 #include "obstack.h"
171 /* Propagate flow information through back edges and thus enable PRE's
172 moving loop invariant calculations out of loops.
174 Originally this tended to create worse overall code, but several
175 improvements during the development of PRE seem to have made following
176 back edges generally a win.
178 Note much of the loop invariant code motion done here would normally
179 be done by loop.c, which has more heuristics for when to move invariants
180 out of loops. At some point we might need to move some of those
181 heuristics into gcse.c. */
183 /* We support GCSE via Partial Redundancy Elimination. PRE optimizations
184 are a superset of those done by GCSE.
186 We perform the following steps:
188 1) Compute basic block information.
190 2) Compute table of places where registers are set.
192 3) Perform copy/constant propagation.
194 4) Perform global cse.
196 5) Perform another pass of copy/constant propagation.
198 Two passes of copy/constant propagation are done because the first one
199 enables more GCSE and the second one helps to clean up the copies that
200 GCSE creates. This is needed more for PRE than for Classic because Classic
201 GCSE will try to use an existing register containing the common
202 subexpression rather than create a new one. This is harder to do for PRE
203 because of the code motion (which Classic GCSE doesn't do).
205 Expressions we are interested in GCSE-ing are of the form
206 (set (pseudo-reg) (expression)).
207 Function want_to_gcse_p says what these are.
209 PRE handles moving invariant expressions out of loops (by treating them as
210 partially redundant).
212 Eventually it would be nice to replace cse.c/gcse.c with SSA (static single
213 assignment) based GVN (global value numbering). L. T. Simpson's paper
214 (Rice University) on value numbering is a useful reference for this.
216 **********************
218 We used to support multiple passes but there are diminishing returns in
219 doing so. The first pass usually makes 90% of the changes that are doable.
220 A second pass can make a few more changes made possible by the first pass.
221 Experiments show any further passes don't make enough changes to justify
222 the expense.
224 A study of spec92 using an unlimited number of passes:
225 [1 pass] = 1208 substitutions, [2] = 577, [3] = 202, [4] = 192, [5] = 83,
226 [6] = 34, [7] = 17, [8] = 9, [9] = 4, [10] = 4, [11] = 2,
227 [12] = 2, [13] = 1, [15] = 1, [16] = 2, [41] = 1
229 It was found doing copy propagation between each pass enables further
230 substitutions.
232 PRE is quite expensive in complicated functions because the DFA can take
233 awhile to converge. Hence we only perform one pass. The parameter max-gcse-passes can
234 be modified if one wants to experiment.
236 **********************
238 The steps for PRE are:
240 1) Build the hash table of expressions we wish to GCSE (expr_hash_table).
242 2) Perform the data flow analysis for PRE.
244 3) Delete the redundant instructions
246 4) Insert the required copies [if any] that make the partially
247 redundant instructions fully redundant.
249 5) For other reaching expressions, insert an instruction to copy the value
250 to a newly created pseudo that will reach the redundant instruction.
252 The deletion is done first so that when we do insertions we
253 know which pseudo reg to use.
255 Various papers have argued that PRE DFA is expensive (O(n^2)) and others
256 argue it is not. The number of iterations for the algorithm to converge
257 is typically 2-4 so I don't view it as that expensive (relatively speaking).
259 PRE GCSE depends heavily on the second CSE pass to clean up the copies
260 we create. To make an expression reach the place where it's redundant,
261 the result of the expression is copied to a new register, and the redundant
262 expression is deleted by replacing it with this new register. Classic GCSE
263 doesn't have this problem as much as it computes the reaching defs of
264 each register in each block and thus can try to use an existing register.
266 **********************
268 A fair bit of simplicity is created by creating small functions for simple
269 tasks, even when the function is only called in one place. This may
270 measurably slow things down [or may not] by creating more function call
271 overhead than is necessary. The source is laid out so that it's trivial
272 to make the affected functions inline so that one can measure what speed
273 up, if any, can be achieved, and maybe later when things settle things can
274 be rearranged.
276 Help stamp out big monolithic functions! */
278 /* GCSE global vars. */
280 /* -dG dump file. */
281 static FILE *gcse_file;
283 /* Note whether or not we should run jump optimization after gcse. We
284 want to do this for two cases.
286 * If we changed any jumps via cprop.
288 * If we added any labels via edge splitting. */
290 static int run_jump_opt_after_gcse;
292 /* Bitmaps are normally not included in debugging dumps.
293 However it's useful to be able to print them from GDB.
294 We could create special functions for this, but it's simpler to
295 just allow passing stderr to the dump_foo fns. Since stderr can
296 be a macro, we store a copy here. */
297 static FILE *debug_stderr;
299 /* An obstack for our working variables. */
300 static struct obstack gcse_obstack;
302 struct reg_use {rtx reg_rtx; };
304 /* Hash table of expressions. */
306 struct expr
308 /* The expression (SET_SRC for expressions, PATTERN for assignments). */
309 rtx expr;
310 /* Index in the available expression bitmaps. */
311 int bitmap_index;
312 /* Next entry with the same hash. */
313 struct expr *next_same_hash;
314 /* List of anticipatable occurrences in basic blocks in the function.
315 An "anticipatable occurrence" is one that is the first occurrence in the
316 basic block, the operands are not modified in the basic block prior
317 to the occurrence and the output is not used between the start of
318 the block and the occurrence. */
319 struct occr *antic_occr;
320 /* List of available occurrence in basic blocks in the function.
321 An "available occurrence" is one that is the last occurrence in the
322 basic block and the operands are not modified by following statements in
323 the basic block [including this insn]. */
324 struct occr *avail_occr;
325 /* Non-null if the computation is PRE redundant.
326 The value is the newly created pseudo-reg to record a copy of the
327 expression in all the places that reach the redundant copy. */
328 rtx reaching_reg;
331 /* Occurrence of an expression.
332 There is one per basic block. If a pattern appears more than once the
333 last appearance is used [or first for anticipatable expressions]. */
335 struct occr
337 /* Next occurrence of this expression. */
338 struct occr *next;
339 /* The insn that computes the expression. */
340 rtx insn;
341 /* Nonzero if this [anticipatable] occurrence has been deleted. */
342 char deleted_p;
343 /* Nonzero if this [available] occurrence has been copied to
344 reaching_reg. */
345 /* ??? This is mutually exclusive with deleted_p, so they could share
346 the same byte. */
347 char copied_p;
350 /* Expression and copy propagation hash tables.
351 Each hash table is an array of buckets.
352 ??? It is known that if it were an array of entries, structure elements
353 `next_same_hash' and `bitmap_index' wouldn't be necessary. However, it is
354 not clear whether in the final analysis a sufficient amount of memory would
355 be saved as the size of the available expression bitmaps would be larger
356 [one could build a mapping table without holes afterwards though].
357 Someday I'll perform the computation and figure it out. */
359 struct hash_table
361 /* The table itself.
362 This is an array of `expr_hash_table_size' elements. */
363 struct expr **table;
365 /* Size of the hash table, in elements. */
366 unsigned int size;
368 /* Number of hash table elements. */
369 unsigned int n_elems;
371 /* Whether the table is expression of copy propagation one. */
372 int set_p;
375 /* Expression hash table. */
376 static struct hash_table expr_hash_table;
378 /* Copy propagation hash table. */
379 static struct hash_table set_hash_table;
381 /* Mapping of uids to cuids.
382 Only real insns get cuids. */
383 static int *uid_cuid;
385 /* Highest UID in UID_CUID. */
386 static int max_uid;
388 /* Get the cuid of an insn. */
389 #ifdef ENABLE_CHECKING
390 #define INSN_CUID(INSN) (INSN_UID (INSN) > max_uid ? (abort (), 0) : uid_cuid[INSN_UID (INSN)])
391 #else
392 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
393 #endif
395 /* Number of cuids. */
396 static int max_cuid;
398 /* Mapping of cuids to insns. */
399 static rtx *cuid_insn;
401 /* Get insn from cuid. */
402 #define CUID_INSN(CUID) (cuid_insn[CUID])
404 /* Maximum register number in function prior to doing gcse + 1.
405 Registers created during this pass have regno >= max_gcse_regno.
406 This is named with "gcse" to not collide with global of same name. */
407 static unsigned int max_gcse_regno;
409 /* Table of registers that are modified.
411 For each register, each element is a list of places where the pseudo-reg
412 is set.
414 For simplicity, GCSE is done on sets of pseudo-regs only. PRE GCSE only
415 requires knowledge of which blocks kill which regs [and thus could use
416 a bitmap instead of the lists `reg_set_table' uses].
418 `reg_set_table' and could be turned into an array of bitmaps (num-bbs x
419 num-regs) [however perhaps it may be useful to keep the data as is]. One
420 advantage of recording things this way is that `reg_set_table' is fairly
421 sparse with respect to pseudo regs but for hard regs could be fairly dense
422 [relatively speaking]. And recording sets of pseudo-regs in lists speeds
423 up functions like compute_transp since in the case of pseudo-regs we only
424 need to iterate over the number of times a pseudo-reg is set, not over the
425 number of basic blocks [clearly there is a bit of a slow down in the cases
426 where a pseudo is set more than once in a block, however it is believed
427 that the net effect is to speed things up]. This isn't done for hard-regs
428 because recording call-clobbered hard-regs in `reg_set_table' at each
429 function call can consume a fair bit of memory, and iterating over
430 hard-regs stored this way in compute_transp will be more expensive. */
432 typedef struct reg_set
434 /* The next setting of this register. */
435 struct reg_set *next;
436 /* The insn where it was set. */
437 rtx insn;
438 } reg_set;
440 static reg_set **reg_set_table;
442 /* Size of `reg_set_table'.
443 The table starts out at max_gcse_regno + slop, and is enlarged as
444 necessary. */
445 static int reg_set_table_size;
447 /* Amount to grow `reg_set_table' by when it's full. */
448 #define REG_SET_TABLE_SLOP 100
450 /* This is a list of expressions which are MEMs and will be used by load
451 or store motion.
452 Load motion tracks MEMs which aren't killed by
453 anything except itself. (ie, loads and stores to a single location).
454 We can then allow movement of these MEM refs with a little special
455 allowance. (all stores copy the same value to the reaching reg used
456 for the loads). This means all values used to store into memory must have
457 no side effects so we can re-issue the setter value.
458 Store Motion uses this structure as an expression table to track stores
459 which look interesting, and might be moveable towards the exit block. */
461 struct ls_expr
463 struct expr * expr; /* Gcse expression reference for LM. */
464 rtx pattern; /* Pattern of this mem. */
465 rtx pattern_regs; /* List of registers mentioned by the mem. */
466 rtx loads; /* INSN list of loads seen. */
467 rtx stores; /* INSN list of stores seen. */
468 struct ls_expr * next; /* Next in the list. */
469 int invalid; /* Invalid for some reason. */
470 int index; /* If it maps to a bitmap index. */
471 int hash_index; /* Index when in a hash table. */
472 rtx reaching_reg; /* Register to use when re-writing. */
475 /* Array of implicit set patterns indexed by basic block index. */
476 static rtx *implicit_sets;
478 /* Head of the list of load/store memory refs. */
479 static struct ls_expr * pre_ldst_mems = NULL;
481 /* Bitmap containing one bit for each register in the program.
482 Used when performing GCSE to track which registers have been set since
483 the start of the basic block. */
484 static regset reg_set_bitmap;
486 /* For each block, a bitmap of registers set in the block.
487 This is used by expr_killed_p and compute_transp.
488 It is computed during hash table computation and not by compute_sets
489 as it includes registers added since the last pass (or between cprop and
490 gcse) and it's currently not easy to realloc sbitmap vectors. */
491 static sbitmap *reg_set_in_block;
493 /* Array, indexed by basic block number for a list of insns which modify
494 memory within that block. */
495 static rtx * modify_mem_list;
496 bitmap modify_mem_list_set;
498 /* This array parallels modify_mem_list, but is kept canonicalized. */
499 static rtx * canon_modify_mem_list;
500 bitmap canon_modify_mem_list_set;
501 /* Various variables for statistics gathering. */
503 /* Memory used in a pass.
504 This isn't intended to be absolutely precise. Its intent is only
505 to keep an eye on memory usage. */
506 static int bytes_used;
508 /* GCSE substitutions made. */
509 static int gcse_subst_count;
510 /* Number of copy instructions created. */
511 static int gcse_create_count;
512 /* Number of constants propagated. */
513 static int const_prop_count;
514 /* Number of copys propagated. */
515 static int copy_prop_count;
517 /* These variables are used by classic GCSE.
518 Normally they'd be defined a bit later, but `rd_gen' needs to
519 be declared sooner. */
521 /* Each block has a bitmap of each type.
522 The length of each blocks bitmap is:
524 max_cuid - for reaching definitions
525 n_exprs - for available expressions
527 Thus we view the bitmaps as 2 dimensional arrays. i.e.
528 rd_kill[block_num][cuid_num]
529 ae_kill[block_num][expr_num] */
531 /* For reaching defs */
532 static sbitmap *rd_kill, *rd_gen, *reaching_defs, *rd_out;
534 /* for available exprs */
535 static sbitmap *ae_kill, *ae_gen, *ae_in, *ae_out;
537 /* Objects of this type are passed around by the null-pointer check
538 removal routines. */
539 struct null_pointer_info
541 /* The basic block being processed. */
542 basic_block current_block;
543 /* The first register to be handled in this pass. */
544 unsigned int min_reg;
545 /* One greater than the last register to be handled in this pass. */
546 unsigned int max_reg;
547 sbitmap *nonnull_local;
548 sbitmap *nonnull_killed;
551 static void compute_can_copy PARAMS ((void));
552 static char *gmalloc PARAMS ((unsigned int));
553 static char *grealloc PARAMS ((char *, unsigned int));
554 static char *gcse_alloc PARAMS ((unsigned long));
555 static void alloc_gcse_mem PARAMS ((rtx));
556 static void free_gcse_mem PARAMS ((void));
557 static void alloc_reg_set_mem PARAMS ((int));
558 static void free_reg_set_mem PARAMS ((void));
559 static int get_bitmap_width PARAMS ((int, int, int));
560 static void record_one_set PARAMS ((int, rtx));
561 static void record_set_info PARAMS ((rtx, rtx, void *));
562 static void compute_sets PARAMS ((rtx));
563 static void hash_scan_insn PARAMS ((rtx, struct hash_table *, int));
564 static void hash_scan_set PARAMS ((rtx, rtx, struct hash_table *));
565 static void hash_scan_clobber PARAMS ((rtx, rtx, struct hash_table *));
566 static void hash_scan_call PARAMS ((rtx, rtx, struct hash_table *));
567 static int want_to_gcse_p PARAMS ((rtx));
568 static bool gcse_constant_p PARAMS ((rtx));
569 static int oprs_unchanged_p PARAMS ((rtx, rtx, int));
570 static int oprs_anticipatable_p PARAMS ((rtx, rtx));
571 static int oprs_available_p PARAMS ((rtx, rtx));
572 static void insert_expr_in_table PARAMS ((rtx, enum machine_mode, rtx,
573 int, int, struct hash_table *));
574 static void insert_set_in_table PARAMS ((rtx, rtx, struct hash_table *));
575 static unsigned int hash_expr PARAMS ((rtx, enum machine_mode, int *, int));
576 static unsigned int hash_expr_1 PARAMS ((rtx, enum machine_mode, int *));
577 static unsigned int hash_string_1 PARAMS ((const char *));
578 static unsigned int hash_set PARAMS ((int, int));
579 static int expr_equiv_p PARAMS ((rtx, rtx));
580 static void record_last_reg_set_info PARAMS ((rtx, int));
581 static void record_last_mem_set_info PARAMS ((rtx));
582 static void record_last_set_info PARAMS ((rtx, rtx, void *));
583 static void compute_hash_table PARAMS ((struct hash_table *));
584 static void alloc_hash_table PARAMS ((int, struct hash_table *, int));
585 static void free_hash_table PARAMS ((struct hash_table *));
586 static void compute_hash_table_work PARAMS ((struct hash_table *));
587 static void dump_hash_table PARAMS ((FILE *, const char *,
588 struct hash_table *));
589 static struct expr *lookup_expr PARAMS ((rtx, struct hash_table *));
590 static struct expr *lookup_set PARAMS ((unsigned int, struct hash_table *));
591 static struct expr *next_set PARAMS ((unsigned int, struct expr *));
592 static void reset_opr_set_tables PARAMS ((void));
593 static int oprs_not_set_p PARAMS ((rtx, rtx));
594 static void mark_call PARAMS ((rtx));
595 static void mark_set PARAMS ((rtx, rtx));
596 static void mark_clobber PARAMS ((rtx, rtx));
597 static void mark_oprs_set PARAMS ((rtx));
598 static void alloc_cprop_mem PARAMS ((int, int));
599 static void free_cprop_mem PARAMS ((void));
600 static void compute_transp PARAMS ((rtx, int, sbitmap *, int));
601 static void compute_transpout PARAMS ((void));
602 static void compute_local_properties PARAMS ((sbitmap *, sbitmap *, sbitmap *,
603 struct hash_table *));
604 static void compute_cprop_data PARAMS ((void));
605 static void find_used_regs PARAMS ((rtx *, void *));
606 static int try_replace_reg PARAMS ((rtx, rtx, rtx));
607 static struct expr *find_avail_set PARAMS ((int, rtx));
608 static int cprop_jump PARAMS ((basic_block, rtx, rtx, rtx, rtx));
609 static void mems_conflict_for_gcse_p PARAMS ((rtx, rtx, void *));
610 static int load_killed_in_block_p PARAMS ((basic_block, int, rtx, int));
611 static void canon_list_insert PARAMS ((rtx, rtx, void *));
612 static int cprop_insn PARAMS ((rtx, int));
613 static int cprop PARAMS ((int));
614 static void find_implicit_sets PARAMS ((void));
615 static int one_cprop_pass PARAMS ((int, int, int));
616 static bool constprop_register PARAMS ((rtx, rtx, rtx, int));
617 static struct expr *find_bypass_set PARAMS ((int, int));
618 static bool reg_killed_on_edge PARAMS ((rtx, edge));
619 static int bypass_block PARAMS ((basic_block, rtx, rtx));
620 static int bypass_conditional_jumps PARAMS ((void));
621 static void alloc_pre_mem PARAMS ((int, int));
622 static void free_pre_mem PARAMS ((void));
623 static void compute_pre_data PARAMS ((void));
624 static int pre_expr_reaches_here_p PARAMS ((basic_block, struct expr *,
625 basic_block));
626 static void insert_insn_end_bb PARAMS ((struct expr *, basic_block, int));
627 static void pre_insert_copy_insn PARAMS ((struct expr *, rtx));
628 static void pre_insert_copies PARAMS ((void));
629 static int pre_delete PARAMS ((void));
630 static int pre_gcse PARAMS ((void));
631 static int one_pre_gcse_pass PARAMS ((int));
632 static void add_label_notes PARAMS ((rtx, rtx));
633 static void alloc_code_hoist_mem PARAMS ((int, int));
634 static void free_code_hoist_mem PARAMS ((void));
635 static void compute_code_hoist_vbeinout PARAMS ((void));
636 static void compute_code_hoist_data PARAMS ((void));
637 static int hoist_expr_reaches_here_p PARAMS ((basic_block, int, basic_block,
638 char *));
639 static void hoist_code PARAMS ((void));
640 static int one_code_hoisting_pass PARAMS ((void));
641 static void alloc_rd_mem PARAMS ((int, int));
642 static void free_rd_mem PARAMS ((void));
643 static void handle_rd_kill_set PARAMS ((rtx, int, basic_block));
644 static void compute_kill_rd PARAMS ((void));
645 static void compute_rd PARAMS ((void));
646 static void alloc_avail_expr_mem PARAMS ((int, int));
647 static void free_avail_expr_mem PARAMS ((void));
648 static void compute_ae_gen PARAMS ((struct hash_table *));
649 static int expr_killed_p PARAMS ((rtx, basic_block));
650 static void compute_ae_kill PARAMS ((sbitmap *, sbitmap *, struct hash_table *));
651 static int expr_reaches_here_p PARAMS ((struct occr *, struct expr *,
652 basic_block, int));
653 static rtx computing_insn PARAMS ((struct expr *, rtx));
654 static int def_reaches_here_p PARAMS ((rtx, rtx));
655 static int can_disregard_other_sets PARAMS ((struct reg_set **, rtx, int));
656 static int handle_avail_expr PARAMS ((rtx, struct expr *));
657 static int classic_gcse PARAMS ((void));
658 static int one_classic_gcse_pass PARAMS ((int));
659 static void invalidate_nonnull_info PARAMS ((rtx, rtx, void *));
660 static int delete_null_pointer_checks_1 PARAMS ((unsigned int *,
661 sbitmap *, sbitmap *,
662 struct null_pointer_info *));
663 static rtx process_insert_insn PARAMS ((struct expr *));
664 static int pre_edge_insert PARAMS ((struct edge_list *, struct expr **));
665 static int expr_reaches_here_p_work PARAMS ((struct occr *, struct expr *,
666 basic_block, int, char *));
667 static int pre_expr_reaches_here_p_work PARAMS ((basic_block, struct expr *,
668 basic_block, char *));
669 static struct ls_expr * ldst_entry PARAMS ((rtx));
670 static void free_ldst_entry PARAMS ((struct ls_expr *));
671 static void free_ldst_mems PARAMS ((void));
672 static void print_ldst_list PARAMS ((FILE *));
673 static struct ls_expr * find_rtx_in_ldst PARAMS ((rtx));
674 static int enumerate_ldsts PARAMS ((void));
675 static inline struct ls_expr * first_ls_expr PARAMS ((void));
676 static inline struct ls_expr * next_ls_expr PARAMS ((struct ls_expr *));
677 static int simple_mem PARAMS ((rtx));
678 static void invalidate_any_buried_refs PARAMS ((rtx));
679 static void compute_ld_motion_mems PARAMS ((void));
680 static void trim_ld_motion_mems PARAMS ((void));
681 static void update_ld_motion_stores PARAMS ((struct expr *));
682 static void reg_set_info PARAMS ((rtx, rtx, void *));
683 static bool store_ops_ok PARAMS ((rtx, int *));
684 static rtx extract_mentioned_regs PARAMS ((rtx));
685 static rtx extract_mentioned_regs_helper PARAMS ((rtx, rtx));
686 static void find_moveable_store PARAMS ((rtx, int *, int *));
687 static int compute_store_table PARAMS ((void));
688 static bool load_kills_store PARAMS ((rtx, rtx));
689 static bool find_loads PARAMS ((rtx, rtx));
690 static bool store_killed_in_insn PARAMS ((rtx, rtx, rtx));
691 static bool store_killed_after PARAMS ((rtx, rtx, rtx, basic_block,
692 int *, rtx *));
693 static bool store_killed_before PARAMS ((rtx, rtx, rtx, basic_block,
694 int *));
695 static void build_store_vectors PARAMS ((void));
696 static void insert_insn_start_bb PARAMS ((rtx, basic_block));
697 static int insert_store PARAMS ((struct ls_expr *, edge));
698 static void replace_store_insn PARAMS ((rtx, rtx, basic_block));
699 static void delete_store PARAMS ((struct ls_expr *,
700 basic_block));
701 static void free_store_memory PARAMS ((void));
702 static void store_motion PARAMS ((void));
703 static void free_insn_expr_list_list PARAMS ((rtx *));
704 static void clear_modify_mem_tables PARAMS ((void));
705 static void free_modify_mem_tables PARAMS ((void));
706 static rtx gcse_emit_move_after PARAMS ((rtx, rtx, rtx));
707 static void local_cprop_find_used_regs PARAMS ((rtx *, void *));
708 static bool do_local_cprop PARAMS ((rtx, rtx, int, rtx*));
709 static bool adjust_libcall_notes PARAMS ((rtx, rtx, rtx, rtx*));
710 static void local_cprop_pass PARAMS ((int));
712 /* Entry point for global common subexpression elimination.
713 F is the first instruction in the function. */
716 gcse_main (f, file)
717 rtx f;
718 FILE *file;
720 int changed, pass;
721 /* Bytes used at start of pass. */
722 int initial_bytes_used;
723 /* Maximum number of bytes used by a pass. */
724 int max_pass_bytes;
725 /* Point to release obstack data from for each pass. */
726 char *gcse_obstack_bottom;
728 /* We do not construct an accurate cfg in functions which call
729 setjmp, so just punt to be safe. */
730 if (current_function_calls_setjmp)
731 return 0;
733 /* Assume that we do not need to run jump optimizations after gcse. */
734 run_jump_opt_after_gcse = 0;
736 /* For calling dump_foo fns from gdb. */
737 debug_stderr = stderr;
738 gcse_file = file;
740 /* Identify the basic block information for this function, including
741 successors and predecessors. */
742 max_gcse_regno = max_reg_num ();
744 if (file)
745 dump_flow_info (file);
747 /* Return if there's nothing to do. */
748 if (n_basic_blocks <= 1)
749 return 0;
751 /* Trying to perform global optimizations on flow graphs which have
752 a high connectivity will take a long time and is unlikely to be
753 particularly useful.
755 In normal circumstances a cfg should have about twice as many edges
756 as blocks. But we do not want to punish small functions which have
757 a couple switch statements. So we require a relatively large number
758 of basic blocks and the ratio of edges to blocks to be high. */
759 if (n_basic_blocks > 1000 && n_edges / n_basic_blocks >= 20)
761 if (warn_disabled_optimization)
762 warning ("GCSE disabled: %d > 1000 basic blocks and %d >= 20 edges/basic block",
763 n_basic_blocks, n_edges / n_basic_blocks);
764 return 0;
767 /* If allocating memory for the cprop bitmap would take up too much
768 storage it's better just to disable the optimization. */
769 if ((n_basic_blocks
770 * SBITMAP_SET_SIZE (max_gcse_regno)
771 * sizeof (SBITMAP_ELT_TYPE)) > MAX_GCSE_MEMORY)
773 if (warn_disabled_optimization)
774 warning ("GCSE disabled: %d basic blocks and %d registers",
775 n_basic_blocks, max_gcse_regno);
777 return 0;
780 gcc_obstack_init (&gcse_obstack);
781 bytes_used = 0;
783 /* We need alias. */
784 init_alias_analysis ();
785 /* Record where pseudo-registers are set. This data is kept accurate
786 during each pass. ??? We could also record hard-reg information here
787 [since it's unchanging], however it is currently done during hash table
788 computation.
790 It may be tempting to compute MEM set information here too, but MEM sets
791 will be subject to code motion one day and thus we need to compute
792 information about memory sets when we build the hash tables. */
794 alloc_reg_set_mem (max_gcse_regno);
795 compute_sets (f);
797 pass = 0;
798 initial_bytes_used = bytes_used;
799 max_pass_bytes = 0;
800 gcse_obstack_bottom = gcse_alloc (1);
801 changed = 1;
802 while (changed && pass < MAX_GCSE_PASSES)
804 changed = 0;
805 if (file)
806 fprintf (file, "GCSE pass %d\n\n", pass + 1);
808 /* Initialize bytes_used to the space for the pred/succ lists,
809 and the reg_set_table data. */
810 bytes_used = initial_bytes_used;
812 /* Each pass may create new registers, so recalculate each time. */
813 max_gcse_regno = max_reg_num ();
815 alloc_gcse_mem (f);
817 /* Don't allow constant propagation to modify jumps
818 during this pass. */
819 changed = one_cprop_pass (pass + 1, 0, 0);
821 if (optimize_size)
822 changed |= one_classic_gcse_pass (pass + 1);
823 else
825 changed |= one_pre_gcse_pass (pass + 1);
826 /* We may have just created new basic blocks. Release and
827 recompute various things which are sized on the number of
828 basic blocks. */
829 if (changed)
831 free_modify_mem_tables ();
832 modify_mem_list
833 = (rtx *) gmalloc (last_basic_block * sizeof (rtx));
834 canon_modify_mem_list
835 = (rtx *) gmalloc (last_basic_block * sizeof (rtx));
836 memset ((char *) modify_mem_list, 0, last_basic_block * sizeof (rtx));
837 memset ((char *) canon_modify_mem_list, 0, last_basic_block * sizeof (rtx));
839 free_reg_set_mem ();
840 alloc_reg_set_mem (max_reg_num ());
841 compute_sets (f);
842 run_jump_opt_after_gcse = 1;
845 if (max_pass_bytes < bytes_used)
846 max_pass_bytes = bytes_used;
848 /* Free up memory, then reallocate for code hoisting. We can
849 not re-use the existing allocated memory because the tables
850 will not have info for the insns or registers created by
851 partial redundancy elimination. */
852 free_gcse_mem ();
854 /* It does not make sense to run code hoisting unless we optimizing
855 for code size -- it rarely makes programs faster, and can make
856 them bigger if we did partial redundancy elimination (when optimizing
857 for space, we use a classic gcse algorithm instead of partial
858 redundancy algorithms). */
859 if (optimize_size)
861 max_gcse_regno = max_reg_num ();
862 alloc_gcse_mem (f);
863 changed |= one_code_hoisting_pass ();
864 free_gcse_mem ();
866 if (max_pass_bytes < bytes_used)
867 max_pass_bytes = bytes_used;
870 if (file)
872 fprintf (file, "\n");
873 fflush (file);
876 obstack_free (&gcse_obstack, gcse_obstack_bottom);
877 pass++;
880 /* Do one last pass of copy propagation, including cprop into
881 conditional jumps. */
883 max_gcse_regno = max_reg_num ();
884 alloc_gcse_mem (f);
885 /* This time, go ahead and allow cprop to alter jumps. */
886 one_cprop_pass (pass + 1, 1, 0);
887 free_gcse_mem ();
889 if (file)
891 fprintf (file, "GCSE of %s: %d basic blocks, ",
892 current_function_name, n_basic_blocks);
893 fprintf (file, "%d pass%s, %d bytes\n\n",
894 pass, pass > 1 ? "es" : "", max_pass_bytes);
897 obstack_free (&gcse_obstack, NULL);
898 free_reg_set_mem ();
899 /* We are finished with alias. */
900 end_alias_analysis ();
901 allocate_reg_info (max_reg_num (), FALSE, FALSE);
903 if (!optimize_size && flag_gcse_sm)
904 store_motion ();
906 /* Record where pseudo-registers are set. */
907 return run_jump_opt_after_gcse;
910 /* Misc. utilities. */
912 /* Nonzero for each mode that supports (set (reg) (reg)).
913 This is trivially true for integer and floating point values.
914 It may or may not be true for condition codes. */
915 static char can_copy[(int) NUM_MACHINE_MODES];
917 /* Compute which modes support reg/reg copy operations. */
919 static void
920 compute_can_copy ()
922 int i;
923 #ifndef AVOID_CCMODE_COPIES
924 rtx reg, insn;
925 #endif
926 memset (can_copy, 0, NUM_MACHINE_MODES);
928 start_sequence ();
929 for (i = 0; i < NUM_MACHINE_MODES; i++)
930 if (GET_MODE_CLASS (i) == MODE_CC)
932 #ifdef AVOID_CCMODE_COPIES
933 can_copy[i] = 0;
934 #else
935 reg = gen_rtx_REG ((enum machine_mode) i, LAST_VIRTUAL_REGISTER + 1);
936 insn = emit_insn (gen_rtx_SET (VOIDmode, reg, reg));
937 if (recog (PATTERN (insn), insn, NULL) >= 0)
938 can_copy[i] = 1;
939 #endif
941 else
942 can_copy[i] = 1;
944 end_sequence ();
947 /* Returns whether the mode supports reg/reg copy operations. */
949 bool
950 can_copy_p (mode)
951 enum machine_mode mode;
953 static bool can_copy_init_p = false;
955 if (! can_copy_init_p)
957 compute_can_copy ();
958 can_copy_init_p = true;
961 return can_copy[mode] != 0;
964 /* Cover function to xmalloc to record bytes allocated. */
966 static char *
967 gmalloc (size)
968 unsigned int size;
970 bytes_used += size;
971 return xmalloc (size);
974 /* Cover function to xrealloc.
975 We don't record the additional size since we don't know it.
976 It won't affect memory usage stats much anyway. */
978 static char *
979 grealloc (ptr, size)
980 char *ptr;
981 unsigned int size;
983 return xrealloc (ptr, size);
986 /* Cover function to obstack_alloc. */
988 static char *
989 gcse_alloc (size)
990 unsigned long size;
992 bytes_used += size;
993 return (char *) obstack_alloc (&gcse_obstack, size);
996 /* Allocate memory for the cuid mapping array,
997 and reg/memory set tracking tables.
999 This is called at the start of each pass. */
1001 static void
1002 alloc_gcse_mem (f)
1003 rtx f;
1005 int i, n;
1006 rtx insn;
1008 /* Find the largest UID and create a mapping from UIDs to CUIDs.
1009 CUIDs are like UIDs except they increase monotonically, have no gaps,
1010 and only apply to real insns. */
1012 max_uid = get_max_uid ();
1013 n = (max_uid + 1) * sizeof (int);
1014 uid_cuid = (int *) gmalloc (n);
1015 memset ((char *) uid_cuid, 0, n);
1016 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
1018 if (INSN_P (insn))
1019 uid_cuid[INSN_UID (insn)] = i++;
1020 else
1021 uid_cuid[INSN_UID (insn)] = i;
1024 /* Create a table mapping cuids to insns. */
1026 max_cuid = i;
1027 n = (max_cuid + 1) * sizeof (rtx);
1028 cuid_insn = (rtx *) gmalloc (n);
1029 memset ((char *) cuid_insn, 0, n);
1030 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
1031 if (INSN_P (insn))
1032 CUID_INSN (i++) = insn;
1034 /* Allocate vars to track sets of regs. */
1035 reg_set_bitmap = BITMAP_XMALLOC ();
1037 /* Allocate vars to track sets of regs, memory per block. */
1038 reg_set_in_block = (sbitmap *) sbitmap_vector_alloc (last_basic_block,
1039 max_gcse_regno);
1040 /* Allocate array to keep a list of insns which modify memory in each
1041 basic block. */
1042 modify_mem_list = (rtx *) gmalloc (last_basic_block * sizeof (rtx));
1043 canon_modify_mem_list = (rtx *) gmalloc (last_basic_block * sizeof (rtx));
1044 memset ((char *) modify_mem_list, 0, last_basic_block * sizeof (rtx));
1045 memset ((char *) canon_modify_mem_list, 0, last_basic_block * sizeof (rtx));
1046 modify_mem_list_set = BITMAP_XMALLOC ();
1047 canon_modify_mem_list_set = BITMAP_XMALLOC ();
1050 /* Free memory allocated by alloc_gcse_mem. */
1052 static void
1053 free_gcse_mem ()
1055 free (uid_cuid);
1056 free (cuid_insn);
1058 BITMAP_XFREE (reg_set_bitmap);
1060 sbitmap_vector_free (reg_set_in_block);
1061 free_modify_mem_tables ();
1062 BITMAP_XFREE (modify_mem_list_set);
1063 BITMAP_XFREE (canon_modify_mem_list_set);
1066 /* Many of the global optimization algorithms work by solving dataflow
1067 equations for various expressions. Initially, some local value is
1068 computed for each expression in each block. Then, the values across the
1069 various blocks are combined (by following flow graph edges) to arrive at
1070 global values. Conceptually, each set of equations is independent. We
1071 may therefore solve all the equations in parallel, solve them one at a
1072 time, or pick any intermediate approach.
1074 When you're going to need N two-dimensional bitmaps, each X (say, the
1075 number of blocks) by Y (say, the number of expressions), call this
1076 function. It's not important what X and Y represent; only that Y
1077 correspond to the things that can be done in parallel. This function will
1078 return an appropriate chunking factor C; you should solve C sets of
1079 equations in parallel. By going through this function, we can easily
1080 trade space against time; by solving fewer equations in parallel we use
1081 less space. */
1083 static int
1084 get_bitmap_width (n, x, y)
1085 int n;
1086 int x;
1087 int y;
1089 /* It's not really worth figuring out *exactly* how much memory will
1090 be used by a particular choice. The important thing is to get
1091 something approximately right. */
1092 size_t max_bitmap_memory = 10 * 1024 * 1024;
1094 /* The number of bytes we'd use for a single column of minimum
1095 width. */
1096 size_t column_size = n * x * sizeof (SBITMAP_ELT_TYPE);
1098 /* Often, it's reasonable just to solve all the equations in
1099 parallel. */
1100 if (column_size * SBITMAP_SET_SIZE (y) <= max_bitmap_memory)
1101 return y;
1103 /* Otherwise, pick the largest width we can, without going over the
1104 limit. */
1105 return SBITMAP_ELT_BITS * ((max_bitmap_memory + column_size - 1)
1106 / column_size);
1109 /* Compute the local properties of each recorded expression.
1111 Local properties are those that are defined by the block, irrespective of
1112 other blocks.
1114 An expression is transparent in a block if its operands are not modified
1115 in the block.
1117 An expression is computed (locally available) in a block if it is computed
1118 at least once and expression would contain the same value if the
1119 computation was moved to the end of the block.
1121 An expression is locally anticipatable in a block if it is computed at
1122 least once and expression would contain the same value if the computation
1123 was moved to the beginning of the block.
1125 We call this routine for cprop, pre and code hoisting. They all compute
1126 basically the same information and thus can easily share this code.
1128 TRANSP, COMP, and ANTLOC are destination sbitmaps for recording local
1129 properties. If NULL, then it is not necessary to compute or record that
1130 particular property.
1132 TABLE controls which hash table to look at. If it is set hash table,
1133 additionally, TRANSP is computed as ~TRANSP, since this is really cprop's
1134 ABSALTERED. */
1136 static void
1137 compute_local_properties (transp, comp, antloc, table)
1138 sbitmap *transp;
1139 sbitmap *comp;
1140 sbitmap *antloc;
1141 struct hash_table *table;
1143 unsigned int i;
1145 /* Initialize any bitmaps that were passed in. */
1146 if (transp)
1148 if (table->set_p)
1149 sbitmap_vector_zero (transp, last_basic_block);
1150 else
1151 sbitmap_vector_ones (transp, last_basic_block);
1154 if (comp)
1155 sbitmap_vector_zero (comp, last_basic_block);
1156 if (antloc)
1157 sbitmap_vector_zero (antloc, last_basic_block);
1159 for (i = 0; i < table->size; i++)
1161 struct expr *expr;
1163 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
1165 int indx = expr->bitmap_index;
1166 struct occr *occr;
1168 /* The expression is transparent in this block if it is not killed.
1169 We start by assuming all are transparent [none are killed], and
1170 then reset the bits for those that are. */
1171 if (transp)
1172 compute_transp (expr->expr, indx, transp, table->set_p);
1174 /* The occurrences recorded in antic_occr are exactly those that
1175 we want to set to nonzero in ANTLOC. */
1176 if (antloc)
1177 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
1179 SET_BIT (antloc[BLOCK_NUM (occr->insn)], indx);
1181 /* While we're scanning the table, this is a good place to
1182 initialize this. */
1183 occr->deleted_p = 0;
1186 /* The occurrences recorded in avail_occr are exactly those that
1187 we want to set to nonzero in COMP. */
1188 if (comp)
1189 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
1191 SET_BIT (comp[BLOCK_NUM (occr->insn)], indx);
1193 /* While we're scanning the table, this is a good place to
1194 initialize this. */
1195 occr->copied_p = 0;
1198 /* While we're scanning the table, this is a good place to
1199 initialize this. */
1200 expr->reaching_reg = 0;
1205 /* Register set information.
1207 `reg_set_table' records where each register is set or otherwise
1208 modified. */
1210 static struct obstack reg_set_obstack;
1212 static void
1213 alloc_reg_set_mem (n_regs)
1214 int n_regs;
1216 unsigned int n;
1218 reg_set_table_size = n_regs + REG_SET_TABLE_SLOP;
1219 n = reg_set_table_size * sizeof (struct reg_set *);
1220 reg_set_table = (struct reg_set **) gmalloc (n);
1221 memset ((char *) reg_set_table, 0, n);
1223 gcc_obstack_init (&reg_set_obstack);
1226 static void
1227 free_reg_set_mem ()
1229 free (reg_set_table);
1230 obstack_free (&reg_set_obstack, NULL);
1233 /* Record REGNO in the reg_set table. */
1235 static void
1236 record_one_set (regno, insn)
1237 int regno;
1238 rtx insn;
1240 /* Allocate a new reg_set element and link it onto the list. */
1241 struct reg_set *new_reg_info;
1243 /* If the table isn't big enough, enlarge it. */
1244 if (regno >= reg_set_table_size)
1246 int new_size = regno + REG_SET_TABLE_SLOP;
1248 reg_set_table
1249 = (struct reg_set **) grealloc ((char *) reg_set_table,
1250 new_size * sizeof (struct reg_set *));
1251 memset ((char *) (reg_set_table + reg_set_table_size), 0,
1252 (new_size - reg_set_table_size) * sizeof (struct reg_set *));
1253 reg_set_table_size = new_size;
1256 new_reg_info = (struct reg_set *) obstack_alloc (&reg_set_obstack,
1257 sizeof (struct reg_set));
1258 bytes_used += sizeof (struct reg_set);
1259 new_reg_info->insn = insn;
1260 new_reg_info->next = reg_set_table[regno];
1261 reg_set_table[regno] = new_reg_info;
1264 /* Called from compute_sets via note_stores to handle one SET or CLOBBER in
1265 an insn. The DATA is really the instruction in which the SET is
1266 occurring. */
1268 static void
1269 record_set_info (dest, setter, data)
1270 rtx dest, setter ATTRIBUTE_UNUSED;
1271 void *data;
1273 rtx record_set_insn = (rtx) data;
1275 if (GET_CODE (dest) == REG && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
1276 record_one_set (REGNO (dest), record_set_insn);
1279 /* Scan the function and record each set of each pseudo-register.
1281 This is called once, at the start of the gcse pass. See the comments for
1282 `reg_set_table' for further documentation. */
1284 static void
1285 compute_sets (f)
1286 rtx f;
1288 rtx insn;
1290 for (insn = f; insn != 0; insn = NEXT_INSN (insn))
1291 if (INSN_P (insn))
1292 note_stores (PATTERN (insn), record_set_info, insn);
1295 /* Hash table support. */
1297 struct reg_avail_info
1299 basic_block last_bb;
1300 int first_set;
1301 int last_set;
1304 static struct reg_avail_info *reg_avail_info;
1305 static basic_block current_bb;
1308 /* See whether X, the source of a set, is something we want to consider for
1309 GCSE. */
1311 static GTY(()) rtx test_insn;
1312 static int
1313 want_to_gcse_p (x)
1314 rtx x;
1316 int num_clobbers = 0;
1317 int icode;
1319 switch (GET_CODE (x))
1321 case REG:
1322 case SUBREG:
1323 case CONST_INT:
1324 case CONST_DOUBLE:
1325 case CONST_VECTOR:
1326 case CALL:
1327 case CONSTANT_P_RTX:
1328 return 0;
1330 default:
1331 break;
1334 /* If this is a valid operand, we are OK. If it's VOIDmode, we aren't. */
1335 if (general_operand (x, GET_MODE (x)))
1336 return 1;
1337 else if (GET_MODE (x) == VOIDmode)
1338 return 0;
1340 /* Otherwise, check if we can make a valid insn from it. First initialize
1341 our test insn if we haven't already. */
1342 if (test_insn == 0)
1344 test_insn
1345 = make_insn_raw (gen_rtx_SET (VOIDmode,
1346 gen_rtx_REG (word_mode,
1347 FIRST_PSEUDO_REGISTER * 2),
1348 const0_rtx));
1349 NEXT_INSN (test_insn) = PREV_INSN (test_insn) = 0;
1352 /* Now make an insn like the one we would make when GCSE'ing and see if
1353 valid. */
1354 PUT_MODE (SET_DEST (PATTERN (test_insn)), GET_MODE (x));
1355 SET_SRC (PATTERN (test_insn)) = x;
1356 return ((icode = recog (PATTERN (test_insn), test_insn, &num_clobbers)) >= 0
1357 && (num_clobbers == 0 || ! added_clobbers_hard_reg_p (icode)));
1360 /* Return nonzero if the operands of expression X are unchanged from the
1361 start of INSN's basic block up to but not including INSN (if AVAIL_P == 0),
1362 or from INSN to the end of INSN's basic block (if AVAIL_P != 0). */
1364 static int
1365 oprs_unchanged_p (x, insn, avail_p)
1366 rtx x, insn;
1367 int avail_p;
1369 int i, j;
1370 enum rtx_code code;
1371 const char *fmt;
1373 if (x == 0)
1374 return 1;
1376 code = GET_CODE (x);
1377 switch (code)
1379 case REG:
1381 struct reg_avail_info *info = &reg_avail_info[REGNO (x)];
1383 if (info->last_bb != current_bb)
1384 return 1;
1385 if (avail_p)
1386 return info->last_set < INSN_CUID (insn);
1387 else
1388 return info->first_set >= INSN_CUID (insn);
1391 case MEM:
1392 if (load_killed_in_block_p (current_bb, INSN_CUID (insn),
1393 x, avail_p))
1394 return 0;
1395 else
1396 return oprs_unchanged_p (XEXP (x, 0), insn, avail_p);
1398 case PRE_DEC:
1399 case PRE_INC:
1400 case POST_DEC:
1401 case POST_INC:
1402 case PRE_MODIFY:
1403 case POST_MODIFY:
1404 return 0;
1406 case PC:
1407 case CC0: /*FIXME*/
1408 case CONST:
1409 case CONST_INT:
1410 case CONST_DOUBLE:
1411 case CONST_VECTOR:
1412 case SYMBOL_REF:
1413 case LABEL_REF:
1414 case ADDR_VEC:
1415 case ADDR_DIFF_VEC:
1416 return 1;
1418 default:
1419 break;
1422 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
1424 if (fmt[i] == 'e')
1426 /* If we are about to do the last recursive call needed at this
1427 level, change it into iteration. This function is called enough
1428 to be worth it. */
1429 if (i == 0)
1430 return oprs_unchanged_p (XEXP (x, i), insn, avail_p);
1432 else if (! oprs_unchanged_p (XEXP (x, i), insn, avail_p))
1433 return 0;
1435 else if (fmt[i] == 'E')
1436 for (j = 0; j < XVECLEN (x, i); j++)
1437 if (! oprs_unchanged_p (XVECEXP (x, i, j), insn, avail_p))
1438 return 0;
1441 return 1;
1444 /* Used for communication between mems_conflict_for_gcse_p and
1445 load_killed_in_block_p. Nonzero if mems_conflict_for_gcse_p finds a
1446 conflict between two memory references. */
1447 static int gcse_mems_conflict_p;
1449 /* Used for communication between mems_conflict_for_gcse_p and
1450 load_killed_in_block_p. A memory reference for a load instruction,
1451 mems_conflict_for_gcse_p will see if a memory store conflicts with
1452 this memory load. */
1453 static rtx gcse_mem_operand;
1455 /* DEST is the output of an instruction. If it is a memory reference, and
1456 possibly conflicts with the load found in gcse_mem_operand, then set
1457 gcse_mems_conflict_p to a nonzero value. */
1459 static void
1460 mems_conflict_for_gcse_p (dest, setter, data)
1461 rtx dest, setter ATTRIBUTE_UNUSED;
1462 void *data ATTRIBUTE_UNUSED;
1464 while (GET_CODE (dest) == SUBREG
1465 || GET_CODE (dest) == ZERO_EXTRACT
1466 || GET_CODE (dest) == SIGN_EXTRACT
1467 || GET_CODE (dest) == STRICT_LOW_PART)
1468 dest = XEXP (dest, 0);
1470 /* If DEST is not a MEM, then it will not conflict with the load. Note
1471 that function calls are assumed to clobber memory, but are handled
1472 elsewhere. */
1473 if (GET_CODE (dest) != MEM)
1474 return;
1476 /* If we are setting a MEM in our list of specially recognized MEMs,
1477 don't mark as killed this time. */
1479 if (expr_equiv_p (dest, gcse_mem_operand) && pre_ldst_mems != NULL)
1481 if (!find_rtx_in_ldst (dest))
1482 gcse_mems_conflict_p = 1;
1483 return;
1486 if (true_dependence (dest, GET_MODE (dest), gcse_mem_operand,
1487 rtx_addr_varies_p))
1488 gcse_mems_conflict_p = 1;
1491 /* Return nonzero if the expression in X (a memory reference) is killed
1492 in block BB before or after the insn with the CUID in UID_LIMIT.
1493 AVAIL_P is nonzero for kills after UID_LIMIT, and zero for kills
1494 before UID_LIMIT.
1496 To check the entire block, set UID_LIMIT to max_uid + 1 and
1497 AVAIL_P to 0. */
1499 static int
1500 load_killed_in_block_p (bb, uid_limit, x, avail_p)
1501 basic_block bb;
1502 int uid_limit;
1503 rtx x;
1504 int avail_p;
1506 rtx list_entry = modify_mem_list[bb->index];
1507 while (list_entry)
1509 rtx setter;
1510 /* Ignore entries in the list that do not apply. */
1511 if ((avail_p
1512 && INSN_CUID (XEXP (list_entry, 0)) < uid_limit)
1513 || (! avail_p
1514 && INSN_CUID (XEXP (list_entry, 0)) > uid_limit))
1516 list_entry = XEXP (list_entry, 1);
1517 continue;
1520 setter = XEXP (list_entry, 0);
1522 /* If SETTER is a call everything is clobbered. Note that calls
1523 to pure functions are never put on the list, so we need not
1524 worry about them. */
1525 if (GET_CODE (setter) == CALL_INSN)
1526 return 1;
1528 /* SETTER must be an INSN of some kind that sets memory. Call
1529 note_stores to examine each hunk of memory that is modified.
1531 The note_stores interface is pretty limited, so we have to
1532 communicate via global variables. Yuk. */
1533 gcse_mem_operand = x;
1534 gcse_mems_conflict_p = 0;
1535 note_stores (PATTERN (setter), mems_conflict_for_gcse_p, NULL);
1536 if (gcse_mems_conflict_p)
1537 return 1;
1538 list_entry = XEXP (list_entry, 1);
1540 return 0;
1543 /* Return nonzero if the operands of expression X are unchanged from
1544 the start of INSN's basic block up to but not including INSN. */
1546 static int
1547 oprs_anticipatable_p (x, insn)
1548 rtx x, insn;
1550 return oprs_unchanged_p (x, insn, 0);
1553 /* Return nonzero if the operands of expression X are unchanged from
1554 INSN to the end of INSN's basic block. */
1556 static int
1557 oprs_available_p (x, insn)
1558 rtx x, insn;
1560 return oprs_unchanged_p (x, insn, 1);
1563 /* Hash expression X.
1565 MODE is only used if X is a CONST_INT. DO_NOT_RECORD_P is a boolean
1566 indicating if a volatile operand is found or if the expression contains
1567 something we don't want to insert in the table.
1569 ??? One might want to merge this with canon_hash. Later. */
1571 static unsigned int
1572 hash_expr (x, mode, do_not_record_p, hash_table_size)
1573 rtx x;
1574 enum machine_mode mode;
1575 int *do_not_record_p;
1576 int hash_table_size;
1578 unsigned int hash;
1580 *do_not_record_p = 0;
1582 hash = hash_expr_1 (x, mode, do_not_record_p);
1583 return hash % hash_table_size;
1586 /* Hash a string. Just add its bytes up. */
1588 static inline unsigned
1589 hash_string_1 (ps)
1590 const char *ps;
1592 unsigned hash = 0;
1593 const unsigned char *p = (const unsigned char *) ps;
1595 if (p)
1596 while (*p)
1597 hash += *p++;
1599 return hash;
1602 /* Subroutine of hash_expr to do the actual work. */
1604 static unsigned int
1605 hash_expr_1 (x, mode, do_not_record_p)
1606 rtx x;
1607 enum machine_mode mode;
1608 int *do_not_record_p;
1610 int i, j;
1611 unsigned hash = 0;
1612 enum rtx_code code;
1613 const char *fmt;
1615 /* Used to turn recursion into iteration. We can't rely on GCC's
1616 tail-recursion elimination since we need to keep accumulating values
1617 in HASH. */
1619 if (x == 0)
1620 return hash;
1622 repeat:
1623 code = GET_CODE (x);
1624 switch (code)
1626 case REG:
1627 hash += ((unsigned int) REG << 7) + REGNO (x);
1628 return hash;
1630 case CONST_INT:
1631 hash += (((unsigned int) CONST_INT << 7) + (unsigned int) mode
1632 + (unsigned int) INTVAL (x));
1633 return hash;
1635 case CONST_DOUBLE:
1636 /* This is like the general case, except that it only counts
1637 the integers representing the constant. */
1638 hash += (unsigned int) code + (unsigned int) GET_MODE (x);
1639 if (GET_MODE (x) != VOIDmode)
1640 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
1641 hash += (unsigned int) XWINT (x, i);
1642 else
1643 hash += ((unsigned int) CONST_DOUBLE_LOW (x)
1644 + (unsigned int) CONST_DOUBLE_HIGH (x));
1645 return hash;
1647 case CONST_VECTOR:
1649 int units;
1650 rtx elt;
1652 units = CONST_VECTOR_NUNITS (x);
1654 for (i = 0; i < units; ++i)
1656 elt = CONST_VECTOR_ELT (x, i);
1657 hash += hash_expr_1 (elt, GET_MODE (elt), do_not_record_p);
1660 return hash;
1663 /* Assume there is only one rtx object for any given label. */
1664 case LABEL_REF:
1665 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
1666 differences and differences between each stage's debugging dumps. */
1667 hash += (((unsigned int) LABEL_REF << 7)
1668 + CODE_LABEL_NUMBER (XEXP (x, 0)));
1669 return hash;
1671 case SYMBOL_REF:
1673 /* Don't hash on the symbol's address to avoid bootstrap differences.
1674 Different hash values may cause expressions to be recorded in
1675 different orders and thus different registers to be used in the
1676 final assembler. This also avoids differences in the dump files
1677 between various stages. */
1678 unsigned int h = 0;
1679 const unsigned char *p = (const unsigned char *) XSTR (x, 0);
1681 while (*p)
1682 h += (h << 7) + *p++; /* ??? revisit */
1684 hash += ((unsigned int) SYMBOL_REF << 7) + h;
1685 return hash;
1688 case MEM:
1689 if (MEM_VOLATILE_P (x))
1691 *do_not_record_p = 1;
1692 return 0;
1695 hash += (unsigned int) MEM;
1696 /* We used alias set for hashing, but this is not good, since the alias
1697 set may differ in -fprofile-arcs and -fbranch-probabilities compilation
1698 causing the profiles to fail to match. */
1699 x = XEXP (x, 0);
1700 goto repeat;
1702 case PRE_DEC:
1703 case PRE_INC:
1704 case POST_DEC:
1705 case POST_INC:
1706 case PC:
1707 case CC0:
1708 case CALL:
1709 case UNSPEC_VOLATILE:
1710 *do_not_record_p = 1;
1711 return 0;
1713 case ASM_OPERANDS:
1714 if (MEM_VOLATILE_P (x))
1716 *do_not_record_p = 1;
1717 return 0;
1719 else
1721 /* We don't want to take the filename and line into account. */
1722 hash += (unsigned) code + (unsigned) GET_MODE (x)
1723 + hash_string_1 (ASM_OPERANDS_TEMPLATE (x))
1724 + hash_string_1 (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
1725 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
1727 if (ASM_OPERANDS_INPUT_LENGTH (x))
1729 for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
1731 hash += (hash_expr_1 (ASM_OPERANDS_INPUT (x, i),
1732 GET_MODE (ASM_OPERANDS_INPUT (x, i)),
1733 do_not_record_p)
1734 + hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT
1735 (x, i)));
1738 hash += hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
1739 x = ASM_OPERANDS_INPUT (x, 0);
1740 mode = GET_MODE (x);
1741 goto repeat;
1743 return hash;
1746 default:
1747 break;
1750 hash += (unsigned) code + (unsigned) GET_MODE (x);
1751 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
1753 if (fmt[i] == 'e')
1755 /* If we are about to do the last recursive call
1756 needed at this level, change it into iteration.
1757 This function is called enough to be worth it. */
1758 if (i == 0)
1760 x = XEXP (x, i);
1761 goto repeat;
1764 hash += hash_expr_1 (XEXP (x, i), 0, do_not_record_p);
1765 if (*do_not_record_p)
1766 return 0;
1769 else if (fmt[i] == 'E')
1770 for (j = 0; j < XVECLEN (x, i); j++)
1772 hash += hash_expr_1 (XVECEXP (x, i, j), 0, do_not_record_p);
1773 if (*do_not_record_p)
1774 return 0;
1777 else if (fmt[i] == 's')
1778 hash += hash_string_1 (XSTR (x, i));
1779 else if (fmt[i] == 'i')
1780 hash += (unsigned int) XINT (x, i);
1781 else
1782 abort ();
1785 return hash;
1788 /* Hash a set of register REGNO.
1790 Sets are hashed on the register that is set. This simplifies the PRE copy
1791 propagation code.
1793 ??? May need to make things more elaborate. Later, as necessary. */
1795 static unsigned int
1796 hash_set (regno, hash_table_size)
1797 int regno;
1798 int hash_table_size;
1800 unsigned int hash;
1802 hash = regno;
1803 return hash % hash_table_size;
1806 /* Return nonzero if exp1 is equivalent to exp2.
1807 ??? Borrowed from cse.c. Might want to remerge with cse.c. Later. */
1809 static int
1810 expr_equiv_p (x, y)
1811 rtx x, y;
1813 int i, j;
1814 enum rtx_code code;
1815 const char *fmt;
1817 if (x == y)
1818 return 1;
1820 if (x == 0 || y == 0)
1821 return 0;
1823 code = GET_CODE (x);
1824 if (code != GET_CODE (y))
1825 return 0;
1827 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
1828 if (GET_MODE (x) != GET_MODE (y))
1829 return 0;
1831 switch (code)
1833 case PC:
1834 case CC0:
1835 case CONST_INT:
1836 return 0;
1838 case LABEL_REF:
1839 return XEXP (x, 0) == XEXP (y, 0);
1841 case SYMBOL_REF:
1842 return XSTR (x, 0) == XSTR (y, 0);
1844 case REG:
1845 return REGNO (x) == REGNO (y);
1847 case MEM:
1848 /* Can't merge two expressions in different alias sets, since we can
1849 decide that the expression is transparent in a block when it isn't,
1850 due to it being set with the different alias set. */
1851 if (MEM_ALIAS_SET (x) != MEM_ALIAS_SET (y))
1852 return 0;
1853 break;
1855 /* For commutative operations, check both orders. */
1856 case PLUS:
1857 case MULT:
1858 case AND:
1859 case IOR:
1860 case XOR:
1861 case NE:
1862 case EQ:
1863 return ((expr_equiv_p (XEXP (x, 0), XEXP (y, 0))
1864 && expr_equiv_p (XEXP (x, 1), XEXP (y, 1)))
1865 || (expr_equiv_p (XEXP (x, 0), XEXP (y, 1))
1866 && expr_equiv_p (XEXP (x, 1), XEXP (y, 0))));
1868 case ASM_OPERANDS:
1869 /* We don't use the generic code below because we want to
1870 disregard filename and line numbers. */
1872 /* A volatile asm isn't equivalent to any other. */
1873 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
1874 return 0;
1876 if (GET_MODE (x) != GET_MODE (y)
1877 || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
1878 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
1879 ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
1880 || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
1881 || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
1882 return 0;
1884 if (ASM_OPERANDS_INPUT_LENGTH (x))
1886 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
1887 if (! expr_equiv_p (ASM_OPERANDS_INPUT (x, i),
1888 ASM_OPERANDS_INPUT (y, i))
1889 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
1890 ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
1891 return 0;
1894 return 1;
1896 default:
1897 break;
1900 /* Compare the elements. If any pair of corresponding elements
1901 fail to match, return 0 for the whole thing. */
1903 fmt = GET_RTX_FORMAT (code);
1904 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1906 switch (fmt[i])
1908 case 'e':
1909 if (! expr_equiv_p (XEXP (x, i), XEXP (y, i)))
1910 return 0;
1911 break;
1913 case 'E':
1914 if (XVECLEN (x, i) != XVECLEN (y, i))
1915 return 0;
1916 for (j = 0; j < XVECLEN (x, i); j++)
1917 if (! expr_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
1918 return 0;
1919 break;
1921 case 's':
1922 if (strcmp (XSTR (x, i), XSTR (y, i)))
1923 return 0;
1924 break;
1926 case 'i':
1927 if (XINT (x, i) != XINT (y, i))
1928 return 0;
1929 break;
1931 case 'w':
1932 if (XWINT (x, i) != XWINT (y, i))
1933 return 0;
1934 break;
1936 case '0':
1937 break;
1939 default:
1940 abort ();
1944 return 1;
1947 /* Insert expression X in INSN in the hash TABLE.
1948 If it is already present, record it as the last occurrence in INSN's
1949 basic block.
1951 MODE is the mode of the value X is being stored into.
1952 It is only used if X is a CONST_INT.
1954 ANTIC_P is nonzero if X is an anticipatable expression.
1955 AVAIL_P is nonzero if X is an available expression. */
1957 static void
1958 insert_expr_in_table (x, mode, insn, antic_p, avail_p, table)
1959 rtx x;
1960 enum machine_mode mode;
1961 rtx insn;
1962 int antic_p, avail_p;
1963 struct hash_table *table;
1965 int found, do_not_record_p;
1966 unsigned int hash;
1967 struct expr *cur_expr, *last_expr = NULL;
1968 struct occr *antic_occr, *avail_occr;
1969 struct occr *last_occr = NULL;
1971 hash = hash_expr (x, mode, &do_not_record_p, table->size);
1973 /* Do not insert expression in table if it contains volatile operands,
1974 or if hash_expr determines the expression is something we don't want
1975 to or can't handle. */
1976 if (do_not_record_p)
1977 return;
1979 cur_expr = table->table[hash];
1980 found = 0;
1982 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
1984 /* If the expression isn't found, save a pointer to the end of
1985 the list. */
1986 last_expr = cur_expr;
1987 cur_expr = cur_expr->next_same_hash;
1990 if (! found)
1992 cur_expr = (struct expr *) gcse_alloc (sizeof (struct expr));
1993 bytes_used += sizeof (struct expr);
1994 if (table->table[hash] == NULL)
1995 /* This is the first pattern that hashed to this index. */
1996 table->table[hash] = cur_expr;
1997 else
1998 /* Add EXPR to end of this hash chain. */
1999 last_expr->next_same_hash = cur_expr;
2001 /* Set the fields of the expr element. */
2002 cur_expr->expr = x;
2003 cur_expr->bitmap_index = table->n_elems++;
2004 cur_expr->next_same_hash = NULL;
2005 cur_expr->antic_occr = NULL;
2006 cur_expr->avail_occr = NULL;
2009 /* Now record the occurrence(s). */
2010 if (antic_p)
2012 antic_occr = cur_expr->antic_occr;
2014 /* Search for another occurrence in the same basic block. */
2015 while (antic_occr && BLOCK_NUM (antic_occr->insn) != BLOCK_NUM (insn))
2017 /* If an occurrence isn't found, save a pointer to the end of
2018 the list. */
2019 last_occr = antic_occr;
2020 antic_occr = antic_occr->next;
2023 if (antic_occr)
2024 /* Found another instance of the expression in the same basic block.
2025 Prefer the currently recorded one. We want the first one in the
2026 block and the block is scanned from start to end. */
2027 ; /* nothing to do */
2028 else
2030 /* First occurrence of this expression in this basic block. */
2031 antic_occr = (struct occr *) gcse_alloc (sizeof (struct occr));
2032 bytes_used += sizeof (struct occr);
2033 /* First occurrence of this expression in any block? */
2034 if (cur_expr->antic_occr == NULL)
2035 cur_expr->antic_occr = antic_occr;
2036 else
2037 last_occr->next = antic_occr;
2039 antic_occr->insn = insn;
2040 antic_occr->next = NULL;
2044 if (avail_p)
2046 avail_occr = cur_expr->avail_occr;
2048 /* Search for another occurrence in the same basic block. */
2049 while (avail_occr && BLOCK_NUM (avail_occr->insn) != BLOCK_NUM (insn))
2051 /* If an occurrence isn't found, save a pointer to the end of
2052 the list. */
2053 last_occr = avail_occr;
2054 avail_occr = avail_occr->next;
2057 if (avail_occr)
2058 /* Found another instance of the expression in the same basic block.
2059 Prefer this occurrence to the currently recorded one. We want
2060 the last one in the block and the block is scanned from start
2061 to end. */
2062 avail_occr->insn = insn;
2063 else
2065 /* First occurrence of this expression in this basic block. */
2066 avail_occr = (struct occr *) gcse_alloc (sizeof (struct occr));
2067 bytes_used += sizeof (struct occr);
2069 /* First occurrence of this expression in any block? */
2070 if (cur_expr->avail_occr == NULL)
2071 cur_expr->avail_occr = avail_occr;
2072 else
2073 last_occr->next = avail_occr;
2075 avail_occr->insn = insn;
2076 avail_occr->next = NULL;
2081 /* Insert pattern X in INSN in the hash table.
2082 X is a SET of a reg to either another reg or a constant.
2083 If it is already present, record it as the last occurrence in INSN's
2084 basic block. */
2086 static void
2087 insert_set_in_table (x, insn, table)
2088 rtx x;
2089 rtx insn;
2090 struct hash_table *table;
2092 int found;
2093 unsigned int hash;
2094 struct expr *cur_expr, *last_expr = NULL;
2095 struct occr *cur_occr, *last_occr = NULL;
2097 if (GET_CODE (x) != SET
2098 || GET_CODE (SET_DEST (x)) != REG)
2099 abort ();
2101 hash = hash_set (REGNO (SET_DEST (x)), table->size);
2103 cur_expr = table->table[hash];
2104 found = 0;
2106 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
2108 /* If the expression isn't found, save a pointer to the end of
2109 the list. */
2110 last_expr = cur_expr;
2111 cur_expr = cur_expr->next_same_hash;
2114 if (! found)
2116 cur_expr = (struct expr *) gcse_alloc (sizeof (struct expr));
2117 bytes_used += sizeof (struct expr);
2118 if (table->table[hash] == NULL)
2119 /* This is the first pattern that hashed to this index. */
2120 table->table[hash] = cur_expr;
2121 else
2122 /* Add EXPR to end of this hash chain. */
2123 last_expr->next_same_hash = cur_expr;
2125 /* Set the fields of the expr element.
2126 We must copy X because it can be modified when copy propagation is
2127 performed on its operands. */
2128 cur_expr->expr = copy_rtx (x);
2129 cur_expr->bitmap_index = table->n_elems++;
2130 cur_expr->next_same_hash = NULL;
2131 cur_expr->antic_occr = NULL;
2132 cur_expr->avail_occr = NULL;
2135 /* Now record the occurrence. */
2136 cur_occr = cur_expr->avail_occr;
2138 /* Search for another occurrence in the same basic block. */
2139 while (cur_occr && BLOCK_NUM (cur_occr->insn) != BLOCK_NUM (insn))
2141 /* If an occurrence isn't found, save a pointer to the end of
2142 the list. */
2143 last_occr = cur_occr;
2144 cur_occr = cur_occr->next;
2147 if (cur_occr)
2148 /* Found another instance of the expression in the same basic block.
2149 Prefer this occurrence to the currently recorded one. We want the
2150 last one in the block and the block is scanned from start to end. */
2151 cur_occr->insn = insn;
2152 else
2154 /* First occurrence of this expression in this basic block. */
2155 cur_occr = (struct occr *) gcse_alloc (sizeof (struct occr));
2156 bytes_used += sizeof (struct occr);
2158 /* First occurrence of this expression in any block? */
2159 if (cur_expr->avail_occr == NULL)
2160 cur_expr->avail_occr = cur_occr;
2161 else
2162 last_occr->next = cur_occr;
2164 cur_occr->insn = insn;
2165 cur_occr->next = NULL;
2169 /* Determine whether the rtx X should be treated as a constant for
2170 the purposes of GCSE's constant propagation. */
2172 static bool
2173 gcse_constant_p (x)
2174 rtx x;
2176 /* Consider a COMPARE of two integers constant. */
2177 if (GET_CODE (x) == COMPARE
2178 && GET_CODE (XEXP (x, 0)) == CONST_INT
2179 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2180 return true;
2182 if (GET_CODE (x) == CONSTANT_P_RTX)
2183 return false;
2185 return CONSTANT_P (x);
2188 /* Scan pattern PAT of INSN and add an entry to the hash TABLE (set or
2189 expression one). */
2191 static void
2192 hash_scan_set (pat, insn, table)
2193 rtx pat, insn;
2194 struct hash_table *table;
2196 rtx src = SET_SRC (pat);
2197 rtx dest = SET_DEST (pat);
2198 rtx note;
2200 if (GET_CODE (src) == CALL)
2201 hash_scan_call (src, insn, table);
2203 else if (GET_CODE (dest) == REG)
2205 unsigned int regno = REGNO (dest);
2206 rtx tmp;
2208 /* If this is a single set and we are doing constant propagation,
2209 see if a REG_NOTE shows this equivalent to a constant. */
2210 if (table->set_p && (note = find_reg_equal_equiv_note (insn)) != 0
2211 && gcse_constant_p (XEXP (note, 0)))
2212 src = XEXP (note, 0), pat = gen_rtx_SET (VOIDmode, dest, src);
2214 /* Only record sets of pseudo-regs in the hash table. */
2215 if (! table->set_p
2216 && regno >= FIRST_PSEUDO_REGISTER
2217 /* Don't GCSE something if we can't do a reg/reg copy. */
2218 && can_copy_p (GET_MODE (dest))
2219 /* GCSE commonly inserts instruction after the insn. We can't
2220 do that easily for EH_REGION notes so disable GCSE on these
2221 for now. */
2222 && !find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2223 /* Is SET_SRC something we want to gcse? */
2224 && want_to_gcse_p (src)
2225 /* Don't CSE a nop. */
2226 && ! set_noop_p (pat)
2227 /* Don't GCSE if it has attached REG_EQUIV note.
2228 At this point this only function parameters should have
2229 REG_EQUIV notes and if the argument slot is used somewhere
2230 explicitly, it means address of parameter has been taken,
2231 so we should not extend the lifetime of the pseudo. */
2232 && ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) == 0
2233 || GET_CODE (XEXP (note, 0)) != MEM))
2235 /* An expression is not anticipatable if its operands are
2236 modified before this insn or if this is not the only SET in
2237 this insn. */
2238 int antic_p = oprs_anticipatable_p (src, insn) && single_set (insn);
2239 /* An expression is not available if its operands are
2240 subsequently modified, including this insn. It's also not
2241 available if this is a branch, because we can't insert
2242 a set after the branch. */
2243 int avail_p = (oprs_available_p (src, insn)
2244 && ! JUMP_P (insn));
2246 insert_expr_in_table (src, GET_MODE (dest), insn, antic_p, avail_p, table);
2249 /* Record sets for constant/copy propagation. */
2250 else if (table->set_p
2251 && regno >= FIRST_PSEUDO_REGISTER
2252 && ((GET_CODE (src) == REG
2253 && REGNO (src) >= FIRST_PSEUDO_REGISTER
2254 && can_copy_p (GET_MODE (dest))
2255 && REGNO (src) != regno)
2256 || gcse_constant_p (src))
2257 /* A copy is not available if its src or dest is subsequently
2258 modified. Here we want to search from INSN+1 on, but
2259 oprs_available_p searches from INSN on. */
2260 && (insn == BLOCK_END (BLOCK_NUM (insn))
2261 || ((tmp = next_nonnote_insn (insn)) != NULL_RTX
2262 && oprs_available_p (pat, tmp))))
2263 insert_set_in_table (pat, insn, table);
2267 static void
2268 hash_scan_clobber (x, insn, table)
2269 rtx x ATTRIBUTE_UNUSED, insn ATTRIBUTE_UNUSED;
2270 struct hash_table *table ATTRIBUTE_UNUSED;
2272 /* Currently nothing to do. */
2275 static void
2276 hash_scan_call (x, insn, table)
2277 rtx x ATTRIBUTE_UNUSED, insn ATTRIBUTE_UNUSED;
2278 struct hash_table *table ATTRIBUTE_UNUSED;
2280 /* Currently nothing to do. */
2283 /* Process INSN and add hash table entries as appropriate.
2285 Only available expressions that set a single pseudo-reg are recorded.
2287 Single sets in a PARALLEL could be handled, but it's an extra complication
2288 that isn't dealt with right now. The trick is handling the CLOBBERs that
2289 are also in the PARALLEL. Later.
2291 If SET_P is nonzero, this is for the assignment hash table,
2292 otherwise it is for the expression hash table.
2293 If IN_LIBCALL_BLOCK nonzero, we are in a libcall block, and should
2294 not record any expressions. */
2296 static void
2297 hash_scan_insn (insn, table, in_libcall_block)
2298 rtx insn;
2299 struct hash_table *table;
2300 int in_libcall_block;
2302 rtx pat = PATTERN (insn);
2303 int i;
2305 if (in_libcall_block)
2306 return;
2308 /* Pick out the sets of INSN and for other forms of instructions record
2309 what's been modified. */
2311 if (GET_CODE (pat) == SET)
2312 hash_scan_set (pat, insn, table);
2313 else if (GET_CODE (pat) == PARALLEL)
2314 for (i = 0; i < XVECLEN (pat, 0); i++)
2316 rtx x = XVECEXP (pat, 0, i);
2318 if (GET_CODE (x) == SET)
2319 hash_scan_set (x, insn, table);
2320 else if (GET_CODE (x) == CLOBBER)
2321 hash_scan_clobber (x, insn, table);
2322 else if (GET_CODE (x) == CALL)
2323 hash_scan_call (x, insn, table);
2326 else if (GET_CODE (pat) == CLOBBER)
2327 hash_scan_clobber (pat, insn, table);
2328 else if (GET_CODE (pat) == CALL)
2329 hash_scan_call (pat, insn, table);
2332 static void
2333 dump_hash_table (file, name, table)
2334 FILE *file;
2335 const char *name;
2336 struct hash_table *table;
2338 int i;
2339 /* Flattened out table, so it's printed in proper order. */
2340 struct expr **flat_table;
2341 unsigned int *hash_val;
2342 struct expr *expr;
2344 flat_table
2345 = (struct expr **) xcalloc (table->n_elems, sizeof (struct expr *));
2346 hash_val = (unsigned int *) xmalloc (table->n_elems * sizeof (unsigned int));
2348 for (i = 0; i < (int) table->size; i++)
2349 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
2351 flat_table[expr->bitmap_index] = expr;
2352 hash_val[expr->bitmap_index] = i;
2355 fprintf (file, "%s hash table (%d buckets, %d entries)\n",
2356 name, table->size, table->n_elems);
2358 for (i = 0; i < (int) table->n_elems; i++)
2359 if (flat_table[i] != 0)
2361 expr = flat_table[i];
2362 fprintf (file, "Index %d (hash value %d)\n ",
2363 expr->bitmap_index, hash_val[i]);
2364 print_rtl (file, expr->expr);
2365 fprintf (file, "\n");
2368 fprintf (file, "\n");
2370 free (flat_table);
2371 free (hash_val);
2374 /* Record register first/last/block set information for REGNO in INSN.
2376 first_set records the first place in the block where the register
2377 is set and is used to compute "anticipatability".
2379 last_set records the last place in the block where the register
2380 is set and is used to compute "availability".
2382 last_bb records the block for which first_set and last_set are
2383 valid, as a quick test to invalidate them.
2385 reg_set_in_block records whether the register is set in the block
2386 and is used to compute "transparency". */
2388 static void
2389 record_last_reg_set_info (insn, regno)
2390 rtx insn;
2391 int regno;
2393 struct reg_avail_info *info = &reg_avail_info[regno];
2394 int cuid = INSN_CUID (insn);
2396 info->last_set = cuid;
2397 if (info->last_bb != current_bb)
2399 info->last_bb = current_bb;
2400 info->first_set = cuid;
2401 SET_BIT (reg_set_in_block[current_bb->index], regno);
2406 /* Record all of the canonicalized MEMs of record_last_mem_set_info's insn.
2407 Note we store a pair of elements in the list, so they have to be
2408 taken off pairwise. */
2410 static void
2411 canon_list_insert (dest, unused1, v_insn)
2412 rtx dest ATTRIBUTE_UNUSED;
2413 rtx unused1 ATTRIBUTE_UNUSED;
2414 void * v_insn;
2416 rtx dest_addr, insn;
2417 int bb;
2419 while (GET_CODE (dest) == SUBREG
2420 || GET_CODE (dest) == ZERO_EXTRACT
2421 || GET_CODE (dest) == SIGN_EXTRACT
2422 || GET_CODE (dest) == STRICT_LOW_PART)
2423 dest = XEXP (dest, 0);
2425 /* If DEST is not a MEM, then it will not conflict with a load. Note
2426 that function calls are assumed to clobber memory, but are handled
2427 elsewhere. */
2429 if (GET_CODE (dest) != MEM)
2430 return;
2432 dest_addr = get_addr (XEXP (dest, 0));
2433 dest_addr = canon_rtx (dest_addr);
2434 insn = (rtx) v_insn;
2435 bb = BLOCK_NUM (insn);
2437 canon_modify_mem_list[bb] =
2438 alloc_EXPR_LIST (VOIDmode, dest_addr, canon_modify_mem_list[bb]);
2439 canon_modify_mem_list[bb] =
2440 alloc_EXPR_LIST (VOIDmode, dest, canon_modify_mem_list[bb]);
2441 bitmap_set_bit (canon_modify_mem_list_set, bb);
2444 /* Record memory modification information for INSN. We do not actually care
2445 about the memory location(s) that are set, or even how they are set (consider
2446 a CALL_INSN). We merely need to record which insns modify memory. */
2448 static void
2449 record_last_mem_set_info (insn)
2450 rtx insn;
2452 int bb = BLOCK_NUM (insn);
2454 /* load_killed_in_block_p will handle the case of calls clobbering
2455 everything. */
2456 modify_mem_list[bb] = alloc_INSN_LIST (insn, modify_mem_list[bb]);
2457 bitmap_set_bit (modify_mem_list_set, bb);
2459 if (GET_CODE (insn) == CALL_INSN)
2461 /* Note that traversals of this loop (other than for free-ing)
2462 will break after encountering a CALL_INSN. So, there's no
2463 need to insert a pair of items, as canon_list_insert does. */
2464 canon_modify_mem_list[bb] =
2465 alloc_INSN_LIST (insn, canon_modify_mem_list[bb]);
2466 bitmap_set_bit (canon_modify_mem_list_set, bb);
2468 else
2469 note_stores (PATTERN (insn), canon_list_insert, (void*) insn);
2472 /* Called from compute_hash_table via note_stores to handle one
2473 SET or CLOBBER in an insn. DATA is really the instruction in which
2474 the SET is taking place. */
2476 static void
2477 record_last_set_info (dest, setter, data)
2478 rtx dest, setter ATTRIBUTE_UNUSED;
2479 void *data;
2481 rtx last_set_insn = (rtx) data;
2483 if (GET_CODE (dest) == SUBREG)
2484 dest = SUBREG_REG (dest);
2486 if (GET_CODE (dest) == REG)
2487 record_last_reg_set_info (last_set_insn, REGNO (dest));
2488 else if (GET_CODE (dest) == MEM
2489 /* Ignore pushes, they clobber nothing. */
2490 && ! push_operand (dest, GET_MODE (dest)))
2491 record_last_mem_set_info (last_set_insn);
2494 /* Top level function to create an expression or assignment hash table.
2496 Expression entries are placed in the hash table if
2497 - they are of the form (set (pseudo-reg) src),
2498 - src is something we want to perform GCSE on,
2499 - none of the operands are subsequently modified in the block
2501 Assignment entries are placed in the hash table if
2502 - they are of the form (set (pseudo-reg) src),
2503 - src is something we want to perform const/copy propagation on,
2504 - none of the operands or target are subsequently modified in the block
2506 Currently src must be a pseudo-reg or a const_int.
2508 TABLE is the table computed. */
2510 static void
2511 compute_hash_table_work (table)
2512 struct hash_table *table;
2514 unsigned int i;
2516 /* While we compute the hash table we also compute a bit array of which
2517 registers are set in which blocks.
2518 ??? This isn't needed during const/copy propagation, but it's cheap to
2519 compute. Later. */
2520 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
2522 /* re-Cache any INSN_LIST nodes we have allocated. */
2523 clear_modify_mem_tables ();
2524 /* Some working arrays used to track first and last set in each block. */
2525 reg_avail_info = (struct reg_avail_info*)
2526 gmalloc (max_gcse_regno * sizeof (struct reg_avail_info));
2528 for (i = 0; i < max_gcse_regno; ++i)
2529 reg_avail_info[i].last_bb = NULL;
2531 FOR_EACH_BB (current_bb)
2533 rtx insn;
2534 unsigned int regno;
2535 int in_libcall_block;
2537 /* First pass over the instructions records information used to
2538 determine when registers and memory are first and last set.
2539 ??? hard-reg reg_set_in_block computation
2540 could be moved to compute_sets since they currently don't change. */
2542 for (insn = current_bb->head;
2543 insn && insn != NEXT_INSN (current_bb->end);
2544 insn = NEXT_INSN (insn))
2546 if (! INSN_P (insn))
2547 continue;
2549 if (GET_CODE (insn) == CALL_INSN)
2551 bool clobbers_all = false;
2552 #ifdef NON_SAVING_SETJMP
2553 if (NON_SAVING_SETJMP
2554 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
2555 clobbers_all = true;
2556 #endif
2558 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2559 if (clobbers_all
2560 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2561 record_last_reg_set_info (insn, regno);
2563 mark_call (insn);
2566 note_stores (PATTERN (insn), record_last_set_info, insn);
2569 /* Insert implicit sets in the hash table. */
2570 if (table->set_p
2571 && implicit_sets[current_bb->index] != NULL_RTX)
2572 hash_scan_set (implicit_sets[current_bb->index],
2573 current_bb->head, table);
2575 /* The next pass builds the hash table. */
2577 for (insn = current_bb->head, in_libcall_block = 0;
2578 insn && insn != NEXT_INSN (current_bb->end);
2579 insn = NEXT_INSN (insn))
2580 if (INSN_P (insn))
2582 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
2583 in_libcall_block = 1;
2584 else if (table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
2585 in_libcall_block = 0;
2586 hash_scan_insn (insn, table, in_libcall_block);
2587 if (!table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
2588 in_libcall_block = 0;
2592 free (reg_avail_info);
2593 reg_avail_info = NULL;
2596 /* Allocate space for the set/expr hash TABLE.
2597 N_INSNS is the number of instructions in the function.
2598 It is used to determine the number of buckets to use.
2599 SET_P determines whether set or expression table will
2600 be created. */
2602 static void
2603 alloc_hash_table (n_insns, table, set_p)
2604 int n_insns;
2605 struct hash_table *table;
2606 int set_p;
2608 int n;
2610 table->size = n_insns / 4;
2611 if (table->size < 11)
2612 table->size = 11;
2614 /* Attempt to maintain efficient use of hash table.
2615 Making it an odd number is simplest for now.
2616 ??? Later take some measurements. */
2617 table->size |= 1;
2618 n = table->size * sizeof (struct expr *);
2619 table->table = (struct expr **) gmalloc (n);
2620 table->set_p = set_p;
2623 /* Free things allocated by alloc_hash_table. */
2625 static void
2626 free_hash_table (table)
2627 struct hash_table *table;
2629 free (table->table);
2632 /* Compute the hash TABLE for doing copy/const propagation or
2633 expression hash table. */
2635 static void
2636 compute_hash_table (table)
2637 struct hash_table *table;
2639 /* Initialize count of number of entries in hash table. */
2640 table->n_elems = 0;
2641 memset ((char *) table->table, 0,
2642 table->size * sizeof (struct expr *));
2644 compute_hash_table_work (table);
2647 /* Expression tracking support. */
2649 /* Lookup pattern PAT in the expression TABLE.
2650 The result is a pointer to the table entry, or NULL if not found. */
2652 static struct expr *
2653 lookup_expr (pat, table)
2654 rtx pat;
2655 struct hash_table *table;
2657 int do_not_record_p;
2658 unsigned int hash = hash_expr (pat, GET_MODE (pat), &do_not_record_p,
2659 table->size);
2660 struct expr *expr;
2662 if (do_not_record_p)
2663 return NULL;
2665 expr = table->table[hash];
2667 while (expr && ! expr_equiv_p (expr->expr, pat))
2668 expr = expr->next_same_hash;
2670 return expr;
2673 /* Lookup REGNO in the set TABLE. The result is a pointer to the
2674 table entry, or NULL if not found. */
2676 static struct expr *
2677 lookup_set (regno, table)
2678 unsigned int regno;
2679 struct hash_table *table;
2681 unsigned int hash = hash_set (regno, table->size);
2682 struct expr *expr;
2684 expr = table->table[hash];
2686 while (expr && REGNO (SET_DEST (expr->expr)) != regno)
2687 expr = expr->next_same_hash;
2689 return expr;
2692 /* Return the next entry for REGNO in list EXPR. */
2694 static struct expr *
2695 next_set (regno, expr)
2696 unsigned int regno;
2697 struct expr *expr;
2700 expr = expr->next_same_hash;
2701 while (expr && REGNO (SET_DEST (expr->expr)) != regno);
2703 return expr;
2706 /* Like free_INSN_LIST_list or free_EXPR_LIST_list, except that the node
2707 types may be mixed. */
2709 static void
2710 free_insn_expr_list_list (listp)
2711 rtx *listp;
2713 rtx list, next;
2715 for (list = *listp; list ; list = next)
2717 next = XEXP (list, 1);
2718 if (GET_CODE (list) == EXPR_LIST)
2719 free_EXPR_LIST_node (list);
2720 else
2721 free_INSN_LIST_node (list);
2724 *listp = NULL;
2727 /* Clear canon_modify_mem_list and modify_mem_list tables. */
2728 static void
2729 clear_modify_mem_tables ()
2731 int i;
2733 EXECUTE_IF_SET_IN_BITMAP
2734 (modify_mem_list_set, 0, i, free_INSN_LIST_list (modify_mem_list + i));
2735 bitmap_clear (modify_mem_list_set);
2737 EXECUTE_IF_SET_IN_BITMAP
2738 (canon_modify_mem_list_set, 0, i,
2739 free_insn_expr_list_list (canon_modify_mem_list + i));
2740 bitmap_clear (canon_modify_mem_list_set);
2743 /* Release memory used by modify_mem_list_set and canon_modify_mem_list_set. */
2745 static void
2746 free_modify_mem_tables ()
2748 clear_modify_mem_tables ();
2749 free (modify_mem_list);
2750 free (canon_modify_mem_list);
2751 modify_mem_list = 0;
2752 canon_modify_mem_list = 0;
2755 /* Reset tables used to keep track of what's still available [since the
2756 start of the block]. */
2758 static void
2759 reset_opr_set_tables ()
2761 /* Maintain a bitmap of which regs have been set since beginning of
2762 the block. */
2763 CLEAR_REG_SET (reg_set_bitmap);
2765 /* Also keep a record of the last instruction to modify memory.
2766 For now this is very trivial, we only record whether any memory
2767 location has been modified. */
2768 clear_modify_mem_tables ();
2771 /* Return nonzero if the operands of X are not set before INSN in
2772 INSN's basic block. */
2774 static int
2775 oprs_not_set_p (x, insn)
2776 rtx x, insn;
2778 int i, j;
2779 enum rtx_code code;
2780 const char *fmt;
2782 if (x == 0)
2783 return 1;
2785 code = GET_CODE (x);
2786 switch (code)
2788 case PC:
2789 case CC0:
2790 case CONST:
2791 case CONST_INT:
2792 case CONST_DOUBLE:
2793 case CONST_VECTOR:
2794 case SYMBOL_REF:
2795 case LABEL_REF:
2796 case ADDR_VEC:
2797 case ADDR_DIFF_VEC:
2798 return 1;
2800 case MEM:
2801 if (load_killed_in_block_p (BLOCK_FOR_INSN (insn),
2802 INSN_CUID (insn), x, 0))
2803 return 0;
2804 else
2805 return oprs_not_set_p (XEXP (x, 0), insn);
2807 case REG:
2808 return ! REGNO_REG_SET_P (reg_set_bitmap, REGNO (x));
2810 default:
2811 break;
2814 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
2816 if (fmt[i] == 'e')
2818 /* If we are about to do the last recursive call
2819 needed at this level, change it into iteration.
2820 This function is called enough to be worth it. */
2821 if (i == 0)
2822 return oprs_not_set_p (XEXP (x, i), insn);
2824 if (! oprs_not_set_p (XEXP (x, i), insn))
2825 return 0;
2827 else if (fmt[i] == 'E')
2828 for (j = 0; j < XVECLEN (x, i); j++)
2829 if (! oprs_not_set_p (XVECEXP (x, i, j), insn))
2830 return 0;
2833 return 1;
2836 /* Mark things set by a CALL. */
2838 static void
2839 mark_call (insn)
2840 rtx insn;
2842 if (! CONST_OR_PURE_CALL_P (insn))
2843 record_last_mem_set_info (insn);
2846 /* Mark things set by a SET. */
2848 static void
2849 mark_set (pat, insn)
2850 rtx pat, insn;
2852 rtx dest = SET_DEST (pat);
2854 while (GET_CODE (dest) == SUBREG
2855 || GET_CODE (dest) == ZERO_EXTRACT
2856 || GET_CODE (dest) == SIGN_EXTRACT
2857 || GET_CODE (dest) == STRICT_LOW_PART)
2858 dest = XEXP (dest, 0);
2860 if (GET_CODE (dest) == REG)
2861 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (dest));
2862 else if (GET_CODE (dest) == MEM)
2863 record_last_mem_set_info (insn);
2865 if (GET_CODE (SET_SRC (pat)) == CALL)
2866 mark_call (insn);
2869 /* Record things set by a CLOBBER. */
2871 static void
2872 mark_clobber (pat, insn)
2873 rtx pat, insn;
2875 rtx clob = XEXP (pat, 0);
2877 while (GET_CODE (clob) == SUBREG || GET_CODE (clob) == STRICT_LOW_PART)
2878 clob = XEXP (clob, 0);
2880 if (GET_CODE (clob) == REG)
2881 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (clob));
2882 else
2883 record_last_mem_set_info (insn);
2886 /* Record things set by INSN.
2887 This data is used by oprs_not_set_p. */
2889 static void
2890 mark_oprs_set (insn)
2891 rtx insn;
2893 rtx pat = PATTERN (insn);
2894 int i;
2896 if (GET_CODE (pat) == SET)
2897 mark_set (pat, insn);
2898 else if (GET_CODE (pat) == PARALLEL)
2899 for (i = 0; i < XVECLEN (pat, 0); i++)
2901 rtx x = XVECEXP (pat, 0, i);
2903 if (GET_CODE (x) == SET)
2904 mark_set (x, insn);
2905 else if (GET_CODE (x) == CLOBBER)
2906 mark_clobber (x, insn);
2907 else if (GET_CODE (x) == CALL)
2908 mark_call (insn);
2911 else if (GET_CODE (pat) == CLOBBER)
2912 mark_clobber (pat, insn);
2913 else if (GET_CODE (pat) == CALL)
2914 mark_call (insn);
2918 /* Classic GCSE reaching definition support. */
2920 /* Allocate reaching def variables. */
2922 static void
2923 alloc_rd_mem (n_blocks, n_insns)
2924 int n_blocks, n_insns;
2926 rd_kill = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2927 sbitmap_vector_zero (rd_kill, n_blocks);
2929 rd_gen = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2930 sbitmap_vector_zero (rd_gen, n_blocks);
2932 reaching_defs = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2933 sbitmap_vector_zero (reaching_defs, n_blocks);
2935 rd_out = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2936 sbitmap_vector_zero (rd_out, n_blocks);
2939 /* Free reaching def variables. */
2941 static void
2942 free_rd_mem ()
2944 sbitmap_vector_free (rd_kill);
2945 sbitmap_vector_free (rd_gen);
2946 sbitmap_vector_free (reaching_defs);
2947 sbitmap_vector_free (rd_out);
2950 /* Add INSN to the kills of BB. REGNO, set in BB, is killed by INSN. */
2952 static void
2953 handle_rd_kill_set (insn, regno, bb)
2954 rtx insn;
2955 int regno;
2956 basic_block bb;
2958 struct reg_set *this_reg;
2960 for (this_reg = reg_set_table[regno]; this_reg; this_reg = this_reg ->next)
2961 if (BLOCK_NUM (this_reg->insn) != BLOCK_NUM (insn))
2962 SET_BIT (rd_kill[bb->index], INSN_CUID (this_reg->insn));
2965 /* Compute the set of kill's for reaching definitions. */
2967 static void
2968 compute_kill_rd ()
2970 int cuid;
2971 unsigned int regno;
2972 int i;
2973 basic_block bb;
2975 /* For each block
2976 For each set bit in `gen' of the block (i.e each insn which
2977 generates a definition in the block)
2978 Call the reg set by the insn corresponding to that bit regx
2979 Look at the linked list starting at reg_set_table[regx]
2980 For each setting of regx in the linked list, which is not in
2981 this block
2982 Set the bit in `kill' corresponding to that insn. */
2983 FOR_EACH_BB (bb)
2984 for (cuid = 0; cuid < max_cuid; cuid++)
2985 if (TEST_BIT (rd_gen[bb->index], cuid))
2987 rtx insn = CUID_INSN (cuid);
2988 rtx pat = PATTERN (insn);
2990 if (GET_CODE (insn) == CALL_INSN)
2992 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2993 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2994 handle_rd_kill_set (insn, regno, bb);
2997 if (GET_CODE (pat) == PARALLEL)
2999 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
3001 enum rtx_code code = GET_CODE (XVECEXP (pat, 0, i));
3003 if ((code == SET || code == CLOBBER)
3004 && GET_CODE (XEXP (XVECEXP (pat, 0, i), 0)) == REG)
3005 handle_rd_kill_set (insn,
3006 REGNO (XEXP (XVECEXP (pat, 0, i), 0)),
3007 bb);
3010 else if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == REG)
3011 /* Each setting of this register outside of this block
3012 must be marked in the set of kills in this block. */
3013 handle_rd_kill_set (insn, REGNO (SET_DEST (pat)), bb);
3017 /* Compute the reaching definitions as in
3018 Compilers Principles, Techniques, and Tools. Aho, Sethi, Ullman,
3019 Chapter 10. It is the same algorithm as used for computing available
3020 expressions but applied to the gens and kills of reaching definitions. */
3022 static void
3023 compute_rd ()
3025 int changed, passes;
3026 basic_block bb;
3028 FOR_EACH_BB (bb)
3029 sbitmap_copy (rd_out[bb->index] /*dst*/, rd_gen[bb->index] /*src*/);
3031 passes = 0;
3032 changed = 1;
3033 while (changed)
3035 changed = 0;
3036 FOR_EACH_BB (bb)
3038 sbitmap_union_of_preds (reaching_defs[bb->index], rd_out, bb->index);
3039 changed |= sbitmap_union_of_diff_cg (rd_out[bb->index], rd_gen[bb->index],
3040 reaching_defs[bb->index], rd_kill[bb->index]);
3042 passes++;
3045 if (gcse_file)
3046 fprintf (gcse_file, "reaching def computation: %d passes\n", passes);
3049 /* Classic GCSE available expression support. */
3051 /* Allocate memory for available expression computation. */
3053 static void
3054 alloc_avail_expr_mem (n_blocks, n_exprs)
3055 int n_blocks, n_exprs;
3057 ae_kill = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
3058 sbitmap_vector_zero (ae_kill, n_blocks);
3060 ae_gen = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
3061 sbitmap_vector_zero (ae_gen, n_blocks);
3063 ae_in = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
3064 sbitmap_vector_zero (ae_in, n_blocks);
3066 ae_out = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
3067 sbitmap_vector_zero (ae_out, n_blocks);
3070 static void
3071 free_avail_expr_mem ()
3073 sbitmap_vector_free (ae_kill);
3074 sbitmap_vector_free (ae_gen);
3075 sbitmap_vector_free (ae_in);
3076 sbitmap_vector_free (ae_out);
3079 /* Compute the set of available expressions generated in each basic block. */
3081 static void
3082 compute_ae_gen (expr_hash_table)
3083 struct hash_table *expr_hash_table;
3085 unsigned int i;
3086 struct expr *expr;
3087 struct occr *occr;
3089 /* For each recorded occurrence of each expression, set ae_gen[bb][expr].
3090 This is all we have to do because an expression is not recorded if it
3091 is not available, and the only expressions we want to work with are the
3092 ones that are recorded. */
3093 for (i = 0; i < expr_hash_table->size; i++)
3094 for (expr = expr_hash_table->table[i]; expr != 0; expr = expr->next_same_hash)
3095 for (occr = expr->avail_occr; occr != 0; occr = occr->next)
3096 SET_BIT (ae_gen[BLOCK_NUM (occr->insn)], expr->bitmap_index);
3099 /* Return nonzero if expression X is killed in BB. */
3101 static int
3102 expr_killed_p (x, bb)
3103 rtx x;
3104 basic_block bb;
3106 int i, j;
3107 enum rtx_code code;
3108 const char *fmt;
3110 if (x == 0)
3111 return 1;
3113 code = GET_CODE (x);
3114 switch (code)
3116 case REG:
3117 return TEST_BIT (reg_set_in_block[bb->index], REGNO (x));
3119 case MEM:
3120 if (load_killed_in_block_p (bb, get_max_uid () + 1, x, 0))
3121 return 1;
3122 else
3123 return expr_killed_p (XEXP (x, 0), bb);
3125 case PC:
3126 case CC0: /*FIXME*/
3127 case CONST:
3128 case CONST_INT:
3129 case CONST_DOUBLE:
3130 case CONST_VECTOR:
3131 case SYMBOL_REF:
3132 case LABEL_REF:
3133 case ADDR_VEC:
3134 case ADDR_DIFF_VEC:
3135 return 0;
3137 default:
3138 break;
3141 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
3143 if (fmt[i] == 'e')
3145 /* If we are about to do the last recursive call
3146 needed at this level, change it into iteration.
3147 This function is called enough to be worth it. */
3148 if (i == 0)
3149 return expr_killed_p (XEXP (x, i), bb);
3150 else if (expr_killed_p (XEXP (x, i), bb))
3151 return 1;
3153 else if (fmt[i] == 'E')
3154 for (j = 0; j < XVECLEN (x, i); j++)
3155 if (expr_killed_p (XVECEXP (x, i, j), bb))
3156 return 1;
3159 return 0;
3162 /* Compute the set of available expressions killed in each basic block. */
3164 static void
3165 compute_ae_kill (ae_gen, ae_kill, expr_hash_table)
3166 sbitmap *ae_gen, *ae_kill;
3167 struct hash_table *expr_hash_table;
3169 basic_block bb;
3170 unsigned int i;
3171 struct expr *expr;
3173 FOR_EACH_BB (bb)
3174 for (i = 0; i < expr_hash_table->size; i++)
3175 for (expr = expr_hash_table->table[i]; expr; expr = expr->next_same_hash)
3177 /* Skip EXPR if generated in this block. */
3178 if (TEST_BIT (ae_gen[bb->index], expr->bitmap_index))
3179 continue;
3181 if (expr_killed_p (expr->expr, bb))
3182 SET_BIT (ae_kill[bb->index], expr->bitmap_index);
3186 /* Actually perform the Classic GCSE optimizations. */
3188 /* Return nonzero if occurrence OCCR of expression EXPR reaches block BB.
3190 CHECK_SELF_LOOP is nonzero if we should consider a block reaching itself
3191 as a positive reach. We want to do this when there are two computations
3192 of the expression in the block.
3194 VISITED is a pointer to a working buffer for tracking which BB's have
3195 been visited. It is NULL for the top-level call.
3197 We treat reaching expressions that go through blocks containing the same
3198 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
3199 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
3200 2 as not reaching. The intent is to improve the probability of finding
3201 only one reaching expression and to reduce register lifetimes by picking
3202 the closest such expression. */
3204 static int
3205 expr_reaches_here_p_work (occr, expr, bb, check_self_loop, visited)
3206 struct occr *occr;
3207 struct expr *expr;
3208 basic_block bb;
3209 int check_self_loop;
3210 char *visited;
3212 edge pred;
3214 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
3216 basic_block pred_bb = pred->src;
3218 if (visited[pred_bb->index])
3219 /* This predecessor has already been visited. Nothing to do. */
3221 else if (pred_bb == bb)
3223 /* BB loops on itself. */
3224 if (check_self_loop
3225 && TEST_BIT (ae_gen[pred_bb->index], expr->bitmap_index)
3226 && BLOCK_NUM (occr->insn) == pred_bb->index)
3227 return 1;
3229 visited[pred_bb->index] = 1;
3232 /* Ignore this predecessor if it kills the expression. */
3233 else if (TEST_BIT (ae_kill[pred_bb->index], expr->bitmap_index))
3234 visited[pred_bb->index] = 1;
3236 /* Does this predecessor generate this expression? */
3237 else if (TEST_BIT (ae_gen[pred_bb->index], expr->bitmap_index))
3239 /* Is this the occurrence we're looking for?
3240 Note that there's only one generating occurrence per block
3241 so we just need to check the block number. */
3242 if (BLOCK_NUM (occr->insn) == pred_bb->index)
3243 return 1;
3245 visited[pred_bb->index] = 1;
3248 /* Neither gen nor kill. */
3249 else
3251 visited[pred_bb->index] = 1;
3252 if (expr_reaches_here_p_work (occr, expr, pred_bb, check_self_loop,
3253 visited))
3255 return 1;
3259 /* All paths have been checked. */
3260 return 0;
3263 /* This wrapper for expr_reaches_here_p_work() is to ensure that any
3264 memory allocated for that function is returned. */
3266 static int
3267 expr_reaches_here_p (occr, expr, bb, check_self_loop)
3268 struct occr *occr;
3269 struct expr *expr;
3270 basic_block bb;
3271 int check_self_loop;
3273 int rval;
3274 char *visited = (char *) xcalloc (last_basic_block, 1);
3276 rval = expr_reaches_here_p_work (occr, expr, bb, check_self_loop, visited);
3278 free (visited);
3279 return rval;
3282 /* Return the instruction that computes EXPR that reaches INSN's basic block.
3283 If there is more than one such instruction, return NULL.
3285 Called only by handle_avail_expr. */
3287 static rtx
3288 computing_insn (expr, insn)
3289 struct expr *expr;
3290 rtx insn;
3292 basic_block bb = BLOCK_FOR_INSN (insn);
3294 if (expr->avail_occr->next == NULL)
3296 if (BLOCK_FOR_INSN (expr->avail_occr->insn) == bb)
3297 /* The available expression is actually itself
3298 (i.e. a loop in the flow graph) so do nothing. */
3299 return NULL;
3301 /* (FIXME) Case that we found a pattern that was created by
3302 a substitution that took place. */
3303 return expr->avail_occr->insn;
3305 else
3307 /* Pattern is computed more than once.
3308 Search backwards from this insn to see how many of these
3309 computations actually reach this insn. */
3310 struct occr *occr;
3311 rtx insn_computes_expr = NULL;
3312 int can_reach = 0;
3314 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
3316 if (BLOCK_FOR_INSN (occr->insn) == bb)
3318 /* The expression is generated in this block.
3319 The only time we care about this is when the expression
3320 is generated later in the block [and thus there's a loop].
3321 We let the normal cse pass handle the other cases. */
3322 if (INSN_CUID (insn) < INSN_CUID (occr->insn)
3323 && expr_reaches_here_p (occr, expr, bb, 1))
3325 can_reach++;
3326 if (can_reach > 1)
3327 return NULL;
3329 insn_computes_expr = occr->insn;
3332 else if (expr_reaches_here_p (occr, expr, bb, 0))
3334 can_reach++;
3335 if (can_reach > 1)
3336 return NULL;
3338 insn_computes_expr = occr->insn;
3342 if (insn_computes_expr == NULL)
3343 abort ();
3345 return insn_computes_expr;
3349 /* Return nonzero if the definition in DEF_INSN can reach INSN.
3350 Only called by can_disregard_other_sets. */
3352 static int
3353 def_reaches_here_p (insn, def_insn)
3354 rtx insn, def_insn;
3356 rtx reg;
3358 if (TEST_BIT (reaching_defs[BLOCK_NUM (insn)], INSN_CUID (def_insn)))
3359 return 1;
3361 if (BLOCK_NUM (insn) == BLOCK_NUM (def_insn))
3363 if (INSN_CUID (def_insn) < INSN_CUID (insn))
3365 if (GET_CODE (PATTERN (def_insn)) == PARALLEL)
3366 return 1;
3367 else if (GET_CODE (PATTERN (def_insn)) == CLOBBER)
3368 reg = XEXP (PATTERN (def_insn), 0);
3369 else if (GET_CODE (PATTERN (def_insn)) == SET)
3370 reg = SET_DEST (PATTERN (def_insn));
3371 else
3372 abort ();
3374 return ! reg_set_between_p (reg, NEXT_INSN (def_insn), insn);
3376 else
3377 return 0;
3380 return 0;
3383 /* Return nonzero if *ADDR_THIS_REG can only have one value at INSN. The
3384 value returned is the number of definitions that reach INSN. Returning a
3385 value of zero means that [maybe] more than one definition reaches INSN and
3386 the caller can't perform whatever optimization it is trying. i.e. it is
3387 always safe to return zero. */
3389 static int
3390 can_disregard_other_sets (addr_this_reg, insn, for_combine)
3391 struct reg_set **addr_this_reg;
3392 rtx insn;
3393 int for_combine;
3395 int number_of_reaching_defs = 0;
3396 struct reg_set *this_reg;
3398 for (this_reg = *addr_this_reg; this_reg != 0; this_reg = this_reg->next)
3399 if (def_reaches_here_p (insn, this_reg->insn))
3401 number_of_reaching_defs++;
3402 /* Ignore parallels for now. */
3403 if (GET_CODE (PATTERN (this_reg->insn)) == PARALLEL)
3404 return 0;
3406 if (!for_combine
3407 && (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER
3408 || ! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)),
3409 SET_SRC (PATTERN (insn)))))
3410 /* A setting of the reg to a different value reaches INSN. */
3411 return 0;
3413 if (number_of_reaching_defs > 1)
3415 /* If in this setting the value the register is being set to is
3416 equal to the previous value the register was set to and this
3417 setting reaches the insn we are trying to do the substitution
3418 on then we are ok. */
3419 if (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER)
3420 return 0;
3421 else if (! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)),
3422 SET_SRC (PATTERN (insn))))
3423 return 0;
3426 *addr_this_reg = this_reg;
3429 return number_of_reaching_defs;
3432 /* Expression computed by insn is available and the substitution is legal,
3433 so try to perform the substitution.
3435 The result is nonzero if any changes were made. */
3437 static int
3438 handle_avail_expr (insn, expr)
3439 rtx insn;
3440 struct expr *expr;
3442 rtx pat, insn_computes_expr, expr_set;
3443 rtx to;
3444 struct reg_set *this_reg;
3445 int found_setting, use_src;
3446 int changed = 0;
3448 /* We only handle the case where one computation of the expression
3449 reaches this instruction. */
3450 insn_computes_expr = computing_insn (expr, insn);
3451 if (insn_computes_expr == NULL)
3452 return 0;
3453 expr_set = single_set (insn_computes_expr);
3454 if (!expr_set)
3455 abort ();
3457 found_setting = 0;
3458 use_src = 0;
3460 /* At this point we know only one computation of EXPR outside of this
3461 block reaches this insn. Now try to find a register that the
3462 expression is computed into. */
3463 if (GET_CODE (SET_SRC (expr_set)) == REG)
3465 /* This is the case when the available expression that reaches
3466 here has already been handled as an available expression. */
3467 unsigned int regnum_for_replacing
3468 = REGNO (SET_SRC (expr_set));
3470 /* If the register was created by GCSE we can't use `reg_set_table',
3471 however we know it's set only once. */
3472 if (regnum_for_replacing >= max_gcse_regno
3473 /* If the register the expression is computed into is set only once,
3474 or only one set reaches this insn, we can use it. */
3475 || (((this_reg = reg_set_table[regnum_for_replacing]),
3476 this_reg->next == NULL)
3477 || can_disregard_other_sets (&this_reg, insn, 0)))
3479 use_src = 1;
3480 found_setting = 1;
3484 if (!found_setting)
3486 unsigned int regnum_for_replacing
3487 = REGNO (SET_DEST (expr_set));
3489 /* This shouldn't happen. */
3490 if (regnum_for_replacing >= max_gcse_regno)
3491 abort ();
3493 this_reg = reg_set_table[regnum_for_replacing];
3495 /* If the register the expression is computed into is set only once,
3496 or only one set reaches this insn, use it. */
3497 if (this_reg->next == NULL
3498 || can_disregard_other_sets (&this_reg, insn, 0))
3499 found_setting = 1;
3502 if (found_setting)
3504 pat = PATTERN (insn);
3505 if (use_src)
3506 to = SET_SRC (expr_set);
3507 else
3508 to = SET_DEST (expr_set);
3509 changed = validate_change (insn, &SET_SRC (pat), to, 0);
3511 /* We should be able to ignore the return code from validate_change but
3512 to play it safe we check. */
3513 if (changed)
3515 gcse_subst_count++;
3516 if (gcse_file != NULL)
3518 fprintf (gcse_file, "GCSE: Replacing the source in insn %d with",
3519 INSN_UID (insn));
3520 fprintf (gcse_file, " reg %d %s insn %d\n",
3521 REGNO (to), use_src ? "from" : "set in",
3522 INSN_UID (insn_computes_expr));
3527 /* The register that the expr is computed into is set more than once. */
3528 else if (1 /*expensive_op(this_pattrn->op) && do_expensive_gcse)*/)
3530 /* Insert an insn after insnx that copies the reg set in insnx
3531 into a new pseudo register call this new register REGN.
3532 From insnb until end of basic block or until REGB is set
3533 replace all uses of REGB with REGN. */
3534 rtx new_insn;
3536 to = gen_reg_rtx (GET_MODE (SET_DEST (expr_set)));
3538 /* Generate the new insn. */
3539 /* ??? If the change fails, we return 0, even though we created
3540 an insn. I think this is ok. */
3541 new_insn
3542 = emit_insn_after (gen_rtx_SET (VOIDmode, to,
3543 SET_DEST (expr_set)),
3544 insn_computes_expr);
3546 /* Keep register set table up to date. */
3547 record_one_set (REGNO (to), new_insn);
3549 gcse_create_count++;
3550 if (gcse_file != NULL)
3552 fprintf (gcse_file, "GCSE: Creating insn %d to copy value of reg %d",
3553 INSN_UID (NEXT_INSN (insn_computes_expr)),
3554 REGNO (SET_SRC (PATTERN (NEXT_INSN (insn_computes_expr)))));
3555 fprintf (gcse_file, ", computed in insn %d,\n",
3556 INSN_UID (insn_computes_expr));
3557 fprintf (gcse_file, " into newly allocated reg %d\n",
3558 REGNO (to));
3561 pat = PATTERN (insn);
3563 /* Do register replacement for INSN. */
3564 changed = validate_change (insn, &SET_SRC (pat),
3565 SET_DEST (PATTERN
3566 (NEXT_INSN (insn_computes_expr))),
3569 /* We should be able to ignore the return code from validate_change but
3570 to play it safe we check. */
3571 if (changed)
3573 gcse_subst_count++;
3574 if (gcse_file != NULL)
3576 fprintf (gcse_file,
3577 "GCSE: Replacing the source in insn %d with reg %d ",
3578 INSN_UID (insn),
3579 REGNO (SET_DEST (PATTERN (NEXT_INSN
3580 (insn_computes_expr)))));
3581 fprintf (gcse_file, "set in insn %d\n",
3582 INSN_UID (insn_computes_expr));
3587 return changed;
3590 /* Perform classic GCSE. This is called by one_classic_gcse_pass after all
3591 the dataflow analysis has been done.
3593 The result is nonzero if a change was made. */
3595 static int
3596 classic_gcse ()
3598 int changed;
3599 rtx insn;
3600 basic_block bb;
3602 /* Note we start at block 1. */
3604 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
3605 return 0;
3607 changed = 0;
3608 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR, next_bb)
3610 /* Reset tables used to keep track of what's still valid [since the
3611 start of the block]. */
3612 reset_opr_set_tables ();
3614 for (insn = bb->head;
3615 insn != NULL && insn != NEXT_INSN (bb->end);
3616 insn = NEXT_INSN (insn))
3618 /* Is insn of form (set (pseudo-reg) ...)? */
3619 if (GET_CODE (insn) == INSN
3620 && GET_CODE (PATTERN (insn)) == SET
3621 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
3622 && REGNO (SET_DEST (PATTERN (insn))) >= FIRST_PSEUDO_REGISTER)
3624 rtx pat = PATTERN (insn);
3625 rtx src = SET_SRC (pat);
3626 struct expr *expr;
3628 if (want_to_gcse_p (src)
3629 /* Is the expression recorded? */
3630 && ((expr = lookup_expr (src, &expr_hash_table)) != NULL)
3631 /* Is the expression available [at the start of the
3632 block]? */
3633 && TEST_BIT (ae_in[bb->index], expr->bitmap_index)
3634 /* Are the operands unchanged since the start of the
3635 block? */
3636 && oprs_not_set_p (src, insn))
3637 changed |= handle_avail_expr (insn, expr);
3640 /* Keep track of everything modified by this insn. */
3641 /* ??? Need to be careful w.r.t. mods done to INSN. */
3642 if (INSN_P (insn))
3643 mark_oprs_set (insn);
3647 return changed;
3650 /* Top level routine to perform one classic GCSE pass.
3652 Return nonzero if a change was made. */
3654 static int
3655 one_classic_gcse_pass (pass)
3656 int pass;
3658 int changed = 0;
3660 gcse_subst_count = 0;
3661 gcse_create_count = 0;
3663 alloc_hash_table (max_cuid, &expr_hash_table, 0);
3664 alloc_rd_mem (last_basic_block, max_cuid);
3665 compute_hash_table (&expr_hash_table);
3666 if (gcse_file)
3667 dump_hash_table (gcse_file, "Expression", &expr_hash_table);
3669 if (expr_hash_table.n_elems > 0)
3671 compute_kill_rd ();
3672 compute_rd ();
3673 alloc_avail_expr_mem (last_basic_block, expr_hash_table.n_elems);
3674 compute_ae_gen (&expr_hash_table);
3675 compute_ae_kill (ae_gen, ae_kill, &expr_hash_table);
3676 compute_available (ae_gen, ae_kill, ae_out, ae_in);
3677 changed = classic_gcse ();
3678 free_avail_expr_mem ();
3681 free_rd_mem ();
3682 free_hash_table (&expr_hash_table);
3684 if (gcse_file)
3686 fprintf (gcse_file, "\n");
3687 fprintf (gcse_file, "GCSE of %s, pass %d: %d bytes needed, %d substs,",
3688 current_function_name, pass, bytes_used, gcse_subst_count);
3689 fprintf (gcse_file, "%d insns created\n", gcse_create_count);
3692 return changed;
3695 /* Compute copy/constant propagation working variables. */
3697 /* Local properties of assignments. */
3698 static sbitmap *cprop_pavloc;
3699 static sbitmap *cprop_absaltered;
3701 /* Global properties of assignments (computed from the local properties). */
3702 static sbitmap *cprop_avin;
3703 static sbitmap *cprop_avout;
3705 /* Allocate vars used for copy/const propagation. N_BLOCKS is the number of
3706 basic blocks. N_SETS is the number of sets. */
3708 static void
3709 alloc_cprop_mem (n_blocks, n_sets)
3710 int n_blocks, n_sets;
3712 cprop_pavloc = sbitmap_vector_alloc (n_blocks, n_sets);
3713 cprop_absaltered = sbitmap_vector_alloc (n_blocks, n_sets);
3715 cprop_avin = sbitmap_vector_alloc (n_blocks, n_sets);
3716 cprop_avout = sbitmap_vector_alloc (n_blocks, n_sets);
3719 /* Free vars used by copy/const propagation. */
3721 static void
3722 free_cprop_mem ()
3724 sbitmap_vector_free (cprop_pavloc);
3725 sbitmap_vector_free (cprop_absaltered);
3726 sbitmap_vector_free (cprop_avin);
3727 sbitmap_vector_free (cprop_avout);
3730 /* For each block, compute whether X is transparent. X is either an
3731 expression or an assignment [though we don't care which, for this context
3732 an assignment is treated as an expression]. For each block where an
3733 element of X is modified, set (SET_P == 1) or reset (SET_P == 0) the INDX
3734 bit in BMAP. */
3736 static void
3737 compute_transp (x, indx, bmap, set_p)
3738 rtx x;
3739 int indx;
3740 sbitmap *bmap;
3741 int set_p;
3743 int i, j;
3744 basic_block bb;
3745 enum rtx_code code;
3746 reg_set *r;
3747 const char *fmt;
3749 /* repeat is used to turn tail-recursion into iteration since GCC
3750 can't do it when there's no return value. */
3751 repeat:
3753 if (x == 0)
3754 return;
3756 code = GET_CODE (x);
3757 switch (code)
3759 case REG:
3760 if (set_p)
3762 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3764 FOR_EACH_BB (bb)
3765 if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
3766 SET_BIT (bmap[bb->index], indx);
3768 else
3770 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
3771 SET_BIT (bmap[BLOCK_NUM (r->insn)], indx);
3774 else
3776 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3778 FOR_EACH_BB (bb)
3779 if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
3780 RESET_BIT (bmap[bb->index], indx);
3782 else
3784 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
3785 RESET_BIT (bmap[BLOCK_NUM (r->insn)], indx);
3789 return;
3791 case MEM:
3792 FOR_EACH_BB (bb)
3794 rtx list_entry = canon_modify_mem_list[bb->index];
3796 while (list_entry)
3798 rtx dest, dest_addr;
3800 if (GET_CODE (XEXP (list_entry, 0)) == CALL_INSN)
3802 if (set_p)
3803 SET_BIT (bmap[bb->index], indx);
3804 else
3805 RESET_BIT (bmap[bb->index], indx);
3806 break;
3808 /* LIST_ENTRY must be an INSN of some kind that sets memory.
3809 Examine each hunk of memory that is modified. */
3811 dest = XEXP (list_entry, 0);
3812 list_entry = XEXP (list_entry, 1);
3813 dest_addr = XEXP (list_entry, 0);
3815 if (canon_true_dependence (dest, GET_MODE (dest), dest_addr,
3816 x, rtx_addr_varies_p))
3818 if (set_p)
3819 SET_BIT (bmap[bb->index], indx);
3820 else
3821 RESET_BIT (bmap[bb->index], indx);
3822 break;
3824 list_entry = XEXP (list_entry, 1);
3828 x = XEXP (x, 0);
3829 goto repeat;
3831 case PC:
3832 case CC0: /*FIXME*/
3833 case CONST:
3834 case CONST_INT:
3835 case CONST_DOUBLE:
3836 case CONST_VECTOR:
3837 case SYMBOL_REF:
3838 case LABEL_REF:
3839 case ADDR_VEC:
3840 case ADDR_DIFF_VEC:
3841 return;
3843 default:
3844 break;
3847 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
3849 if (fmt[i] == 'e')
3851 /* If we are about to do the last recursive call
3852 needed at this level, change it into iteration.
3853 This function is called enough to be worth it. */
3854 if (i == 0)
3856 x = XEXP (x, i);
3857 goto repeat;
3860 compute_transp (XEXP (x, i), indx, bmap, set_p);
3862 else if (fmt[i] == 'E')
3863 for (j = 0; j < XVECLEN (x, i); j++)
3864 compute_transp (XVECEXP (x, i, j), indx, bmap, set_p);
3868 /* Top level routine to do the dataflow analysis needed by copy/const
3869 propagation. */
3871 static void
3872 compute_cprop_data ()
3874 compute_local_properties (cprop_absaltered, cprop_pavloc, NULL, &set_hash_table);
3875 compute_available (cprop_pavloc, cprop_absaltered,
3876 cprop_avout, cprop_avin);
3879 /* Copy/constant propagation. */
3881 /* Maximum number of register uses in an insn that we handle. */
3882 #define MAX_USES 8
3884 /* Table of uses found in an insn.
3885 Allocated statically to avoid alloc/free complexity and overhead. */
3886 static struct reg_use reg_use_table[MAX_USES];
3888 /* Index into `reg_use_table' while building it. */
3889 static int reg_use_count;
3891 /* Set up a list of register numbers used in INSN. The found uses are stored
3892 in `reg_use_table'. `reg_use_count' is initialized to zero before entry,
3893 and contains the number of uses in the table upon exit.
3895 ??? If a register appears multiple times we will record it multiple times.
3896 This doesn't hurt anything but it will slow things down. */
3898 static void
3899 find_used_regs (xptr, data)
3900 rtx *xptr;
3901 void *data ATTRIBUTE_UNUSED;
3903 int i, j;
3904 enum rtx_code code;
3905 const char *fmt;
3906 rtx x = *xptr;
3908 /* repeat is used to turn tail-recursion into iteration since GCC
3909 can't do it when there's no return value. */
3910 repeat:
3911 if (x == 0)
3912 return;
3914 code = GET_CODE (x);
3915 if (REG_P (x))
3917 if (reg_use_count == MAX_USES)
3918 return;
3920 reg_use_table[reg_use_count].reg_rtx = x;
3921 reg_use_count++;
3924 /* Recursively scan the operands of this expression. */
3926 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
3928 if (fmt[i] == 'e')
3930 /* If we are about to do the last recursive call
3931 needed at this level, change it into iteration.
3932 This function is called enough to be worth it. */
3933 if (i == 0)
3935 x = XEXP (x, 0);
3936 goto repeat;
3939 find_used_regs (&XEXP (x, i), data);
3941 else if (fmt[i] == 'E')
3942 for (j = 0; j < XVECLEN (x, i); j++)
3943 find_used_regs (&XVECEXP (x, i, j), data);
3947 /* Try to replace all non-SET_DEST occurrences of FROM in INSN with TO.
3948 Returns nonzero is successful. */
3950 static int
3951 try_replace_reg (from, to, insn)
3952 rtx from, to, insn;
3954 rtx note = find_reg_equal_equiv_note (insn);
3955 rtx src = 0;
3956 int success = 0;
3957 rtx set = single_set (insn);
3959 validate_replace_src_group (from, to, insn);
3960 if (num_changes_pending () && apply_change_group ())
3961 success = 1;
3963 /* Try to simplify SET_SRC if we have substituted a constant. */
3964 if (success && set && CONSTANT_P (to))
3966 src = simplify_rtx (SET_SRC (set));
3968 if (src)
3969 validate_change (insn, &SET_SRC (set), src, 0);
3972 if (!success && set && reg_mentioned_p (from, SET_SRC (set)))
3974 /* If above failed and this is a single set, try to simplify the source of
3975 the set given our substitution. We could perhaps try this for multiple
3976 SETs, but it probably won't buy us anything. */
3977 src = simplify_replace_rtx (SET_SRC (set), from, to);
3979 if (!rtx_equal_p (src, SET_SRC (set))
3980 && validate_change (insn, &SET_SRC (set), src, 0))
3981 success = 1;
3983 /* If we've failed to do replacement, have a single SET, and don't already
3984 have a note, add a REG_EQUAL note to not lose information. */
3985 if (!success && note == 0 && set != 0)
3986 note = set_unique_reg_note (insn, REG_EQUAL, copy_rtx (src));
3989 /* If there is already a NOTE, update the expression in it with our
3990 replacement. */
3991 else if (note != 0)
3992 XEXP (note, 0) = simplify_replace_rtx (XEXP (note, 0), from, to);
3994 /* REG_EQUAL may get simplified into register.
3995 We don't allow that. Remove that note. This code ought
3996 not to happen, because previous code ought to synthesize
3997 reg-reg move, but be on the safe side. */
3998 if (note && REG_P (XEXP (note, 0)))
3999 remove_note (insn, note);
4001 return success;
4004 /* Find a set of REGNOs that are available on entry to INSN's block. Returns
4005 NULL no such set is found. */
4007 static struct expr *
4008 find_avail_set (regno, insn)
4009 int regno;
4010 rtx insn;
4012 /* SET1 contains the last set found that can be returned to the caller for
4013 use in a substitution. */
4014 struct expr *set1 = 0;
4016 /* Loops are not possible here. To get a loop we would need two sets
4017 available at the start of the block containing INSN. ie we would
4018 need two sets like this available at the start of the block:
4020 (set (reg X) (reg Y))
4021 (set (reg Y) (reg X))
4023 This can not happen since the set of (reg Y) would have killed the
4024 set of (reg X) making it unavailable at the start of this block. */
4025 while (1)
4027 rtx src;
4028 struct expr *set = lookup_set (regno, &set_hash_table);
4030 /* Find a set that is available at the start of the block
4031 which contains INSN. */
4032 while (set)
4034 if (TEST_BIT (cprop_avin[BLOCK_NUM (insn)], set->bitmap_index))
4035 break;
4036 set = next_set (regno, set);
4039 /* If no available set was found we've reached the end of the
4040 (possibly empty) copy chain. */
4041 if (set == 0)
4042 break;
4044 if (GET_CODE (set->expr) != SET)
4045 abort ();
4047 src = SET_SRC (set->expr);
4049 /* We know the set is available.
4050 Now check that SRC is ANTLOC (i.e. none of the source operands
4051 have changed since the start of the block).
4053 If the source operand changed, we may still use it for the next
4054 iteration of this loop, but we may not use it for substitutions. */
4056 if (gcse_constant_p (src) || oprs_not_set_p (src, insn))
4057 set1 = set;
4059 /* If the source of the set is anything except a register, then
4060 we have reached the end of the copy chain. */
4061 if (GET_CODE (src) != REG)
4062 break;
4064 /* Follow the copy chain, ie start another iteration of the loop
4065 and see if we have an available copy into SRC. */
4066 regno = REGNO (src);
4069 /* SET1 holds the last set that was available and anticipatable at
4070 INSN. */
4071 return set1;
4074 /* Subroutine of cprop_insn that tries to propagate constants into
4075 JUMP_INSNS. JUMP must be a conditional jump. If SETCC is non-NULL
4076 it is the instruction that immediately precedes JUMP, and must be a
4077 single SET of a register. FROM is what we will try to replace,
4078 SRC is the constant we will try to substitute for it. Returns nonzero
4079 if a change was made. */
4081 static int
4082 cprop_jump (bb, setcc, jump, from, src)
4083 basic_block bb;
4084 rtx setcc;
4085 rtx jump;
4086 rtx from;
4087 rtx src;
4089 rtx new, set_src, note_src;
4090 rtx set = pc_set (jump);
4091 rtx note = find_reg_equal_equiv_note (jump);
4093 if (note)
4095 note_src = XEXP (note, 0);
4096 if (GET_CODE (note_src) == EXPR_LIST)
4097 note_src = NULL_RTX;
4099 else note_src = NULL_RTX;
4101 /* Prefer REG_EQUAL notes except those containing EXPR_LISTs. */
4102 set_src = note_src ? note_src : SET_SRC (set);
4104 /* First substitute the SETCC condition into the JUMP instruction,
4105 then substitute that given values into this expanded JUMP. */
4106 if (setcc != NULL_RTX
4107 && !modified_between_p (from, setcc, jump)
4108 && !modified_between_p (src, setcc, jump))
4110 rtx setcc_src;
4111 rtx setcc_set = single_set (setcc);
4112 rtx setcc_note = find_reg_equal_equiv_note (setcc);
4113 setcc_src = (setcc_note && GET_CODE (XEXP (setcc_note, 0)) != EXPR_LIST)
4114 ? XEXP (setcc_note, 0) : SET_SRC (setcc_set);
4115 set_src = simplify_replace_rtx (set_src, SET_DEST (setcc_set),
4116 setcc_src);
4118 else
4119 setcc = NULL_RTX;
4121 new = simplify_replace_rtx (set_src, from, src);
4123 /* If no simplification can be made, then try the next register. */
4124 if (rtx_equal_p (new, SET_SRC (set)))
4125 return 0;
4127 /* If this is now a no-op delete it, otherwise this must be a valid insn. */
4128 if (new == pc_rtx)
4129 delete_insn (jump);
4130 else
4132 /* Ensure the value computed inside the jump insn to be equivalent
4133 to one computed by setcc. */
4134 if (setcc && modified_in_p (new, setcc))
4135 return 0;
4136 if (! validate_change (jump, &SET_SRC (set), new, 0))
4138 /* When (some) constants are not valid in a comparison, and there
4139 are two registers to be replaced by constants before the entire
4140 comparison can be folded into a constant, we need to keep
4141 intermediate information in REG_EQUAL notes. For targets with
4142 separate compare insns, such notes are added by try_replace_reg.
4143 When we have a combined compare-and-branch instruction, however,
4144 we need to attach a note to the branch itself to make this
4145 optimization work. */
4147 if (!rtx_equal_p (new, note_src))
4148 set_unique_reg_note (jump, REG_EQUAL, copy_rtx (new));
4149 return 0;
4152 /* Remove REG_EQUAL note after simplification. */
4153 if (note_src)
4154 remove_note (jump, note);
4156 /* If this has turned into an unconditional jump,
4157 then put a barrier after it so that the unreachable
4158 code will be deleted. */
4159 if (GET_CODE (SET_SRC (set)) == LABEL_REF)
4160 emit_barrier_after (jump);
4163 #ifdef HAVE_cc0
4164 /* Delete the cc0 setter. */
4165 if (setcc != NULL && CC0_P (SET_DEST (single_set (setcc))))
4166 delete_insn (setcc);
4167 #endif
4169 run_jump_opt_after_gcse = 1;
4171 const_prop_count++;
4172 if (gcse_file != NULL)
4174 fprintf (gcse_file,
4175 "CONST-PROP: Replacing reg %d in jump_insn %d with constant ",
4176 REGNO (from), INSN_UID (jump));
4177 print_rtl (gcse_file, src);
4178 fprintf (gcse_file, "\n");
4180 purge_dead_edges (bb);
4182 return 1;
4185 static bool
4186 constprop_register (insn, from, to, alter_jumps)
4187 rtx insn;
4188 rtx from;
4189 rtx to;
4190 int alter_jumps;
4192 rtx sset;
4194 /* Check for reg or cc0 setting instructions followed by
4195 conditional branch instructions first. */
4196 if (alter_jumps
4197 && (sset = single_set (insn)) != NULL
4198 && NEXT_INSN (insn)
4199 && any_condjump_p (NEXT_INSN (insn)) && onlyjump_p (NEXT_INSN (insn)))
4201 rtx dest = SET_DEST (sset);
4202 if ((REG_P (dest) || CC0_P (dest))
4203 && cprop_jump (BLOCK_FOR_INSN (insn), insn, NEXT_INSN (insn), from, to))
4204 return 1;
4207 /* Handle normal insns next. */
4208 if (GET_CODE (insn) == INSN
4209 && try_replace_reg (from, to, insn))
4210 return 1;
4212 /* Try to propagate a CONST_INT into a conditional jump.
4213 We're pretty specific about what we will handle in this
4214 code, we can extend this as necessary over time.
4216 Right now the insn in question must look like
4217 (set (pc) (if_then_else ...)) */
4218 else if (alter_jumps && any_condjump_p (insn) && onlyjump_p (insn))
4219 return cprop_jump (BLOCK_FOR_INSN (insn), NULL, insn, from, to);
4220 return 0;
4223 /* Perform constant and copy propagation on INSN.
4224 The result is nonzero if a change was made. */
4226 static int
4227 cprop_insn (insn, alter_jumps)
4228 rtx insn;
4229 int alter_jumps;
4231 struct reg_use *reg_used;
4232 int changed = 0;
4233 rtx note;
4235 if (!INSN_P (insn))
4236 return 0;
4238 reg_use_count = 0;
4239 note_uses (&PATTERN (insn), find_used_regs, NULL);
4241 note = find_reg_equal_equiv_note (insn);
4243 /* We may win even when propagating constants into notes. */
4244 if (note)
4245 find_used_regs (&XEXP (note, 0), NULL);
4247 for (reg_used = &reg_use_table[0]; reg_use_count > 0;
4248 reg_used++, reg_use_count--)
4250 unsigned int regno = REGNO (reg_used->reg_rtx);
4251 rtx pat, src;
4252 struct expr *set;
4254 /* Ignore registers created by GCSE.
4255 We do this because ... */
4256 if (regno >= max_gcse_regno)
4257 continue;
4259 /* If the register has already been set in this block, there's
4260 nothing we can do. */
4261 if (! oprs_not_set_p (reg_used->reg_rtx, insn))
4262 continue;
4264 /* Find an assignment that sets reg_used and is available
4265 at the start of the block. */
4266 set = find_avail_set (regno, insn);
4267 if (! set)
4268 continue;
4270 pat = set->expr;
4271 /* ??? We might be able to handle PARALLELs. Later. */
4272 if (GET_CODE (pat) != SET)
4273 abort ();
4275 src = SET_SRC (pat);
4277 /* Constant propagation. */
4278 if (gcse_constant_p (src))
4280 if (constprop_register (insn, reg_used->reg_rtx, src, alter_jumps))
4282 changed = 1;
4283 const_prop_count++;
4284 if (gcse_file != NULL)
4286 fprintf (gcse_file, "GLOBAL CONST-PROP: Replacing reg %d in ", regno);
4287 fprintf (gcse_file, "insn %d with constant ", INSN_UID (insn));
4288 print_rtl (gcse_file, src);
4289 fprintf (gcse_file, "\n");
4291 if (INSN_DELETED_P (insn))
4292 return 1;
4295 else if (GET_CODE (src) == REG
4296 && REGNO (src) >= FIRST_PSEUDO_REGISTER
4297 && REGNO (src) != regno)
4299 if (try_replace_reg (reg_used->reg_rtx, src, insn))
4301 changed = 1;
4302 copy_prop_count++;
4303 if (gcse_file != NULL)
4305 fprintf (gcse_file, "GLOBAL COPY-PROP: Replacing reg %d in insn %d",
4306 regno, INSN_UID (insn));
4307 fprintf (gcse_file, " with reg %d\n", REGNO (src));
4310 /* The original insn setting reg_used may or may not now be
4311 deletable. We leave the deletion to flow. */
4312 /* FIXME: If it turns out that the insn isn't deletable,
4313 then we may have unnecessarily extended register lifetimes
4314 and made things worse. */
4319 return changed;
4322 /* Like find_used_regs, but avoid recording uses that appear in
4323 input-output contexts such as zero_extract or pre_dec. This
4324 restricts the cases we consider to those for which local cprop
4325 can legitimately make replacements. */
4327 static void
4328 local_cprop_find_used_regs (xptr, data)
4329 rtx *xptr;
4330 void *data;
4332 rtx x = *xptr;
4334 if (x == 0)
4335 return;
4337 switch (GET_CODE (x))
4339 case ZERO_EXTRACT:
4340 case SIGN_EXTRACT:
4341 case STRICT_LOW_PART:
4342 return;
4344 case PRE_DEC:
4345 case PRE_INC:
4346 case POST_DEC:
4347 case POST_INC:
4348 case PRE_MODIFY:
4349 case POST_MODIFY:
4350 /* Can only legitimately appear this early in the context of
4351 stack pushes for function arguments, but handle all of the
4352 codes nonetheless. */
4353 return;
4355 case SUBREG:
4356 /* Setting a subreg of a register larger than word_mode leaves
4357 the non-written words unchanged. */
4358 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) > BITS_PER_WORD)
4359 return;
4360 break;
4362 default:
4363 break;
4366 find_used_regs (xptr, data);
4369 /* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
4370 their REG_EQUAL notes need updating. */
4372 static bool
4373 do_local_cprop (x, insn, alter_jumps, libcall_sp)
4374 rtx x;
4375 rtx insn;
4376 int alter_jumps;
4377 rtx *libcall_sp;
4379 rtx newreg = NULL, newcnst = NULL;
4381 /* Rule out USE instructions and ASM statements as we don't want to
4382 change the hard registers mentioned. */
4383 if (GET_CODE (x) == REG
4384 && (REGNO (x) >= FIRST_PSEUDO_REGISTER
4385 || (GET_CODE (PATTERN (insn)) != USE
4386 && asm_noperands (PATTERN (insn)) < 0)))
4388 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0);
4389 struct elt_loc_list *l;
4391 if (!val)
4392 return false;
4393 for (l = val->locs; l; l = l->next)
4395 rtx this_rtx = l->loc;
4396 rtx note;
4398 if (l->in_libcall)
4399 continue;
4401 if (gcse_constant_p (this_rtx))
4402 newcnst = this_rtx;
4403 if (REG_P (this_rtx) && REGNO (this_rtx) >= FIRST_PSEUDO_REGISTER
4404 /* Don't copy propagate if it has attached REG_EQUIV note.
4405 At this point this only function parameters should have
4406 REG_EQUIV notes and if the argument slot is used somewhere
4407 explicitly, it means address of parameter has been taken,
4408 so we should not extend the lifetime of the pseudo. */
4409 && (!(note = find_reg_note (l->setting_insn, REG_EQUIV, NULL_RTX))
4410 || GET_CODE (XEXP (note, 0)) != MEM))
4411 newreg = this_rtx;
4413 if (newcnst && constprop_register (insn, x, newcnst, alter_jumps))
4415 /* If we find a case where we can't fix the retval REG_EQUAL notes
4416 match the new register, we either have to abandon this replacement
4417 or fix delete_trivially_dead_insns to preserve the setting insn,
4418 or make it delete the REG_EUAQL note, and fix up all passes that
4419 require the REG_EQUAL note there. */
4420 if (!adjust_libcall_notes (x, newcnst, insn, libcall_sp))
4421 abort ();
4422 if (gcse_file != NULL)
4424 fprintf (gcse_file, "LOCAL CONST-PROP: Replacing reg %d in ",
4425 REGNO (x));
4426 fprintf (gcse_file, "insn %d with constant ",
4427 INSN_UID (insn));
4428 print_rtl (gcse_file, newcnst);
4429 fprintf (gcse_file, "\n");
4431 const_prop_count++;
4432 return true;
4434 else if (newreg && newreg != x && try_replace_reg (x, newreg, insn))
4436 adjust_libcall_notes (x, newreg, insn, libcall_sp);
4437 if (gcse_file != NULL)
4439 fprintf (gcse_file,
4440 "LOCAL COPY-PROP: Replacing reg %d in insn %d",
4441 REGNO (x), INSN_UID (insn));
4442 fprintf (gcse_file, " with reg %d\n", REGNO (newreg));
4444 copy_prop_count++;
4445 return true;
4448 return false;
4451 /* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
4452 their REG_EQUAL notes need updating to reflect that OLDREG has been
4453 replaced with NEWVAL in INSN. Return true if all substitutions could
4454 be made. */
4455 static bool
4456 adjust_libcall_notes (oldreg, newval, insn, libcall_sp)
4457 rtx oldreg, newval, insn, *libcall_sp;
4459 rtx end;
4461 while ((end = *libcall_sp++))
4463 rtx note = find_reg_equal_equiv_note (end);
4465 if (! note)
4466 continue;
4468 if (REG_P (newval))
4470 if (reg_set_between_p (newval, PREV_INSN (insn), end))
4474 note = find_reg_equal_equiv_note (end);
4475 if (! note)
4476 continue;
4477 if (reg_mentioned_p (newval, XEXP (note, 0)))
4478 return false;
4480 while ((end = *libcall_sp++));
4481 return true;
4484 XEXP (note, 0) = replace_rtx (XEXP (note, 0), oldreg, newval);
4485 insn = end;
4487 return true;
4490 #define MAX_NESTED_LIBCALLS 9
4492 static void
4493 local_cprop_pass (alter_jumps)
4494 int alter_jumps;
4496 rtx insn;
4497 struct reg_use *reg_used;
4498 rtx libcall_stack[MAX_NESTED_LIBCALLS + 1], *libcall_sp;
4499 bool changed = false;
4501 cselib_init ();
4502 libcall_sp = &libcall_stack[MAX_NESTED_LIBCALLS];
4503 *libcall_sp = 0;
4504 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4506 if (INSN_P (insn))
4508 rtx note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
4510 if (note)
4512 if (libcall_sp == libcall_stack)
4513 abort ();
4514 *--libcall_sp = XEXP (note, 0);
4516 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
4517 if (note)
4518 libcall_sp++;
4519 note = find_reg_equal_equiv_note (insn);
4522 reg_use_count = 0;
4523 note_uses (&PATTERN (insn), local_cprop_find_used_regs, NULL);
4524 if (note)
4525 local_cprop_find_used_regs (&XEXP (note, 0), NULL);
4527 for (reg_used = &reg_use_table[0]; reg_use_count > 0;
4528 reg_used++, reg_use_count--)
4529 if (do_local_cprop (reg_used->reg_rtx, insn, alter_jumps,
4530 libcall_sp))
4532 changed = true;
4533 break;
4535 if (INSN_DELETED_P (insn))
4536 break;
4538 while (reg_use_count);
4540 cselib_process_insn (insn);
4542 cselib_finish ();
4543 /* Global analysis may get into infinite loops for unreachable blocks. */
4544 if (changed && alter_jumps)
4546 delete_unreachable_blocks ();
4547 free_reg_set_mem ();
4548 alloc_reg_set_mem (max_reg_num ());
4549 compute_sets (get_insns ());
4553 /* Forward propagate copies. This includes copies and constants. Return
4554 nonzero if a change was made. */
4556 static int
4557 cprop (alter_jumps)
4558 int alter_jumps;
4560 int changed;
4561 basic_block bb;
4562 rtx insn;
4564 /* Note we start at block 1. */
4565 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
4567 if (gcse_file != NULL)
4568 fprintf (gcse_file, "\n");
4569 return 0;
4572 changed = 0;
4573 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR, next_bb)
4575 /* Reset tables used to keep track of what's still valid [since the
4576 start of the block]. */
4577 reset_opr_set_tables ();
4579 for (insn = bb->head;
4580 insn != NULL && insn != NEXT_INSN (bb->end);
4581 insn = NEXT_INSN (insn))
4582 if (INSN_P (insn))
4584 changed |= cprop_insn (insn, alter_jumps);
4586 /* Keep track of everything modified by this insn. */
4587 /* ??? Need to be careful w.r.t. mods done to INSN. Don't
4588 call mark_oprs_set if we turned the insn into a NOTE. */
4589 if (GET_CODE (insn) != NOTE)
4590 mark_oprs_set (insn);
4594 if (gcse_file != NULL)
4595 fprintf (gcse_file, "\n");
4597 return changed;
4600 /* Similar to get_condition, only the resulting condition must be
4601 valid at JUMP, instead of at EARLIEST.
4603 This differs from noce_get_condition in ifcvt.c in that we prefer not to
4604 settle for the condition variable in the jump instruction being integral.
4605 We prefer to be able to record the value of a user variable, rather than
4606 the value of a temporary used in a condition. This could be solved by
4607 recording the value of *every* register scaned by canonicalize_condition,
4608 but this would require some code reorganization. */
4611 fis_get_condition (jump)
4612 rtx jump;
4614 rtx cond, set, tmp, insn, earliest;
4615 bool reverse;
4617 if (! any_condjump_p (jump))
4618 return NULL_RTX;
4620 set = pc_set (jump);
4621 cond = XEXP (SET_SRC (set), 0);
4623 /* If this branches to JUMP_LABEL when the condition is false,
4624 reverse the condition. */
4625 reverse = (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
4626 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump));
4628 /* Use canonicalize_condition to do the dirty work of manipulating
4629 MODE_CC values and COMPARE rtx codes. */
4630 tmp = canonicalize_condition (jump, cond, reverse, &earliest, NULL_RTX);
4631 if (!tmp)
4632 return NULL_RTX;
4634 /* Verify that the given condition is valid at JUMP by virtue of not
4635 having been modified since EARLIEST. */
4636 for (insn = earliest; insn != jump; insn = NEXT_INSN (insn))
4637 if (INSN_P (insn) && modified_in_p (tmp, insn))
4638 break;
4639 if (insn == jump)
4640 return tmp;
4642 /* The condition was modified. See if we can get a partial result
4643 that doesn't follow all the reversals. Perhaps combine can fold
4644 them together later. */
4645 tmp = XEXP (tmp, 0);
4646 if (!REG_P (tmp) || GET_MODE_CLASS (GET_MODE (tmp)) != MODE_INT)
4647 return NULL_RTX;
4648 tmp = canonicalize_condition (jump, cond, reverse, &earliest, tmp);
4649 if (!tmp)
4650 return NULL_RTX;
4652 /* For sanity's sake, re-validate the new result. */
4653 for (insn = earliest; insn != jump; insn = NEXT_INSN (insn))
4654 if (INSN_P (insn) && modified_in_p (tmp, insn))
4655 return NULL_RTX;
4657 return tmp;
4660 /* Find the implicit sets of a function. An "implicit set" is a constraint
4661 on the value of a variable, implied by a conditional jump. For example,
4662 following "if (x == 2)", the then branch may be optimized as though the
4663 conditional performed an "explicit set", in this example, "x = 2". This
4664 function records the set patterns that are implicit at the start of each
4665 basic block. */
4667 static void
4668 find_implicit_sets ()
4670 basic_block bb, dest;
4671 unsigned int count;
4672 rtx cond, new;
4674 count = 0;
4675 FOR_EACH_BB (bb)
4676 /* Check for more than one sucessor. */
4677 if (bb->succ && bb->succ->succ_next)
4679 cond = fis_get_condition (bb->end);
4681 if (cond
4682 && (GET_CODE (cond) == EQ || GET_CODE (cond) == NE)
4683 && GET_CODE (XEXP (cond, 0)) == REG
4684 && REGNO (XEXP (cond, 0)) >= FIRST_PSEUDO_REGISTER
4685 && gcse_constant_p (XEXP (cond, 1)))
4687 dest = GET_CODE (cond) == EQ ? BRANCH_EDGE (bb)->dest
4688 : FALLTHRU_EDGE (bb)->dest;
4690 if (dest && ! dest->pred->pred_next
4691 && dest != EXIT_BLOCK_PTR)
4693 new = gen_rtx_SET (VOIDmode, XEXP (cond, 0),
4694 XEXP (cond, 1));
4695 implicit_sets[dest->index] = new;
4696 if (gcse_file)
4698 fprintf(gcse_file, "Implicit set of reg %d in ",
4699 REGNO (XEXP (cond, 0)));
4700 fprintf(gcse_file, "basic block %d\n", dest->index);
4702 count++;
4707 if (gcse_file)
4708 fprintf (gcse_file, "Found %d implicit sets\n", count);
4711 /* Perform one copy/constant propagation pass.
4712 PASS is the pass count. If CPROP_JUMPS is true, perform constant
4713 propagation into conditional jumps. If BYPASS_JUMPS is true,
4714 perform conditional jump bypassing optimizations. */
4716 static int
4717 one_cprop_pass (pass, cprop_jumps, bypass_jumps)
4718 int pass;
4719 int cprop_jumps;
4720 int bypass_jumps;
4722 int changed = 0;
4724 const_prop_count = 0;
4725 copy_prop_count = 0;
4727 local_cprop_pass (cprop_jumps);
4729 /* Determine implicit sets. */
4730 implicit_sets = (rtx *) xcalloc (last_basic_block, sizeof (rtx));
4731 find_implicit_sets ();
4733 alloc_hash_table (max_cuid, &set_hash_table, 1);
4734 compute_hash_table (&set_hash_table);
4736 /* Free implicit_sets before peak usage. */
4737 free (implicit_sets);
4738 implicit_sets = NULL;
4740 if (gcse_file)
4741 dump_hash_table (gcse_file, "SET", &set_hash_table);
4742 if (set_hash_table.n_elems > 0)
4744 alloc_cprop_mem (last_basic_block, set_hash_table.n_elems);
4745 compute_cprop_data ();
4746 changed = cprop (cprop_jumps);
4747 if (bypass_jumps)
4748 changed |= bypass_conditional_jumps ();
4749 free_cprop_mem ();
4752 free_hash_table (&set_hash_table);
4754 if (gcse_file)
4756 fprintf (gcse_file, "CPROP of %s, pass %d: %d bytes needed, ",
4757 current_function_name, pass, bytes_used);
4758 fprintf (gcse_file, "%d const props, %d copy props\n\n",
4759 const_prop_count, copy_prop_count);
4761 /* Global analysis may get into infinite loops for unreachable blocks. */
4762 if (changed && cprop_jumps)
4763 delete_unreachable_blocks ();
4765 return changed;
4768 /* Bypass conditional jumps. */
4770 /* The value of last_basic_block at the beginning of the jump_bypass
4771 pass. The use of redirect_edge_and_branch_force may introduce new
4772 basic blocks, but the data flow analysis is only valid for basic
4773 block indices less than bypass_last_basic_block. */
4775 static int bypass_last_basic_block;
4777 /* Find a set of REGNO to a constant that is available at the end of basic
4778 block BB. Returns NULL if no such set is found. Based heavily upon
4779 find_avail_set. */
4781 static struct expr *
4782 find_bypass_set (regno, bb)
4783 int regno;
4784 int bb;
4786 struct expr *result = 0;
4788 for (;;)
4790 rtx src;
4791 struct expr *set = lookup_set (regno, &set_hash_table);
4793 while (set)
4795 if (TEST_BIT (cprop_avout[bb], set->bitmap_index))
4796 break;
4797 set = next_set (regno, set);
4800 if (set == 0)
4801 break;
4803 if (GET_CODE (set->expr) != SET)
4804 abort ();
4806 src = SET_SRC (set->expr);
4807 if (gcse_constant_p (src))
4808 result = set;
4810 if (GET_CODE (src) != REG)
4811 break;
4813 regno = REGNO (src);
4815 return result;
4819 /* Subroutine of bypass_block that checks whether a pseudo is killed by
4820 any of the instructions inserted on an edge. Jump bypassing places
4821 condition code setters on CFG edges using insert_insn_on_edge. This
4822 function is required to check that our data flow analysis is still
4823 valid prior to commit_edge_insertions. */
4825 static bool
4826 reg_killed_on_edge (reg, e)
4827 rtx reg;
4828 edge e;
4830 rtx insn;
4832 for (insn = e->insns; insn; insn = NEXT_INSN (insn))
4833 if (INSN_P (insn) && reg_set_p (reg, insn))
4834 return true;
4836 return false;
4839 /* Subroutine of bypass_conditional_jumps that attempts to bypass the given
4840 basic block BB which has more than one predecessor. If not NULL, SETCC
4841 is the first instruction of BB, which is immediately followed by JUMP_INSN
4842 JUMP. Otherwise, SETCC is NULL, and JUMP is the first insn of BB.
4843 Returns nonzero if a change was made.
4845 During the jump bypassing pass, we may place copies of SETCC instructions
4846 on CFG edges. The following routine must be careful to pay attention to
4847 these inserted insns when performing its transformations. */
4849 static int
4850 bypass_block (bb, setcc, jump)
4851 basic_block bb;
4852 rtx setcc, jump;
4854 rtx insn, note;
4855 edge e, enext, edest;
4856 int i, change;
4857 int may_be_loop_header;
4859 insn = (setcc != NULL) ? setcc : jump;
4861 /* Determine set of register uses in INSN. */
4862 reg_use_count = 0;
4863 note_uses (&PATTERN (insn), find_used_regs, NULL);
4864 note = find_reg_equal_equiv_note (insn);
4865 if (note)
4866 find_used_regs (&XEXP (note, 0), NULL);
4868 may_be_loop_header = false;
4869 for (e = bb->pred; e; e = e->pred_next)
4870 if (e->flags & EDGE_DFS_BACK)
4872 may_be_loop_header = true;
4873 break;
4876 change = 0;
4877 for (e = bb->pred; e; e = enext)
4879 enext = e->pred_next;
4880 if (e->flags & EDGE_COMPLEX)
4881 continue;
4883 /* We can't redirect edges from new basic blocks. */
4884 if (e->src->index >= bypass_last_basic_block)
4885 continue;
4887 /* The irreducible loops created by redirecting of edges entering the
4888 loop from outside would decrease effectiveness of some of the following
4889 optimizations, so prevent this. */
4890 if (may_be_loop_header
4891 && !(e->flags & EDGE_DFS_BACK))
4892 continue;
4894 for (i = 0; i < reg_use_count; i++)
4896 struct reg_use *reg_used = &reg_use_table[i];
4897 unsigned int regno = REGNO (reg_used->reg_rtx);
4898 basic_block dest, old_dest;
4899 struct expr *set;
4900 rtx src, new;
4902 if (regno >= max_gcse_regno)
4903 continue;
4905 set = find_bypass_set (regno, e->src->index);
4907 if (! set)
4908 continue;
4910 /* Check the data flow is valid after edge insertions. */
4911 if (e->insns && reg_killed_on_edge (reg_used->reg_rtx, e))
4912 continue;
4914 src = SET_SRC (pc_set (jump));
4916 if (setcc != NULL)
4917 src = simplify_replace_rtx (src,
4918 SET_DEST (PATTERN (setcc)),
4919 SET_SRC (PATTERN (setcc)));
4921 new = simplify_replace_rtx (src, reg_used->reg_rtx,
4922 SET_SRC (set->expr));
4924 /* Jump bypassing may have already placed instructions on
4925 edges of the CFG. We can't bypass an outgoing edge that
4926 has instructions associated with it, as these insns won't
4927 get executed if the incoming edge is redirected. */
4929 if (new == pc_rtx)
4931 edest = FALLTHRU_EDGE (bb);
4932 dest = edest->insns ? NULL : edest->dest;
4934 else if (GET_CODE (new) == LABEL_REF)
4936 dest = BLOCK_FOR_INSN (XEXP (new, 0));
4937 /* Don't bypass edges containing instructions. */
4938 for (edest = bb->succ; edest; edest = edest->succ_next)
4939 if (edest->dest == dest && edest->insns)
4941 dest = NULL;
4942 break;
4945 else
4946 dest = NULL;
4948 old_dest = e->dest;
4949 if (dest != NULL
4950 && dest != old_dest
4951 && dest != EXIT_BLOCK_PTR)
4953 redirect_edge_and_branch_force (e, dest);
4955 /* Copy the register setter to the redirected edge.
4956 Don't copy CC0 setters, as CC0 is dead after jump. */
4957 if (setcc)
4959 rtx pat = PATTERN (setcc);
4960 if (!CC0_P (SET_DEST (pat)))
4961 insert_insn_on_edge (copy_insn (pat), e);
4964 if (gcse_file != NULL)
4966 fprintf (gcse_file, "JUMP-BYPASS: Proved reg %d in jump_insn %d equals constant ",
4967 regno, INSN_UID (jump));
4968 print_rtl (gcse_file, SET_SRC (set->expr));
4969 fprintf (gcse_file, "\nBypass edge from %d->%d to %d\n",
4970 e->src->index, old_dest->index, dest->index);
4972 change = 1;
4973 break;
4977 return change;
4980 /* Find basic blocks with more than one predecessor that only contain a
4981 single conditional jump. If the result of the comparison is known at
4982 compile-time from any incoming edge, redirect that edge to the
4983 appropriate target. Returns nonzero if a change was made.
4985 This function is now mis-named, because we also handle indirect jumps. */
4987 static int
4988 bypass_conditional_jumps ()
4990 basic_block bb;
4991 int changed;
4992 rtx setcc;
4993 rtx insn;
4994 rtx dest;
4996 /* Note we start at block 1. */
4997 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
4998 return 0;
5000 bypass_last_basic_block = last_basic_block;
5001 mark_dfs_back_edges ();
5003 changed = 0;
5004 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb,
5005 EXIT_BLOCK_PTR, next_bb)
5007 /* Check for more than one predecessor. */
5008 if (bb->pred && bb->pred->pred_next)
5010 setcc = NULL_RTX;
5011 for (insn = bb->head;
5012 insn != NULL && insn != NEXT_INSN (bb->end);
5013 insn = NEXT_INSN (insn))
5014 if (GET_CODE (insn) == INSN)
5016 if (setcc)
5017 break;
5018 if (GET_CODE (PATTERN (insn)) != SET)
5019 break;
5021 dest = SET_DEST (PATTERN (insn));
5022 if (REG_P (dest) || CC0_P (dest))
5023 setcc = insn;
5024 else
5025 break;
5027 else if (GET_CODE (insn) == JUMP_INSN)
5029 if ((any_condjump_p (insn) || computed_jump_p (insn))
5030 && onlyjump_p (insn))
5031 changed |= bypass_block (bb, setcc, insn);
5032 break;
5034 else if (INSN_P (insn))
5035 break;
5039 /* If we bypassed any register setting insns, we inserted a
5040 copy on the redirected edge. These need to be committed. */
5041 if (changed)
5042 commit_edge_insertions();
5044 return changed;
5047 /* Compute PRE+LCM working variables. */
5049 /* Local properties of expressions. */
5050 /* Nonzero for expressions that are transparent in the block. */
5051 static sbitmap *transp;
5053 /* Nonzero for expressions that are transparent at the end of the block.
5054 This is only zero for expressions killed by abnormal critical edge
5055 created by a calls. */
5056 static sbitmap *transpout;
5058 /* Nonzero for expressions that are computed (available) in the block. */
5059 static sbitmap *comp;
5061 /* Nonzero for expressions that are locally anticipatable in the block. */
5062 static sbitmap *antloc;
5064 /* Nonzero for expressions where this block is an optimal computation
5065 point. */
5066 static sbitmap *pre_optimal;
5068 /* Nonzero for expressions which are redundant in a particular block. */
5069 static sbitmap *pre_redundant;
5071 /* Nonzero for expressions which should be inserted on a specific edge. */
5072 static sbitmap *pre_insert_map;
5074 /* Nonzero for expressions which should be deleted in a specific block. */
5075 static sbitmap *pre_delete_map;
5077 /* Contains the edge_list returned by pre_edge_lcm. */
5078 static struct edge_list *edge_list;
5080 /* Redundant insns. */
5081 static sbitmap pre_redundant_insns;
5083 /* Allocate vars used for PRE analysis. */
5085 static void
5086 alloc_pre_mem (n_blocks, n_exprs)
5087 int n_blocks, n_exprs;
5089 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
5090 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
5091 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
5093 pre_optimal = NULL;
5094 pre_redundant = NULL;
5095 pre_insert_map = NULL;
5096 pre_delete_map = NULL;
5097 ae_in = NULL;
5098 ae_out = NULL;
5099 ae_kill = sbitmap_vector_alloc (n_blocks, n_exprs);
5101 /* pre_insert and pre_delete are allocated later. */
5104 /* Free vars used for PRE analysis. */
5106 static void
5107 free_pre_mem ()
5109 sbitmap_vector_free (transp);
5110 sbitmap_vector_free (comp);
5112 /* ANTLOC and AE_KILL are freed just after pre_lcm finishes. */
5114 if (pre_optimal)
5115 sbitmap_vector_free (pre_optimal);
5116 if (pre_redundant)
5117 sbitmap_vector_free (pre_redundant);
5118 if (pre_insert_map)
5119 sbitmap_vector_free (pre_insert_map);
5120 if (pre_delete_map)
5121 sbitmap_vector_free (pre_delete_map);
5122 if (ae_in)
5123 sbitmap_vector_free (ae_in);
5124 if (ae_out)
5125 sbitmap_vector_free (ae_out);
5127 transp = comp = NULL;
5128 pre_optimal = pre_redundant = pre_insert_map = pre_delete_map = NULL;
5129 ae_in = ae_out = NULL;
5132 /* Top level routine to do the dataflow analysis needed by PRE. */
5134 static void
5135 compute_pre_data ()
5137 sbitmap trapping_expr;
5138 basic_block bb;
5139 unsigned int ui;
5141 compute_local_properties (transp, comp, antloc, &expr_hash_table);
5142 sbitmap_vector_zero (ae_kill, last_basic_block);
5144 /* Collect expressions which might trap. */
5145 trapping_expr = sbitmap_alloc (expr_hash_table.n_elems);
5146 sbitmap_zero (trapping_expr);
5147 for (ui = 0; ui < expr_hash_table.size; ui++)
5149 struct expr *e;
5150 for (e = expr_hash_table.table[ui]; e != NULL; e = e->next_same_hash)
5151 if (may_trap_p (e->expr))
5152 SET_BIT (trapping_expr, e->bitmap_index);
5155 /* Compute ae_kill for each basic block using:
5157 ~(TRANSP | COMP)
5159 This is significantly faster than compute_ae_kill. */
5161 FOR_EACH_BB (bb)
5163 edge e;
5165 /* If the current block is the destination of an abnormal edge, we
5166 kill all trapping expressions because we won't be able to properly
5167 place the instruction on the edge. So make them neither
5168 anticipatable nor transparent. This is fairly conservative. */
5169 for (e = bb->pred; e ; e = e->pred_next)
5170 if (e->flags & EDGE_ABNORMAL)
5172 sbitmap_difference (antloc[bb->index], antloc[bb->index], trapping_expr);
5173 sbitmap_difference (transp[bb->index], transp[bb->index], trapping_expr);
5174 break;
5177 sbitmap_a_or_b (ae_kill[bb->index], transp[bb->index], comp[bb->index]);
5178 sbitmap_not (ae_kill[bb->index], ae_kill[bb->index]);
5181 edge_list = pre_edge_lcm (gcse_file, expr_hash_table.n_elems, transp, comp, antloc,
5182 ae_kill, &pre_insert_map, &pre_delete_map);
5183 sbitmap_vector_free (antloc);
5184 antloc = NULL;
5185 sbitmap_vector_free (ae_kill);
5186 ae_kill = NULL;
5187 sbitmap_free (trapping_expr);
5190 /* PRE utilities */
5192 /* Return nonzero if an occurrence of expression EXPR in OCCR_BB would reach
5193 block BB.
5195 VISITED is a pointer to a working buffer for tracking which BB's have
5196 been visited. It is NULL for the top-level call.
5198 We treat reaching expressions that go through blocks containing the same
5199 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
5200 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
5201 2 as not reaching. The intent is to improve the probability of finding
5202 only one reaching expression and to reduce register lifetimes by picking
5203 the closest such expression. */
5205 static int
5206 pre_expr_reaches_here_p_work (occr_bb, expr, bb, visited)
5207 basic_block occr_bb;
5208 struct expr *expr;
5209 basic_block bb;
5210 char *visited;
5212 edge pred;
5214 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
5216 basic_block pred_bb = pred->src;
5218 if (pred->src == ENTRY_BLOCK_PTR
5219 /* Has predecessor has already been visited? */
5220 || visited[pred_bb->index])
5221 ;/* Nothing to do. */
5223 /* Does this predecessor generate this expression? */
5224 else if (TEST_BIT (comp[pred_bb->index], expr->bitmap_index))
5226 /* Is this the occurrence we're looking for?
5227 Note that there's only one generating occurrence per block
5228 so we just need to check the block number. */
5229 if (occr_bb == pred_bb)
5230 return 1;
5232 visited[pred_bb->index] = 1;
5234 /* Ignore this predecessor if it kills the expression. */
5235 else if (! TEST_BIT (transp[pred_bb->index], expr->bitmap_index))
5236 visited[pred_bb->index] = 1;
5238 /* Neither gen nor kill. */
5239 else
5241 visited[pred_bb->index] = 1;
5242 if (pre_expr_reaches_here_p_work (occr_bb, expr, pred_bb, visited))
5243 return 1;
5247 /* All paths have been checked. */
5248 return 0;
5251 /* The wrapper for pre_expr_reaches_here_work that ensures that any
5252 memory allocated for that function is returned. */
5254 static int
5255 pre_expr_reaches_here_p (occr_bb, expr, bb)
5256 basic_block occr_bb;
5257 struct expr *expr;
5258 basic_block bb;
5260 int rval;
5261 char *visited = (char *) xcalloc (last_basic_block, 1);
5263 rval = pre_expr_reaches_here_p_work (occr_bb, expr, bb, visited);
5265 free (visited);
5266 return rval;
5270 /* Given an expr, generate RTL which we can insert at the end of a BB,
5271 or on an edge. Set the block number of any insns generated to
5272 the value of BB. */
5274 static rtx
5275 process_insert_insn (expr)
5276 struct expr *expr;
5278 rtx reg = expr->reaching_reg;
5279 rtx exp = copy_rtx (expr->expr);
5280 rtx pat;
5282 start_sequence ();
5284 /* If the expression is something that's an operand, like a constant,
5285 just copy it to a register. */
5286 if (general_operand (exp, GET_MODE (reg)))
5287 emit_move_insn (reg, exp);
5289 /* Otherwise, make a new insn to compute this expression and make sure the
5290 insn will be recognized (this also adds any needed CLOBBERs). Copy the
5291 expression to make sure we don't have any sharing issues. */
5292 else if (insn_invalid_p (emit_insn (gen_rtx_SET (VOIDmode, reg, exp))))
5293 abort ();
5295 pat = get_insns ();
5296 end_sequence ();
5298 return pat;
5301 /* Add EXPR to the end of basic block BB.
5303 This is used by both the PRE and code hoisting.
5305 For PRE, we want to verify that the expr is either transparent
5306 or locally anticipatable in the target block. This check makes
5307 no sense for code hoisting. */
5309 static void
5310 insert_insn_end_bb (expr, bb, pre)
5311 struct expr *expr;
5312 basic_block bb;
5313 int pre;
5315 rtx insn = bb->end;
5316 rtx new_insn;
5317 rtx reg = expr->reaching_reg;
5318 int regno = REGNO (reg);
5319 rtx pat, pat_end;
5321 pat = process_insert_insn (expr);
5322 if (pat == NULL_RTX || ! INSN_P (pat))
5323 abort ();
5325 pat_end = pat;
5326 while (NEXT_INSN (pat_end) != NULL_RTX)
5327 pat_end = NEXT_INSN (pat_end);
5329 /* If the last insn is a jump, insert EXPR in front [taking care to
5330 handle cc0, etc. properly]. Similary we need to care trapping
5331 instructions in presence of non-call exceptions. */
5333 if (GET_CODE (insn) == JUMP_INSN
5334 || (GET_CODE (insn) == INSN
5335 && (bb->succ->succ_next || (bb->succ->flags & EDGE_ABNORMAL))))
5337 #ifdef HAVE_cc0
5338 rtx note;
5339 #endif
5340 /* It should always be the case that we can put these instructions
5341 anywhere in the basic block with performing PRE optimizations.
5342 Check this. */
5343 if (GET_CODE (insn) == INSN && pre
5344 && !TEST_BIT (antloc[bb->index], expr->bitmap_index)
5345 && !TEST_BIT (transp[bb->index], expr->bitmap_index))
5346 abort ();
5348 /* If this is a jump table, then we can't insert stuff here. Since
5349 we know the previous real insn must be the tablejump, we insert
5350 the new instruction just before the tablejump. */
5351 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
5352 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
5353 insn = prev_real_insn (insn);
5355 #ifdef HAVE_cc0
5356 /* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts
5357 if cc0 isn't set. */
5358 note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
5359 if (note)
5360 insn = XEXP (note, 0);
5361 else
5363 rtx maybe_cc0_setter = prev_nonnote_insn (insn);
5364 if (maybe_cc0_setter
5365 && INSN_P (maybe_cc0_setter)
5366 && sets_cc0_p (PATTERN (maybe_cc0_setter)))
5367 insn = maybe_cc0_setter;
5369 #endif
5370 /* FIXME: What if something in cc0/jump uses value set in new insn? */
5371 new_insn = emit_insn_before (pat, insn);
5374 /* Likewise if the last insn is a call, as will happen in the presence
5375 of exception handling. */
5376 else if (GET_CODE (insn) == CALL_INSN
5377 && (bb->succ->succ_next || (bb->succ->flags & EDGE_ABNORMAL)))
5379 /* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers,
5380 we search backward and place the instructions before the first
5381 parameter is loaded. Do this for everyone for consistency and a
5382 presumption that we'll get better code elsewhere as well.
5384 It should always be the case that we can put these instructions
5385 anywhere in the basic block with performing PRE optimizations.
5386 Check this. */
5388 if (pre
5389 && !TEST_BIT (antloc[bb->index], expr->bitmap_index)
5390 && !TEST_BIT (transp[bb->index], expr->bitmap_index))
5391 abort ();
5393 /* Since different machines initialize their parameter registers
5394 in different orders, assume nothing. Collect the set of all
5395 parameter registers. */
5396 insn = find_first_parameter_load (insn, bb->head);
5398 /* If we found all the parameter loads, then we want to insert
5399 before the first parameter load.
5401 If we did not find all the parameter loads, then we might have
5402 stopped on the head of the block, which could be a CODE_LABEL.
5403 If we inserted before the CODE_LABEL, then we would be putting
5404 the insn in the wrong basic block. In that case, put the insn
5405 after the CODE_LABEL. Also, respect NOTE_INSN_BASIC_BLOCK. */
5406 while (GET_CODE (insn) == CODE_LABEL
5407 || NOTE_INSN_BASIC_BLOCK_P (insn))
5408 insn = NEXT_INSN (insn);
5410 new_insn = emit_insn_before (pat, insn);
5412 else
5413 new_insn = emit_insn_after (pat, insn);
5415 while (1)
5417 if (INSN_P (pat))
5419 add_label_notes (PATTERN (pat), new_insn);
5420 note_stores (PATTERN (pat), record_set_info, pat);
5422 if (pat == pat_end)
5423 break;
5424 pat = NEXT_INSN (pat);
5427 gcse_create_count++;
5429 if (gcse_file)
5431 fprintf (gcse_file, "PRE/HOIST: end of bb %d, insn %d, ",
5432 bb->index, INSN_UID (new_insn));
5433 fprintf (gcse_file, "copying expression %d to reg %d\n",
5434 expr->bitmap_index, regno);
5438 /* Insert partially redundant expressions on edges in the CFG to make
5439 the expressions fully redundant. */
5441 static int
5442 pre_edge_insert (edge_list, index_map)
5443 struct edge_list *edge_list;
5444 struct expr **index_map;
5446 int e, i, j, num_edges, set_size, did_insert = 0;
5447 sbitmap *inserted;
5449 /* Where PRE_INSERT_MAP is nonzero, we add the expression on that edge
5450 if it reaches any of the deleted expressions. */
5452 set_size = pre_insert_map[0]->size;
5453 num_edges = NUM_EDGES (edge_list);
5454 inserted = sbitmap_vector_alloc (num_edges, expr_hash_table.n_elems);
5455 sbitmap_vector_zero (inserted, num_edges);
5457 for (e = 0; e < num_edges; e++)
5459 int indx;
5460 basic_block bb = INDEX_EDGE_PRED_BB (edge_list, e);
5462 for (i = indx = 0; i < set_size; i++, indx += SBITMAP_ELT_BITS)
5464 SBITMAP_ELT_TYPE insert = pre_insert_map[e]->elms[i];
5466 for (j = indx; insert && j < (int) expr_hash_table.n_elems; j++, insert >>= 1)
5467 if ((insert & 1) != 0 && index_map[j]->reaching_reg != NULL_RTX)
5469 struct expr *expr = index_map[j];
5470 struct occr *occr;
5472 /* Now look at each deleted occurrence of this expression. */
5473 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5475 if (! occr->deleted_p)
5476 continue;
5478 /* Insert this expression on this edge if if it would
5479 reach the deleted occurrence in BB. */
5480 if (!TEST_BIT (inserted[e], j))
5482 rtx insn;
5483 edge eg = INDEX_EDGE (edge_list, e);
5485 /* We can't insert anything on an abnormal and
5486 critical edge, so we insert the insn at the end of
5487 the previous block. There are several alternatives
5488 detailed in Morgans book P277 (sec 10.5) for
5489 handling this situation. This one is easiest for
5490 now. */
5492 if ((eg->flags & EDGE_ABNORMAL) == EDGE_ABNORMAL)
5493 insert_insn_end_bb (index_map[j], bb, 0);
5494 else
5496 insn = process_insert_insn (index_map[j]);
5497 insert_insn_on_edge (insn, eg);
5500 if (gcse_file)
5502 fprintf (gcse_file, "PRE/HOIST: edge (%d,%d), ",
5503 bb->index,
5504 INDEX_EDGE_SUCC_BB (edge_list, e)->index);
5505 fprintf (gcse_file, "copy expression %d\n",
5506 expr->bitmap_index);
5509 update_ld_motion_stores (expr);
5510 SET_BIT (inserted[e], j);
5511 did_insert = 1;
5512 gcse_create_count++;
5519 sbitmap_vector_free (inserted);
5520 return did_insert;
5523 /* Copy the result of INSN to REG. INDX is the expression number. */
5525 static void
5526 pre_insert_copy_insn (expr, insn)
5527 struct expr *expr;
5528 rtx insn;
5530 rtx reg = expr->reaching_reg;
5531 int regno = REGNO (reg);
5532 int indx = expr->bitmap_index;
5533 rtx set = single_set (insn);
5534 rtx new_insn;
5536 if (!set)
5537 abort ();
5539 new_insn = emit_insn_after (gen_move_insn (reg, copy_rtx (SET_DEST (set))), insn);
5541 /* Keep register set table up to date. */
5542 record_one_set (regno, new_insn);
5544 gcse_create_count++;
5546 if (gcse_file)
5547 fprintf (gcse_file,
5548 "PRE: bb %d, insn %d, copy expression %d in insn %d to reg %d\n",
5549 BLOCK_NUM (insn), INSN_UID (new_insn), indx,
5550 INSN_UID (insn), regno);
5551 update_ld_motion_stores (expr);
5554 /* Copy available expressions that reach the redundant expression
5555 to `reaching_reg'. */
5557 static void
5558 pre_insert_copies ()
5560 unsigned int i;
5561 struct expr *expr;
5562 struct occr *occr;
5563 struct occr *avail;
5565 /* For each available expression in the table, copy the result to
5566 `reaching_reg' if the expression reaches a deleted one.
5568 ??? The current algorithm is rather brute force.
5569 Need to do some profiling. */
5571 for (i = 0; i < expr_hash_table.size; i++)
5572 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
5574 /* If the basic block isn't reachable, PPOUT will be TRUE. However,
5575 we don't want to insert a copy here because the expression may not
5576 really be redundant. So only insert an insn if the expression was
5577 deleted. This test also avoids further processing if the
5578 expression wasn't deleted anywhere. */
5579 if (expr->reaching_reg == NULL)
5580 continue;
5582 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5584 if (! occr->deleted_p)
5585 continue;
5587 for (avail = expr->avail_occr; avail != NULL; avail = avail->next)
5589 rtx insn = avail->insn;
5591 /* No need to handle this one if handled already. */
5592 if (avail->copied_p)
5593 continue;
5595 /* Don't handle this one if it's a redundant one. */
5596 if (TEST_BIT (pre_redundant_insns, INSN_CUID (insn)))
5597 continue;
5599 /* Or if the expression doesn't reach the deleted one. */
5600 if (! pre_expr_reaches_here_p (BLOCK_FOR_INSN (avail->insn),
5601 expr,
5602 BLOCK_FOR_INSN (occr->insn)))
5603 continue;
5605 /* Copy the result of avail to reaching_reg. */
5606 pre_insert_copy_insn (expr, insn);
5607 avail->copied_p = 1;
5613 /* Emit move from SRC to DEST noting the equivalence with expression computed
5614 in INSN. */
5615 static rtx
5616 gcse_emit_move_after (src, dest, insn)
5617 rtx src, dest, insn;
5619 rtx new;
5620 rtx set = single_set (insn), set2;
5621 rtx note;
5622 rtx eqv;
5624 /* This should never fail since we're creating a reg->reg copy
5625 we've verified to be valid. */
5627 new = emit_insn_after (gen_move_insn (dest, src), insn);
5629 /* Note the equivalence for local CSE pass. */
5630 set2 = single_set (new);
5631 if (!set2 || !rtx_equal_p (SET_DEST (set2), dest))
5632 return new;
5633 if ((note = find_reg_equal_equiv_note (insn)))
5634 eqv = XEXP (note, 0);
5635 else
5636 eqv = SET_SRC (set);
5638 set_unique_reg_note (new, REG_EQUAL, copy_insn_1 (eqv));
5640 return new;
5643 /* Delete redundant computations.
5644 Deletion is done by changing the insn to copy the `reaching_reg' of
5645 the expression into the result of the SET. It is left to later passes
5646 (cprop, cse2, flow, combine, regmove) to propagate the copy or eliminate it.
5648 Returns nonzero if a change is made. */
5650 static int
5651 pre_delete ()
5653 unsigned int i;
5654 int changed;
5655 struct expr *expr;
5656 struct occr *occr;
5658 changed = 0;
5659 for (i = 0; i < expr_hash_table.size; i++)
5660 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
5662 int indx = expr->bitmap_index;
5664 /* We only need to search antic_occr since we require
5665 ANTLOC != 0. */
5667 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5669 rtx insn = occr->insn;
5670 rtx set;
5671 basic_block bb = BLOCK_FOR_INSN (insn);
5673 if (TEST_BIT (pre_delete_map[bb->index], indx))
5675 set = single_set (insn);
5676 if (! set)
5677 abort ();
5679 /* Create a pseudo-reg to store the result of reaching
5680 expressions into. Get the mode for the new pseudo from
5681 the mode of the original destination pseudo. */
5682 if (expr->reaching_reg == NULL)
5683 expr->reaching_reg
5684 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
5686 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
5687 delete_insn (insn);
5688 occr->deleted_p = 1;
5689 SET_BIT (pre_redundant_insns, INSN_CUID (insn));
5690 changed = 1;
5691 gcse_subst_count++;
5693 if (gcse_file)
5695 fprintf (gcse_file,
5696 "PRE: redundant insn %d (expression %d) in ",
5697 INSN_UID (insn), indx);
5698 fprintf (gcse_file, "bb %d, reaching reg is %d\n",
5699 bb->index, REGNO (expr->reaching_reg));
5705 return changed;
5708 /* Perform GCSE optimizations using PRE.
5709 This is called by one_pre_gcse_pass after all the dataflow analysis
5710 has been done.
5712 This is based on the original Morel-Renvoise paper Fred Chow's thesis, and
5713 lazy code motion from Knoop, Ruthing and Steffen as described in Advanced
5714 Compiler Design and Implementation.
5716 ??? A new pseudo reg is created to hold the reaching expression. The nice
5717 thing about the classical approach is that it would try to use an existing
5718 reg. If the register can't be adequately optimized [i.e. we introduce
5719 reload problems], one could add a pass here to propagate the new register
5720 through the block.
5722 ??? We don't handle single sets in PARALLELs because we're [currently] not
5723 able to copy the rest of the parallel when we insert copies to create full
5724 redundancies from partial redundancies. However, there's no reason why we
5725 can't handle PARALLELs in the cases where there are no partial
5726 redundancies. */
5728 static int
5729 pre_gcse ()
5731 unsigned int i;
5732 int did_insert, changed;
5733 struct expr **index_map;
5734 struct expr *expr;
5736 /* Compute a mapping from expression number (`bitmap_index') to
5737 hash table entry. */
5739 index_map = (struct expr **) xcalloc (expr_hash_table.n_elems, sizeof (struct expr *));
5740 for (i = 0; i < expr_hash_table.size; i++)
5741 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
5742 index_map[expr->bitmap_index] = expr;
5744 /* Reset bitmap used to track which insns are redundant. */
5745 pre_redundant_insns = sbitmap_alloc (max_cuid);
5746 sbitmap_zero (pre_redundant_insns);
5748 /* Delete the redundant insns first so that
5749 - we know what register to use for the new insns and for the other
5750 ones with reaching expressions
5751 - we know which insns are redundant when we go to create copies */
5753 changed = pre_delete ();
5755 did_insert = pre_edge_insert (edge_list, index_map);
5757 /* In other places with reaching expressions, copy the expression to the
5758 specially allocated pseudo-reg that reaches the redundant expr. */
5759 pre_insert_copies ();
5760 if (did_insert)
5762 commit_edge_insertions ();
5763 changed = 1;
5766 free (index_map);
5767 sbitmap_free (pre_redundant_insns);
5768 return changed;
5771 /* Top level routine to perform one PRE GCSE pass.
5773 Return nonzero if a change was made. */
5775 static int
5776 one_pre_gcse_pass (pass)
5777 int pass;
5779 int changed = 0;
5781 gcse_subst_count = 0;
5782 gcse_create_count = 0;
5784 alloc_hash_table (max_cuid, &expr_hash_table, 0);
5785 add_noreturn_fake_exit_edges ();
5786 if (flag_gcse_lm)
5787 compute_ld_motion_mems ();
5789 compute_hash_table (&expr_hash_table);
5790 trim_ld_motion_mems ();
5791 if (gcse_file)
5792 dump_hash_table (gcse_file, "Expression", &expr_hash_table);
5794 if (expr_hash_table.n_elems > 0)
5796 alloc_pre_mem (last_basic_block, expr_hash_table.n_elems);
5797 compute_pre_data ();
5798 changed |= pre_gcse ();
5799 free_edge_list (edge_list);
5800 free_pre_mem ();
5803 free_ldst_mems ();
5804 remove_fake_edges ();
5805 free_hash_table (&expr_hash_table);
5807 if (gcse_file)
5809 fprintf (gcse_file, "\nPRE GCSE of %s, pass %d: %d bytes needed, ",
5810 current_function_name, pass, bytes_used);
5811 fprintf (gcse_file, "%d substs, %d insns created\n",
5812 gcse_subst_count, gcse_create_count);
5815 return changed;
5818 /* If X contains any LABEL_REF's, add REG_LABEL notes for them to INSN.
5819 If notes are added to an insn which references a CODE_LABEL, the
5820 LABEL_NUSES count is incremented. We have to add REG_LABEL notes,
5821 because the following loop optimization pass requires them. */
5823 /* ??? This is very similar to the loop.c add_label_notes function. We
5824 could probably share code here. */
5826 /* ??? If there was a jump optimization pass after gcse and before loop,
5827 then we would not need to do this here, because jump would add the
5828 necessary REG_LABEL notes. */
5830 static void
5831 add_label_notes (x, insn)
5832 rtx x;
5833 rtx insn;
5835 enum rtx_code code = GET_CODE (x);
5836 int i, j;
5837 const char *fmt;
5839 if (code == LABEL_REF && !LABEL_REF_NONLOCAL_P (x))
5841 /* This code used to ignore labels that referred to dispatch tables to
5842 avoid flow generating (slightly) worse code.
5844 We no longer ignore such label references (see LABEL_REF handling in
5845 mark_jump_label for additional information). */
5847 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, XEXP (x, 0),
5848 REG_NOTES (insn));
5849 if (LABEL_P (XEXP (x, 0)))
5850 LABEL_NUSES (XEXP (x, 0))++;
5851 return;
5854 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
5856 if (fmt[i] == 'e')
5857 add_label_notes (XEXP (x, i), insn);
5858 else if (fmt[i] == 'E')
5859 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5860 add_label_notes (XVECEXP (x, i, j), insn);
5864 /* Compute transparent outgoing information for each block.
5866 An expression is transparent to an edge unless it is killed by
5867 the edge itself. This can only happen with abnormal control flow,
5868 when the edge is traversed through a call. This happens with
5869 non-local labels and exceptions.
5871 This would not be necessary if we split the edge. While this is
5872 normally impossible for abnormal critical edges, with some effort
5873 it should be possible with exception handling, since we still have
5874 control over which handler should be invoked. But due to increased
5875 EH table sizes, this may not be worthwhile. */
5877 static void
5878 compute_transpout ()
5880 basic_block bb;
5881 unsigned int i;
5882 struct expr *expr;
5884 sbitmap_vector_ones (transpout, last_basic_block);
5886 FOR_EACH_BB (bb)
5888 /* Note that flow inserted a nop a the end of basic blocks that
5889 end in call instructions for reasons other than abnormal
5890 control flow. */
5891 if (GET_CODE (bb->end) != CALL_INSN)
5892 continue;
5894 for (i = 0; i < expr_hash_table.size; i++)
5895 for (expr = expr_hash_table.table[i]; expr ; expr = expr->next_same_hash)
5896 if (GET_CODE (expr->expr) == MEM)
5898 if (GET_CODE (XEXP (expr->expr, 0)) == SYMBOL_REF
5899 && CONSTANT_POOL_ADDRESS_P (XEXP (expr->expr, 0)))
5900 continue;
5902 /* ??? Optimally, we would use interprocedural alias
5903 analysis to determine if this mem is actually killed
5904 by this call. */
5905 RESET_BIT (transpout[bb->index], expr->bitmap_index);
5910 /* Removal of useless null pointer checks */
5912 /* Called via note_stores. X is set by SETTER. If X is a register we must
5913 invalidate nonnull_local and set nonnull_killed. DATA is really a
5914 `null_pointer_info *'.
5916 We ignore hard registers. */
5918 static void
5919 invalidate_nonnull_info (x, setter, data)
5920 rtx x;
5921 rtx setter ATTRIBUTE_UNUSED;
5922 void *data;
5924 unsigned int regno;
5925 struct null_pointer_info *npi = (struct null_pointer_info *) data;
5927 while (GET_CODE (x) == SUBREG)
5928 x = SUBREG_REG (x);
5930 /* Ignore anything that is not a register or is a hard register. */
5931 if (GET_CODE (x) != REG
5932 || REGNO (x) < npi->min_reg
5933 || REGNO (x) >= npi->max_reg)
5934 return;
5936 regno = REGNO (x) - npi->min_reg;
5938 RESET_BIT (npi->nonnull_local[npi->current_block->index], regno);
5939 SET_BIT (npi->nonnull_killed[npi->current_block->index], regno);
5942 /* Do null-pointer check elimination for the registers indicated in
5943 NPI. NONNULL_AVIN and NONNULL_AVOUT are pre-allocated sbitmaps;
5944 they are not our responsibility to free. */
5946 static int
5947 delete_null_pointer_checks_1 (block_reg, nonnull_avin,
5948 nonnull_avout, npi)
5949 unsigned int *block_reg;
5950 sbitmap *nonnull_avin;
5951 sbitmap *nonnull_avout;
5952 struct null_pointer_info *npi;
5954 basic_block bb, current_block;
5955 sbitmap *nonnull_local = npi->nonnull_local;
5956 sbitmap *nonnull_killed = npi->nonnull_killed;
5957 int something_changed = 0;
5959 /* Compute local properties, nonnull and killed. A register will have
5960 the nonnull property if at the end of the current block its value is
5961 known to be nonnull. The killed property indicates that somewhere in
5962 the block any information we had about the register is killed.
5964 Note that a register can have both properties in a single block. That
5965 indicates that it's killed, then later in the block a new value is
5966 computed. */
5967 sbitmap_vector_zero (nonnull_local, last_basic_block);
5968 sbitmap_vector_zero (nonnull_killed, last_basic_block);
5970 FOR_EACH_BB (current_block)
5972 rtx insn, stop_insn;
5974 /* Set the current block for invalidate_nonnull_info. */
5975 npi->current_block = current_block;
5977 /* Scan each insn in the basic block looking for memory references and
5978 register sets. */
5979 stop_insn = NEXT_INSN (current_block->end);
5980 for (insn = current_block->head;
5981 insn != stop_insn;
5982 insn = NEXT_INSN (insn))
5984 rtx set;
5985 rtx reg;
5987 /* Ignore anything that is not a normal insn. */
5988 if (! INSN_P (insn))
5989 continue;
5991 /* Basically ignore anything that is not a simple SET. We do have
5992 to make sure to invalidate nonnull_local and set nonnull_killed
5993 for such insns though. */
5994 set = single_set (insn);
5995 if (!set)
5997 note_stores (PATTERN (insn), invalidate_nonnull_info, npi);
5998 continue;
6001 /* See if we've got a usable memory load. We handle it first
6002 in case it uses its address register as a dest (which kills
6003 the nonnull property). */
6004 if (GET_CODE (SET_SRC (set)) == MEM
6005 && GET_CODE ((reg = XEXP (SET_SRC (set), 0))) == REG
6006 && REGNO (reg) >= npi->min_reg
6007 && REGNO (reg) < npi->max_reg)
6008 SET_BIT (nonnull_local[current_block->index],
6009 REGNO (reg) - npi->min_reg);
6011 /* Now invalidate stuff clobbered by this insn. */
6012 note_stores (PATTERN (insn), invalidate_nonnull_info, npi);
6014 /* And handle stores, we do these last since any sets in INSN can
6015 not kill the nonnull property if it is derived from a MEM
6016 appearing in a SET_DEST. */
6017 if (GET_CODE (SET_DEST (set)) == MEM
6018 && GET_CODE ((reg = XEXP (SET_DEST (set), 0))) == REG
6019 && REGNO (reg) >= npi->min_reg
6020 && REGNO (reg) < npi->max_reg)
6021 SET_BIT (nonnull_local[current_block->index],
6022 REGNO (reg) - npi->min_reg);
6026 /* Now compute global properties based on the local properties. This
6027 is a classic global availability algorithm. */
6028 compute_available (nonnull_local, nonnull_killed,
6029 nonnull_avout, nonnull_avin);
6031 /* Now look at each bb and see if it ends with a compare of a value
6032 against zero. */
6033 FOR_EACH_BB (bb)
6035 rtx last_insn = bb->end;
6036 rtx condition, earliest;
6037 int compare_and_branch;
6039 /* Since MIN_REG is always at least FIRST_PSEUDO_REGISTER, and
6040 since BLOCK_REG[BB] is zero if this block did not end with a
6041 comparison against zero, this condition works. */
6042 if (block_reg[bb->index] < npi->min_reg
6043 || block_reg[bb->index] >= npi->max_reg)
6044 continue;
6046 /* LAST_INSN is a conditional jump. Get its condition. */
6047 condition = get_condition (last_insn, &earliest);
6049 /* If we can't determine the condition then skip. */
6050 if (! condition)
6051 continue;
6053 /* Is the register known to have a nonzero value? */
6054 if (!TEST_BIT (nonnull_avout[bb->index], block_reg[bb->index] - npi->min_reg))
6055 continue;
6057 /* Try to compute whether the compare/branch at the loop end is one or
6058 two instructions. */
6059 if (earliest == last_insn)
6060 compare_and_branch = 1;
6061 else if (earliest == prev_nonnote_insn (last_insn))
6062 compare_and_branch = 2;
6063 else
6064 continue;
6066 /* We know the register in this comparison is nonnull at exit from
6067 this block. We can optimize this comparison. */
6068 if (GET_CODE (condition) == NE)
6070 rtx new_jump;
6072 new_jump = emit_jump_insn_after (gen_jump (JUMP_LABEL (last_insn)),
6073 last_insn);
6074 JUMP_LABEL (new_jump) = JUMP_LABEL (last_insn);
6075 LABEL_NUSES (JUMP_LABEL (new_jump))++;
6076 emit_barrier_after (new_jump);
6079 something_changed = 1;
6080 delete_insn (last_insn);
6081 if (compare_and_branch == 2)
6082 delete_insn (earliest);
6083 purge_dead_edges (bb);
6085 /* Don't check this block again. (Note that BLOCK_END is
6086 invalid here; we deleted the last instruction in the
6087 block.) */
6088 block_reg[bb->index] = 0;
6091 return something_changed;
6094 /* Find EQ/NE comparisons against zero which can be (indirectly) evaluated
6095 at compile time.
6097 This is conceptually similar to global constant/copy propagation and
6098 classic global CSE (it even uses the same dataflow equations as cprop).
6100 If a register is used as memory address with the form (mem (reg)), then we
6101 know that REG can not be zero at that point in the program. Any instruction
6102 which sets REG "kills" this property.
6104 So, if every path leading to a conditional branch has an available memory
6105 reference of that form, then we know the register can not have the value
6106 zero at the conditional branch.
6108 So we merely need to compute the local properties and propagate that data
6109 around the cfg, then optimize where possible.
6111 We run this pass two times. Once before CSE, then again after CSE. This
6112 has proven to be the most profitable approach. It is rare for new
6113 optimization opportunities of this nature to appear after the first CSE
6114 pass.
6116 This could probably be integrated with global cprop with a little work. */
6119 delete_null_pointer_checks (f)
6120 rtx f ATTRIBUTE_UNUSED;
6122 sbitmap *nonnull_avin, *nonnull_avout;
6123 unsigned int *block_reg;
6124 basic_block bb;
6125 int reg;
6126 int regs_per_pass;
6127 int max_reg;
6128 struct null_pointer_info npi;
6129 int something_changed = 0;
6131 /* If we have only a single block, then there's nothing to do. */
6132 if (n_basic_blocks <= 1)
6133 return 0;
6135 /* Trying to perform global optimizations on flow graphs which have
6136 a high connectivity will take a long time and is unlikely to be
6137 particularly useful.
6139 In normal circumstances a cfg should have about twice as many edges
6140 as blocks. But we do not want to punish small functions which have
6141 a couple switch statements. So we require a relatively large number
6142 of basic blocks and the ratio of edges to blocks to be high. */
6143 if (n_basic_blocks > 1000 && n_edges / n_basic_blocks >= 20)
6144 return 0;
6146 /* We need four bitmaps, each with a bit for each register in each
6147 basic block. */
6148 max_reg = max_reg_num ();
6149 regs_per_pass = get_bitmap_width (4, last_basic_block, max_reg);
6151 /* Allocate bitmaps to hold local and global properties. */
6152 npi.nonnull_local = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
6153 npi.nonnull_killed = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
6154 nonnull_avin = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
6155 nonnull_avout = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
6157 /* Go through the basic blocks, seeing whether or not each block
6158 ends with a conditional branch whose condition is a comparison
6159 against zero. Record the register compared in BLOCK_REG. */
6160 block_reg = (unsigned int *) xcalloc (last_basic_block, sizeof (int));
6161 FOR_EACH_BB (bb)
6163 rtx last_insn = bb->end;
6164 rtx condition, earliest, reg;
6166 /* We only want conditional branches. */
6167 if (GET_CODE (last_insn) != JUMP_INSN
6168 || !any_condjump_p (last_insn)
6169 || !onlyjump_p (last_insn))
6170 continue;
6172 /* LAST_INSN is a conditional jump. Get its condition. */
6173 condition = get_condition (last_insn, &earliest);
6175 /* If we were unable to get the condition, or it is not an equality
6176 comparison against zero then there's nothing we can do. */
6177 if (!condition
6178 || (GET_CODE (condition) != NE && GET_CODE (condition) != EQ)
6179 || GET_CODE (XEXP (condition, 1)) != CONST_INT
6180 || (XEXP (condition, 1)
6181 != CONST0_RTX (GET_MODE (XEXP (condition, 0)))))
6182 continue;
6184 /* We must be checking a register against zero. */
6185 reg = XEXP (condition, 0);
6186 if (GET_CODE (reg) != REG)
6187 continue;
6189 block_reg[bb->index] = REGNO (reg);
6192 /* Go through the algorithm for each block of registers. */
6193 for (reg = FIRST_PSEUDO_REGISTER; reg < max_reg; reg += regs_per_pass)
6195 npi.min_reg = reg;
6196 npi.max_reg = MIN (reg + regs_per_pass, max_reg);
6197 something_changed |= delete_null_pointer_checks_1 (block_reg,
6198 nonnull_avin,
6199 nonnull_avout,
6200 &npi);
6203 /* Free the table of registers compared at the end of every block. */
6204 free (block_reg);
6206 /* Free bitmaps. */
6207 sbitmap_vector_free (npi.nonnull_local);
6208 sbitmap_vector_free (npi.nonnull_killed);
6209 sbitmap_vector_free (nonnull_avin);
6210 sbitmap_vector_free (nonnull_avout);
6212 return something_changed;
6215 /* Code Hoisting variables and subroutines. */
6217 /* Very busy expressions. */
6218 static sbitmap *hoist_vbein;
6219 static sbitmap *hoist_vbeout;
6221 /* Hoistable expressions. */
6222 static sbitmap *hoist_exprs;
6224 /* Dominator bitmaps. */
6225 dominance_info dominators;
6227 /* ??? We could compute post dominators and run this algorithm in
6228 reverse to perform tail merging, doing so would probably be
6229 more effective than the tail merging code in jump.c.
6231 It's unclear if tail merging could be run in parallel with
6232 code hoisting. It would be nice. */
6234 /* Allocate vars used for code hoisting analysis. */
6236 static void
6237 alloc_code_hoist_mem (n_blocks, n_exprs)
6238 int n_blocks, n_exprs;
6240 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
6241 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
6242 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
6244 hoist_vbein = sbitmap_vector_alloc (n_blocks, n_exprs);
6245 hoist_vbeout = sbitmap_vector_alloc (n_blocks, n_exprs);
6246 hoist_exprs = sbitmap_vector_alloc (n_blocks, n_exprs);
6247 transpout = sbitmap_vector_alloc (n_blocks, n_exprs);
6250 /* Free vars used for code hoisting analysis. */
6252 static void
6253 free_code_hoist_mem ()
6255 sbitmap_vector_free (antloc);
6256 sbitmap_vector_free (transp);
6257 sbitmap_vector_free (comp);
6259 sbitmap_vector_free (hoist_vbein);
6260 sbitmap_vector_free (hoist_vbeout);
6261 sbitmap_vector_free (hoist_exprs);
6262 sbitmap_vector_free (transpout);
6264 free_dominance_info (dominators);
6267 /* Compute the very busy expressions at entry/exit from each block.
6269 An expression is very busy if all paths from a given point
6270 compute the expression. */
6272 static void
6273 compute_code_hoist_vbeinout ()
6275 int changed, passes;
6276 basic_block bb;
6278 sbitmap_vector_zero (hoist_vbeout, last_basic_block);
6279 sbitmap_vector_zero (hoist_vbein, last_basic_block);
6281 passes = 0;
6282 changed = 1;
6284 while (changed)
6286 changed = 0;
6288 /* We scan the blocks in the reverse order to speed up
6289 the convergence. */
6290 FOR_EACH_BB_REVERSE (bb)
6292 changed |= sbitmap_a_or_b_and_c_cg (hoist_vbein[bb->index], antloc[bb->index],
6293 hoist_vbeout[bb->index], transp[bb->index]);
6294 if (bb->next_bb != EXIT_BLOCK_PTR)
6295 sbitmap_intersection_of_succs (hoist_vbeout[bb->index], hoist_vbein, bb->index);
6298 passes++;
6301 if (gcse_file)
6302 fprintf (gcse_file, "hoisting vbeinout computation: %d passes\n", passes);
6305 /* Top level routine to do the dataflow analysis needed by code hoisting. */
6307 static void
6308 compute_code_hoist_data ()
6310 compute_local_properties (transp, comp, antloc, &expr_hash_table);
6311 compute_transpout ();
6312 compute_code_hoist_vbeinout ();
6313 dominators = calculate_dominance_info (CDI_DOMINATORS);
6314 if (gcse_file)
6315 fprintf (gcse_file, "\n");
6318 /* Determine if the expression identified by EXPR_INDEX would
6319 reach BB unimpared if it was placed at the end of EXPR_BB.
6321 It's unclear exactly what Muchnick meant by "unimpared". It seems
6322 to me that the expression must either be computed or transparent in
6323 *every* block in the path(s) from EXPR_BB to BB. Any other definition
6324 would allow the expression to be hoisted out of loops, even if
6325 the expression wasn't a loop invariant.
6327 Contrast this to reachability for PRE where an expression is
6328 considered reachable if *any* path reaches instead of *all*
6329 paths. */
6331 static int
6332 hoist_expr_reaches_here_p (expr_bb, expr_index, bb, visited)
6333 basic_block expr_bb;
6334 int expr_index;
6335 basic_block bb;
6336 char *visited;
6338 edge pred;
6339 int visited_allocated_locally = 0;
6342 if (visited == NULL)
6344 visited_allocated_locally = 1;
6345 visited = xcalloc (last_basic_block, 1);
6348 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
6350 basic_block pred_bb = pred->src;
6352 if (pred->src == ENTRY_BLOCK_PTR)
6353 break;
6354 else if (pred_bb == expr_bb)
6355 continue;
6356 else if (visited[pred_bb->index])
6357 continue;
6359 /* Does this predecessor generate this expression? */
6360 else if (TEST_BIT (comp[pred_bb->index], expr_index))
6361 break;
6362 else if (! TEST_BIT (transp[pred_bb->index], expr_index))
6363 break;
6365 /* Not killed. */
6366 else
6368 visited[pred_bb->index] = 1;
6369 if (! hoist_expr_reaches_here_p (expr_bb, expr_index,
6370 pred_bb, visited))
6371 break;
6374 if (visited_allocated_locally)
6375 free (visited);
6377 return (pred == NULL);
6380 /* Actually perform code hoisting. */
6382 static void
6383 hoist_code ()
6385 basic_block bb, dominated;
6386 basic_block *domby;
6387 unsigned int domby_len;
6388 unsigned int i,j;
6389 struct expr **index_map;
6390 struct expr *expr;
6392 sbitmap_vector_zero (hoist_exprs, last_basic_block);
6394 /* Compute a mapping from expression number (`bitmap_index') to
6395 hash table entry. */
6397 index_map = (struct expr **) xcalloc (expr_hash_table.n_elems, sizeof (struct expr *));
6398 for (i = 0; i < expr_hash_table.size; i++)
6399 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
6400 index_map[expr->bitmap_index] = expr;
6402 /* Walk over each basic block looking for potentially hoistable
6403 expressions, nothing gets hoisted from the entry block. */
6404 FOR_EACH_BB (bb)
6406 int found = 0;
6407 int insn_inserted_p;
6409 domby_len = get_dominated_by (dominators, bb, &domby);
6410 /* Examine each expression that is very busy at the exit of this
6411 block. These are the potentially hoistable expressions. */
6412 for (i = 0; i < hoist_vbeout[bb->index]->n_bits; i++)
6414 int hoistable = 0;
6416 if (TEST_BIT (hoist_vbeout[bb->index], i)
6417 && TEST_BIT (transpout[bb->index], i))
6419 /* We've found a potentially hoistable expression, now
6420 we look at every block BB dominates to see if it
6421 computes the expression. */
6422 for (j = 0; j < domby_len; j++)
6424 dominated = domby[j];
6425 /* Ignore self dominance. */
6426 if (bb == dominated)
6427 continue;
6428 /* We've found a dominated block, now see if it computes
6429 the busy expression and whether or not moving that
6430 expression to the "beginning" of that block is safe. */
6431 if (!TEST_BIT (antloc[dominated->index], i))
6432 continue;
6434 /* Note if the expression would reach the dominated block
6435 unimpared if it was placed at the end of BB.
6437 Keep track of how many times this expression is hoistable
6438 from a dominated block into BB. */
6439 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
6440 hoistable++;
6443 /* If we found more than one hoistable occurrence of this
6444 expression, then note it in the bitmap of expressions to
6445 hoist. It makes no sense to hoist things which are computed
6446 in only one BB, and doing so tends to pessimize register
6447 allocation. One could increase this value to try harder
6448 to avoid any possible code expansion due to register
6449 allocation issues; however experiments have shown that
6450 the vast majority of hoistable expressions are only movable
6451 from two successors, so raising this threshold is likely
6452 to nullify any benefit we get from code hoisting. */
6453 if (hoistable > 1)
6455 SET_BIT (hoist_exprs[bb->index], i);
6456 found = 1;
6460 /* If we found nothing to hoist, then quit now. */
6461 if (! found)
6463 free (domby);
6464 continue;
6467 /* Loop over all the hoistable expressions. */
6468 for (i = 0; i < hoist_exprs[bb->index]->n_bits; i++)
6470 /* We want to insert the expression into BB only once, so
6471 note when we've inserted it. */
6472 insn_inserted_p = 0;
6474 /* These tests should be the same as the tests above. */
6475 if (TEST_BIT (hoist_vbeout[bb->index], i))
6477 /* We've found a potentially hoistable expression, now
6478 we look at every block BB dominates to see if it
6479 computes the expression. */
6480 for (j = 0; j < domby_len; j++)
6482 dominated = domby[j];
6483 /* Ignore self dominance. */
6484 if (bb == dominated)
6485 continue;
6487 /* We've found a dominated block, now see if it computes
6488 the busy expression and whether or not moving that
6489 expression to the "beginning" of that block is safe. */
6490 if (!TEST_BIT (antloc[dominated->index], i))
6491 continue;
6493 /* The expression is computed in the dominated block and
6494 it would be safe to compute it at the start of the
6495 dominated block. Now we have to determine if the
6496 expression would reach the dominated block if it was
6497 placed at the end of BB. */
6498 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
6500 struct expr *expr = index_map[i];
6501 struct occr *occr = expr->antic_occr;
6502 rtx insn;
6503 rtx set;
6505 /* Find the right occurrence of this expression. */
6506 while (BLOCK_FOR_INSN (occr->insn) != dominated && occr)
6507 occr = occr->next;
6509 /* Should never happen. */
6510 if (!occr)
6511 abort ();
6513 insn = occr->insn;
6515 set = single_set (insn);
6516 if (! set)
6517 abort ();
6519 /* Create a pseudo-reg to store the result of reaching
6520 expressions into. Get the mode for the new pseudo
6521 from the mode of the original destination pseudo. */
6522 if (expr->reaching_reg == NULL)
6523 expr->reaching_reg
6524 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
6526 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
6527 delete_insn (insn);
6528 occr->deleted_p = 1;
6529 if (!insn_inserted_p)
6531 insert_insn_end_bb (index_map[i], bb, 0);
6532 insn_inserted_p = 1;
6538 free (domby);
6541 free (index_map);
6544 /* Top level routine to perform one code hoisting (aka unification) pass
6546 Return nonzero if a change was made. */
6548 static int
6549 one_code_hoisting_pass ()
6551 int changed = 0;
6553 alloc_hash_table (max_cuid, &expr_hash_table, 0);
6554 compute_hash_table (&expr_hash_table);
6555 if (gcse_file)
6556 dump_hash_table (gcse_file, "Code Hosting Expressions", &expr_hash_table);
6558 if (expr_hash_table.n_elems > 0)
6560 alloc_code_hoist_mem (last_basic_block, expr_hash_table.n_elems);
6561 compute_code_hoist_data ();
6562 hoist_code ();
6563 free_code_hoist_mem ();
6566 free_hash_table (&expr_hash_table);
6568 return changed;
6571 /* Here we provide the things required to do store motion towards
6572 the exit. In order for this to be effective, gcse also needed to
6573 be taught how to move a load when it is kill only by a store to itself.
6575 int i;
6576 float a[10];
6578 void foo(float scale)
6580 for (i=0; i<10; i++)
6581 a[i] *= scale;
6584 'i' is both loaded and stored to in the loop. Normally, gcse cannot move
6585 the load out since its live around the loop, and stored at the bottom
6586 of the loop.
6588 The 'Load Motion' referred to and implemented in this file is
6589 an enhancement to gcse which when using edge based lcm, recognizes
6590 this situation and allows gcse to move the load out of the loop.
6592 Once gcse has hoisted the load, store motion can then push this
6593 load towards the exit, and we end up with no loads or stores of 'i'
6594 in the loop. */
6596 /* This will search the ldst list for a matching expression. If it
6597 doesn't find one, we create one and initialize it. */
6599 static struct ls_expr *
6600 ldst_entry (x)
6601 rtx x;
6603 struct ls_expr * ptr;
6605 for (ptr = first_ls_expr(); ptr != NULL; ptr = next_ls_expr (ptr))
6606 if (expr_equiv_p (ptr->pattern, x))
6607 break;
6609 if (!ptr)
6611 ptr = (struct ls_expr *) xmalloc (sizeof (struct ls_expr));
6613 ptr->next = pre_ldst_mems;
6614 ptr->expr = NULL;
6615 ptr->pattern = x;
6616 ptr->pattern_regs = NULL_RTX;
6617 ptr->loads = NULL_RTX;
6618 ptr->stores = NULL_RTX;
6619 ptr->reaching_reg = NULL_RTX;
6620 ptr->invalid = 0;
6621 ptr->index = 0;
6622 ptr->hash_index = 0;
6623 pre_ldst_mems = ptr;
6626 return ptr;
6629 /* Free up an individual ldst entry. */
6631 static void
6632 free_ldst_entry (ptr)
6633 struct ls_expr * ptr;
6635 free_INSN_LIST_list (& ptr->loads);
6636 free_INSN_LIST_list (& ptr->stores);
6638 free (ptr);
6641 /* Free up all memory associated with the ldst list. */
6643 static void
6644 free_ldst_mems ()
6646 while (pre_ldst_mems)
6648 struct ls_expr * tmp = pre_ldst_mems;
6650 pre_ldst_mems = pre_ldst_mems->next;
6652 free_ldst_entry (tmp);
6655 pre_ldst_mems = NULL;
6658 /* Dump debugging info about the ldst list. */
6660 static void
6661 print_ldst_list (file)
6662 FILE * file;
6664 struct ls_expr * ptr;
6666 fprintf (file, "LDST list: \n");
6668 for (ptr = first_ls_expr(); ptr != NULL; ptr = next_ls_expr (ptr))
6670 fprintf (file, " Pattern (%3d): ", ptr->index);
6672 print_rtl (file, ptr->pattern);
6674 fprintf (file, "\n Loads : ");
6676 if (ptr->loads)
6677 print_rtl (file, ptr->loads);
6678 else
6679 fprintf (file, "(nil)");
6681 fprintf (file, "\n Stores : ");
6683 if (ptr->stores)
6684 print_rtl (file, ptr->stores);
6685 else
6686 fprintf (file, "(nil)");
6688 fprintf (file, "\n\n");
6691 fprintf (file, "\n");
6694 /* Returns 1 if X is in the list of ldst only expressions. */
6696 static struct ls_expr *
6697 find_rtx_in_ldst (x)
6698 rtx x;
6700 struct ls_expr * ptr;
6702 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
6703 if (expr_equiv_p (ptr->pattern, x) && ! ptr->invalid)
6704 return ptr;
6706 return NULL;
6709 /* Assign each element of the list of mems a monotonically increasing value. */
6711 static int
6712 enumerate_ldsts ()
6714 struct ls_expr * ptr;
6715 int n = 0;
6717 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
6718 ptr->index = n++;
6720 return n;
6723 /* Return first item in the list. */
6725 static inline struct ls_expr *
6726 first_ls_expr ()
6728 return pre_ldst_mems;
6731 /* Return the next item in the list after the specified one. */
6733 static inline struct ls_expr *
6734 next_ls_expr (ptr)
6735 struct ls_expr * ptr;
6737 return ptr->next;
6740 /* Load Motion for loads which only kill themselves. */
6742 /* Return true if x is a simple MEM operation, with no registers or
6743 side effects. These are the types of loads we consider for the
6744 ld_motion list, otherwise we let the usual aliasing take care of it. */
6746 static int
6747 simple_mem (x)
6748 rtx x;
6750 if (GET_CODE (x) != MEM)
6751 return 0;
6753 if (MEM_VOLATILE_P (x))
6754 return 0;
6756 if (GET_MODE (x) == BLKmode)
6757 return 0;
6759 /* If we are handling exceptions, we must be careful with memory references
6760 that may trap. If we are not, the behavior is undefined, so we may just
6761 continue. */
6762 if (flag_non_call_exceptions && may_trap_p (x))
6763 return 0;
6765 if (side_effects_p (x))
6766 return 0;
6768 /* Do not consider function arguments passed on stack. */
6769 if (reg_mentioned_p (stack_pointer_rtx, x))
6770 return 0;
6772 if (flag_float_store && FLOAT_MODE_P (GET_MODE (x)))
6773 return 0;
6775 return 1;
6778 /* Make sure there isn't a buried reference in this pattern anywhere.
6779 If there is, invalidate the entry for it since we're not capable
6780 of fixing it up just yet.. We have to be sure we know about ALL
6781 loads since the aliasing code will allow all entries in the
6782 ld_motion list to not-alias itself. If we miss a load, we will get
6783 the wrong value since gcse might common it and we won't know to
6784 fix it up. */
6786 static void
6787 invalidate_any_buried_refs (x)
6788 rtx x;
6790 const char * fmt;
6791 int i, j;
6792 struct ls_expr * ptr;
6794 /* Invalidate it in the list. */
6795 if (GET_CODE (x) == MEM && simple_mem (x))
6797 ptr = ldst_entry (x);
6798 ptr->invalid = 1;
6801 /* Recursively process the insn. */
6802 fmt = GET_RTX_FORMAT (GET_CODE (x));
6804 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6806 if (fmt[i] == 'e')
6807 invalidate_any_buried_refs (XEXP (x, i));
6808 else if (fmt[i] == 'E')
6809 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6810 invalidate_any_buried_refs (XVECEXP (x, i, j));
6814 /* Find all the 'simple' MEMs which are used in LOADs and STORES. Simple
6815 being defined as MEM loads and stores to symbols, with no side effects
6816 and no registers in the expression. For a MEM destination, we also
6817 check that the insn is still valid if we replace the destination with a
6818 REG, as is done in update_ld_motion_stores. If there are any uses/defs
6819 which don't match this criteria, they are invalidated and trimmed out
6820 later. */
6822 static void
6823 compute_ld_motion_mems ()
6825 struct ls_expr * ptr;
6826 basic_block bb;
6827 rtx insn;
6829 pre_ldst_mems = NULL;
6831 FOR_EACH_BB (bb)
6833 for (insn = bb->head;
6834 insn && insn != NEXT_INSN (bb->end);
6835 insn = NEXT_INSN (insn))
6837 if (INSN_P (insn))
6839 if (GET_CODE (PATTERN (insn)) == SET)
6841 rtx src = SET_SRC (PATTERN (insn));
6842 rtx dest = SET_DEST (PATTERN (insn));
6844 /* Check for a simple LOAD... */
6845 if (GET_CODE (src) == MEM && simple_mem (src))
6847 ptr = ldst_entry (src);
6848 if (GET_CODE (dest) == REG)
6849 ptr->loads = alloc_INSN_LIST (insn, ptr->loads);
6850 else
6851 ptr->invalid = 1;
6853 else
6855 /* Make sure there isn't a buried load somewhere. */
6856 invalidate_any_buried_refs (src);
6859 /* Check for stores. Don't worry about aliased ones, they
6860 will block any movement we might do later. We only care
6861 about this exact pattern since those are the only
6862 circumstance that we will ignore the aliasing info. */
6863 if (GET_CODE (dest) == MEM && simple_mem (dest))
6865 ptr = ldst_entry (dest);
6867 if (GET_CODE (src) != MEM
6868 && GET_CODE (src) != ASM_OPERANDS
6869 /* Check for REG manually since want_to_gcse_p
6870 returns 0 for all REGs. */
6871 && (REG_P (src) || want_to_gcse_p (src)))
6872 ptr->stores = alloc_INSN_LIST (insn, ptr->stores);
6873 else
6874 ptr->invalid = 1;
6877 else
6878 invalidate_any_buried_refs (PATTERN (insn));
6884 /* Remove any references that have been either invalidated or are not in the
6885 expression list for pre gcse. */
6887 static void
6888 trim_ld_motion_mems ()
6890 struct ls_expr * last = NULL;
6891 struct ls_expr * ptr = first_ls_expr ();
6893 while (ptr != NULL)
6895 int del = ptr->invalid;
6896 struct expr * expr = NULL;
6898 /* Delete if entry has been made invalid. */
6899 if (!del)
6901 unsigned int i;
6903 del = 1;
6904 /* Delete if we cannot find this mem in the expression list. */
6905 for (i = 0; i < expr_hash_table.size && del; i++)
6907 for (expr = expr_hash_table.table[i];
6908 expr != NULL;
6909 expr = expr->next_same_hash)
6910 if (expr_equiv_p (expr->expr, ptr->pattern))
6912 del = 0;
6913 break;
6918 if (del)
6920 if (last != NULL)
6922 last->next = ptr->next;
6923 free_ldst_entry (ptr);
6924 ptr = last->next;
6926 else
6928 pre_ldst_mems = pre_ldst_mems->next;
6929 free_ldst_entry (ptr);
6930 ptr = pre_ldst_mems;
6933 else
6935 /* Set the expression field if we are keeping it. */
6936 last = ptr;
6937 ptr->expr = expr;
6938 ptr = ptr->next;
6942 /* Show the world what we've found. */
6943 if (gcse_file && pre_ldst_mems != NULL)
6944 print_ldst_list (gcse_file);
6947 /* This routine will take an expression which we are replacing with
6948 a reaching register, and update any stores that are needed if
6949 that expression is in the ld_motion list. Stores are updated by
6950 copying their SRC to the reaching register, and then storeing
6951 the reaching register into the store location. These keeps the
6952 correct value in the reaching register for the loads. */
6954 static void
6955 update_ld_motion_stores (expr)
6956 struct expr * expr;
6958 struct ls_expr * mem_ptr;
6960 if ((mem_ptr = find_rtx_in_ldst (expr->expr)))
6962 /* We can try to find just the REACHED stores, but is shouldn't
6963 matter to set the reaching reg everywhere... some might be
6964 dead and should be eliminated later. */
6966 /* We replace (set mem expr) with (set reg expr) (set mem reg)
6967 where reg is the reaching reg used in the load. We checked in
6968 compute_ld_motion_mems that we can replace (set mem expr) with
6969 (set reg expr) in that insn. */
6970 rtx list = mem_ptr->stores;
6972 for ( ; list != NULL_RTX; list = XEXP (list, 1))
6974 rtx insn = XEXP (list, 0);
6975 rtx pat = PATTERN (insn);
6976 rtx src = SET_SRC (pat);
6977 rtx reg = expr->reaching_reg;
6978 rtx copy, new;
6980 /* If we've already copied it, continue. */
6981 if (expr->reaching_reg == src)
6982 continue;
6984 if (gcse_file)
6986 fprintf (gcse_file, "PRE: store updated with reaching reg ");
6987 print_rtl (gcse_file, expr->reaching_reg);
6988 fprintf (gcse_file, ":\n ");
6989 print_inline_rtx (gcse_file, insn, 8);
6990 fprintf (gcse_file, "\n");
6993 copy = gen_move_insn ( reg, copy_rtx (SET_SRC (pat)));
6994 new = emit_insn_before (copy, insn);
6995 record_one_set (REGNO (reg), new);
6996 SET_SRC (pat) = reg;
6998 /* un-recognize this pattern since it's probably different now. */
6999 INSN_CODE (insn) = -1;
7000 gcse_create_count++;
7005 /* Store motion code. */
7007 #define ANTIC_STORE_LIST(x) ((x)->loads)
7008 #define AVAIL_STORE_LIST(x) ((x)->stores)
7009 #define LAST_AVAIL_CHECK_FAILURE(x) ((x)->reaching_reg)
7011 /* This is used to communicate the target bitvector we want to use in the
7012 reg_set_info routine when called via the note_stores mechanism. */
7013 static int * regvec;
7015 /* And current insn, for the same routine. */
7016 static rtx compute_store_table_current_insn;
7018 /* Used in computing the reverse edge graph bit vectors. */
7019 static sbitmap * st_antloc;
7021 /* Global holding the number of store expressions we are dealing with. */
7022 static int num_stores;
7024 /* Checks to set if we need to mark a register set. Called from note_stores. */
7026 static void
7027 reg_set_info (dest, setter, data)
7028 rtx dest, setter ATTRIBUTE_UNUSED;
7029 void * data ATTRIBUTE_UNUSED;
7031 if (GET_CODE (dest) == SUBREG)
7032 dest = SUBREG_REG (dest);
7034 if (GET_CODE (dest) == REG)
7035 regvec[REGNO (dest)] = INSN_UID (compute_store_table_current_insn);
7038 /* Return zero if some of the registers in list X are killed
7039 due to set of registers in bitmap REGS_SET. */
7041 static bool
7042 store_ops_ok (x, regs_set)
7043 rtx x;
7044 int *regs_set;
7046 rtx reg;
7048 for (; x; x = XEXP (x, 1))
7050 reg = XEXP (x, 0);
7051 if (regs_set[REGNO(reg)])
7052 return false;
7055 return true;
7058 /* Returns a list of registers mentioned in X. */
7059 static rtx
7060 extract_mentioned_regs (x)
7061 rtx x;
7063 return extract_mentioned_regs_helper (x, NULL_RTX);
7066 /* Helper for extract_mentioned_regs; ACCUM is used to accumulate used
7067 registers. */
7068 static rtx
7069 extract_mentioned_regs_helper (x, accum)
7070 rtx x;
7071 rtx accum;
7073 int i;
7074 enum rtx_code code;
7075 const char * fmt;
7077 /* Repeat is used to turn tail-recursion into iteration. */
7078 repeat:
7080 if (x == 0)
7081 return accum;
7083 code = GET_CODE (x);
7084 switch (code)
7086 case REG:
7087 return alloc_EXPR_LIST (0, x, accum);
7089 case MEM:
7090 x = XEXP (x, 0);
7091 goto repeat;
7093 case PRE_DEC:
7094 case PRE_INC:
7095 case POST_DEC:
7096 case POST_INC:
7097 /* We do not run this function with arguments having side effects. */
7098 abort ();
7100 case PC:
7101 case CC0: /*FIXME*/
7102 case CONST:
7103 case CONST_INT:
7104 case CONST_DOUBLE:
7105 case CONST_VECTOR:
7106 case SYMBOL_REF:
7107 case LABEL_REF:
7108 case ADDR_VEC:
7109 case ADDR_DIFF_VEC:
7110 return accum;
7112 default:
7113 break;
7116 i = GET_RTX_LENGTH (code) - 1;
7117 fmt = GET_RTX_FORMAT (code);
7119 for (; i >= 0; i--)
7121 if (fmt[i] == 'e')
7123 rtx tem = XEXP (x, i);
7125 /* If we are about to do the last recursive call
7126 needed at this level, change it into iteration. */
7127 if (i == 0)
7129 x = tem;
7130 goto repeat;
7133 accum = extract_mentioned_regs_helper (tem, accum);
7135 else if (fmt[i] == 'E')
7137 int j;
7139 for (j = 0; j < XVECLEN (x, i); j++)
7140 accum = extract_mentioned_regs_helper (XVECEXP (x, i, j), accum);
7144 return accum;
7147 /* Determine whether INSN is MEM store pattern that we will consider moving.
7148 REGS_SET_BEFORE is bitmap of registers set before (and including) the
7149 current insn, REGS_SET_AFTER is bitmap of registers set after (and
7150 including) the insn in this basic block. We must be passing through BB from
7151 head to end, as we are using this fact to speed things up.
7153 The results are stored this way:
7155 -- the first anticipatable expression is added into ANTIC_STORE_LIST
7156 -- if the processed expression is not anticipatable, NULL_RTX is added
7157 there instead, so that we can use it as indicator that no further
7158 expression of this type may be anticipatable
7159 -- if the expression is available, it is added as head of AVAIL_STORE_LIST;
7160 consequently, all of them but this head are dead and may be deleted.
7161 -- if the expression is not available, the insn due to that it fails to be
7162 available is stored in reaching_reg.
7164 The things are complicated a bit by fact that there already may be stores
7165 to the same MEM from other blocks; also caller must take care of the
7166 necessary cleanup of the temporary markers after end of the basic block.
7169 static void
7170 find_moveable_store (insn, regs_set_before, regs_set_after)
7171 rtx insn;
7172 int *regs_set_before;
7173 int *regs_set_after;
7175 struct ls_expr * ptr;
7176 rtx dest, set, tmp;
7177 int check_anticipatable, check_available;
7178 basic_block bb = BLOCK_FOR_INSN (insn);
7180 set = single_set (insn);
7181 if (!set)
7182 return;
7184 dest = SET_DEST (set);
7186 if (GET_CODE (dest) != MEM || MEM_VOLATILE_P (dest)
7187 || GET_MODE (dest) == BLKmode)
7188 return;
7190 if (side_effects_p (dest))
7191 return;
7193 /* If we are handling exceptions, we must be careful with memory references
7194 that may trap. If we are not, the behavior is undefined, so we may just
7195 continue. */
7196 if (flag_non_call_exceptions && may_trap_p (dest))
7197 return;
7199 ptr = ldst_entry (dest);
7200 if (!ptr->pattern_regs)
7201 ptr->pattern_regs = extract_mentioned_regs (dest);
7203 /* Do not check for anticipatability if we either found one anticipatable
7204 store already, or tested for one and found out that it was killed. */
7205 check_anticipatable = 0;
7206 if (!ANTIC_STORE_LIST (ptr))
7207 check_anticipatable = 1;
7208 else
7210 tmp = XEXP (ANTIC_STORE_LIST (ptr), 0);
7211 if (tmp != NULL_RTX
7212 && BLOCK_FOR_INSN (tmp) != bb)
7213 check_anticipatable = 1;
7215 if (check_anticipatable)
7217 if (store_killed_before (dest, ptr->pattern_regs, insn, bb, regs_set_before))
7218 tmp = NULL_RTX;
7219 else
7220 tmp = insn;
7221 ANTIC_STORE_LIST (ptr) = alloc_INSN_LIST (tmp,
7222 ANTIC_STORE_LIST (ptr));
7225 /* It is not necessary to check whether store is available if we did
7226 it successfully before; if we failed before, do not bother to check
7227 until we reach the insn that caused us to fail. */
7228 check_available = 0;
7229 if (!AVAIL_STORE_LIST (ptr))
7230 check_available = 1;
7231 else
7233 tmp = XEXP (AVAIL_STORE_LIST (ptr), 0);
7234 if (BLOCK_FOR_INSN (tmp) != bb)
7235 check_available = 1;
7237 if (check_available)
7239 /* Check that we have already reached the insn at that the check
7240 failed last time. */
7241 if (LAST_AVAIL_CHECK_FAILURE (ptr))
7243 for (tmp = bb->end;
7244 tmp != insn && tmp != LAST_AVAIL_CHECK_FAILURE (ptr);
7245 tmp = PREV_INSN (tmp))
7246 continue;
7247 if (tmp == insn)
7248 check_available = 0;
7250 else
7251 check_available = store_killed_after (dest, ptr->pattern_regs, insn,
7252 bb, regs_set_after,
7253 &LAST_AVAIL_CHECK_FAILURE (ptr));
7255 if (!check_available)
7256 AVAIL_STORE_LIST (ptr) = alloc_INSN_LIST (insn, AVAIL_STORE_LIST (ptr));
7259 /* Find available and anticipatable stores. */
7261 static int
7262 compute_store_table ()
7264 int ret;
7265 basic_block bb;
7266 unsigned regno;
7267 rtx insn, pat, tmp;
7268 int *last_set_in, *already_set;
7269 struct ls_expr * ptr, **prev_next_ptr_ptr;
7271 max_gcse_regno = max_reg_num ();
7273 reg_set_in_block = (sbitmap *) sbitmap_vector_alloc (last_basic_block,
7274 max_gcse_regno);
7275 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
7276 pre_ldst_mems = 0;
7277 last_set_in = xmalloc (sizeof (int) * max_gcse_regno);
7278 already_set = xmalloc (sizeof (int) * max_gcse_regno);
7280 /* Find all the stores we care about. */
7281 FOR_EACH_BB (bb)
7283 /* First compute the registers set in this block. */
7284 memset (last_set_in, 0, sizeof (int) * max_gcse_regno);
7285 regvec = last_set_in;
7287 for (insn = bb->head;
7288 insn != NEXT_INSN (bb->end);
7289 insn = NEXT_INSN (insn))
7291 if (! INSN_P (insn))
7292 continue;
7294 if (GET_CODE (insn) == CALL_INSN)
7296 bool clobbers_all = false;
7297 #ifdef NON_SAVING_SETJMP
7298 if (NON_SAVING_SETJMP
7299 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
7300 clobbers_all = true;
7301 #endif
7303 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7304 if (clobbers_all
7305 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
7306 last_set_in[regno] = INSN_UID (insn);
7309 pat = PATTERN (insn);
7310 compute_store_table_current_insn = insn;
7311 note_stores (pat, reg_set_info, NULL);
7314 /* Record the set registers. */
7315 for (regno = 0; regno < max_gcse_regno; regno++)
7316 if (last_set_in[regno])
7317 SET_BIT (reg_set_in_block[bb->index], regno);
7319 /* Now find the stores. */
7320 memset (already_set, 0, sizeof (int) * max_gcse_regno);
7321 regvec = already_set;
7322 for (insn = bb->head;
7323 insn != NEXT_INSN (bb->end);
7324 insn = NEXT_INSN (insn))
7326 if (! INSN_P (insn))
7327 continue;
7329 if (GET_CODE (insn) == CALL_INSN)
7331 bool clobbers_all = false;
7332 #ifdef NON_SAVING_SETJMP
7333 if (NON_SAVING_SETJMP
7334 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
7335 clobbers_all = true;
7336 #endif
7338 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7339 if (clobbers_all
7340 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
7341 already_set[regno] = 1;
7344 pat = PATTERN (insn);
7345 note_stores (pat, reg_set_info, NULL);
7347 /* Now that we've marked regs, look for stores. */
7348 find_moveable_store (insn, already_set, last_set_in);
7350 /* Unmark regs that are no longer set. */
7351 for (regno = 0; regno < max_gcse_regno; regno++)
7352 if (last_set_in[regno] == INSN_UID (insn))
7353 last_set_in[regno] = 0;
7356 /* Clear temporary marks. */
7357 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7359 LAST_AVAIL_CHECK_FAILURE(ptr) = NULL_RTX;
7360 if (ANTIC_STORE_LIST (ptr)
7361 && (tmp = XEXP (ANTIC_STORE_LIST (ptr), 0)) == NULL_RTX)
7362 ANTIC_STORE_LIST (ptr) = XEXP (ANTIC_STORE_LIST (ptr), 1);
7366 /* Remove the stores that are not available anywhere, as there will
7367 be no opportunity to optimize them. */
7368 for (ptr = pre_ldst_mems, prev_next_ptr_ptr = &pre_ldst_mems;
7369 ptr != NULL;
7370 ptr = *prev_next_ptr_ptr)
7372 if (!AVAIL_STORE_LIST (ptr))
7374 *prev_next_ptr_ptr = ptr->next;
7375 free_ldst_entry (ptr);
7377 else
7378 prev_next_ptr_ptr = &ptr->next;
7381 ret = enumerate_ldsts ();
7383 if (gcse_file)
7385 fprintf (gcse_file, "ST_avail and ST_antic (shown under loads..)\n");
7386 print_ldst_list (gcse_file);
7389 free (last_set_in);
7390 free (already_set);
7391 return ret;
7394 /* Check to see if the load X is aliased with STORE_PATTERN. */
7396 static bool
7397 load_kills_store (x, store_pattern)
7398 rtx x, store_pattern;
7400 if (true_dependence (x, GET_MODE (x), store_pattern, rtx_addr_varies_p))
7401 return true;
7402 return false;
7405 /* Go through the entire insn X, looking for any loads which might alias
7406 STORE_PATTERN. Return true if found. */
7408 static bool
7409 find_loads (x, store_pattern)
7410 rtx x, store_pattern;
7412 const char * fmt;
7413 int i, j;
7414 int ret = false;
7416 if (!x)
7417 return false;
7419 if (GET_CODE (x) == SET)
7420 x = SET_SRC (x);
7422 if (GET_CODE (x) == MEM)
7424 if (load_kills_store (x, store_pattern))
7425 return true;
7428 /* Recursively process the insn. */
7429 fmt = GET_RTX_FORMAT (GET_CODE (x));
7431 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0 && !ret; i--)
7433 if (fmt[i] == 'e')
7434 ret |= find_loads (XEXP (x, i), store_pattern);
7435 else if (fmt[i] == 'E')
7436 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7437 ret |= find_loads (XVECEXP (x, i, j), store_pattern);
7439 return ret;
7442 /* Check if INSN kills the store pattern X (is aliased with it).
7443 Return true if it it does. */
7445 static bool
7446 store_killed_in_insn (x, x_regs, insn)
7447 rtx x, x_regs, insn;
7449 rtx reg, base;
7451 if (!INSN_P (insn))
7452 return false;
7454 if (GET_CODE (insn) == CALL_INSN)
7456 /* A normal or pure call might read from pattern,
7457 but a const call will not. */
7458 if (! CONST_OR_PURE_CALL_P (insn) || pure_call_p (insn))
7459 return true;
7461 /* But even a const call reads its parameters. Check whether the
7462 base of some of registers used in mem is stack pointer. */
7463 for (reg = x_regs; reg; reg = XEXP (reg, 1))
7465 base = find_base_term (XEXP (reg, 0));
7466 if (!base
7467 || (GET_CODE (base) == ADDRESS
7468 && GET_MODE (base) == Pmode
7469 && XEXP (base, 0) == stack_pointer_rtx))
7470 return true;
7473 return false;
7476 if (GET_CODE (PATTERN (insn)) == SET)
7478 rtx pat = PATTERN (insn);
7479 /* Check for memory stores to aliased objects. */
7480 if (GET_CODE (SET_DEST (pat)) == MEM && !expr_equiv_p (SET_DEST (pat), x))
7481 /* pretend its a load and check for aliasing. */
7482 if (find_loads (SET_DEST (pat), x))
7483 return true;
7484 return find_loads (SET_SRC (pat), x);
7486 else
7487 return find_loads (PATTERN (insn), x);
7490 /* Returns true if the expression X is loaded or clobbered on or after INSN
7491 within basic block BB. REGS_SET_AFTER is bitmap of registers set in
7492 or after the insn. X_REGS is list of registers mentioned in X. If the store
7493 is killed, return the last insn in that it occurs in FAIL_INSN. */
7495 static bool
7496 store_killed_after (x, x_regs, insn, bb, regs_set_after, fail_insn)
7497 rtx x, x_regs, insn;
7498 basic_block bb;
7499 int *regs_set_after;
7500 rtx *fail_insn;
7502 rtx last = bb->end, act;
7504 if (!store_ops_ok (x_regs, regs_set_after))
7506 /* We do not know where it will happen. */
7507 if (fail_insn)
7508 *fail_insn = NULL_RTX;
7509 return true;
7512 /* Scan from the end, so that fail_insn is determined correctly. */
7513 for (act = last; act != PREV_INSN (insn); act = PREV_INSN (act))
7514 if (store_killed_in_insn (x, x_regs, act))
7516 if (fail_insn)
7517 *fail_insn = act;
7518 return true;
7521 return false;
7524 /* Returns true if the expression X is loaded or clobbered on or before INSN
7525 within basic block BB. X_REGS is list of registers mentioned in X.
7526 REGS_SET_BEFORE is bitmap of registers set before or in this insn. */
7527 static bool
7528 store_killed_before (x, x_regs, insn, bb, regs_set_before)
7529 rtx x, x_regs, insn;
7530 basic_block bb;
7531 int *regs_set_before;
7533 rtx first = bb->head;
7535 if (!store_ops_ok (x_regs, regs_set_before))
7536 return true;
7538 for ( ; insn != PREV_INSN (first); insn = PREV_INSN (insn))
7539 if (store_killed_in_insn (x, x_regs, insn))
7540 return true;
7542 return false;
7545 /* Fill in available, anticipatable, transparent and kill vectors in
7546 STORE_DATA, based on lists of available and anticipatable stores. */
7547 static void
7548 build_store_vectors ()
7550 basic_block bb;
7551 int *regs_set_in_block;
7552 rtx insn, st;
7553 struct ls_expr * ptr;
7554 unsigned regno;
7556 /* Build the gen_vector. This is any store in the table which is not killed
7557 by aliasing later in its block. */
7558 ae_gen = (sbitmap *) sbitmap_vector_alloc (last_basic_block, num_stores);
7559 sbitmap_vector_zero (ae_gen, last_basic_block);
7561 st_antloc = (sbitmap *) sbitmap_vector_alloc (last_basic_block, num_stores);
7562 sbitmap_vector_zero (st_antloc, last_basic_block);
7564 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7566 for (st = AVAIL_STORE_LIST (ptr); st != NULL; st = XEXP (st, 1))
7568 insn = XEXP (st, 0);
7569 bb = BLOCK_FOR_INSN (insn);
7571 /* If we've already seen an available expression in this block,
7572 we can delete this one (It occurs earlier in the block). We'll
7573 copy the SRC expression to an unused register in case there
7574 are any side effects. */
7575 if (TEST_BIT (ae_gen[bb->index], ptr->index))
7577 rtx r = gen_reg_rtx (GET_MODE (ptr->pattern));
7578 if (gcse_file)
7579 fprintf (gcse_file, "Removing redundant store:\n");
7580 replace_store_insn (r, XEXP (st, 0), bb);
7581 continue;
7583 SET_BIT (ae_gen[bb->index], ptr->index);
7586 for (st = ANTIC_STORE_LIST (ptr); st != NULL; st = XEXP (st, 1))
7588 insn = XEXP (st, 0);
7589 bb = BLOCK_FOR_INSN (insn);
7590 SET_BIT (st_antloc[bb->index], ptr->index);
7594 ae_kill = (sbitmap *) sbitmap_vector_alloc (last_basic_block, num_stores);
7595 sbitmap_vector_zero (ae_kill, last_basic_block);
7597 transp = (sbitmap *) sbitmap_vector_alloc (last_basic_block, num_stores);
7598 sbitmap_vector_zero (transp, last_basic_block);
7599 regs_set_in_block = xmalloc (sizeof (int) * max_gcse_regno);
7601 FOR_EACH_BB (bb)
7603 for (regno = 0; regno < max_gcse_regno; regno++)
7604 regs_set_in_block[regno] = TEST_BIT (reg_set_in_block[bb->index], regno);
7606 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7608 if (store_killed_after (ptr->pattern, ptr->pattern_regs, bb->head,
7609 bb, regs_set_in_block, NULL))
7611 /* It should not be necessary to consider the expression
7612 killed if it is both anticipatable and available. */
7613 if (!TEST_BIT (st_antloc[bb->index], ptr->index)
7614 || !TEST_BIT (ae_gen[bb->index], ptr->index))
7615 SET_BIT (ae_kill[bb->index], ptr->index);
7617 else
7618 SET_BIT (transp[bb->index], ptr->index);
7622 free (regs_set_in_block);
7624 if (gcse_file)
7626 dump_sbitmap_vector (gcse_file, "st_antloc", "", st_antloc, last_basic_block);
7627 dump_sbitmap_vector (gcse_file, "st_kill", "", ae_kill, last_basic_block);
7628 dump_sbitmap_vector (gcse_file, "Transpt", "", transp, last_basic_block);
7629 dump_sbitmap_vector (gcse_file, "st_avloc", "", ae_gen, last_basic_block);
7633 /* Insert an instruction at the beginning of a basic block, and update
7634 the BLOCK_HEAD if needed. */
7636 static void
7637 insert_insn_start_bb (insn, bb)
7638 rtx insn;
7639 basic_block bb;
7641 /* Insert at start of successor block. */
7642 rtx prev = PREV_INSN (bb->head);
7643 rtx before = bb->head;
7644 while (before != 0)
7646 if (GET_CODE (before) != CODE_LABEL
7647 && (GET_CODE (before) != NOTE
7648 || NOTE_LINE_NUMBER (before) != NOTE_INSN_BASIC_BLOCK))
7649 break;
7650 prev = before;
7651 if (prev == bb->end)
7652 break;
7653 before = NEXT_INSN (before);
7656 insn = emit_insn_after (insn, prev);
7658 if (gcse_file)
7660 fprintf (gcse_file, "STORE_MOTION insert store at start of BB %d:\n",
7661 bb->index);
7662 print_inline_rtx (gcse_file, insn, 6);
7663 fprintf (gcse_file, "\n");
7667 /* This routine will insert a store on an edge. EXPR is the ldst entry for
7668 the memory reference, and E is the edge to insert it on. Returns nonzero
7669 if an edge insertion was performed. */
7671 static int
7672 insert_store (expr, e)
7673 struct ls_expr * expr;
7674 edge e;
7676 rtx reg, insn;
7677 basic_block bb;
7678 edge tmp;
7680 /* We did all the deleted before this insert, so if we didn't delete a
7681 store, then we haven't set the reaching reg yet either. */
7682 if (expr->reaching_reg == NULL_RTX)
7683 return 0;
7685 reg = expr->reaching_reg;
7686 insn = gen_move_insn (copy_rtx (expr->pattern), reg);
7688 /* If we are inserting this expression on ALL predecessor edges of a BB,
7689 insert it at the start of the BB, and reset the insert bits on the other
7690 edges so we don't try to insert it on the other edges. */
7691 bb = e->dest;
7692 for (tmp = e->dest->pred; tmp ; tmp = tmp->pred_next)
7694 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
7695 if (index == EDGE_INDEX_NO_EDGE)
7696 abort ();
7697 if (! TEST_BIT (pre_insert_map[index], expr->index))
7698 break;
7701 /* If tmp is NULL, we found an insertion on every edge, blank the
7702 insertion vector for these edges, and insert at the start of the BB. */
7703 if (!tmp && bb != EXIT_BLOCK_PTR)
7705 for (tmp = e->dest->pred; tmp ; tmp = tmp->pred_next)
7707 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
7708 RESET_BIT (pre_insert_map[index], expr->index);
7710 insert_insn_start_bb (insn, bb);
7711 return 0;
7714 /* We can't insert on this edge, so we'll insert at the head of the
7715 successors block. See Morgan, sec 10.5. */
7716 if ((e->flags & EDGE_ABNORMAL) == EDGE_ABNORMAL)
7718 insert_insn_start_bb (insn, bb);
7719 return 0;
7722 insert_insn_on_edge (insn, e);
7724 if (gcse_file)
7726 fprintf (gcse_file, "STORE_MOTION insert insn on edge (%d, %d):\n",
7727 e->src->index, e->dest->index);
7728 print_inline_rtx (gcse_file, insn, 6);
7729 fprintf (gcse_file, "\n");
7732 return 1;
7735 /* This routine will replace a store with a SET to a specified register. */
7737 static void
7738 replace_store_insn (reg, del, bb)
7739 rtx reg, del;
7740 basic_block bb;
7742 rtx insn;
7744 insn = gen_move_insn (reg, SET_SRC (single_set (del)));
7745 insn = emit_insn_after (insn, del);
7747 if (gcse_file)
7749 fprintf (gcse_file,
7750 "STORE_MOTION delete insn in BB %d:\n ", bb->index);
7751 print_inline_rtx (gcse_file, del, 6);
7752 fprintf (gcse_file, "\nSTORE MOTION replaced with insn:\n ");
7753 print_inline_rtx (gcse_file, insn, 6);
7754 fprintf (gcse_file, "\n");
7757 delete_insn (del);
7761 /* Delete a store, but copy the value that would have been stored into
7762 the reaching_reg for later storing. */
7764 static void
7765 delete_store (expr, bb)
7766 struct ls_expr * expr;
7767 basic_block bb;
7769 rtx reg, i, del;
7771 if (expr->reaching_reg == NULL_RTX)
7772 expr->reaching_reg = gen_reg_rtx (GET_MODE (expr->pattern));
7774 reg = expr->reaching_reg;
7776 for (i = AVAIL_STORE_LIST (expr); i; i = XEXP (i, 1))
7778 del = XEXP (i, 0);
7779 if (BLOCK_FOR_INSN (del) == bb)
7781 /* We know there is only one since we deleted redundant
7782 ones during the available computation. */
7783 replace_store_insn (reg, del, bb);
7784 break;
7789 /* Free memory used by store motion. */
7791 static void
7792 free_store_memory ()
7794 free_ldst_mems ();
7796 if (ae_gen)
7797 sbitmap_vector_free (ae_gen);
7798 if (ae_kill)
7799 sbitmap_vector_free (ae_kill);
7800 if (transp)
7801 sbitmap_vector_free (transp);
7802 if (st_antloc)
7803 sbitmap_vector_free (st_antloc);
7804 if (pre_insert_map)
7805 sbitmap_vector_free (pre_insert_map);
7806 if (pre_delete_map)
7807 sbitmap_vector_free (pre_delete_map);
7808 if (reg_set_in_block)
7809 sbitmap_vector_free (reg_set_in_block);
7811 ae_gen = ae_kill = transp = st_antloc = NULL;
7812 pre_insert_map = pre_delete_map = reg_set_in_block = NULL;
7815 /* Perform store motion. Much like gcse, except we move expressions the
7816 other way by looking at the flowgraph in reverse. */
7818 static void
7819 store_motion ()
7821 basic_block bb;
7822 int x;
7823 struct ls_expr * ptr;
7824 int update_flow = 0;
7826 if (gcse_file)
7828 fprintf (gcse_file, "before store motion\n");
7829 print_rtl (gcse_file, get_insns ());
7832 init_alias_analysis ();
7834 /* Find all the available and anticipatable stores. */
7835 num_stores = compute_store_table ();
7836 if (num_stores == 0)
7838 sbitmap_vector_free (reg_set_in_block);
7839 end_alias_analysis ();
7840 return;
7843 /* Now compute kill & transp vectors. */
7844 build_store_vectors ();
7845 add_noreturn_fake_exit_edges ();
7847 edge_list = pre_edge_rev_lcm (gcse_file, num_stores, transp, ae_gen,
7848 st_antloc, ae_kill, &pre_insert_map,
7849 &pre_delete_map);
7851 /* Now we want to insert the new stores which are going to be needed. */
7852 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7854 FOR_EACH_BB (bb)
7855 if (TEST_BIT (pre_delete_map[bb->index], ptr->index))
7856 delete_store (ptr, bb);
7858 for (x = 0; x < NUM_EDGES (edge_list); x++)
7859 if (TEST_BIT (pre_insert_map[x], ptr->index))
7860 update_flow |= insert_store (ptr, INDEX_EDGE (edge_list, x));
7863 if (update_flow)
7864 commit_edge_insertions ();
7866 free_store_memory ();
7867 free_edge_list (edge_list);
7868 remove_fake_edges ();
7869 end_alias_analysis ();
7873 /* Entry point for jump bypassing optimization pass. */
7876 bypass_jumps (file)
7877 FILE *file;
7879 int changed;
7881 /* We do not construct an accurate cfg in functions which call
7882 setjmp, so just punt to be safe. */
7883 if (current_function_calls_setjmp)
7884 return 0;
7886 /* For calling dump_foo fns from gdb. */
7887 debug_stderr = stderr;
7888 gcse_file = file;
7890 /* Identify the basic block information for this function, including
7891 successors and predecessors. */
7892 max_gcse_regno = max_reg_num ();
7894 if (file)
7895 dump_flow_info (file);
7897 /* Return if there's nothing to do. */
7898 if (n_basic_blocks <= 1)
7899 return 0;
7901 /* Trying to perform global optimizations on flow graphs which have
7902 a high connectivity will take a long time and is unlikely to be
7903 particularly useful.
7905 In normal circumstances a cfg should have about twice as many edges
7906 as blocks. But we do not want to punish small functions which have
7907 a couple switch statements. So we require a relatively large number
7908 of basic blocks and the ratio of edges to blocks to be high. */
7909 if (n_basic_blocks > 1000 && n_edges / n_basic_blocks >= 20)
7911 if (warn_disabled_optimization)
7912 warning ("BYPASS disabled: %d > 1000 basic blocks and %d >= 20 edges/basic block",
7913 n_basic_blocks, n_edges / n_basic_blocks);
7914 return 0;
7917 /* If allocating memory for the cprop bitmap would take up too much
7918 storage it's better just to disable the optimization. */
7919 if ((n_basic_blocks
7920 * SBITMAP_SET_SIZE (max_gcse_regno)
7921 * sizeof (SBITMAP_ELT_TYPE)) > MAX_GCSE_MEMORY)
7923 if (warn_disabled_optimization)
7924 warning ("GCSE disabled: %d basic blocks and %d registers",
7925 n_basic_blocks, max_gcse_regno);
7927 return 0;
7930 gcc_obstack_init (&gcse_obstack);
7931 bytes_used = 0;
7933 /* We need alias. */
7934 init_alias_analysis ();
7936 /* Record where pseudo-registers are set. This data is kept accurate
7937 during each pass. ??? We could also record hard-reg information here
7938 [since it's unchanging], however it is currently done during hash table
7939 computation.
7941 It may be tempting to compute MEM set information here too, but MEM sets
7942 will be subject to code motion one day and thus we need to compute
7943 information about memory sets when we build the hash tables. */
7945 alloc_reg_set_mem (max_gcse_regno);
7946 compute_sets (get_insns ());
7948 max_gcse_regno = max_reg_num ();
7949 alloc_gcse_mem (get_insns ());
7950 changed = one_cprop_pass (1, 1, 1);
7951 free_gcse_mem ();
7953 if (file)
7955 fprintf (file, "BYPASS of %s: %d basic blocks, ",
7956 current_function_name, n_basic_blocks);
7957 fprintf (file, "%d bytes\n\n", bytes_used);
7960 obstack_free (&gcse_obstack, NULL);
7961 free_reg_set_mem ();
7963 /* We are finished with alias. */
7964 end_alias_analysis ();
7965 allocate_reg_info (max_reg_num (), FALSE, FALSE);
7967 return changed;
7970 #include "gt-gcse.h"