configure.in (GLIBCPP_ENABLE_CXX_FLAGS): Do not pass arguments, let the defaults...
[official-gcc.git] / gcc / gcse.c
blob400df2df005021f278428672361b5b80924004b7
1 /* Global common subexpression elimination/Partial redundancy elimination
2 and global constant/copy propagation for GNU compiler.
3 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 02111-1307, USA. */
23 /* TODO
24 - reordering of memory allocation and freeing to be more space efficient
25 - do rough calc of how many regs are needed in each block, and a rough
26 calc of how many regs are available in each class and use that to
27 throttle back the code in cases where RTX_COST is minimal.
28 - a store to the same address as a load does not kill the load if the
29 source of the store is also the destination of the load. Handling this
30 allows more load motion, particularly out of loops.
31 - ability to realloc sbitmap vectors would allow one initial computation
32 of reg_set_in_block with only subsequent additions, rather than
33 recomputing it for each pass
37 /* References searched while implementing this.
39 Compilers Principles, Techniques and Tools
40 Aho, Sethi, Ullman
41 Addison-Wesley, 1988
43 Global Optimization by Suppression of Partial Redundancies
44 E. Morel, C. Renvoise
45 communications of the acm, Vol. 22, Num. 2, Feb. 1979
47 A Portable Machine-Independent Global Optimizer - Design and Measurements
48 Frederick Chow
49 Stanford Ph.D. thesis, Dec. 1983
51 A Fast Algorithm for Code Movement Optimization
52 D.M. Dhamdhere
53 SIGPLAN Notices, Vol. 23, Num. 10, Oct. 1988
55 A Solution to a Problem with Morel and Renvoise's
56 Global Optimization by Suppression of Partial Redundancies
57 K-H Drechsler, M.P. Stadel
58 ACM TOPLAS, Vol. 10, Num. 4, Oct. 1988
60 Practical Adaptation of the Global Optimization
61 Algorithm of Morel and Renvoise
62 D.M. Dhamdhere
63 ACM TOPLAS, Vol. 13, Num. 2. Apr. 1991
65 Efficiently Computing Static Single Assignment Form and the Control
66 Dependence Graph
67 R. Cytron, J. Ferrante, B.K. Rosen, M.N. Wegman, and F.K. Zadeck
68 ACM TOPLAS, Vol. 13, Num. 4, Oct. 1991
70 Lazy Code Motion
71 J. Knoop, O. Ruthing, B. Steffen
72 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
74 What's In a Region? Or Computing Control Dependence Regions in Near-Linear
75 Time for Reducible Flow Control
76 Thomas Ball
77 ACM Letters on Programming Languages and Systems,
78 Vol. 2, Num. 1-4, Mar-Dec 1993
80 An Efficient Representation for Sparse Sets
81 Preston Briggs, Linda Torczon
82 ACM Letters on Programming Languages and Systems,
83 Vol. 2, Num. 1-4, Mar-Dec 1993
85 A Variation of Knoop, Ruthing, and Steffen's Lazy Code Motion
86 K-H Drechsler, M.P. Stadel
87 ACM SIGPLAN Notices, Vol. 28, Num. 5, May 1993
89 Partial Dead Code Elimination
90 J. Knoop, O. Ruthing, B. Steffen
91 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
93 Effective Partial Redundancy Elimination
94 P. Briggs, K.D. Cooper
95 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
97 The Program Structure Tree: Computing Control Regions in Linear Time
98 R. Johnson, D. Pearson, K. Pingali
99 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
101 Optimal Code Motion: Theory and Practice
102 J. Knoop, O. Ruthing, B. Steffen
103 ACM TOPLAS, Vol. 16, Num. 4, Jul. 1994
105 The power of assignment motion
106 J. Knoop, O. Ruthing, B. Steffen
107 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
109 Global code motion / global value numbering
110 C. Click
111 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
113 Value Driven Redundancy Elimination
114 L.T. Simpson
115 Rice University Ph.D. thesis, Apr. 1996
117 Value Numbering
118 L.T. Simpson
119 Massively Scalar Compiler Project, Rice University, Sep. 1996
121 High Performance Compilers for Parallel Computing
122 Michael Wolfe
123 Addison-Wesley, 1996
125 Advanced Compiler Design and Implementation
126 Steven Muchnick
127 Morgan Kaufmann, 1997
129 Building an Optimizing Compiler
130 Robert Morgan
131 Digital Press, 1998
133 People wishing to speed up the code here should read:
134 Elimination Algorithms for Data Flow Analysis
135 B.G. Ryder, M.C. Paull
136 ACM Computing Surveys, Vol. 18, Num. 3, Sep. 1986
138 How to Analyze Large Programs Efficiently and Informatively
139 D.M. Dhamdhere, B.K. Rosen, F.K. Zadeck
140 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
142 People wishing to do something different can find various possibilities
143 in the above papers and elsewhere.
146 #include "config.h"
147 #include "system.h"
148 #include "coretypes.h"
149 #include "tm.h"
150 #include "toplev.h"
152 #include "rtl.h"
153 #include "tm_p.h"
154 #include "regs.h"
155 #include "hard-reg-set.h"
156 #include "flags.h"
157 #include "real.h"
158 #include "insn-config.h"
159 #include "recog.h"
160 #include "basic-block.h"
161 #include "output.h"
162 #include "function.h"
163 #include "expr.h"
164 #include "except.h"
165 #include "ggc.h"
166 #include "params.h"
167 #include "cselib.h"
169 #include "obstack.h"
171 /* Propagate flow information through back edges and thus enable PRE's
172 moving loop invariant calculations out of loops.
174 Originally this tended to create worse overall code, but several
175 improvements during the development of PRE seem to have made following
176 back edges generally a win.
178 Note much of the loop invariant code motion done here would normally
179 be done by loop.c, which has more heuristics for when to move invariants
180 out of loops. At some point we might need to move some of those
181 heuristics into gcse.c. */
183 /* We support GCSE via Partial Redundancy Elimination. PRE optimizations
184 are a superset of those done by GCSE.
186 We perform the following steps:
188 1) Compute basic block information.
190 2) Compute table of places where registers are set.
192 3) Perform copy/constant propagation.
194 4) Perform global cse.
196 5) Perform another pass of copy/constant propagation.
198 Two passes of copy/constant propagation are done because the first one
199 enables more GCSE and the second one helps to clean up the copies that
200 GCSE creates. This is needed more for PRE than for Classic because Classic
201 GCSE will try to use an existing register containing the common
202 subexpression rather than create a new one. This is harder to do for PRE
203 because of the code motion (which Classic GCSE doesn't do).
205 Expressions we are interested in GCSE-ing are of the form
206 (set (pseudo-reg) (expression)).
207 Function want_to_gcse_p says what these are.
209 PRE handles moving invariant expressions out of loops (by treating them as
210 partially redundant).
212 Eventually it would be nice to replace cse.c/gcse.c with SSA (static single
213 assignment) based GVN (global value numbering). L. T. Simpson's paper
214 (Rice University) on value numbering is a useful reference for this.
216 **********************
218 We used to support multiple passes but there are diminishing returns in
219 doing so. The first pass usually makes 90% of the changes that are doable.
220 A second pass can make a few more changes made possible by the first pass.
221 Experiments show any further passes don't make enough changes to justify
222 the expense.
224 A study of spec92 using an unlimited number of passes:
225 [1 pass] = 1208 substitutions, [2] = 577, [3] = 202, [4] = 192, [5] = 83,
226 [6] = 34, [7] = 17, [8] = 9, [9] = 4, [10] = 4, [11] = 2,
227 [12] = 2, [13] = 1, [15] = 1, [16] = 2, [41] = 1
229 It was found doing copy propagation between each pass enables further
230 substitutions.
232 PRE is quite expensive in complicated functions because the DFA can take
233 awhile to converge. Hence we only perform one pass. The parameter max-gcse-passes can
234 be modified if one wants to experiment.
236 **********************
238 The steps for PRE are:
240 1) Build the hash table of expressions we wish to GCSE (expr_hash_table).
242 2) Perform the data flow analysis for PRE.
244 3) Delete the redundant instructions
246 4) Insert the required copies [if any] that make the partially
247 redundant instructions fully redundant.
249 5) For other reaching expressions, insert an instruction to copy the value
250 to a newly created pseudo that will reach the redundant instruction.
252 The deletion is done first so that when we do insertions we
253 know which pseudo reg to use.
255 Various papers have argued that PRE DFA is expensive (O(n^2)) and others
256 argue it is not. The number of iterations for the algorithm to converge
257 is typically 2-4 so I don't view it as that expensive (relatively speaking).
259 PRE GCSE depends heavily on the second CSE pass to clean up the copies
260 we create. To make an expression reach the place where it's redundant,
261 the result of the expression is copied to a new register, and the redundant
262 expression is deleted by replacing it with this new register. Classic GCSE
263 doesn't have this problem as much as it computes the reaching defs of
264 each register in each block and thus can try to use an existing register.
266 **********************
268 A fair bit of simplicity is created by creating small functions for simple
269 tasks, even when the function is only called in one place. This may
270 measurably slow things down [or may not] by creating more function call
271 overhead than is necessary. The source is laid out so that it's trivial
272 to make the affected functions inline so that one can measure what speed
273 up, if any, can be achieved, and maybe later when things settle things can
274 be rearranged.
276 Help stamp out big monolithic functions! */
278 /* GCSE global vars. */
280 /* -dG dump file. */
281 static FILE *gcse_file;
283 /* Note whether or not we should run jump optimization after gcse. We
284 want to do this for two cases.
286 * If we changed any jumps via cprop.
288 * If we added any labels via edge splitting. */
290 static int run_jump_opt_after_gcse;
292 /* Bitmaps are normally not included in debugging dumps.
293 However it's useful to be able to print them from GDB.
294 We could create special functions for this, but it's simpler to
295 just allow passing stderr to the dump_foo fns. Since stderr can
296 be a macro, we store a copy here. */
297 static FILE *debug_stderr;
299 /* An obstack for our working variables. */
300 static struct obstack gcse_obstack;
302 /* Nonzero for each mode that supports (set (reg) (reg)).
303 This is trivially true for integer and floating point values.
304 It may or may not be true for condition codes. */
305 static char can_copy_p[(int) NUM_MACHINE_MODES];
307 /* Nonzero if can_copy_p has been initialized. */
308 static int can_copy_init_p;
310 struct reg_use {rtx reg_rtx; };
312 /* Hash table of expressions. */
314 struct expr
316 /* The expression (SET_SRC for expressions, PATTERN for assignments). */
317 rtx expr;
318 /* Index in the available expression bitmaps. */
319 int bitmap_index;
320 /* Next entry with the same hash. */
321 struct expr *next_same_hash;
322 /* List of anticipatable occurrences in basic blocks in the function.
323 An "anticipatable occurrence" is one that is the first occurrence in the
324 basic block, the operands are not modified in the basic block prior
325 to the occurrence and the output is not used between the start of
326 the block and the occurrence. */
327 struct occr *antic_occr;
328 /* List of available occurrence in basic blocks in the function.
329 An "available occurrence" is one that is the last occurrence in the
330 basic block and the operands are not modified by following statements in
331 the basic block [including this insn]. */
332 struct occr *avail_occr;
333 /* Non-null if the computation is PRE redundant.
334 The value is the newly created pseudo-reg to record a copy of the
335 expression in all the places that reach the redundant copy. */
336 rtx reaching_reg;
339 /* Occurrence of an expression.
340 There is one per basic block. If a pattern appears more than once the
341 last appearance is used [or first for anticipatable expressions]. */
343 struct occr
345 /* Next occurrence of this expression. */
346 struct occr *next;
347 /* The insn that computes the expression. */
348 rtx insn;
349 /* Nonzero if this [anticipatable] occurrence has been deleted. */
350 char deleted_p;
351 /* Nonzero if this [available] occurrence has been copied to
352 reaching_reg. */
353 /* ??? This is mutually exclusive with deleted_p, so they could share
354 the same byte. */
355 char copied_p;
358 /* Expression and copy propagation hash tables.
359 Each hash table is an array of buckets.
360 ??? It is known that if it were an array of entries, structure elements
361 `next_same_hash' and `bitmap_index' wouldn't be necessary. However, it is
362 not clear whether in the final analysis a sufficient amount of memory would
363 be saved as the size of the available expression bitmaps would be larger
364 [one could build a mapping table without holes afterwards though].
365 Someday I'll perform the computation and figure it out. */
367 struct hash_table
369 /* The table itself.
370 This is an array of `expr_hash_table_size' elements. */
371 struct expr **table;
373 /* Size of the hash table, in elements. */
374 unsigned int size;
376 /* Number of hash table elements. */
377 unsigned int n_elems;
379 /* Whether the table is expression of copy propagation one. */
380 int set_p;
383 /* Expression hash table. */
384 static struct hash_table expr_hash_table;
386 /* Copy propagation hash table. */
387 static struct hash_table set_hash_table;
389 /* Mapping of uids to cuids.
390 Only real insns get cuids. */
391 static int *uid_cuid;
393 /* Highest UID in UID_CUID. */
394 static int max_uid;
396 /* Get the cuid of an insn. */
397 #ifdef ENABLE_CHECKING
398 #define INSN_CUID(INSN) (INSN_UID (INSN) > max_uid ? (abort (), 0) : uid_cuid[INSN_UID (INSN)])
399 #else
400 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
401 #endif
403 /* Number of cuids. */
404 static int max_cuid;
406 /* Mapping of cuids to insns. */
407 static rtx *cuid_insn;
409 /* Get insn from cuid. */
410 #define CUID_INSN(CUID) (cuid_insn[CUID])
412 /* Maximum register number in function prior to doing gcse + 1.
413 Registers created during this pass have regno >= max_gcse_regno.
414 This is named with "gcse" to not collide with global of same name. */
415 static unsigned int max_gcse_regno;
417 /* Table of registers that are modified.
419 For each register, each element is a list of places where the pseudo-reg
420 is set.
422 For simplicity, GCSE is done on sets of pseudo-regs only. PRE GCSE only
423 requires knowledge of which blocks kill which regs [and thus could use
424 a bitmap instead of the lists `reg_set_table' uses].
426 `reg_set_table' and could be turned into an array of bitmaps (num-bbs x
427 num-regs) [however perhaps it may be useful to keep the data as is]. One
428 advantage of recording things this way is that `reg_set_table' is fairly
429 sparse with respect to pseudo regs but for hard regs could be fairly dense
430 [relatively speaking]. And recording sets of pseudo-regs in lists speeds
431 up functions like compute_transp since in the case of pseudo-regs we only
432 need to iterate over the number of times a pseudo-reg is set, not over the
433 number of basic blocks [clearly there is a bit of a slow down in the cases
434 where a pseudo is set more than once in a block, however it is believed
435 that the net effect is to speed things up]. This isn't done for hard-regs
436 because recording call-clobbered hard-regs in `reg_set_table' at each
437 function call can consume a fair bit of memory, and iterating over
438 hard-regs stored this way in compute_transp will be more expensive. */
440 typedef struct reg_set
442 /* The next setting of this register. */
443 struct reg_set *next;
444 /* The insn where it was set. */
445 rtx insn;
446 } reg_set;
448 static reg_set **reg_set_table;
450 /* Size of `reg_set_table'.
451 The table starts out at max_gcse_regno + slop, and is enlarged as
452 necessary. */
453 static int reg_set_table_size;
455 /* Amount to grow `reg_set_table' by when it's full. */
456 #define REG_SET_TABLE_SLOP 100
458 /* This is a list of expressions which are MEMs and will be used by load
459 or store motion.
460 Load motion tracks MEMs which aren't killed by
461 anything except itself. (ie, loads and stores to a single location).
462 We can then allow movement of these MEM refs with a little special
463 allowance. (all stores copy the same value to the reaching reg used
464 for the loads). This means all values used to store into memory must have
465 no side effects so we can re-issue the setter value.
466 Store Motion uses this structure as an expression table to track stores
467 which look interesting, and might be moveable towards the exit block. */
469 struct ls_expr
471 struct expr * expr; /* Gcse expression reference for LM. */
472 rtx pattern; /* Pattern of this mem. */
473 rtx loads; /* INSN list of loads seen. */
474 rtx stores; /* INSN list of stores seen. */
475 struct ls_expr * next; /* Next in the list. */
476 int invalid; /* Invalid for some reason. */
477 int index; /* If it maps to a bitmap index. */
478 int hash_index; /* Index when in a hash table. */
479 rtx reaching_reg; /* Register to use when re-writing. */
482 /* Head of the list of load/store memory refs. */
483 static struct ls_expr * pre_ldst_mems = NULL;
485 /* Bitmap containing one bit for each register in the program.
486 Used when performing GCSE to track which registers have been set since
487 the start of the basic block. */
488 static regset reg_set_bitmap;
490 /* For each block, a bitmap of registers set in the block.
491 This is used by expr_killed_p and compute_transp.
492 It is computed during hash table computation and not by compute_sets
493 as it includes registers added since the last pass (or between cprop and
494 gcse) and it's currently not easy to realloc sbitmap vectors. */
495 static sbitmap *reg_set_in_block;
497 /* Array, indexed by basic block number for a list of insns which modify
498 memory within that block. */
499 static rtx * modify_mem_list;
500 bitmap modify_mem_list_set;
502 /* This array parallels modify_mem_list, but is kept canonicalized. */
503 static rtx * canon_modify_mem_list;
504 bitmap canon_modify_mem_list_set;
505 /* Various variables for statistics gathering. */
507 /* Memory used in a pass.
508 This isn't intended to be absolutely precise. Its intent is only
509 to keep an eye on memory usage. */
510 static int bytes_used;
512 /* GCSE substitutions made. */
513 static int gcse_subst_count;
514 /* Number of copy instructions created. */
515 static int gcse_create_count;
516 /* Number of constants propagated. */
517 static int const_prop_count;
518 /* Number of copys propagated. */
519 static int copy_prop_count;
521 /* These variables are used by classic GCSE.
522 Normally they'd be defined a bit later, but `rd_gen' needs to
523 be declared sooner. */
525 /* Each block has a bitmap of each type.
526 The length of each blocks bitmap is:
528 max_cuid - for reaching definitions
529 n_exprs - for available expressions
531 Thus we view the bitmaps as 2 dimensional arrays. i.e.
532 rd_kill[block_num][cuid_num]
533 ae_kill[block_num][expr_num] */
535 /* For reaching defs */
536 static sbitmap *rd_kill, *rd_gen, *reaching_defs, *rd_out;
538 /* for available exprs */
539 static sbitmap *ae_kill, *ae_gen, *ae_in, *ae_out;
541 /* Objects of this type are passed around by the null-pointer check
542 removal routines. */
543 struct null_pointer_info
545 /* The basic block being processed. */
546 basic_block current_block;
547 /* The first register to be handled in this pass. */
548 unsigned int min_reg;
549 /* One greater than the last register to be handled in this pass. */
550 unsigned int max_reg;
551 sbitmap *nonnull_local;
552 sbitmap *nonnull_killed;
555 static void compute_can_copy PARAMS ((void));
556 static char *gmalloc PARAMS ((unsigned int));
557 static char *grealloc PARAMS ((char *, unsigned int));
558 static char *gcse_alloc PARAMS ((unsigned long));
559 static void alloc_gcse_mem PARAMS ((rtx));
560 static void free_gcse_mem PARAMS ((void));
561 static void alloc_reg_set_mem PARAMS ((int));
562 static void free_reg_set_mem PARAMS ((void));
563 static int get_bitmap_width PARAMS ((int, int, int));
564 static void record_one_set PARAMS ((int, rtx));
565 static void record_set_info PARAMS ((rtx, rtx, void *));
566 static void compute_sets PARAMS ((rtx));
567 static void hash_scan_insn PARAMS ((rtx, struct hash_table *, int));
568 static void hash_scan_set PARAMS ((rtx, rtx, struct hash_table *));
569 static void hash_scan_clobber PARAMS ((rtx, rtx, struct hash_table *));
570 static void hash_scan_call PARAMS ((rtx, rtx, struct hash_table *));
571 static int want_to_gcse_p PARAMS ((rtx));
572 static int oprs_unchanged_p PARAMS ((rtx, rtx, int));
573 static int oprs_anticipatable_p PARAMS ((rtx, rtx));
574 static int oprs_available_p PARAMS ((rtx, rtx));
575 static void insert_expr_in_table PARAMS ((rtx, enum machine_mode, rtx,
576 int, int, struct hash_table *));
577 static void insert_set_in_table PARAMS ((rtx, rtx, struct hash_table *));
578 static unsigned int hash_expr PARAMS ((rtx, enum machine_mode, int *, int));
579 static unsigned int hash_expr_1 PARAMS ((rtx, enum machine_mode, int *));
580 static unsigned int hash_string_1 PARAMS ((const char *));
581 static unsigned int hash_set PARAMS ((int, int));
582 static int expr_equiv_p PARAMS ((rtx, rtx));
583 static void record_last_reg_set_info PARAMS ((rtx, int));
584 static void record_last_mem_set_info PARAMS ((rtx));
585 static void record_last_set_info PARAMS ((rtx, rtx, void *));
586 static void compute_hash_table PARAMS ((struct hash_table *));
587 static void alloc_hash_table PARAMS ((int, struct hash_table *, int));
588 static void free_hash_table PARAMS ((struct hash_table *));
589 static void compute_hash_table_work PARAMS ((struct hash_table *));
590 static void dump_hash_table PARAMS ((FILE *, const char *,
591 struct hash_table *));
592 static struct expr *lookup_expr PARAMS ((rtx, struct hash_table *));
593 static struct expr *lookup_set PARAMS ((unsigned int, rtx, struct hash_table *));
594 static struct expr *next_set PARAMS ((unsigned int, struct expr *));
595 static void reset_opr_set_tables PARAMS ((void));
596 static int oprs_not_set_p PARAMS ((rtx, rtx));
597 static void mark_call PARAMS ((rtx));
598 static void mark_set PARAMS ((rtx, rtx));
599 static void mark_clobber PARAMS ((rtx, rtx));
600 static void mark_oprs_set PARAMS ((rtx));
601 static void alloc_cprop_mem PARAMS ((int, int));
602 static void free_cprop_mem PARAMS ((void));
603 static void compute_transp PARAMS ((rtx, int, sbitmap *, int));
604 static void compute_transpout PARAMS ((void));
605 static void compute_local_properties PARAMS ((sbitmap *, sbitmap *, sbitmap *,
606 struct hash_table *));
607 static void compute_cprop_data PARAMS ((void));
608 static void find_used_regs PARAMS ((rtx *, void *));
609 static int try_replace_reg PARAMS ((rtx, rtx, rtx));
610 static struct expr *find_avail_set PARAMS ((int, rtx));
611 static int cprop_jump PARAMS ((basic_block, rtx, rtx, rtx, rtx));
612 static void mems_conflict_for_gcse_p PARAMS ((rtx, rtx, void *));
613 static int load_killed_in_block_p PARAMS ((basic_block, int, rtx, int));
614 static void canon_list_insert PARAMS ((rtx, rtx, void *));
615 static int cprop_insn PARAMS ((rtx, int));
616 static int cprop PARAMS ((int));
617 static int one_cprop_pass PARAMS ((int, int));
618 static bool constprop_register PARAMS ((rtx, rtx, rtx, int));
619 static struct expr *find_bypass_set PARAMS ((int, int));
620 static int bypass_block PARAMS ((basic_block, rtx, rtx));
621 static int bypass_conditional_jumps PARAMS ((void));
622 static void alloc_pre_mem PARAMS ((int, int));
623 static void free_pre_mem PARAMS ((void));
624 static void compute_pre_data PARAMS ((void));
625 static int pre_expr_reaches_here_p PARAMS ((basic_block, struct expr *,
626 basic_block));
627 static void insert_insn_end_bb PARAMS ((struct expr *, basic_block, int));
628 static void pre_insert_copy_insn PARAMS ((struct expr *, rtx));
629 static void pre_insert_copies PARAMS ((void));
630 static int pre_delete PARAMS ((void));
631 static int pre_gcse PARAMS ((void));
632 static int one_pre_gcse_pass PARAMS ((int));
633 static void add_label_notes PARAMS ((rtx, rtx));
634 static void alloc_code_hoist_mem PARAMS ((int, int));
635 static void free_code_hoist_mem PARAMS ((void));
636 static void compute_code_hoist_vbeinout PARAMS ((void));
637 static void compute_code_hoist_data PARAMS ((void));
638 static int hoist_expr_reaches_here_p PARAMS ((basic_block, int, basic_block,
639 char *));
640 static void hoist_code PARAMS ((void));
641 static int one_code_hoisting_pass PARAMS ((void));
642 static void alloc_rd_mem PARAMS ((int, int));
643 static void free_rd_mem PARAMS ((void));
644 static void handle_rd_kill_set PARAMS ((rtx, int, basic_block));
645 static void compute_kill_rd PARAMS ((void));
646 static void compute_rd PARAMS ((void));
647 static void alloc_avail_expr_mem PARAMS ((int, int));
648 static void free_avail_expr_mem PARAMS ((void));
649 static void compute_ae_gen PARAMS ((struct hash_table *));
650 static int expr_killed_p PARAMS ((rtx, basic_block));
651 static void compute_ae_kill PARAMS ((sbitmap *, sbitmap *, struct hash_table *));
652 static int expr_reaches_here_p PARAMS ((struct occr *, struct expr *,
653 basic_block, int));
654 static rtx computing_insn PARAMS ((struct expr *, rtx));
655 static int def_reaches_here_p PARAMS ((rtx, rtx));
656 static int can_disregard_other_sets PARAMS ((struct reg_set **, rtx, int));
657 static int handle_avail_expr PARAMS ((rtx, struct expr *));
658 static int classic_gcse PARAMS ((void));
659 static int one_classic_gcse_pass PARAMS ((int));
660 static void invalidate_nonnull_info PARAMS ((rtx, rtx, void *));
661 static int delete_null_pointer_checks_1 PARAMS ((unsigned int *,
662 sbitmap *, sbitmap *,
663 struct null_pointer_info *));
664 static rtx process_insert_insn PARAMS ((struct expr *));
665 static int pre_edge_insert PARAMS ((struct edge_list *, struct expr **));
666 static int expr_reaches_here_p_work PARAMS ((struct occr *, struct expr *,
667 basic_block, int, char *));
668 static int pre_expr_reaches_here_p_work PARAMS ((basic_block, struct expr *,
669 basic_block, char *));
670 static struct ls_expr * ldst_entry PARAMS ((rtx));
671 static void free_ldst_entry PARAMS ((struct ls_expr *));
672 static void free_ldst_mems PARAMS ((void));
673 static void print_ldst_list PARAMS ((FILE *));
674 static struct ls_expr * find_rtx_in_ldst PARAMS ((rtx));
675 static int enumerate_ldsts PARAMS ((void));
676 static inline struct ls_expr * first_ls_expr PARAMS ((void));
677 static inline struct ls_expr * next_ls_expr PARAMS ((struct ls_expr *));
678 static int simple_mem PARAMS ((rtx));
679 static void invalidate_any_buried_refs PARAMS ((rtx));
680 static void compute_ld_motion_mems PARAMS ((void));
681 static void trim_ld_motion_mems PARAMS ((void));
682 static void update_ld_motion_stores PARAMS ((struct expr *));
683 static void reg_set_info PARAMS ((rtx, rtx, void *));
684 static int store_ops_ok PARAMS ((rtx, basic_block));
685 static void find_moveable_store PARAMS ((rtx));
686 static int compute_store_table PARAMS ((void));
687 static int load_kills_store PARAMS ((rtx, rtx));
688 static int find_loads PARAMS ((rtx, rtx));
689 static int store_killed_in_insn PARAMS ((rtx, rtx));
690 static int store_killed_after PARAMS ((rtx, rtx, basic_block));
691 static int store_killed_before PARAMS ((rtx, rtx, basic_block));
692 static void build_store_vectors PARAMS ((void));
693 static void insert_insn_start_bb PARAMS ((rtx, basic_block));
694 static int insert_store PARAMS ((struct ls_expr *, edge));
695 static void replace_store_insn PARAMS ((rtx, rtx, basic_block));
696 static void delete_store PARAMS ((struct ls_expr *,
697 basic_block));
698 static void free_store_memory PARAMS ((void));
699 static void store_motion PARAMS ((void));
700 static void free_insn_expr_list_list PARAMS ((rtx *));
701 static void clear_modify_mem_tables PARAMS ((void));
702 static void free_modify_mem_tables PARAMS ((void));
703 static rtx gcse_emit_move_after PARAMS ((rtx, rtx, rtx));
704 static bool do_local_cprop PARAMS ((rtx, rtx, int, rtx*));
705 static bool adjust_libcall_notes PARAMS ((rtx, rtx, rtx, rtx*));
706 static void local_cprop_pass PARAMS ((int));
708 /* Entry point for global common subexpression elimination.
709 F is the first instruction in the function. */
712 gcse_main (f, file)
713 rtx f;
714 FILE *file;
716 int changed, pass;
717 /* Bytes used at start of pass. */
718 int initial_bytes_used;
719 /* Maximum number of bytes used by a pass. */
720 int max_pass_bytes;
721 /* Point to release obstack data from for each pass. */
722 char *gcse_obstack_bottom;
724 /* We do not construct an accurate cfg in functions which call
725 setjmp, so just punt to be safe. */
726 if (current_function_calls_setjmp)
727 return 0;
729 /* Assume that we do not need to run jump optimizations after gcse. */
730 run_jump_opt_after_gcse = 0;
732 /* For calling dump_foo fns from gdb. */
733 debug_stderr = stderr;
734 gcse_file = file;
736 /* Identify the basic block information for this function, including
737 successors and predecessors. */
738 max_gcse_regno = max_reg_num ();
740 if (file)
741 dump_flow_info (file);
743 /* Return if there's nothing to do. */
744 if (n_basic_blocks <= 1)
745 return 0;
747 /* Trying to perform global optimizations on flow graphs which have
748 a high connectivity will take a long time and is unlikely to be
749 particularly useful.
751 In normal circumstances a cfg should have about twice as many edges
752 as blocks. But we do not want to punish small functions which have
753 a couple switch statements. So we require a relatively large number
754 of basic blocks and the ratio of edges to blocks to be high. */
755 if (n_basic_blocks > 1000 && n_edges / n_basic_blocks >= 20)
757 if (warn_disabled_optimization)
758 warning ("GCSE disabled: %d > 1000 basic blocks and %d >= 20 edges/basic block",
759 n_basic_blocks, n_edges / n_basic_blocks);
760 return 0;
763 /* If allocating memory for the cprop bitmap would take up too much
764 storage it's better just to disable the optimization. */
765 if ((n_basic_blocks
766 * SBITMAP_SET_SIZE (max_gcse_regno)
767 * sizeof (SBITMAP_ELT_TYPE)) > MAX_GCSE_MEMORY)
769 if (warn_disabled_optimization)
770 warning ("GCSE disabled: %d basic blocks and %d registers",
771 n_basic_blocks, max_gcse_regno);
773 return 0;
776 /* See what modes support reg/reg copy operations. */
777 if (! can_copy_init_p)
779 compute_can_copy ();
780 can_copy_init_p = 1;
783 gcc_obstack_init (&gcse_obstack);
784 bytes_used = 0;
786 /* We need alias. */
787 init_alias_analysis ();
788 /* Record where pseudo-registers are set. This data is kept accurate
789 during each pass. ??? We could also record hard-reg information here
790 [since it's unchanging], however it is currently done during hash table
791 computation.
793 It may be tempting to compute MEM set information here too, but MEM sets
794 will be subject to code motion one day and thus we need to compute
795 information about memory sets when we build the hash tables. */
797 alloc_reg_set_mem (max_gcse_regno);
798 compute_sets (f);
800 pass = 0;
801 initial_bytes_used = bytes_used;
802 max_pass_bytes = 0;
803 gcse_obstack_bottom = gcse_alloc (1);
804 changed = 1;
805 while (changed && pass < MAX_GCSE_PASSES)
807 changed = 0;
808 if (file)
809 fprintf (file, "GCSE pass %d\n\n", pass + 1);
811 /* Initialize bytes_used to the space for the pred/succ lists,
812 and the reg_set_table data. */
813 bytes_used = initial_bytes_used;
815 /* Each pass may create new registers, so recalculate each time. */
816 max_gcse_regno = max_reg_num ();
818 alloc_gcse_mem (f);
820 /* Don't allow constant propagation to modify jumps
821 during this pass. */
822 changed = one_cprop_pass (pass + 1, 0);
824 if (optimize_size)
825 changed |= one_classic_gcse_pass (pass + 1);
826 else
828 changed |= one_pre_gcse_pass (pass + 1);
829 /* We may have just created new basic blocks. Release and
830 recompute various things which are sized on the number of
831 basic blocks. */
832 if (changed)
834 free_modify_mem_tables ();
835 modify_mem_list
836 = (rtx *) gmalloc (last_basic_block * sizeof (rtx));
837 canon_modify_mem_list
838 = (rtx *) gmalloc (last_basic_block * sizeof (rtx));
839 memset ((char *) modify_mem_list, 0, last_basic_block * sizeof (rtx));
840 memset ((char *) canon_modify_mem_list, 0, last_basic_block * sizeof (rtx));
842 free_reg_set_mem ();
843 alloc_reg_set_mem (max_reg_num ());
844 compute_sets (f);
845 run_jump_opt_after_gcse = 1;
848 if (max_pass_bytes < bytes_used)
849 max_pass_bytes = bytes_used;
851 /* Free up memory, then reallocate for code hoisting. We can
852 not re-use the existing allocated memory because the tables
853 will not have info for the insns or registers created by
854 partial redundancy elimination. */
855 free_gcse_mem ();
857 /* It does not make sense to run code hoisting unless we optimizing
858 for code size -- it rarely makes programs faster, and can make
859 them bigger if we did partial redundancy elimination (when optimizing
860 for space, we use a classic gcse algorithm instead of partial
861 redundancy algorithms). */
862 if (optimize_size)
864 max_gcse_regno = max_reg_num ();
865 alloc_gcse_mem (f);
866 changed |= one_code_hoisting_pass ();
867 free_gcse_mem ();
869 if (max_pass_bytes < bytes_used)
870 max_pass_bytes = bytes_used;
873 if (file)
875 fprintf (file, "\n");
876 fflush (file);
879 obstack_free (&gcse_obstack, gcse_obstack_bottom);
880 pass++;
883 /* Do one last pass of copy propagation, including cprop into
884 conditional jumps. */
886 max_gcse_regno = max_reg_num ();
887 alloc_gcse_mem (f);
888 /* This time, go ahead and allow cprop to alter jumps. */
889 one_cprop_pass (pass + 1, 1);
890 free_gcse_mem ();
892 if (file)
894 fprintf (file, "GCSE of %s: %d basic blocks, ",
895 current_function_name, n_basic_blocks);
896 fprintf (file, "%d pass%s, %d bytes\n\n",
897 pass, pass > 1 ? "es" : "", max_pass_bytes);
900 obstack_free (&gcse_obstack, NULL);
901 free_reg_set_mem ();
902 /* We are finished with alias. */
903 end_alias_analysis ();
904 allocate_reg_info (max_reg_num (), FALSE, FALSE);
906 /* Store motion disabled until it is fixed. */
907 if (0 && !optimize_size && flag_gcse_sm)
908 store_motion ();
909 /* Record where pseudo-registers are set. */
910 return run_jump_opt_after_gcse;
913 /* Misc. utilities. */
915 /* Compute which modes support reg/reg copy operations. */
917 static void
918 compute_can_copy ()
920 int i;
921 #ifndef AVOID_CCMODE_COPIES
922 rtx reg, insn;
923 #endif
924 memset (can_copy_p, 0, NUM_MACHINE_MODES);
926 start_sequence ();
927 for (i = 0; i < NUM_MACHINE_MODES; i++)
928 if (GET_MODE_CLASS (i) == MODE_CC)
930 #ifdef AVOID_CCMODE_COPIES
931 can_copy_p[i] = 0;
932 #else
933 reg = gen_rtx_REG ((enum machine_mode) i, LAST_VIRTUAL_REGISTER + 1);
934 insn = emit_insn (gen_rtx_SET (VOIDmode, reg, reg));
935 if (recog (PATTERN (insn), insn, NULL) >= 0)
936 can_copy_p[i] = 1;
937 #endif
939 else
940 can_copy_p[i] = 1;
942 end_sequence ();
945 /* Cover function to xmalloc to record bytes allocated. */
947 static char *
948 gmalloc (size)
949 unsigned int size;
951 bytes_used += size;
952 return xmalloc (size);
955 /* Cover function to xrealloc.
956 We don't record the additional size since we don't know it.
957 It won't affect memory usage stats much anyway. */
959 static char *
960 grealloc (ptr, size)
961 char *ptr;
962 unsigned int size;
964 return xrealloc (ptr, size);
967 /* Cover function to obstack_alloc. */
969 static char *
970 gcse_alloc (size)
971 unsigned long size;
973 bytes_used += size;
974 return (char *) obstack_alloc (&gcse_obstack, size);
977 /* Allocate memory for the cuid mapping array,
978 and reg/memory set tracking tables.
980 This is called at the start of each pass. */
982 static void
983 alloc_gcse_mem (f)
984 rtx f;
986 int i, n;
987 rtx insn;
989 /* Find the largest UID and create a mapping from UIDs to CUIDs.
990 CUIDs are like UIDs except they increase monotonically, have no gaps,
991 and only apply to real insns. */
993 max_uid = get_max_uid ();
994 n = (max_uid + 1) * sizeof (int);
995 uid_cuid = (int *) gmalloc (n);
996 memset ((char *) uid_cuid, 0, n);
997 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
999 if (INSN_P (insn))
1000 uid_cuid[INSN_UID (insn)] = i++;
1001 else
1002 uid_cuid[INSN_UID (insn)] = i;
1005 /* Create a table mapping cuids to insns. */
1007 max_cuid = i;
1008 n = (max_cuid + 1) * sizeof (rtx);
1009 cuid_insn = (rtx *) gmalloc (n);
1010 memset ((char *) cuid_insn, 0, n);
1011 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
1012 if (INSN_P (insn))
1013 CUID_INSN (i++) = insn;
1015 /* Allocate vars to track sets of regs. */
1016 reg_set_bitmap = BITMAP_XMALLOC ();
1018 /* Allocate vars to track sets of regs, memory per block. */
1019 reg_set_in_block = (sbitmap *) sbitmap_vector_alloc (last_basic_block,
1020 max_gcse_regno);
1021 /* Allocate array to keep a list of insns which modify memory in each
1022 basic block. */
1023 modify_mem_list = (rtx *) gmalloc (last_basic_block * sizeof (rtx));
1024 canon_modify_mem_list = (rtx *) gmalloc (last_basic_block * sizeof (rtx));
1025 memset ((char *) modify_mem_list, 0, last_basic_block * sizeof (rtx));
1026 memset ((char *) canon_modify_mem_list, 0, last_basic_block * sizeof (rtx));
1027 modify_mem_list_set = BITMAP_XMALLOC ();
1028 canon_modify_mem_list_set = BITMAP_XMALLOC ();
1031 /* Free memory allocated by alloc_gcse_mem. */
1033 static void
1034 free_gcse_mem ()
1036 free (uid_cuid);
1037 free (cuid_insn);
1039 BITMAP_XFREE (reg_set_bitmap);
1041 sbitmap_vector_free (reg_set_in_block);
1042 free_modify_mem_tables ();
1043 BITMAP_XFREE (modify_mem_list_set);
1044 BITMAP_XFREE (canon_modify_mem_list_set);
1047 /* Many of the global optimization algorithms work by solving dataflow
1048 equations for various expressions. Initially, some local value is
1049 computed for each expression in each block. Then, the values across the
1050 various blocks are combined (by following flow graph edges) to arrive at
1051 global values. Conceptually, each set of equations is independent. We
1052 may therefore solve all the equations in parallel, solve them one at a
1053 time, or pick any intermediate approach.
1055 When you're going to need N two-dimensional bitmaps, each X (say, the
1056 number of blocks) by Y (say, the number of expressions), call this
1057 function. It's not important what X and Y represent; only that Y
1058 correspond to the things that can be done in parallel. This function will
1059 return an appropriate chunking factor C; you should solve C sets of
1060 equations in parallel. By going through this function, we can easily
1061 trade space against time; by solving fewer equations in parallel we use
1062 less space. */
1064 static int
1065 get_bitmap_width (n, x, y)
1066 int n;
1067 int x;
1068 int y;
1070 /* It's not really worth figuring out *exactly* how much memory will
1071 be used by a particular choice. The important thing is to get
1072 something approximately right. */
1073 size_t max_bitmap_memory = 10 * 1024 * 1024;
1075 /* The number of bytes we'd use for a single column of minimum
1076 width. */
1077 size_t column_size = n * x * sizeof (SBITMAP_ELT_TYPE);
1079 /* Often, it's reasonable just to solve all the equations in
1080 parallel. */
1081 if (column_size * SBITMAP_SET_SIZE (y) <= max_bitmap_memory)
1082 return y;
1084 /* Otherwise, pick the largest width we can, without going over the
1085 limit. */
1086 return SBITMAP_ELT_BITS * ((max_bitmap_memory + column_size - 1)
1087 / column_size);
1090 /* Compute the local properties of each recorded expression.
1092 Local properties are those that are defined by the block, irrespective of
1093 other blocks.
1095 An expression is transparent in a block if its operands are not modified
1096 in the block.
1098 An expression is computed (locally available) in a block if it is computed
1099 at least once and expression would contain the same value if the
1100 computation was moved to the end of the block.
1102 An expression is locally anticipatable in a block if it is computed at
1103 least once and expression would contain the same value if the computation
1104 was moved to the beginning of the block.
1106 We call this routine for cprop, pre and code hoisting. They all compute
1107 basically the same information and thus can easily share this code.
1109 TRANSP, COMP, and ANTLOC are destination sbitmaps for recording local
1110 properties. If NULL, then it is not necessary to compute or record that
1111 particular property.
1113 TABLE controls which hash table to look at. If it is set hash table,
1114 additionally, TRANSP is computed as ~TRANSP, since this is really cprop's
1115 ABSALTERED. */
1117 static void
1118 compute_local_properties (transp, comp, antloc, table)
1119 sbitmap *transp;
1120 sbitmap *comp;
1121 sbitmap *antloc;
1122 struct hash_table *table;
1124 unsigned int i;
1126 /* Initialize any bitmaps that were passed in. */
1127 if (transp)
1129 if (table->set_p)
1130 sbitmap_vector_zero (transp, last_basic_block);
1131 else
1132 sbitmap_vector_ones (transp, last_basic_block);
1135 if (comp)
1136 sbitmap_vector_zero (comp, last_basic_block);
1137 if (antloc)
1138 sbitmap_vector_zero (antloc, last_basic_block);
1140 for (i = 0; i < table->size; i++)
1142 struct expr *expr;
1144 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
1146 int indx = expr->bitmap_index;
1147 struct occr *occr;
1149 /* The expression is transparent in this block if it is not killed.
1150 We start by assuming all are transparent [none are killed], and
1151 then reset the bits for those that are. */
1152 if (transp)
1153 compute_transp (expr->expr, indx, transp, table->set_p);
1155 /* The occurrences recorded in antic_occr are exactly those that
1156 we want to set to nonzero in ANTLOC. */
1157 if (antloc)
1158 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
1160 SET_BIT (antloc[BLOCK_NUM (occr->insn)], indx);
1162 /* While we're scanning the table, this is a good place to
1163 initialize this. */
1164 occr->deleted_p = 0;
1167 /* The occurrences recorded in avail_occr are exactly those that
1168 we want to set to nonzero in COMP. */
1169 if (comp)
1170 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
1172 SET_BIT (comp[BLOCK_NUM (occr->insn)], indx);
1174 /* While we're scanning the table, this is a good place to
1175 initialize this. */
1176 occr->copied_p = 0;
1179 /* While we're scanning the table, this is a good place to
1180 initialize this. */
1181 expr->reaching_reg = 0;
1186 /* Register set information.
1188 `reg_set_table' records where each register is set or otherwise
1189 modified. */
1191 static struct obstack reg_set_obstack;
1193 static void
1194 alloc_reg_set_mem (n_regs)
1195 int n_regs;
1197 unsigned int n;
1199 reg_set_table_size = n_regs + REG_SET_TABLE_SLOP;
1200 n = reg_set_table_size * sizeof (struct reg_set *);
1201 reg_set_table = (struct reg_set **) gmalloc (n);
1202 memset ((char *) reg_set_table, 0, n);
1204 gcc_obstack_init (&reg_set_obstack);
1207 static void
1208 free_reg_set_mem ()
1210 free (reg_set_table);
1211 obstack_free (&reg_set_obstack, NULL);
1214 /* Record REGNO in the reg_set table. */
1216 static void
1217 record_one_set (regno, insn)
1218 int regno;
1219 rtx insn;
1221 /* Allocate a new reg_set element and link it onto the list. */
1222 struct reg_set *new_reg_info;
1224 /* If the table isn't big enough, enlarge it. */
1225 if (regno >= reg_set_table_size)
1227 int new_size = regno + REG_SET_TABLE_SLOP;
1229 reg_set_table
1230 = (struct reg_set **) grealloc ((char *) reg_set_table,
1231 new_size * sizeof (struct reg_set *));
1232 memset ((char *) (reg_set_table + reg_set_table_size), 0,
1233 (new_size - reg_set_table_size) * sizeof (struct reg_set *));
1234 reg_set_table_size = new_size;
1237 new_reg_info = (struct reg_set *) obstack_alloc (&reg_set_obstack,
1238 sizeof (struct reg_set));
1239 bytes_used += sizeof (struct reg_set);
1240 new_reg_info->insn = insn;
1241 new_reg_info->next = reg_set_table[regno];
1242 reg_set_table[regno] = new_reg_info;
1245 /* Called from compute_sets via note_stores to handle one SET or CLOBBER in
1246 an insn. The DATA is really the instruction in which the SET is
1247 occurring. */
1249 static void
1250 record_set_info (dest, setter, data)
1251 rtx dest, setter ATTRIBUTE_UNUSED;
1252 void *data;
1254 rtx record_set_insn = (rtx) data;
1256 if (GET_CODE (dest) == REG && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
1257 record_one_set (REGNO (dest), record_set_insn);
1260 /* Scan the function and record each set of each pseudo-register.
1262 This is called once, at the start of the gcse pass. See the comments for
1263 `reg_set_table' for further documenation. */
1265 static void
1266 compute_sets (f)
1267 rtx f;
1269 rtx insn;
1271 for (insn = f; insn != 0; insn = NEXT_INSN (insn))
1272 if (INSN_P (insn))
1273 note_stores (PATTERN (insn), record_set_info, insn);
1276 /* Hash table support. */
1278 struct reg_avail_info
1280 basic_block last_bb;
1281 int first_set;
1282 int last_set;
1285 static struct reg_avail_info *reg_avail_info;
1286 static basic_block current_bb;
1289 /* See whether X, the source of a set, is something we want to consider for
1290 GCSE. */
1292 static GTY(()) rtx test_insn;
1293 static int
1294 want_to_gcse_p (x)
1295 rtx x;
1297 int num_clobbers = 0;
1298 int icode;
1300 switch (GET_CODE (x))
1302 case REG:
1303 case SUBREG:
1304 case CONST_INT:
1305 case CONST_DOUBLE:
1306 case CONST_VECTOR:
1307 case CALL:
1308 return 0;
1310 default:
1311 break;
1314 /* If this is a valid operand, we are OK. If it's VOIDmode, we aren't. */
1315 if (general_operand (x, GET_MODE (x)))
1316 return 1;
1317 else if (GET_MODE (x) == VOIDmode)
1318 return 0;
1320 /* Otherwise, check if we can make a valid insn from it. First initialize
1321 our test insn if we haven't already. */
1322 if (test_insn == 0)
1324 test_insn
1325 = make_insn_raw (gen_rtx_SET (VOIDmode,
1326 gen_rtx_REG (word_mode,
1327 FIRST_PSEUDO_REGISTER * 2),
1328 const0_rtx));
1329 NEXT_INSN (test_insn) = PREV_INSN (test_insn) = 0;
1332 /* Now make an insn like the one we would make when GCSE'ing and see if
1333 valid. */
1334 PUT_MODE (SET_DEST (PATTERN (test_insn)), GET_MODE (x));
1335 SET_SRC (PATTERN (test_insn)) = x;
1336 return ((icode = recog (PATTERN (test_insn), test_insn, &num_clobbers)) >= 0
1337 && (num_clobbers == 0 || ! added_clobbers_hard_reg_p (icode)));
1340 /* Return nonzero if the operands of expression X are unchanged from the
1341 start of INSN's basic block up to but not including INSN (if AVAIL_P == 0),
1342 or from INSN to the end of INSN's basic block (if AVAIL_P != 0). */
1344 static int
1345 oprs_unchanged_p (x, insn, avail_p)
1346 rtx x, insn;
1347 int avail_p;
1349 int i, j;
1350 enum rtx_code code;
1351 const char *fmt;
1353 if (x == 0)
1354 return 1;
1356 code = GET_CODE (x);
1357 switch (code)
1359 case REG:
1361 struct reg_avail_info *info = &reg_avail_info[REGNO (x)];
1363 if (info->last_bb != current_bb)
1364 return 1;
1365 if (avail_p)
1366 return info->last_set < INSN_CUID (insn);
1367 else
1368 return info->first_set >= INSN_CUID (insn);
1371 case MEM:
1372 if (load_killed_in_block_p (current_bb, INSN_CUID (insn),
1373 x, avail_p))
1374 return 0;
1375 else
1376 return oprs_unchanged_p (XEXP (x, 0), insn, avail_p);
1378 case PRE_DEC:
1379 case PRE_INC:
1380 case POST_DEC:
1381 case POST_INC:
1382 case PRE_MODIFY:
1383 case POST_MODIFY:
1384 return 0;
1386 case PC:
1387 case CC0: /*FIXME*/
1388 case CONST:
1389 case CONST_INT:
1390 case CONST_DOUBLE:
1391 case CONST_VECTOR:
1392 case SYMBOL_REF:
1393 case LABEL_REF:
1394 case ADDR_VEC:
1395 case ADDR_DIFF_VEC:
1396 return 1;
1398 default:
1399 break;
1402 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
1404 if (fmt[i] == 'e')
1406 /* If we are about to do the last recursive call needed at this
1407 level, change it into iteration. This function is called enough
1408 to be worth it. */
1409 if (i == 0)
1410 return oprs_unchanged_p (XEXP (x, i), insn, avail_p);
1412 else if (! oprs_unchanged_p (XEXP (x, i), insn, avail_p))
1413 return 0;
1415 else if (fmt[i] == 'E')
1416 for (j = 0; j < XVECLEN (x, i); j++)
1417 if (! oprs_unchanged_p (XVECEXP (x, i, j), insn, avail_p))
1418 return 0;
1421 return 1;
1424 /* Used for communication between mems_conflict_for_gcse_p and
1425 load_killed_in_block_p. Nonzero if mems_conflict_for_gcse_p finds a
1426 conflict between two memory references. */
1427 static int gcse_mems_conflict_p;
1429 /* Used for communication between mems_conflict_for_gcse_p and
1430 load_killed_in_block_p. A memory reference for a load instruction,
1431 mems_conflict_for_gcse_p will see if a memory store conflicts with
1432 this memory load. */
1433 static rtx gcse_mem_operand;
1435 /* DEST is the output of an instruction. If it is a memory reference, and
1436 possibly conflicts with the load found in gcse_mem_operand, then set
1437 gcse_mems_conflict_p to a nonzero value. */
1439 static void
1440 mems_conflict_for_gcse_p (dest, setter, data)
1441 rtx dest, setter ATTRIBUTE_UNUSED;
1442 void *data ATTRIBUTE_UNUSED;
1444 while (GET_CODE (dest) == SUBREG
1445 || GET_CODE (dest) == ZERO_EXTRACT
1446 || GET_CODE (dest) == SIGN_EXTRACT
1447 || GET_CODE (dest) == STRICT_LOW_PART)
1448 dest = XEXP (dest, 0);
1450 /* If DEST is not a MEM, then it will not conflict with the load. Note
1451 that function calls are assumed to clobber memory, but are handled
1452 elsewhere. */
1453 if (GET_CODE (dest) != MEM)
1454 return;
1456 /* If we are setting a MEM in our list of specially recognized MEMs,
1457 don't mark as killed this time. */
1459 if (dest == gcse_mem_operand && pre_ldst_mems != NULL)
1461 if (!find_rtx_in_ldst (dest))
1462 gcse_mems_conflict_p = 1;
1463 return;
1466 if (true_dependence (dest, GET_MODE (dest), gcse_mem_operand,
1467 rtx_addr_varies_p))
1468 gcse_mems_conflict_p = 1;
1471 /* Return nonzero if the expression in X (a memory reference) is killed
1472 in block BB before or after the insn with the CUID in UID_LIMIT.
1473 AVAIL_P is nonzero for kills after UID_LIMIT, and zero for kills
1474 before UID_LIMIT.
1476 To check the entire block, set UID_LIMIT to max_uid + 1 and
1477 AVAIL_P to 0. */
1479 static int
1480 load_killed_in_block_p (bb, uid_limit, x, avail_p)
1481 basic_block bb;
1482 int uid_limit;
1483 rtx x;
1484 int avail_p;
1486 rtx list_entry = modify_mem_list[bb->index];
1487 while (list_entry)
1489 rtx setter;
1490 /* Ignore entries in the list that do not apply. */
1491 if ((avail_p
1492 && INSN_CUID (XEXP (list_entry, 0)) < uid_limit)
1493 || (! avail_p
1494 && INSN_CUID (XEXP (list_entry, 0)) > uid_limit))
1496 list_entry = XEXP (list_entry, 1);
1497 continue;
1500 setter = XEXP (list_entry, 0);
1502 /* If SETTER is a call everything is clobbered. Note that calls
1503 to pure functions are never put on the list, so we need not
1504 worry about them. */
1505 if (GET_CODE (setter) == CALL_INSN)
1506 return 1;
1508 /* SETTER must be an INSN of some kind that sets memory. Call
1509 note_stores to examine each hunk of memory that is modified.
1511 The note_stores interface is pretty limited, so we have to
1512 communicate via global variables. Yuk. */
1513 gcse_mem_operand = x;
1514 gcse_mems_conflict_p = 0;
1515 note_stores (PATTERN (setter), mems_conflict_for_gcse_p, NULL);
1516 if (gcse_mems_conflict_p)
1517 return 1;
1518 list_entry = XEXP (list_entry, 1);
1520 return 0;
1523 /* Return nonzero if the operands of expression X are unchanged from
1524 the start of INSN's basic block up to but not including INSN. */
1526 static int
1527 oprs_anticipatable_p (x, insn)
1528 rtx x, insn;
1530 return oprs_unchanged_p (x, insn, 0);
1533 /* Return nonzero if the operands of expression X are unchanged from
1534 INSN to the end of INSN's basic block. */
1536 static int
1537 oprs_available_p (x, insn)
1538 rtx x, insn;
1540 return oprs_unchanged_p (x, insn, 1);
1543 /* Hash expression X.
1545 MODE is only used if X is a CONST_INT. DO_NOT_RECORD_P is a boolean
1546 indicating if a volatile operand is found or if the expression contains
1547 something we don't want to insert in the table.
1549 ??? One might want to merge this with canon_hash. Later. */
1551 static unsigned int
1552 hash_expr (x, mode, do_not_record_p, hash_table_size)
1553 rtx x;
1554 enum machine_mode mode;
1555 int *do_not_record_p;
1556 int hash_table_size;
1558 unsigned int hash;
1560 *do_not_record_p = 0;
1562 hash = hash_expr_1 (x, mode, do_not_record_p);
1563 return hash % hash_table_size;
1566 /* Hash a string. Just add its bytes up. */
1568 static inline unsigned
1569 hash_string_1 (ps)
1570 const char *ps;
1572 unsigned hash = 0;
1573 const unsigned char *p = (const unsigned char *) ps;
1575 if (p)
1576 while (*p)
1577 hash += *p++;
1579 return hash;
1582 /* Subroutine of hash_expr to do the actual work. */
1584 static unsigned int
1585 hash_expr_1 (x, mode, do_not_record_p)
1586 rtx x;
1587 enum machine_mode mode;
1588 int *do_not_record_p;
1590 int i, j;
1591 unsigned hash = 0;
1592 enum rtx_code code;
1593 const char *fmt;
1595 /* Used to turn recursion into iteration. We can't rely on GCC's
1596 tail-recursion eliminatio since we need to keep accumulating values
1597 in HASH. */
1599 if (x == 0)
1600 return hash;
1602 repeat:
1603 code = GET_CODE (x);
1604 switch (code)
1606 case REG:
1607 hash += ((unsigned int) REG << 7) + REGNO (x);
1608 return hash;
1610 case CONST_INT:
1611 hash += (((unsigned int) CONST_INT << 7) + (unsigned int) mode
1612 + (unsigned int) INTVAL (x));
1613 return hash;
1615 case CONST_DOUBLE:
1616 /* This is like the general case, except that it only counts
1617 the integers representing the constant. */
1618 hash += (unsigned int) code + (unsigned int) GET_MODE (x);
1619 if (GET_MODE (x) != VOIDmode)
1620 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
1621 hash += (unsigned int) XWINT (x, i);
1622 else
1623 hash += ((unsigned int) CONST_DOUBLE_LOW (x)
1624 + (unsigned int) CONST_DOUBLE_HIGH (x));
1625 return hash;
1627 case CONST_VECTOR:
1629 int units;
1630 rtx elt;
1632 units = CONST_VECTOR_NUNITS (x);
1634 for (i = 0; i < units; ++i)
1636 elt = CONST_VECTOR_ELT (x, i);
1637 hash += hash_expr_1 (elt, GET_MODE (elt), do_not_record_p);
1640 return hash;
1643 /* Assume there is only one rtx object for any given label. */
1644 case LABEL_REF:
1645 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
1646 differences and differences between each stage's debugging dumps. */
1647 hash += (((unsigned int) LABEL_REF << 7)
1648 + CODE_LABEL_NUMBER (XEXP (x, 0)));
1649 return hash;
1651 case SYMBOL_REF:
1653 /* Don't hash on the symbol's address to avoid bootstrap differences.
1654 Different hash values may cause expressions to be recorded in
1655 different orders and thus different registers to be used in the
1656 final assembler. This also avoids differences in the dump files
1657 between various stages. */
1658 unsigned int h = 0;
1659 const unsigned char *p = (const unsigned char *) XSTR (x, 0);
1661 while (*p)
1662 h += (h << 7) + *p++; /* ??? revisit */
1664 hash += ((unsigned int) SYMBOL_REF << 7) + h;
1665 return hash;
1668 case MEM:
1669 if (MEM_VOLATILE_P (x))
1671 *do_not_record_p = 1;
1672 return 0;
1675 hash += (unsigned int) MEM;
1676 /* We used alias set for hashing, but this is not good, since the alias
1677 set may differ in -fprofile-arcs and -fbranch-probabilities compilation
1678 causing the profiles to fail to match. */
1679 x = XEXP (x, 0);
1680 goto repeat;
1682 case PRE_DEC:
1683 case PRE_INC:
1684 case POST_DEC:
1685 case POST_INC:
1686 case PC:
1687 case CC0:
1688 case CALL:
1689 case UNSPEC_VOLATILE:
1690 *do_not_record_p = 1;
1691 return 0;
1693 case ASM_OPERANDS:
1694 if (MEM_VOLATILE_P (x))
1696 *do_not_record_p = 1;
1697 return 0;
1699 else
1701 /* We don't want to take the filename and line into account. */
1702 hash += (unsigned) code + (unsigned) GET_MODE (x)
1703 + hash_string_1 (ASM_OPERANDS_TEMPLATE (x))
1704 + hash_string_1 (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
1705 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
1707 if (ASM_OPERANDS_INPUT_LENGTH (x))
1709 for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
1711 hash += (hash_expr_1 (ASM_OPERANDS_INPUT (x, i),
1712 GET_MODE (ASM_OPERANDS_INPUT (x, i)),
1713 do_not_record_p)
1714 + hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT
1715 (x, i)));
1718 hash += hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
1719 x = ASM_OPERANDS_INPUT (x, 0);
1720 mode = GET_MODE (x);
1721 goto repeat;
1723 return hash;
1726 default:
1727 break;
1730 hash += (unsigned) code + (unsigned) GET_MODE (x);
1731 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
1733 if (fmt[i] == 'e')
1735 /* If we are about to do the last recursive call
1736 needed at this level, change it into iteration.
1737 This function is called enough to be worth it. */
1738 if (i == 0)
1740 x = XEXP (x, i);
1741 goto repeat;
1744 hash += hash_expr_1 (XEXP (x, i), 0, do_not_record_p);
1745 if (*do_not_record_p)
1746 return 0;
1749 else if (fmt[i] == 'E')
1750 for (j = 0; j < XVECLEN (x, i); j++)
1752 hash += hash_expr_1 (XVECEXP (x, i, j), 0, do_not_record_p);
1753 if (*do_not_record_p)
1754 return 0;
1757 else if (fmt[i] == 's')
1758 hash += hash_string_1 (XSTR (x, i));
1759 else if (fmt[i] == 'i')
1760 hash += (unsigned int) XINT (x, i);
1761 else
1762 abort ();
1765 return hash;
1768 /* Hash a set of register REGNO.
1770 Sets are hashed on the register that is set. This simplifies the PRE copy
1771 propagation code.
1773 ??? May need to make things more elaborate. Later, as necessary. */
1775 static unsigned int
1776 hash_set (regno, hash_table_size)
1777 int regno;
1778 int hash_table_size;
1780 unsigned int hash;
1782 hash = regno;
1783 return hash % hash_table_size;
1786 /* Return nonzero if exp1 is equivalent to exp2.
1787 ??? Borrowed from cse.c. Might want to remerge with cse.c. Later. */
1789 static int
1790 expr_equiv_p (x, y)
1791 rtx x, y;
1793 int i, j;
1794 enum rtx_code code;
1795 const char *fmt;
1797 if (x == y)
1798 return 1;
1800 if (x == 0 || y == 0)
1801 return x == y;
1803 code = GET_CODE (x);
1804 if (code != GET_CODE (y))
1805 return 0;
1807 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
1808 if (GET_MODE (x) != GET_MODE (y))
1809 return 0;
1811 switch (code)
1813 case PC:
1814 case CC0:
1815 return x == y;
1817 case CONST_INT:
1818 return INTVAL (x) == INTVAL (y);
1820 case LABEL_REF:
1821 return XEXP (x, 0) == XEXP (y, 0);
1823 case SYMBOL_REF:
1824 return XSTR (x, 0) == XSTR (y, 0);
1826 case REG:
1827 return REGNO (x) == REGNO (y);
1829 case MEM:
1830 /* Can't merge two expressions in different alias sets, since we can
1831 decide that the expression is transparent in a block when it isn't,
1832 due to it being set with the different alias set. */
1833 if (MEM_ALIAS_SET (x) != MEM_ALIAS_SET (y))
1834 return 0;
1835 break;
1837 /* For commutative operations, check both orders. */
1838 case PLUS:
1839 case MULT:
1840 case AND:
1841 case IOR:
1842 case XOR:
1843 case NE:
1844 case EQ:
1845 return ((expr_equiv_p (XEXP (x, 0), XEXP (y, 0))
1846 && expr_equiv_p (XEXP (x, 1), XEXP (y, 1)))
1847 || (expr_equiv_p (XEXP (x, 0), XEXP (y, 1))
1848 && expr_equiv_p (XEXP (x, 1), XEXP (y, 0))));
1850 case ASM_OPERANDS:
1851 /* We don't use the generic code below because we want to
1852 disregard filename and line numbers. */
1854 /* A volatile asm isn't equivalent to any other. */
1855 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
1856 return 0;
1858 if (GET_MODE (x) != GET_MODE (y)
1859 || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
1860 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
1861 ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
1862 || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
1863 || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
1864 return 0;
1866 if (ASM_OPERANDS_INPUT_LENGTH (x))
1868 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
1869 if (! expr_equiv_p (ASM_OPERANDS_INPUT (x, i),
1870 ASM_OPERANDS_INPUT (y, i))
1871 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
1872 ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
1873 return 0;
1876 return 1;
1878 default:
1879 break;
1882 /* Compare the elements. If any pair of corresponding elements
1883 fail to match, return 0 for the whole thing. */
1885 fmt = GET_RTX_FORMAT (code);
1886 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1888 switch (fmt[i])
1890 case 'e':
1891 if (! expr_equiv_p (XEXP (x, i), XEXP (y, i)))
1892 return 0;
1893 break;
1895 case 'E':
1896 if (XVECLEN (x, i) != XVECLEN (y, i))
1897 return 0;
1898 for (j = 0; j < XVECLEN (x, i); j++)
1899 if (! expr_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
1900 return 0;
1901 break;
1903 case 's':
1904 if (strcmp (XSTR (x, i), XSTR (y, i)))
1905 return 0;
1906 break;
1908 case 'i':
1909 if (XINT (x, i) != XINT (y, i))
1910 return 0;
1911 break;
1913 case 'w':
1914 if (XWINT (x, i) != XWINT (y, i))
1915 return 0;
1916 break;
1918 case '0':
1919 break;
1921 default:
1922 abort ();
1926 return 1;
1929 /* Insert expression X in INSN in the hash TABLE.
1930 If it is already present, record it as the last occurrence in INSN's
1931 basic block.
1933 MODE is the mode of the value X is being stored into.
1934 It is only used if X is a CONST_INT.
1936 ANTIC_P is nonzero if X is an anticipatable expression.
1937 AVAIL_P is nonzero if X is an available expression. */
1939 static void
1940 insert_expr_in_table (x, mode, insn, antic_p, avail_p, table)
1941 rtx x;
1942 enum machine_mode mode;
1943 rtx insn;
1944 int antic_p, avail_p;
1945 struct hash_table *table;
1947 int found, do_not_record_p;
1948 unsigned int hash;
1949 struct expr *cur_expr, *last_expr = NULL;
1950 struct occr *antic_occr, *avail_occr;
1951 struct occr *last_occr = NULL;
1953 hash = hash_expr (x, mode, &do_not_record_p, table->size);
1955 /* Do not insert expression in table if it contains volatile operands,
1956 or if hash_expr determines the expression is something we don't want
1957 to or can't handle. */
1958 if (do_not_record_p)
1959 return;
1961 cur_expr = table->table[hash];
1962 found = 0;
1964 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
1966 /* If the expression isn't found, save a pointer to the end of
1967 the list. */
1968 last_expr = cur_expr;
1969 cur_expr = cur_expr->next_same_hash;
1972 if (! found)
1974 cur_expr = (struct expr *) gcse_alloc (sizeof (struct expr));
1975 bytes_used += sizeof (struct expr);
1976 if (table->table[hash] == NULL)
1977 /* This is the first pattern that hashed to this index. */
1978 table->table[hash] = cur_expr;
1979 else
1980 /* Add EXPR to end of this hash chain. */
1981 last_expr->next_same_hash = cur_expr;
1983 /* Set the fields of the expr element. */
1984 cur_expr->expr = x;
1985 cur_expr->bitmap_index = table->n_elems++;
1986 cur_expr->next_same_hash = NULL;
1987 cur_expr->antic_occr = NULL;
1988 cur_expr->avail_occr = NULL;
1991 /* Now record the occurrence(s). */
1992 if (antic_p)
1994 antic_occr = cur_expr->antic_occr;
1996 /* Search for another occurrence in the same basic block. */
1997 while (antic_occr && BLOCK_NUM (antic_occr->insn) != BLOCK_NUM (insn))
1999 /* If an occurrence isn't found, save a pointer to the end of
2000 the list. */
2001 last_occr = antic_occr;
2002 antic_occr = antic_occr->next;
2005 if (antic_occr)
2006 /* Found another instance of the expression in the same basic block.
2007 Prefer the currently recorded one. We want the first one in the
2008 block and the block is scanned from start to end. */
2009 ; /* nothing to do */
2010 else
2012 /* First occurrence of this expression in this basic block. */
2013 antic_occr = (struct occr *) gcse_alloc (sizeof (struct occr));
2014 bytes_used += sizeof (struct occr);
2015 /* First occurrence of this expression in any block? */
2016 if (cur_expr->antic_occr == NULL)
2017 cur_expr->antic_occr = antic_occr;
2018 else
2019 last_occr->next = antic_occr;
2021 antic_occr->insn = insn;
2022 antic_occr->next = NULL;
2026 if (avail_p)
2028 avail_occr = cur_expr->avail_occr;
2030 /* Search for another occurrence in the same basic block. */
2031 while (avail_occr && BLOCK_NUM (avail_occr->insn) != BLOCK_NUM (insn))
2033 /* If an occurrence isn't found, save a pointer to the end of
2034 the list. */
2035 last_occr = avail_occr;
2036 avail_occr = avail_occr->next;
2039 if (avail_occr)
2040 /* Found another instance of the expression in the same basic block.
2041 Prefer this occurrence to the currently recorded one. We want
2042 the last one in the block and the block is scanned from start
2043 to end. */
2044 avail_occr->insn = insn;
2045 else
2047 /* First occurrence of this expression in this basic block. */
2048 avail_occr = (struct occr *) gcse_alloc (sizeof (struct occr));
2049 bytes_used += sizeof (struct occr);
2051 /* First occurrence of this expression in any block? */
2052 if (cur_expr->avail_occr == NULL)
2053 cur_expr->avail_occr = avail_occr;
2054 else
2055 last_occr->next = avail_occr;
2057 avail_occr->insn = insn;
2058 avail_occr->next = NULL;
2063 /* Insert pattern X in INSN in the hash table.
2064 X is a SET of a reg to either another reg or a constant.
2065 If it is already present, record it as the last occurrence in INSN's
2066 basic block. */
2068 static void
2069 insert_set_in_table (x, insn, table)
2070 rtx x;
2071 rtx insn;
2072 struct hash_table *table;
2074 int found;
2075 unsigned int hash;
2076 struct expr *cur_expr, *last_expr = NULL;
2077 struct occr *cur_occr, *last_occr = NULL;
2079 if (GET_CODE (x) != SET
2080 || GET_CODE (SET_DEST (x)) != REG)
2081 abort ();
2083 hash = hash_set (REGNO (SET_DEST (x)), table->size);
2085 cur_expr = table->table[hash];
2086 found = 0;
2088 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
2090 /* If the expression isn't found, save a pointer to the end of
2091 the list. */
2092 last_expr = cur_expr;
2093 cur_expr = cur_expr->next_same_hash;
2096 if (! found)
2098 cur_expr = (struct expr *) gcse_alloc (sizeof (struct expr));
2099 bytes_used += sizeof (struct expr);
2100 if (table->table[hash] == NULL)
2101 /* This is the first pattern that hashed to this index. */
2102 table->table[hash] = cur_expr;
2103 else
2104 /* Add EXPR to end of this hash chain. */
2105 last_expr->next_same_hash = cur_expr;
2107 /* Set the fields of the expr element.
2108 We must copy X because it can be modified when copy propagation is
2109 performed on its operands. */
2110 cur_expr->expr = copy_rtx (x);
2111 cur_expr->bitmap_index = table->n_elems++;
2112 cur_expr->next_same_hash = NULL;
2113 cur_expr->antic_occr = NULL;
2114 cur_expr->avail_occr = NULL;
2117 /* Now record the occurrence. */
2118 cur_occr = cur_expr->avail_occr;
2120 /* Search for another occurrence in the same basic block. */
2121 while (cur_occr && BLOCK_NUM (cur_occr->insn) != BLOCK_NUM (insn))
2123 /* If an occurrence isn't found, save a pointer to the end of
2124 the list. */
2125 last_occr = cur_occr;
2126 cur_occr = cur_occr->next;
2129 if (cur_occr)
2130 /* Found another instance of the expression in the same basic block.
2131 Prefer this occurrence to the currently recorded one. We want the
2132 last one in the block and the block is scanned from start to end. */
2133 cur_occr->insn = insn;
2134 else
2136 /* First occurrence of this expression in this basic block. */
2137 cur_occr = (struct occr *) gcse_alloc (sizeof (struct occr));
2138 bytes_used += sizeof (struct occr);
2140 /* First occurrence of this expression in any block? */
2141 if (cur_expr->avail_occr == NULL)
2142 cur_expr->avail_occr = cur_occr;
2143 else
2144 last_occr->next = cur_occr;
2146 cur_occr->insn = insn;
2147 cur_occr->next = NULL;
2151 /* Scan pattern PAT of INSN and add an entry to the hash TABLE (set or
2152 expression one). */
2154 static void
2155 hash_scan_set (pat, insn, table)
2156 rtx pat, insn;
2157 struct hash_table *table;
2159 rtx src = SET_SRC (pat);
2160 rtx dest = SET_DEST (pat);
2161 rtx note;
2163 if (GET_CODE (src) == CALL)
2164 hash_scan_call (src, insn, table);
2166 else if (GET_CODE (dest) == REG)
2168 unsigned int regno = REGNO (dest);
2169 rtx tmp;
2171 /* If this is a single set and we are doing constant propagation,
2172 see if a REG_NOTE shows this equivalent to a constant. */
2173 if (table->set_p && (note = find_reg_equal_equiv_note (insn)) != 0
2174 && CONSTANT_P (XEXP (note, 0)))
2175 src = XEXP (note, 0), pat = gen_rtx_SET (VOIDmode, dest, src);
2177 /* Only record sets of pseudo-regs in the hash table. */
2178 if (! table->set_p
2179 && regno >= FIRST_PSEUDO_REGISTER
2180 /* Don't GCSE something if we can't do a reg/reg copy. */
2181 && can_copy_p [GET_MODE (dest)]
2182 /* GCSE commonly inserts instruction after the insn. We can't
2183 do that easily for EH_REGION notes so disable GCSE on these
2184 for now. */
2185 && !find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2186 /* Is SET_SRC something we want to gcse? */
2187 && want_to_gcse_p (src)
2188 /* Don't CSE a nop. */
2189 && ! set_noop_p (pat)
2190 /* Don't GCSE if it has attached REG_EQUIV note.
2191 At this point this only function parameters should have
2192 REG_EQUIV notes and if the argument slot is used somewhere
2193 explicitly, it means address of parameter has been taken,
2194 so we should not extend the lifetime of the pseudo. */
2195 && ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) == 0
2196 || GET_CODE (XEXP (note, 0)) != MEM))
2198 /* An expression is not anticipatable if its operands are
2199 modified before this insn or if this is not the only SET in
2200 this insn. */
2201 int antic_p = oprs_anticipatable_p (src, insn) && single_set (insn);
2202 /* An expression is not available if its operands are
2203 subsequently modified, including this insn. It's also not
2204 available if this is a branch, because we can't insert
2205 a set after the branch. */
2206 int avail_p = (oprs_available_p (src, insn)
2207 && ! JUMP_P (insn));
2209 insert_expr_in_table (src, GET_MODE (dest), insn, antic_p, avail_p, table);
2212 /* Record sets for constant/copy propagation. */
2213 else if (table->set_p
2214 && regno >= FIRST_PSEUDO_REGISTER
2215 && ((GET_CODE (src) == REG
2216 && REGNO (src) >= FIRST_PSEUDO_REGISTER
2217 && can_copy_p [GET_MODE (dest)]
2218 && REGNO (src) != regno)
2219 || CONSTANT_P (src))
2220 /* A copy is not available if its src or dest is subsequently
2221 modified. Here we want to search from INSN+1 on, but
2222 oprs_available_p searches from INSN on. */
2223 && (insn == BLOCK_END (BLOCK_NUM (insn))
2224 || ((tmp = next_nonnote_insn (insn)) != NULL_RTX
2225 && oprs_available_p (pat, tmp))))
2226 insert_set_in_table (pat, insn, table);
2230 static void
2231 hash_scan_clobber (x, insn, table)
2232 rtx x ATTRIBUTE_UNUSED, insn ATTRIBUTE_UNUSED;
2233 struct hash_table *table ATTRIBUTE_UNUSED;
2235 /* Currently nothing to do. */
2238 static void
2239 hash_scan_call (x, insn, table)
2240 rtx x ATTRIBUTE_UNUSED, insn ATTRIBUTE_UNUSED;
2241 struct hash_table *table ATTRIBUTE_UNUSED;
2243 /* Currently nothing to do. */
2246 /* Process INSN and add hash table entries as appropriate.
2248 Only available expressions that set a single pseudo-reg are recorded.
2250 Single sets in a PARALLEL could be handled, but it's an extra complication
2251 that isn't dealt with right now. The trick is handling the CLOBBERs that
2252 are also in the PARALLEL. Later.
2254 If SET_P is nonzero, this is for the assignment hash table,
2255 otherwise it is for the expression hash table.
2256 If IN_LIBCALL_BLOCK nonzero, we are in a libcall block, and should
2257 not record any expressions. */
2259 static void
2260 hash_scan_insn (insn, table, in_libcall_block)
2261 rtx insn;
2262 struct hash_table *table;
2263 int in_libcall_block;
2265 rtx pat = PATTERN (insn);
2266 int i;
2268 if (in_libcall_block)
2269 return;
2271 /* Pick out the sets of INSN and for other forms of instructions record
2272 what's been modified. */
2274 if (GET_CODE (pat) == SET)
2275 hash_scan_set (pat, insn, table);
2276 else if (GET_CODE (pat) == PARALLEL)
2277 for (i = 0; i < XVECLEN (pat, 0); i++)
2279 rtx x = XVECEXP (pat, 0, i);
2281 if (GET_CODE (x) == SET)
2282 hash_scan_set (x, insn, table);
2283 else if (GET_CODE (x) == CLOBBER)
2284 hash_scan_clobber (x, insn, table);
2285 else if (GET_CODE (x) == CALL)
2286 hash_scan_call (x, insn, table);
2289 else if (GET_CODE (pat) == CLOBBER)
2290 hash_scan_clobber (pat, insn, table);
2291 else if (GET_CODE (pat) == CALL)
2292 hash_scan_call (pat, insn, table);
2295 static void
2296 dump_hash_table (file, name, table)
2297 FILE *file;
2298 const char *name;
2299 struct hash_table *table;
2301 int i;
2302 /* Flattened out table, so it's printed in proper order. */
2303 struct expr **flat_table;
2304 unsigned int *hash_val;
2305 struct expr *expr;
2307 flat_table
2308 = (struct expr **) xcalloc (table->n_elems, sizeof (struct expr *));
2309 hash_val = (unsigned int *) xmalloc (table->n_elems * sizeof (unsigned int));
2311 for (i = 0; i < (int) table->size; i++)
2312 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
2314 flat_table[expr->bitmap_index] = expr;
2315 hash_val[expr->bitmap_index] = i;
2318 fprintf (file, "%s hash table (%d buckets, %d entries)\n",
2319 name, table->size, table->n_elems);
2321 for (i = 0; i < (int) table->n_elems; i++)
2322 if (flat_table[i] != 0)
2324 expr = flat_table[i];
2325 fprintf (file, "Index %d (hash value %d)\n ",
2326 expr->bitmap_index, hash_val[i]);
2327 print_rtl (file, expr->expr);
2328 fprintf (file, "\n");
2331 fprintf (file, "\n");
2333 free (flat_table);
2334 free (hash_val);
2337 /* Record register first/last/block set information for REGNO in INSN.
2339 first_set records the first place in the block where the register
2340 is set and is used to compute "anticipatability".
2342 last_set records the last place in the block where the register
2343 is set and is used to compute "availability".
2345 last_bb records the block for which first_set and last_set are
2346 valid, as a quick test to invalidate them.
2348 reg_set_in_block records whether the register is set in the block
2349 and is used to compute "transparency". */
2351 static void
2352 record_last_reg_set_info (insn, regno)
2353 rtx insn;
2354 int regno;
2356 struct reg_avail_info *info = &reg_avail_info[regno];
2357 int cuid = INSN_CUID (insn);
2359 info->last_set = cuid;
2360 if (info->last_bb != current_bb)
2362 info->last_bb = current_bb;
2363 info->first_set = cuid;
2364 SET_BIT (reg_set_in_block[current_bb->index], regno);
2369 /* Record all of the canonicalized MEMs of record_last_mem_set_info's insn.
2370 Note we store a pair of elements in the list, so they have to be
2371 taken off pairwise. */
2373 static void
2374 canon_list_insert (dest, unused1, v_insn)
2375 rtx dest ATTRIBUTE_UNUSED;
2376 rtx unused1 ATTRIBUTE_UNUSED;
2377 void * v_insn;
2379 rtx dest_addr, insn;
2380 int bb;
2382 while (GET_CODE (dest) == SUBREG
2383 || GET_CODE (dest) == ZERO_EXTRACT
2384 || GET_CODE (dest) == SIGN_EXTRACT
2385 || GET_CODE (dest) == STRICT_LOW_PART)
2386 dest = XEXP (dest, 0);
2388 /* If DEST is not a MEM, then it will not conflict with a load. Note
2389 that function calls are assumed to clobber memory, but are handled
2390 elsewhere. */
2392 if (GET_CODE (dest) != MEM)
2393 return;
2395 dest_addr = get_addr (XEXP (dest, 0));
2396 dest_addr = canon_rtx (dest_addr);
2397 insn = (rtx) v_insn;
2398 bb = BLOCK_NUM (insn);
2400 canon_modify_mem_list[bb] =
2401 alloc_EXPR_LIST (VOIDmode, dest_addr, canon_modify_mem_list[bb]);
2402 canon_modify_mem_list[bb] =
2403 alloc_EXPR_LIST (VOIDmode, dest, canon_modify_mem_list[bb]);
2404 bitmap_set_bit (canon_modify_mem_list_set, bb);
2407 /* Record memory modification information for INSN. We do not actually care
2408 about the memory location(s) that are set, or even how they are set (consider
2409 a CALL_INSN). We merely need to record which insns modify memory. */
2411 static void
2412 record_last_mem_set_info (insn)
2413 rtx insn;
2415 int bb = BLOCK_NUM (insn);
2417 /* load_killed_in_block_p will handle the case of calls clobbering
2418 everything. */
2419 modify_mem_list[bb] = alloc_INSN_LIST (insn, modify_mem_list[bb]);
2420 bitmap_set_bit (modify_mem_list_set, bb);
2422 if (GET_CODE (insn) == CALL_INSN)
2424 /* Note that traversals of this loop (other than for free-ing)
2425 will break after encountering a CALL_INSN. So, there's no
2426 need to insert a pair of items, as canon_list_insert does. */
2427 canon_modify_mem_list[bb] =
2428 alloc_INSN_LIST (insn, canon_modify_mem_list[bb]);
2429 bitmap_set_bit (canon_modify_mem_list_set, bb);
2431 else
2432 note_stores (PATTERN (insn), canon_list_insert, (void*) insn);
2435 /* Called from compute_hash_table via note_stores to handle one
2436 SET or CLOBBER in an insn. DATA is really the instruction in which
2437 the SET is taking place. */
2439 static void
2440 record_last_set_info (dest, setter, data)
2441 rtx dest, setter ATTRIBUTE_UNUSED;
2442 void *data;
2444 rtx last_set_insn = (rtx) data;
2446 if (GET_CODE (dest) == SUBREG)
2447 dest = SUBREG_REG (dest);
2449 if (GET_CODE (dest) == REG)
2450 record_last_reg_set_info (last_set_insn, REGNO (dest));
2451 else if (GET_CODE (dest) == MEM
2452 /* Ignore pushes, they clobber nothing. */
2453 && ! push_operand (dest, GET_MODE (dest)))
2454 record_last_mem_set_info (last_set_insn);
2457 /* Top level function to create an expression or assignment hash table.
2459 Expression entries are placed in the hash table if
2460 - they are of the form (set (pseudo-reg) src),
2461 - src is something we want to perform GCSE on,
2462 - none of the operands are subsequently modified in the block
2464 Assignment entries are placed in the hash table if
2465 - they are of the form (set (pseudo-reg) src),
2466 - src is something we want to perform const/copy propagation on,
2467 - none of the operands or target are subsequently modified in the block
2469 Currently src must be a pseudo-reg or a const_int.
2471 F is the first insn.
2472 TABLE is the table computed. */
2474 static void
2475 compute_hash_table_work (table)
2476 struct hash_table *table;
2478 unsigned int i;
2480 /* While we compute the hash table we also compute a bit array of which
2481 registers are set in which blocks.
2482 ??? This isn't needed during const/copy propagation, but it's cheap to
2483 compute. Later. */
2484 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
2486 /* re-Cache any INSN_LIST nodes we have allocated. */
2487 clear_modify_mem_tables ();
2488 /* Some working arrays used to track first and last set in each block. */
2489 reg_avail_info = (struct reg_avail_info*)
2490 gmalloc (max_gcse_regno * sizeof (struct reg_avail_info));
2492 for (i = 0; i < max_gcse_regno; ++i)
2493 reg_avail_info[i].last_bb = NULL;
2495 FOR_EACH_BB (current_bb)
2497 rtx insn;
2498 unsigned int regno;
2499 int in_libcall_block;
2501 /* First pass over the instructions records information used to
2502 determine when registers and memory are first and last set.
2503 ??? hard-reg reg_set_in_block computation
2504 could be moved to compute_sets since they currently don't change. */
2506 for (insn = current_bb->head;
2507 insn && insn != NEXT_INSN (current_bb->end);
2508 insn = NEXT_INSN (insn))
2510 if (! INSN_P (insn))
2511 continue;
2513 if (GET_CODE (insn) == CALL_INSN)
2515 bool clobbers_all = false;
2516 #ifdef NON_SAVING_SETJMP
2517 if (NON_SAVING_SETJMP
2518 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
2519 clobbers_all = true;
2520 #endif
2522 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2523 if (clobbers_all
2524 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2525 record_last_reg_set_info (insn, regno);
2527 mark_call (insn);
2530 note_stores (PATTERN (insn), record_last_set_info, insn);
2533 /* The next pass builds the hash table. */
2535 for (insn = current_bb->head, in_libcall_block = 0;
2536 insn && insn != NEXT_INSN (current_bb->end);
2537 insn = NEXT_INSN (insn))
2538 if (INSN_P (insn))
2540 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
2541 in_libcall_block = 1;
2542 else if (table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
2543 in_libcall_block = 0;
2544 hash_scan_insn (insn, table, in_libcall_block);
2545 if (!table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
2546 in_libcall_block = 0;
2550 free (reg_avail_info);
2551 reg_avail_info = NULL;
2554 /* Allocate space for the set/expr hash TABLE.
2555 N_INSNS is the number of instructions in the function.
2556 It is used to determine the number of buckets to use.
2557 SET_P determines whether set or expression table will
2558 be created. */
2560 static void
2561 alloc_hash_table (n_insns, table, set_p)
2562 int n_insns;
2563 struct hash_table *table;
2564 int set_p;
2566 int n;
2568 table->size = n_insns / 4;
2569 if (table->size < 11)
2570 table->size = 11;
2572 /* Attempt to maintain efficient use of hash table.
2573 Making it an odd number is simplest for now.
2574 ??? Later take some measurements. */
2575 table->size |= 1;
2576 n = table->size * sizeof (struct expr *);
2577 table->table = (struct expr **) gmalloc (n);
2578 table->set_p = set_p;
2581 /* Free things allocated by alloc_hash_table. */
2583 static void
2584 free_hash_table (table)
2585 struct hash_table *table;
2587 free (table->table);
2590 /* Compute the hash TABLE for doing copy/const propagation or
2591 expression hash table. */
2593 static void
2594 compute_hash_table (table)
2595 struct hash_table *table;
2597 /* Initialize count of number of entries in hash table. */
2598 table->n_elems = 0;
2599 memset ((char *) table->table, 0,
2600 table->size * sizeof (struct expr *));
2602 compute_hash_table_work (table);
2605 /* Expression tracking support. */
2607 /* Lookup pattern PAT in the expression TABLE.
2608 The result is a pointer to the table entry, or NULL if not found. */
2610 static struct expr *
2611 lookup_expr (pat, table)
2612 rtx pat;
2613 struct hash_table *table;
2615 int do_not_record_p;
2616 unsigned int hash = hash_expr (pat, GET_MODE (pat), &do_not_record_p,
2617 table->size);
2618 struct expr *expr;
2620 if (do_not_record_p)
2621 return NULL;
2623 expr = table->table[hash];
2625 while (expr && ! expr_equiv_p (expr->expr, pat))
2626 expr = expr->next_same_hash;
2628 return expr;
2631 /* Lookup REGNO in the set TABLE. If PAT is non-NULL look for the entry that
2632 matches it, otherwise return the first entry for REGNO. The result is a
2633 pointer to the table entry, or NULL if not found. */
2635 static struct expr *
2636 lookup_set (regno, pat, table)
2637 unsigned int regno;
2638 rtx pat;
2639 struct hash_table *table;
2641 unsigned int hash = hash_set (regno, table->size);
2642 struct expr *expr;
2644 expr = table->table[hash];
2646 if (pat)
2648 while (expr && ! expr_equiv_p (expr->expr, pat))
2649 expr = expr->next_same_hash;
2651 else
2653 while (expr && REGNO (SET_DEST (expr->expr)) != regno)
2654 expr = expr->next_same_hash;
2657 return expr;
2660 /* Return the next entry for REGNO in list EXPR. */
2662 static struct expr *
2663 next_set (regno, expr)
2664 unsigned int regno;
2665 struct expr *expr;
2668 expr = expr->next_same_hash;
2669 while (expr && REGNO (SET_DEST (expr->expr)) != regno);
2671 return expr;
2674 /* Like free_INSN_LIST_list or free_EXPR_LIST_list, except that the node
2675 types may be mixed. */
2677 static void
2678 free_insn_expr_list_list (listp)
2679 rtx *listp;
2681 rtx list, next;
2683 for (list = *listp; list ; list = next)
2685 next = XEXP (list, 1);
2686 if (GET_CODE (list) == EXPR_LIST)
2687 free_EXPR_LIST_node (list);
2688 else
2689 free_INSN_LIST_node (list);
2692 *listp = NULL;
2695 /* Clear canon_modify_mem_list and modify_mem_list tables. */
2696 static void
2697 clear_modify_mem_tables ()
2699 int i;
2701 EXECUTE_IF_SET_IN_BITMAP
2702 (modify_mem_list_set, 0, i, free_INSN_LIST_list (modify_mem_list + i));
2703 bitmap_clear (modify_mem_list_set);
2705 EXECUTE_IF_SET_IN_BITMAP
2706 (canon_modify_mem_list_set, 0, i,
2707 free_insn_expr_list_list (canon_modify_mem_list + i));
2708 bitmap_clear (canon_modify_mem_list_set);
2711 /* Release memory used by modify_mem_list_set and canon_modify_mem_list_set. */
2713 static void
2714 free_modify_mem_tables ()
2716 clear_modify_mem_tables ();
2717 free (modify_mem_list);
2718 free (canon_modify_mem_list);
2719 modify_mem_list = 0;
2720 canon_modify_mem_list = 0;
2723 /* Reset tables used to keep track of what's still available [since the
2724 start of the block]. */
2726 static void
2727 reset_opr_set_tables ()
2729 /* Maintain a bitmap of which regs have been set since beginning of
2730 the block. */
2731 CLEAR_REG_SET (reg_set_bitmap);
2733 /* Also keep a record of the last instruction to modify memory.
2734 For now this is very trivial, we only record whether any memory
2735 location has been modified. */
2736 clear_modify_mem_tables ();
2739 /* Return nonzero if the operands of X are not set before INSN in
2740 INSN's basic block. */
2742 static int
2743 oprs_not_set_p (x, insn)
2744 rtx x, insn;
2746 int i, j;
2747 enum rtx_code code;
2748 const char *fmt;
2750 if (x == 0)
2751 return 1;
2753 code = GET_CODE (x);
2754 switch (code)
2756 case PC:
2757 case CC0:
2758 case CONST:
2759 case CONST_INT:
2760 case CONST_DOUBLE:
2761 case CONST_VECTOR:
2762 case SYMBOL_REF:
2763 case LABEL_REF:
2764 case ADDR_VEC:
2765 case ADDR_DIFF_VEC:
2766 return 1;
2768 case MEM:
2769 if (load_killed_in_block_p (BLOCK_FOR_INSN (insn),
2770 INSN_CUID (insn), x, 0))
2771 return 0;
2772 else
2773 return oprs_not_set_p (XEXP (x, 0), insn);
2775 case REG:
2776 return ! REGNO_REG_SET_P (reg_set_bitmap, REGNO (x));
2778 default:
2779 break;
2782 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
2784 if (fmt[i] == 'e')
2786 /* If we are about to do the last recursive call
2787 needed at this level, change it into iteration.
2788 This function is called enough to be worth it. */
2789 if (i == 0)
2790 return oprs_not_set_p (XEXP (x, i), insn);
2792 if (! oprs_not_set_p (XEXP (x, i), insn))
2793 return 0;
2795 else if (fmt[i] == 'E')
2796 for (j = 0; j < XVECLEN (x, i); j++)
2797 if (! oprs_not_set_p (XVECEXP (x, i, j), insn))
2798 return 0;
2801 return 1;
2804 /* Mark things set by a CALL. */
2806 static void
2807 mark_call (insn)
2808 rtx insn;
2810 if (! CONST_OR_PURE_CALL_P (insn))
2811 record_last_mem_set_info (insn);
2814 /* Mark things set by a SET. */
2816 static void
2817 mark_set (pat, insn)
2818 rtx pat, insn;
2820 rtx dest = SET_DEST (pat);
2822 while (GET_CODE (dest) == SUBREG
2823 || GET_CODE (dest) == ZERO_EXTRACT
2824 || GET_CODE (dest) == SIGN_EXTRACT
2825 || GET_CODE (dest) == STRICT_LOW_PART)
2826 dest = XEXP (dest, 0);
2828 if (GET_CODE (dest) == REG)
2829 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (dest));
2830 else if (GET_CODE (dest) == MEM)
2831 record_last_mem_set_info (insn);
2833 if (GET_CODE (SET_SRC (pat)) == CALL)
2834 mark_call (insn);
2837 /* Record things set by a CLOBBER. */
2839 static void
2840 mark_clobber (pat, insn)
2841 rtx pat, insn;
2843 rtx clob = XEXP (pat, 0);
2845 while (GET_CODE (clob) == SUBREG || GET_CODE (clob) == STRICT_LOW_PART)
2846 clob = XEXP (clob, 0);
2848 if (GET_CODE (clob) == REG)
2849 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (clob));
2850 else
2851 record_last_mem_set_info (insn);
2854 /* Record things set by INSN.
2855 This data is used by oprs_not_set_p. */
2857 static void
2858 mark_oprs_set (insn)
2859 rtx insn;
2861 rtx pat = PATTERN (insn);
2862 int i;
2864 if (GET_CODE (pat) == SET)
2865 mark_set (pat, insn);
2866 else if (GET_CODE (pat) == PARALLEL)
2867 for (i = 0; i < XVECLEN (pat, 0); i++)
2869 rtx x = XVECEXP (pat, 0, i);
2871 if (GET_CODE (x) == SET)
2872 mark_set (x, insn);
2873 else if (GET_CODE (x) == CLOBBER)
2874 mark_clobber (x, insn);
2875 else if (GET_CODE (x) == CALL)
2876 mark_call (insn);
2879 else if (GET_CODE (pat) == CLOBBER)
2880 mark_clobber (pat, insn);
2881 else if (GET_CODE (pat) == CALL)
2882 mark_call (insn);
2886 /* Classic GCSE reaching definition support. */
2888 /* Allocate reaching def variables. */
2890 static void
2891 alloc_rd_mem (n_blocks, n_insns)
2892 int n_blocks, n_insns;
2894 rd_kill = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2895 sbitmap_vector_zero (rd_kill, n_blocks);
2897 rd_gen = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2898 sbitmap_vector_zero (rd_gen, n_blocks);
2900 reaching_defs = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2901 sbitmap_vector_zero (reaching_defs, n_blocks);
2903 rd_out = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2904 sbitmap_vector_zero (rd_out, n_blocks);
2907 /* Free reaching def variables. */
2909 static void
2910 free_rd_mem ()
2912 sbitmap_vector_free (rd_kill);
2913 sbitmap_vector_free (rd_gen);
2914 sbitmap_vector_free (reaching_defs);
2915 sbitmap_vector_free (rd_out);
2918 /* Add INSN to the kills of BB. REGNO, set in BB, is killed by INSN. */
2920 static void
2921 handle_rd_kill_set (insn, regno, bb)
2922 rtx insn;
2923 int regno;
2924 basic_block bb;
2926 struct reg_set *this_reg;
2928 for (this_reg = reg_set_table[regno]; this_reg; this_reg = this_reg ->next)
2929 if (BLOCK_NUM (this_reg->insn) != BLOCK_NUM (insn))
2930 SET_BIT (rd_kill[bb->index], INSN_CUID (this_reg->insn));
2933 /* Compute the set of kill's for reaching definitions. */
2935 static void
2936 compute_kill_rd ()
2938 int cuid;
2939 unsigned int regno;
2940 int i;
2941 basic_block bb;
2943 /* For each block
2944 For each set bit in `gen' of the block (i.e each insn which
2945 generates a definition in the block)
2946 Call the reg set by the insn corresponding to that bit regx
2947 Look at the linked list starting at reg_set_table[regx]
2948 For each setting of regx in the linked list, which is not in
2949 this block
2950 Set the bit in `kill' corresponding to that insn. */
2951 FOR_EACH_BB (bb)
2952 for (cuid = 0; cuid < max_cuid; cuid++)
2953 if (TEST_BIT (rd_gen[bb->index], cuid))
2955 rtx insn = CUID_INSN (cuid);
2956 rtx pat = PATTERN (insn);
2958 if (GET_CODE (insn) == CALL_INSN)
2960 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2961 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2962 handle_rd_kill_set (insn, regno, bb);
2965 if (GET_CODE (pat) == PARALLEL)
2967 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
2969 enum rtx_code code = GET_CODE (XVECEXP (pat, 0, i));
2971 if ((code == SET || code == CLOBBER)
2972 && GET_CODE (XEXP (XVECEXP (pat, 0, i), 0)) == REG)
2973 handle_rd_kill_set (insn,
2974 REGNO (XEXP (XVECEXP (pat, 0, i), 0)),
2975 bb);
2978 else if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == REG)
2979 /* Each setting of this register outside of this block
2980 must be marked in the set of kills in this block. */
2981 handle_rd_kill_set (insn, REGNO (SET_DEST (pat)), bb);
2985 /* Compute the reaching definitions as in
2986 Compilers Principles, Techniques, and Tools. Aho, Sethi, Ullman,
2987 Chapter 10. It is the same algorithm as used for computing available
2988 expressions but applied to the gens and kills of reaching definitions. */
2990 static void
2991 compute_rd ()
2993 int changed, passes;
2994 basic_block bb;
2996 FOR_EACH_BB (bb)
2997 sbitmap_copy (rd_out[bb->index] /*dst*/, rd_gen[bb->index] /*src*/);
2999 passes = 0;
3000 changed = 1;
3001 while (changed)
3003 changed = 0;
3004 FOR_EACH_BB (bb)
3006 sbitmap_union_of_preds (reaching_defs[bb->index], rd_out, bb->index);
3007 changed |= sbitmap_union_of_diff_cg (rd_out[bb->index], rd_gen[bb->index],
3008 reaching_defs[bb->index], rd_kill[bb->index]);
3010 passes++;
3013 if (gcse_file)
3014 fprintf (gcse_file, "reaching def computation: %d passes\n", passes);
3017 /* Classic GCSE available expression support. */
3019 /* Allocate memory for available expression computation. */
3021 static void
3022 alloc_avail_expr_mem (n_blocks, n_exprs)
3023 int n_blocks, n_exprs;
3025 ae_kill = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
3026 sbitmap_vector_zero (ae_kill, n_blocks);
3028 ae_gen = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
3029 sbitmap_vector_zero (ae_gen, n_blocks);
3031 ae_in = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
3032 sbitmap_vector_zero (ae_in, n_blocks);
3034 ae_out = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
3035 sbitmap_vector_zero (ae_out, n_blocks);
3038 static void
3039 free_avail_expr_mem ()
3041 sbitmap_vector_free (ae_kill);
3042 sbitmap_vector_free (ae_gen);
3043 sbitmap_vector_free (ae_in);
3044 sbitmap_vector_free (ae_out);
3047 /* Compute the set of available expressions generated in each basic block. */
3049 static void
3050 compute_ae_gen (expr_hash_table)
3051 struct hash_table *expr_hash_table;
3053 unsigned int i;
3054 struct expr *expr;
3055 struct occr *occr;
3057 /* For each recorded occurrence of each expression, set ae_gen[bb][expr].
3058 This is all we have to do because an expression is not recorded if it
3059 is not available, and the only expressions we want to work with are the
3060 ones that are recorded. */
3061 for (i = 0; i < expr_hash_table->size; i++)
3062 for (expr = expr_hash_table->table[i]; expr != 0; expr = expr->next_same_hash)
3063 for (occr = expr->avail_occr; occr != 0; occr = occr->next)
3064 SET_BIT (ae_gen[BLOCK_NUM (occr->insn)], expr->bitmap_index);
3067 /* Return nonzero if expression X is killed in BB. */
3069 static int
3070 expr_killed_p (x, bb)
3071 rtx x;
3072 basic_block bb;
3074 int i, j;
3075 enum rtx_code code;
3076 const char *fmt;
3078 if (x == 0)
3079 return 1;
3081 code = GET_CODE (x);
3082 switch (code)
3084 case REG:
3085 return TEST_BIT (reg_set_in_block[bb->index], REGNO (x));
3087 case MEM:
3088 if (load_killed_in_block_p (bb, get_max_uid () + 1, x, 0))
3089 return 1;
3090 else
3091 return expr_killed_p (XEXP (x, 0), bb);
3093 case PC:
3094 case CC0: /*FIXME*/
3095 case CONST:
3096 case CONST_INT:
3097 case CONST_DOUBLE:
3098 case CONST_VECTOR:
3099 case SYMBOL_REF:
3100 case LABEL_REF:
3101 case ADDR_VEC:
3102 case ADDR_DIFF_VEC:
3103 return 0;
3105 default:
3106 break;
3109 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
3111 if (fmt[i] == 'e')
3113 /* If we are about to do the last recursive call
3114 needed at this level, change it into iteration.
3115 This function is called enough to be worth it. */
3116 if (i == 0)
3117 return expr_killed_p (XEXP (x, i), bb);
3118 else if (expr_killed_p (XEXP (x, i), bb))
3119 return 1;
3121 else if (fmt[i] == 'E')
3122 for (j = 0; j < XVECLEN (x, i); j++)
3123 if (expr_killed_p (XVECEXP (x, i, j), bb))
3124 return 1;
3127 return 0;
3130 /* Compute the set of available expressions killed in each basic block. */
3132 static void
3133 compute_ae_kill (ae_gen, ae_kill, expr_hash_table)
3134 sbitmap *ae_gen, *ae_kill;
3135 struct hash_table *expr_hash_table;
3137 basic_block bb;
3138 unsigned int i;
3139 struct expr *expr;
3141 FOR_EACH_BB (bb)
3142 for (i = 0; i < expr_hash_table->size; i++)
3143 for (expr = expr_hash_table->table[i]; expr; expr = expr->next_same_hash)
3145 /* Skip EXPR if generated in this block. */
3146 if (TEST_BIT (ae_gen[bb->index], expr->bitmap_index))
3147 continue;
3149 if (expr_killed_p (expr->expr, bb))
3150 SET_BIT (ae_kill[bb->index], expr->bitmap_index);
3154 /* Actually perform the Classic GCSE optimizations. */
3156 /* Return nonzero if occurrence OCCR of expression EXPR reaches block BB.
3158 CHECK_SELF_LOOP is nonzero if we should consider a block reaching itself
3159 as a positive reach. We want to do this when there are two computations
3160 of the expression in the block.
3162 VISITED is a pointer to a working buffer for tracking which BB's have
3163 been visited. It is NULL for the top-level call.
3165 We treat reaching expressions that go through blocks containing the same
3166 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
3167 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
3168 2 as not reaching. The intent is to improve the probability of finding
3169 only one reaching expression and to reduce register lifetimes by picking
3170 the closest such expression. */
3172 static int
3173 expr_reaches_here_p_work (occr, expr, bb, check_self_loop, visited)
3174 struct occr *occr;
3175 struct expr *expr;
3176 basic_block bb;
3177 int check_self_loop;
3178 char *visited;
3180 edge pred;
3182 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
3184 basic_block pred_bb = pred->src;
3186 if (visited[pred_bb->index])
3187 /* This predecessor has already been visited. Nothing to do. */
3189 else if (pred_bb == bb)
3191 /* BB loops on itself. */
3192 if (check_self_loop
3193 && TEST_BIT (ae_gen[pred_bb->index], expr->bitmap_index)
3194 && BLOCK_NUM (occr->insn) == pred_bb->index)
3195 return 1;
3197 visited[pred_bb->index] = 1;
3200 /* Ignore this predecessor if it kills the expression. */
3201 else if (TEST_BIT (ae_kill[pred_bb->index], expr->bitmap_index))
3202 visited[pred_bb->index] = 1;
3204 /* Does this predecessor generate this expression? */
3205 else if (TEST_BIT (ae_gen[pred_bb->index], expr->bitmap_index))
3207 /* Is this the occurrence we're looking for?
3208 Note that there's only one generating occurrence per block
3209 so we just need to check the block number. */
3210 if (BLOCK_NUM (occr->insn) == pred_bb->index)
3211 return 1;
3213 visited[pred_bb->index] = 1;
3216 /* Neither gen nor kill. */
3217 else
3219 visited[pred_bb->index] = 1;
3220 if (expr_reaches_here_p_work (occr, expr, pred_bb, check_self_loop,
3221 visited))
3223 return 1;
3227 /* All paths have been checked. */
3228 return 0;
3231 /* This wrapper for expr_reaches_here_p_work() is to ensure that any
3232 memory allocated for that function is returned. */
3234 static int
3235 expr_reaches_here_p (occr, expr, bb, check_self_loop)
3236 struct occr *occr;
3237 struct expr *expr;
3238 basic_block bb;
3239 int check_self_loop;
3241 int rval;
3242 char *visited = (char *) xcalloc (last_basic_block, 1);
3244 rval = expr_reaches_here_p_work (occr, expr, bb, check_self_loop, visited);
3246 free (visited);
3247 return rval;
3250 /* Return the instruction that computes EXPR that reaches INSN's basic block.
3251 If there is more than one such instruction, return NULL.
3253 Called only by handle_avail_expr. */
3255 static rtx
3256 computing_insn (expr, insn)
3257 struct expr *expr;
3258 rtx insn;
3260 basic_block bb = BLOCK_FOR_INSN (insn);
3262 if (expr->avail_occr->next == NULL)
3264 if (BLOCK_FOR_INSN (expr->avail_occr->insn) == bb)
3265 /* The available expression is actually itself
3266 (i.e. a loop in the flow graph) so do nothing. */
3267 return NULL;
3269 /* (FIXME) Case that we found a pattern that was created by
3270 a substitution that took place. */
3271 return expr->avail_occr->insn;
3273 else
3275 /* Pattern is computed more than once.
3276 Search backwards from this insn to see how many of these
3277 computations actually reach this insn. */
3278 struct occr *occr;
3279 rtx insn_computes_expr = NULL;
3280 int can_reach = 0;
3282 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
3284 if (BLOCK_FOR_INSN (occr->insn) == bb)
3286 /* The expression is generated in this block.
3287 The only time we care about this is when the expression
3288 is generated later in the block [and thus there's a loop].
3289 We let the normal cse pass handle the other cases. */
3290 if (INSN_CUID (insn) < INSN_CUID (occr->insn)
3291 && expr_reaches_here_p (occr, expr, bb, 1))
3293 can_reach++;
3294 if (can_reach > 1)
3295 return NULL;
3297 insn_computes_expr = occr->insn;
3300 else if (expr_reaches_here_p (occr, expr, bb, 0))
3302 can_reach++;
3303 if (can_reach > 1)
3304 return NULL;
3306 insn_computes_expr = occr->insn;
3310 if (insn_computes_expr == NULL)
3311 abort ();
3313 return insn_computes_expr;
3317 /* Return nonzero if the definition in DEF_INSN can reach INSN.
3318 Only called by can_disregard_other_sets. */
3320 static int
3321 def_reaches_here_p (insn, def_insn)
3322 rtx insn, def_insn;
3324 rtx reg;
3326 if (TEST_BIT (reaching_defs[BLOCK_NUM (insn)], INSN_CUID (def_insn)))
3327 return 1;
3329 if (BLOCK_NUM (insn) == BLOCK_NUM (def_insn))
3331 if (INSN_CUID (def_insn) < INSN_CUID (insn))
3333 if (GET_CODE (PATTERN (def_insn)) == PARALLEL)
3334 return 1;
3335 else if (GET_CODE (PATTERN (def_insn)) == CLOBBER)
3336 reg = XEXP (PATTERN (def_insn), 0);
3337 else if (GET_CODE (PATTERN (def_insn)) == SET)
3338 reg = SET_DEST (PATTERN (def_insn));
3339 else
3340 abort ();
3342 return ! reg_set_between_p (reg, NEXT_INSN (def_insn), insn);
3344 else
3345 return 0;
3348 return 0;
3351 /* Return nonzero if *ADDR_THIS_REG can only have one value at INSN. The
3352 value returned is the number of definitions that reach INSN. Returning a
3353 value of zero means that [maybe] more than one definition reaches INSN and
3354 the caller can't perform whatever optimization it is trying. i.e. it is
3355 always safe to return zero. */
3357 static int
3358 can_disregard_other_sets (addr_this_reg, insn, for_combine)
3359 struct reg_set **addr_this_reg;
3360 rtx insn;
3361 int for_combine;
3363 int number_of_reaching_defs = 0;
3364 struct reg_set *this_reg;
3366 for (this_reg = *addr_this_reg; this_reg != 0; this_reg = this_reg->next)
3367 if (def_reaches_here_p (insn, this_reg->insn))
3369 number_of_reaching_defs++;
3370 /* Ignore parallels for now. */
3371 if (GET_CODE (PATTERN (this_reg->insn)) == PARALLEL)
3372 return 0;
3374 if (!for_combine
3375 && (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER
3376 || ! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)),
3377 SET_SRC (PATTERN (insn)))))
3378 /* A setting of the reg to a different value reaches INSN. */
3379 return 0;
3381 if (number_of_reaching_defs > 1)
3383 /* If in this setting the value the register is being set to is
3384 equal to the previous value the register was set to and this
3385 setting reaches the insn we are trying to do the substitution
3386 on then we are ok. */
3387 if (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER)
3388 return 0;
3389 else if (! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)),
3390 SET_SRC (PATTERN (insn))))
3391 return 0;
3394 *addr_this_reg = this_reg;
3397 return number_of_reaching_defs;
3400 /* Expression computed by insn is available and the substitution is legal,
3401 so try to perform the substitution.
3403 The result is nonzero if any changes were made. */
3405 static int
3406 handle_avail_expr (insn, expr)
3407 rtx insn;
3408 struct expr *expr;
3410 rtx pat, insn_computes_expr, expr_set;
3411 rtx to;
3412 struct reg_set *this_reg;
3413 int found_setting, use_src;
3414 int changed = 0;
3416 /* We only handle the case where one computation of the expression
3417 reaches this instruction. */
3418 insn_computes_expr = computing_insn (expr, insn);
3419 if (insn_computes_expr == NULL)
3420 return 0;
3421 expr_set = single_set (insn_computes_expr);
3422 if (!expr_set)
3423 abort ();
3425 found_setting = 0;
3426 use_src = 0;
3428 /* At this point we know only one computation of EXPR outside of this
3429 block reaches this insn. Now try to find a register that the
3430 expression is computed into. */
3431 if (GET_CODE (SET_SRC (expr_set)) == REG)
3433 /* This is the case when the available expression that reaches
3434 here has already been handled as an available expression. */
3435 unsigned int regnum_for_replacing
3436 = REGNO (SET_SRC (expr_set));
3438 /* If the register was created by GCSE we can't use `reg_set_table',
3439 however we know it's set only once. */
3440 if (regnum_for_replacing >= max_gcse_regno
3441 /* If the register the expression is computed into is set only once,
3442 or only one set reaches this insn, we can use it. */
3443 || (((this_reg = reg_set_table[regnum_for_replacing]),
3444 this_reg->next == NULL)
3445 || can_disregard_other_sets (&this_reg, insn, 0)))
3447 use_src = 1;
3448 found_setting = 1;
3452 if (!found_setting)
3454 unsigned int regnum_for_replacing
3455 = REGNO (SET_DEST (expr_set));
3457 /* This shouldn't happen. */
3458 if (regnum_for_replacing >= max_gcse_regno)
3459 abort ();
3461 this_reg = reg_set_table[regnum_for_replacing];
3463 /* If the register the expression is computed into is set only once,
3464 or only one set reaches this insn, use it. */
3465 if (this_reg->next == NULL
3466 || can_disregard_other_sets (&this_reg, insn, 0))
3467 found_setting = 1;
3470 if (found_setting)
3472 pat = PATTERN (insn);
3473 if (use_src)
3474 to = SET_SRC (expr_set);
3475 else
3476 to = SET_DEST (expr_set);
3477 changed = validate_change (insn, &SET_SRC (pat), to, 0);
3479 /* We should be able to ignore the return code from validate_change but
3480 to play it safe we check. */
3481 if (changed)
3483 gcse_subst_count++;
3484 if (gcse_file != NULL)
3486 fprintf (gcse_file, "GCSE: Replacing the source in insn %d with",
3487 INSN_UID (insn));
3488 fprintf (gcse_file, " reg %d %s insn %d\n",
3489 REGNO (to), use_src ? "from" : "set in",
3490 INSN_UID (insn_computes_expr));
3495 /* The register that the expr is computed into is set more than once. */
3496 else if (1 /*expensive_op(this_pattrn->op) && do_expensive_gcse)*/)
3498 /* Insert an insn after insnx that copies the reg set in insnx
3499 into a new pseudo register call this new register REGN.
3500 From insnb until end of basic block or until REGB is set
3501 replace all uses of REGB with REGN. */
3502 rtx new_insn;
3504 to = gen_reg_rtx (GET_MODE (SET_DEST (expr_set)));
3506 /* Generate the new insn. */
3507 /* ??? If the change fails, we return 0, even though we created
3508 an insn. I think this is ok. */
3509 new_insn
3510 = emit_insn_after (gen_rtx_SET (VOIDmode, to,
3511 SET_DEST (expr_set)),
3512 insn_computes_expr);
3514 /* Keep register set table up to date. */
3515 record_one_set (REGNO (to), new_insn);
3517 gcse_create_count++;
3518 if (gcse_file != NULL)
3520 fprintf (gcse_file, "GCSE: Creating insn %d to copy value of reg %d",
3521 INSN_UID (NEXT_INSN (insn_computes_expr)),
3522 REGNO (SET_SRC (PATTERN (NEXT_INSN (insn_computes_expr)))));
3523 fprintf (gcse_file, ", computed in insn %d,\n",
3524 INSN_UID (insn_computes_expr));
3525 fprintf (gcse_file, " into newly allocated reg %d\n",
3526 REGNO (to));
3529 pat = PATTERN (insn);
3531 /* Do register replacement for INSN. */
3532 changed = validate_change (insn, &SET_SRC (pat),
3533 SET_DEST (PATTERN
3534 (NEXT_INSN (insn_computes_expr))),
3537 /* We should be able to ignore the return code from validate_change but
3538 to play it safe we check. */
3539 if (changed)
3541 gcse_subst_count++;
3542 if (gcse_file != NULL)
3544 fprintf (gcse_file,
3545 "GCSE: Replacing the source in insn %d with reg %d ",
3546 INSN_UID (insn),
3547 REGNO (SET_DEST (PATTERN (NEXT_INSN
3548 (insn_computes_expr)))));
3549 fprintf (gcse_file, "set in insn %d\n",
3550 INSN_UID (insn_computes_expr));
3555 return changed;
3558 /* Perform classic GCSE. This is called by one_classic_gcse_pass after all
3559 the dataflow analysis has been done.
3561 The result is nonzero if a change was made. */
3563 static int
3564 classic_gcse ()
3566 int changed;
3567 rtx insn;
3568 basic_block bb;
3570 /* Note we start at block 1. */
3572 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
3573 return 0;
3575 changed = 0;
3576 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR, next_bb)
3578 /* Reset tables used to keep track of what's still valid [since the
3579 start of the block]. */
3580 reset_opr_set_tables ();
3582 for (insn = bb->head;
3583 insn != NULL && insn != NEXT_INSN (bb->end);
3584 insn = NEXT_INSN (insn))
3586 /* Is insn of form (set (pseudo-reg) ...)? */
3587 if (GET_CODE (insn) == INSN
3588 && GET_CODE (PATTERN (insn)) == SET
3589 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
3590 && REGNO (SET_DEST (PATTERN (insn))) >= FIRST_PSEUDO_REGISTER)
3592 rtx pat = PATTERN (insn);
3593 rtx src = SET_SRC (pat);
3594 struct expr *expr;
3596 if (want_to_gcse_p (src)
3597 /* Is the expression recorded? */
3598 && ((expr = lookup_expr (src, &expr_hash_table)) != NULL)
3599 /* Is the expression available [at the start of the
3600 block]? */
3601 && TEST_BIT (ae_in[bb->index], expr->bitmap_index)
3602 /* Are the operands unchanged since the start of the
3603 block? */
3604 && oprs_not_set_p (src, insn))
3605 changed |= handle_avail_expr (insn, expr);
3608 /* Keep track of everything modified by this insn. */
3609 /* ??? Need to be careful w.r.t. mods done to INSN. */
3610 if (INSN_P (insn))
3611 mark_oprs_set (insn);
3615 return changed;
3618 /* Top level routine to perform one classic GCSE pass.
3620 Return nonzero if a change was made. */
3622 static int
3623 one_classic_gcse_pass (pass)
3624 int pass;
3626 int changed = 0;
3628 gcse_subst_count = 0;
3629 gcse_create_count = 0;
3631 alloc_hash_table (max_cuid, &expr_hash_table, 0);
3632 alloc_rd_mem (last_basic_block, max_cuid);
3633 compute_hash_table (&expr_hash_table);
3634 if (gcse_file)
3635 dump_hash_table (gcse_file, "Expression", &expr_hash_table);
3637 if (expr_hash_table.n_elems > 0)
3639 compute_kill_rd ();
3640 compute_rd ();
3641 alloc_avail_expr_mem (last_basic_block, expr_hash_table.n_elems);
3642 compute_ae_gen (&expr_hash_table);
3643 compute_ae_kill (ae_gen, ae_kill, &expr_hash_table);
3644 compute_available (ae_gen, ae_kill, ae_out, ae_in);
3645 changed = classic_gcse ();
3646 free_avail_expr_mem ();
3649 free_rd_mem ();
3650 free_hash_table (&expr_hash_table);
3652 if (gcse_file)
3654 fprintf (gcse_file, "\n");
3655 fprintf (gcse_file, "GCSE of %s, pass %d: %d bytes needed, %d substs,",
3656 current_function_name, pass, bytes_used, gcse_subst_count);
3657 fprintf (gcse_file, "%d insns created\n", gcse_create_count);
3660 return changed;
3663 /* Compute copy/constant propagation working variables. */
3665 /* Local properties of assignments. */
3666 static sbitmap *cprop_pavloc;
3667 static sbitmap *cprop_absaltered;
3669 /* Global properties of assignments (computed from the local properties). */
3670 static sbitmap *cprop_avin;
3671 static sbitmap *cprop_avout;
3673 /* Allocate vars used for copy/const propagation. N_BLOCKS is the number of
3674 basic blocks. N_SETS is the number of sets. */
3676 static void
3677 alloc_cprop_mem (n_blocks, n_sets)
3678 int n_blocks, n_sets;
3680 cprop_pavloc = sbitmap_vector_alloc (n_blocks, n_sets);
3681 cprop_absaltered = sbitmap_vector_alloc (n_blocks, n_sets);
3683 cprop_avin = sbitmap_vector_alloc (n_blocks, n_sets);
3684 cprop_avout = sbitmap_vector_alloc (n_blocks, n_sets);
3687 /* Free vars used by copy/const propagation. */
3689 static void
3690 free_cprop_mem ()
3692 sbitmap_vector_free (cprop_pavloc);
3693 sbitmap_vector_free (cprop_absaltered);
3694 sbitmap_vector_free (cprop_avin);
3695 sbitmap_vector_free (cprop_avout);
3698 /* For each block, compute whether X is transparent. X is either an
3699 expression or an assignment [though we don't care which, for this context
3700 an assignment is treated as an expression]. For each block where an
3701 element of X is modified, set (SET_P == 1) or reset (SET_P == 0) the INDX
3702 bit in BMAP. */
3704 static void
3705 compute_transp (x, indx, bmap, set_p)
3706 rtx x;
3707 int indx;
3708 sbitmap *bmap;
3709 int set_p;
3711 int i, j;
3712 basic_block bb;
3713 enum rtx_code code;
3714 reg_set *r;
3715 const char *fmt;
3717 /* repeat is used to turn tail-recursion into iteration since GCC
3718 can't do it when there's no return value. */
3719 repeat:
3721 if (x == 0)
3722 return;
3724 code = GET_CODE (x);
3725 switch (code)
3727 case REG:
3728 if (set_p)
3730 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3732 FOR_EACH_BB (bb)
3733 if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
3734 SET_BIT (bmap[bb->index], indx);
3736 else
3738 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
3739 SET_BIT (bmap[BLOCK_NUM (r->insn)], indx);
3742 else
3744 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3746 FOR_EACH_BB (bb)
3747 if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
3748 RESET_BIT (bmap[bb->index], indx);
3750 else
3752 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
3753 RESET_BIT (bmap[BLOCK_NUM (r->insn)], indx);
3757 return;
3759 case MEM:
3760 FOR_EACH_BB (bb)
3762 rtx list_entry = canon_modify_mem_list[bb->index];
3764 while (list_entry)
3766 rtx dest, dest_addr;
3768 if (GET_CODE (XEXP (list_entry, 0)) == CALL_INSN)
3770 if (set_p)
3771 SET_BIT (bmap[bb->index], indx);
3772 else
3773 RESET_BIT (bmap[bb->index], indx);
3774 break;
3776 /* LIST_ENTRY must be an INSN of some kind that sets memory.
3777 Examine each hunk of memory that is modified. */
3779 dest = XEXP (list_entry, 0);
3780 list_entry = XEXP (list_entry, 1);
3781 dest_addr = XEXP (list_entry, 0);
3783 if (canon_true_dependence (dest, GET_MODE (dest), dest_addr,
3784 x, rtx_addr_varies_p))
3786 if (set_p)
3787 SET_BIT (bmap[bb->index], indx);
3788 else
3789 RESET_BIT (bmap[bb->index], indx);
3790 break;
3792 list_entry = XEXP (list_entry, 1);
3796 x = XEXP (x, 0);
3797 goto repeat;
3799 case PC:
3800 case CC0: /*FIXME*/
3801 case CONST:
3802 case CONST_INT:
3803 case CONST_DOUBLE:
3804 case CONST_VECTOR:
3805 case SYMBOL_REF:
3806 case LABEL_REF:
3807 case ADDR_VEC:
3808 case ADDR_DIFF_VEC:
3809 return;
3811 default:
3812 break;
3815 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
3817 if (fmt[i] == 'e')
3819 /* If we are about to do the last recursive call
3820 needed at this level, change it into iteration.
3821 This function is called enough to be worth it. */
3822 if (i == 0)
3824 x = XEXP (x, i);
3825 goto repeat;
3828 compute_transp (XEXP (x, i), indx, bmap, set_p);
3830 else if (fmt[i] == 'E')
3831 for (j = 0; j < XVECLEN (x, i); j++)
3832 compute_transp (XVECEXP (x, i, j), indx, bmap, set_p);
3836 /* Top level routine to do the dataflow analysis needed by copy/const
3837 propagation. */
3839 static void
3840 compute_cprop_data ()
3842 compute_local_properties (cprop_absaltered, cprop_pavloc, NULL, &set_hash_table);
3843 compute_available (cprop_pavloc, cprop_absaltered,
3844 cprop_avout, cprop_avin);
3847 /* Copy/constant propagation. */
3849 /* Maximum number of register uses in an insn that we handle. */
3850 #define MAX_USES 8
3852 /* Table of uses found in an insn.
3853 Allocated statically to avoid alloc/free complexity and overhead. */
3854 static struct reg_use reg_use_table[MAX_USES];
3856 /* Index into `reg_use_table' while building it. */
3857 static int reg_use_count;
3859 /* Set up a list of register numbers used in INSN. The found uses are stored
3860 in `reg_use_table'. `reg_use_count' is initialized to zero before entry,
3861 and contains the number of uses in the table upon exit.
3863 ??? If a register appears multiple times we will record it multiple times.
3864 This doesn't hurt anything but it will slow things down. */
3866 static void
3867 find_used_regs (xptr, data)
3868 rtx *xptr;
3869 void *data ATTRIBUTE_UNUSED;
3871 int i, j;
3872 enum rtx_code code;
3873 const char *fmt;
3874 rtx x = *xptr;
3876 /* repeat is used to turn tail-recursion into iteration since GCC
3877 can't do it when there's no return value. */
3878 repeat:
3879 if (x == 0)
3880 return;
3882 code = GET_CODE (x);
3883 if (REG_P (x))
3885 if (reg_use_count == MAX_USES)
3886 return;
3888 reg_use_table[reg_use_count].reg_rtx = x;
3889 reg_use_count++;
3892 /* Recursively scan the operands of this expression. */
3894 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
3896 if (fmt[i] == 'e')
3898 /* If we are about to do the last recursive call
3899 needed at this level, change it into iteration.
3900 This function is called enough to be worth it. */
3901 if (i == 0)
3903 x = XEXP (x, 0);
3904 goto repeat;
3907 find_used_regs (&XEXP (x, i), data);
3909 else if (fmt[i] == 'E')
3910 for (j = 0; j < XVECLEN (x, i); j++)
3911 find_used_regs (&XVECEXP (x, i, j), data);
3915 /* Try to replace all non-SET_DEST occurrences of FROM in INSN with TO.
3916 Returns nonzero is successful. */
3918 static int
3919 try_replace_reg (from, to, insn)
3920 rtx from, to, insn;
3922 rtx note = find_reg_equal_equiv_note (insn);
3923 rtx src = 0;
3924 int success = 0;
3925 rtx set = single_set (insn);
3927 validate_replace_src_group (from, to, insn);
3928 if (num_changes_pending () && apply_change_group ())
3929 success = 1;
3931 if (!success && set && reg_mentioned_p (from, SET_SRC (set)))
3933 /* If above failed and this is a single set, try to simplify the source of
3934 the set given our substitution. We could perhaps try this for multiple
3935 SETs, but it probably won't buy us anything. */
3936 src = simplify_replace_rtx (SET_SRC (set), from, to);
3938 if (!rtx_equal_p (src, SET_SRC (set))
3939 && validate_change (insn, &SET_SRC (set), src, 0))
3940 success = 1;
3942 /* If we've failed to do replacement, have a single SET, and don't already
3943 have a note, add a REG_EQUAL note to not lose information. */
3944 if (!success && note == 0 && set != 0)
3945 note = set_unique_reg_note (insn, REG_EQUAL, copy_rtx (src));
3948 /* If there is already a NOTE, update the expression in it with our
3949 replacement. */
3950 else if (note != 0)
3951 XEXP (note, 0) = simplify_replace_rtx (XEXP (note, 0), from, to);
3953 /* REG_EQUAL may get simplified into register.
3954 We don't allow that. Remove that note. This code ought
3955 not to hapen, because previous code ought to syntetize
3956 reg-reg move, but be on the safe side. */
3957 if (note && REG_P (XEXP (note, 0)))
3958 remove_note (insn, note);
3960 return success;
3963 /* Find a set of REGNOs that are available on entry to INSN's block. Returns
3964 NULL no such set is found. */
3966 static struct expr *
3967 find_avail_set (regno, insn)
3968 int regno;
3969 rtx insn;
3971 /* SET1 contains the last set found that can be returned to the caller for
3972 use in a substitution. */
3973 struct expr *set1 = 0;
3975 /* Loops are not possible here. To get a loop we would need two sets
3976 available at the start of the block containing INSN. ie we would
3977 need two sets like this available at the start of the block:
3979 (set (reg X) (reg Y))
3980 (set (reg Y) (reg X))
3982 This can not happen since the set of (reg Y) would have killed the
3983 set of (reg X) making it unavailable at the start of this block. */
3984 while (1)
3986 rtx src;
3987 struct expr *set = lookup_set (regno, NULL_RTX, &set_hash_table);
3989 /* Find a set that is available at the start of the block
3990 which contains INSN. */
3991 while (set)
3993 if (TEST_BIT (cprop_avin[BLOCK_NUM (insn)], set->bitmap_index))
3994 break;
3995 set = next_set (regno, set);
3998 /* If no available set was found we've reached the end of the
3999 (possibly empty) copy chain. */
4000 if (set == 0)
4001 break;
4003 if (GET_CODE (set->expr) != SET)
4004 abort ();
4006 src = SET_SRC (set->expr);
4008 /* We know the set is available.
4009 Now check that SRC is ANTLOC (i.e. none of the source operands
4010 have changed since the start of the block).
4012 If the source operand changed, we may still use it for the next
4013 iteration of this loop, but we may not use it for substitutions. */
4015 if (CONSTANT_P (src) || oprs_not_set_p (src, insn))
4016 set1 = set;
4018 /* If the source of the set is anything except a register, then
4019 we have reached the end of the copy chain. */
4020 if (GET_CODE (src) != REG)
4021 break;
4023 /* Follow the copy chain, ie start another iteration of the loop
4024 and see if we have an available copy into SRC. */
4025 regno = REGNO (src);
4028 /* SET1 holds the last set that was available and anticipatable at
4029 INSN. */
4030 return set1;
4033 /* Subroutine of cprop_insn that tries to propagate constants into
4034 JUMP_INSNS. JUMP must be a conditional jump. If SETCC is non-NULL
4035 it is the instruction that immediately preceeds JUMP, and must be a
4036 single SET of a register. FROM is what we will try to replace,
4037 SRC is the constant we will try to substitute for it. Returns nonzero
4038 if a change was made. */
4040 static int
4041 cprop_jump (bb, setcc, jump, from, src)
4042 basic_block bb;
4043 rtx setcc;
4044 rtx jump;
4045 rtx from;
4046 rtx src;
4048 rtx new, new_set;
4049 rtx set = pc_set (jump);
4051 /* First substitute in the INSN condition as the SET_SRC of the JUMP,
4052 then substitute that given values in this expanded JUMP. */
4053 if (setcc != NULL
4054 && !modified_between_p (from, setcc, jump)
4055 && !modified_between_p (src, setcc, jump))
4057 rtx setcc_set = single_set (setcc);
4058 new_set = simplify_replace_rtx (SET_SRC (set),
4059 SET_DEST (setcc_set),
4060 SET_SRC (setcc_set));
4062 else
4063 new_set = set;
4065 new = simplify_replace_rtx (new_set, from, src);
4067 /* If no simplification can be made, then try the next
4068 register. */
4069 if (rtx_equal_p (new, new_set) || rtx_equal_p (new, SET_SRC (set)))
4070 return 0;
4072 /* If this is now a no-op delete it, otherwise this must be a valid insn. */
4073 if (new == pc_rtx)
4074 delete_insn (jump);
4075 else
4077 /* Ensure the value computed inside the jump insn to be equivalent
4078 to one computed by setcc. */
4079 if (setcc
4080 && modified_in_p (new, setcc))
4081 return 0;
4082 if (! validate_change (jump, &SET_SRC (set), new, 0))
4083 return 0;
4085 /* If this has turned into an unconditional jump,
4086 then put a barrier after it so that the unreachable
4087 code will be deleted. */
4088 if (GET_CODE (SET_SRC (set)) == LABEL_REF)
4089 emit_barrier_after (jump);
4092 #ifdef HAVE_cc0
4093 /* Delete the cc0 setter. */
4094 if (setcc != NULL && CC0_P (SET_DEST (single_set (setcc))))
4095 delete_insn (setcc);
4096 #endif
4098 run_jump_opt_after_gcse = 1;
4100 const_prop_count++;
4101 if (gcse_file != NULL)
4103 fprintf (gcse_file,
4104 "CONST-PROP: Replacing reg %d in jump_insn %d with constant ",
4105 REGNO (from), INSN_UID (jump));
4106 print_rtl (gcse_file, src);
4107 fprintf (gcse_file, "\n");
4109 purge_dead_edges (bb);
4111 return 1;
4114 static bool
4115 constprop_register (insn, from, to, alter_jumps)
4116 rtx insn;
4117 rtx from;
4118 rtx to;
4119 int alter_jumps;
4121 rtx sset;
4123 /* Check for reg or cc0 setting instructions followed by
4124 conditional branch instructions first. */
4125 if (alter_jumps
4126 && (sset = single_set (insn)) != NULL
4127 && any_condjump_p (NEXT_INSN (insn)) && onlyjump_p (NEXT_INSN (insn)))
4129 rtx dest = SET_DEST (sset);
4130 if ((REG_P (dest) || CC0_P (dest))
4131 && cprop_jump (BLOCK_FOR_INSN (insn), insn, NEXT_INSN (insn), from, to))
4132 return 1;
4135 /* Handle normal insns next. */
4136 if (GET_CODE (insn) == INSN
4137 && try_replace_reg (from, to, insn))
4138 return 1;
4140 /* Try to propagate a CONST_INT into a conditional jump.
4141 We're pretty specific about what we will handle in this
4142 code, we can extend this as necessary over time.
4144 Right now the insn in question must look like
4145 (set (pc) (if_then_else ...)) */
4146 else if (alter_jumps && any_condjump_p (insn) && onlyjump_p (insn))
4147 return cprop_jump (BLOCK_FOR_INSN (insn), NULL, insn, from, to);
4148 return 0;
4151 /* Perform constant and copy propagation on INSN.
4152 The result is nonzero if a change was made. */
4154 static int
4155 cprop_insn (insn, alter_jumps)
4156 rtx insn;
4157 int alter_jumps;
4159 struct reg_use *reg_used;
4160 int changed = 0;
4161 rtx note;
4163 if (!INSN_P (insn))
4164 return 0;
4166 reg_use_count = 0;
4167 note_uses (&PATTERN (insn), find_used_regs, NULL);
4169 note = find_reg_equal_equiv_note (insn);
4171 /* We may win even when propagating constants into notes. */
4172 if (note)
4173 find_used_regs (&XEXP (note, 0), NULL);
4175 for (reg_used = &reg_use_table[0]; reg_use_count > 0;
4176 reg_used++, reg_use_count--)
4178 unsigned int regno = REGNO (reg_used->reg_rtx);
4179 rtx pat, src;
4180 struct expr *set;
4182 /* Ignore registers created by GCSE.
4183 We do this because ... */
4184 if (regno >= max_gcse_regno)
4185 continue;
4187 /* If the register has already been set in this block, there's
4188 nothing we can do. */
4189 if (! oprs_not_set_p (reg_used->reg_rtx, insn))
4190 continue;
4192 /* Find an assignment that sets reg_used and is available
4193 at the start of the block. */
4194 set = find_avail_set (regno, insn);
4195 if (! set)
4196 continue;
4198 pat = set->expr;
4199 /* ??? We might be able to handle PARALLELs. Later. */
4200 if (GET_CODE (pat) != SET)
4201 abort ();
4203 src = SET_SRC (pat);
4205 /* Constant propagation. */
4206 if (CONSTANT_P (src))
4208 if (constprop_register (insn, reg_used->reg_rtx, src, alter_jumps))
4210 changed = 1;
4211 const_prop_count++;
4212 if (gcse_file != NULL)
4214 fprintf (gcse_file, "GLOBAL CONST-PROP: Replacing reg %d in ", regno);
4215 fprintf (gcse_file, "insn %d with constant ", INSN_UID (insn));
4216 print_rtl (gcse_file, src);
4217 fprintf (gcse_file, "\n");
4221 else if (GET_CODE (src) == REG
4222 && REGNO (src) >= FIRST_PSEUDO_REGISTER
4223 && REGNO (src) != regno)
4225 if (try_replace_reg (reg_used->reg_rtx, src, insn))
4227 changed = 1;
4228 copy_prop_count++;
4229 if (gcse_file != NULL)
4231 fprintf (gcse_file, "GLOBAL COPY-PROP: Replacing reg %d in insn %d",
4232 regno, INSN_UID (insn));
4233 fprintf (gcse_file, " with reg %d\n", REGNO (src));
4236 /* The original insn setting reg_used may or may not now be
4237 deletable. We leave the deletion to flow. */
4238 /* FIXME: If it turns out that the insn isn't deletable,
4239 then we may have unnecessarily extended register lifetimes
4240 and made things worse. */
4245 return changed;
4248 /* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
4249 their REG_EQUAL notes need updating. */
4251 static bool
4252 do_local_cprop (x, insn, alter_jumps, libcall_sp)
4253 rtx x;
4254 rtx insn;
4255 int alter_jumps;
4256 rtx *libcall_sp;
4258 rtx newreg = NULL, newcnst = NULL;
4260 /* Rule out USE instructions and ASM statements as we don't want to
4261 change the hard registers mentioned. */
4262 if (GET_CODE (x) == REG
4263 && (REGNO (x) >= FIRST_PSEUDO_REGISTER
4264 || (GET_CODE (PATTERN (insn)) != USE
4265 && asm_noperands (PATTERN (insn)) < 0)))
4267 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0);
4268 struct elt_loc_list *l;
4270 if (!val)
4271 return false;
4272 for (l = val->locs; l; l = l->next)
4274 rtx this_rtx = l->loc;
4275 rtx note;
4277 if (CONSTANT_P (this_rtx))
4278 newcnst = this_rtx;
4279 if (REG_P (this_rtx) && REGNO (this_rtx) >= FIRST_PSEUDO_REGISTER
4280 /* Don't copy propagate if it has attached REG_EQUIV note.
4281 At this point this only function parameters should have
4282 REG_EQUIV notes and if the argument slot is used somewhere
4283 explicitly, it means address of parameter has been taken,
4284 so we should not extend the lifetime of the pseudo. */
4285 && (!(note = find_reg_note (l->setting_insn, REG_EQUIV, NULL_RTX))
4286 || GET_CODE (XEXP (note, 0)) != MEM))
4287 newreg = this_rtx;
4289 if (newcnst && constprop_register (insn, x, newcnst, alter_jumps))
4291 /* If we find a case where we can't fix the retval REG_EQUAL notes
4292 match the new register, we either have to abandom this replacement
4293 or fix delete_trivially_dead_insns to preserve the setting insn,
4294 or make it delete the REG_EUAQL note, and fix up all passes that
4295 require the REG_EQUAL note there. */
4296 if (!adjust_libcall_notes (x, newcnst, insn, libcall_sp))
4297 abort ();
4298 if (gcse_file != NULL)
4300 fprintf (gcse_file, "LOCAL CONST-PROP: Replacing reg %d in ",
4301 REGNO (x));
4302 fprintf (gcse_file, "insn %d with constant ",
4303 INSN_UID (insn));
4304 print_rtl (gcse_file, newcnst);
4305 fprintf (gcse_file, "\n");
4307 const_prop_count++;
4308 return true;
4310 else if (newreg && newreg != x && try_replace_reg (x, newreg, insn))
4312 adjust_libcall_notes (x, newreg, insn, libcall_sp);
4313 if (gcse_file != NULL)
4315 fprintf (gcse_file,
4316 "LOCAL COPY-PROP: Replacing reg %d in insn %d",
4317 REGNO (x), INSN_UID (insn));
4318 fprintf (gcse_file, " with reg %d\n", REGNO (newreg));
4320 copy_prop_count++;
4321 return true;
4324 return false;
4327 /* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
4328 their REG_EQUAL notes need updating to reflect that OLDREG has been
4329 replaced with NEWVAL in INSN. Return true if all substitutions could
4330 be made. */
4331 static bool
4332 adjust_libcall_notes (oldreg, newval, insn, libcall_sp)
4333 rtx oldreg, newval, insn, *libcall_sp;
4335 rtx end;
4337 while ((end = *libcall_sp++))
4339 rtx note = find_reg_equal_equiv_note (end);
4341 if (! note)
4342 continue;
4344 if (REG_P (newval))
4346 if (reg_set_between_p (newval, PREV_INSN (insn), end))
4350 note = find_reg_equal_equiv_note (end);
4351 if (! note)
4352 continue;
4353 if (reg_mentioned_p (newval, XEXP (note, 0)))
4354 return false;
4356 while ((end = *libcall_sp++));
4357 return true;
4360 XEXP (note, 0) = replace_rtx (XEXP (note, 0), oldreg, newval);
4361 insn = end;
4363 return true;
4366 #define MAX_NESTED_LIBCALLS 9
4368 static void
4369 local_cprop_pass (alter_jumps)
4370 int alter_jumps;
4372 rtx insn;
4373 struct reg_use *reg_used;
4374 rtx libcall_stack[MAX_NESTED_LIBCALLS + 1], *libcall_sp;
4376 cselib_init ();
4377 libcall_sp = &libcall_stack[MAX_NESTED_LIBCALLS];
4378 *libcall_sp = 0;
4379 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4381 if (INSN_P (insn))
4383 rtx note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
4385 if (note)
4387 if (libcall_sp == libcall_stack)
4388 abort ();
4389 *--libcall_sp = XEXP (note, 0);
4391 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
4392 if (note)
4393 libcall_sp++;
4394 note = find_reg_equal_equiv_note (insn);
4397 reg_use_count = 0;
4398 note_uses (&PATTERN (insn), find_used_regs, NULL);
4399 if (note)
4400 find_used_regs (&XEXP (note, 0), NULL);
4402 for (reg_used = &reg_use_table[0]; reg_use_count > 0;
4403 reg_used++, reg_use_count--)
4404 if (do_local_cprop (reg_used->reg_rtx, insn, alter_jumps,
4405 libcall_sp))
4406 break;
4408 while (reg_use_count);
4410 cselib_process_insn (insn);
4412 cselib_finish ();
4415 /* Forward propagate copies. This includes copies and constants. Return
4416 nonzero if a change was made. */
4418 static int
4419 cprop (alter_jumps)
4420 int alter_jumps;
4422 int changed;
4423 basic_block bb;
4424 rtx insn;
4426 /* Note we start at block 1. */
4427 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
4429 if (gcse_file != NULL)
4430 fprintf (gcse_file, "\n");
4431 return 0;
4434 changed = 0;
4435 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR, next_bb)
4437 /* Reset tables used to keep track of what's still valid [since the
4438 start of the block]. */
4439 reset_opr_set_tables ();
4441 for (insn = bb->head;
4442 insn != NULL && insn != NEXT_INSN (bb->end);
4443 insn = NEXT_INSN (insn))
4444 if (INSN_P (insn))
4446 changed |= cprop_insn (insn, alter_jumps);
4448 /* Keep track of everything modified by this insn. */
4449 /* ??? Need to be careful w.r.t. mods done to INSN. Don't
4450 call mark_oprs_set if we turned the insn into a NOTE. */
4451 if (GET_CODE (insn) != NOTE)
4452 mark_oprs_set (insn);
4456 if (gcse_file != NULL)
4457 fprintf (gcse_file, "\n");
4459 return changed;
4462 /* Perform one copy/constant propagation pass.
4463 F is the first insn in the function.
4464 PASS is the pass count. */
4466 static int
4467 one_cprop_pass (pass, alter_jumps)
4468 int pass;
4469 int alter_jumps;
4471 int changed = 0;
4473 const_prop_count = 0;
4474 copy_prop_count = 0;
4476 local_cprop_pass (alter_jumps);
4478 alloc_hash_table (max_cuid, &set_hash_table, 1);
4479 compute_hash_table (&set_hash_table);
4480 if (gcse_file)
4481 dump_hash_table (gcse_file, "SET", &set_hash_table);
4482 if (set_hash_table.n_elems > 0)
4484 alloc_cprop_mem (last_basic_block, set_hash_table.n_elems);
4485 compute_cprop_data ();
4486 changed = cprop (alter_jumps);
4487 if (alter_jumps)
4488 changed |= bypass_conditional_jumps ();
4489 free_cprop_mem ();
4492 free_hash_table (&set_hash_table);
4494 if (gcse_file)
4496 fprintf (gcse_file, "CPROP of %s, pass %d: %d bytes needed, ",
4497 current_function_name, pass, bytes_used);
4498 fprintf (gcse_file, "%d const props, %d copy props\n\n",
4499 const_prop_count, copy_prop_count);
4502 return changed;
4505 /* Bypass conditional jumps. */
4507 /* Find a set of REGNO to a constant that is available at the end of basic
4508 block BB. Returns NULL if no such set is found. Based heavily upon
4509 find_avail_set. */
4511 static struct expr *
4512 find_bypass_set (regno, bb)
4513 int regno;
4514 int bb;
4516 struct expr *result = 0;
4518 for (;;)
4520 rtx src;
4521 struct expr *set = lookup_set (regno, NULL_RTX, &set_hash_table);
4523 while (set)
4525 if (TEST_BIT (cprop_avout[bb], set->bitmap_index))
4526 break;
4527 set = next_set (regno, set);
4530 if (set == 0)
4531 break;
4533 if (GET_CODE (set->expr) != SET)
4534 abort ();
4536 src = SET_SRC (set->expr);
4537 if (CONSTANT_P (src))
4538 result = set;
4540 if (GET_CODE (src) != REG)
4541 break;
4543 regno = REGNO (src);
4545 return result;
4549 /* Subroutine of bypass_conditional_jumps that attempts to bypass the given
4550 basic block BB which has more than one predecessor. If not NULL, SETCC
4551 is the first instruction of BB, which is immediately followed by JUMP_INSN
4552 JUMP. Otherwise, SETCC is NULL, and JUMP is the first insn of BB.
4553 Returns nonzero if a change was made. */
4555 static int
4556 bypass_block (bb, setcc, jump)
4557 basic_block bb;
4558 rtx setcc, jump;
4560 rtx insn, note;
4561 edge e, enext;
4562 int i, change;
4564 insn = (setcc != NULL) ? setcc : jump;
4566 /* Determine set of register uses in INSN. */
4567 reg_use_count = 0;
4568 note_uses (&PATTERN (insn), find_used_regs, NULL);
4569 note = find_reg_equal_equiv_note (insn);
4570 if (note)
4571 find_used_regs (&XEXP (note, 0), NULL);
4573 change = 0;
4574 for (e = bb->pred; e; e = enext)
4576 enext = e->pred_next;
4577 for (i = 0; i < reg_use_count; i++)
4579 struct reg_use *reg_used = &reg_use_table[i];
4580 unsigned int regno = REGNO (reg_used->reg_rtx);
4581 basic_block dest, old_dest;
4582 struct expr *set;
4583 rtx src, new;
4585 if (regno >= max_gcse_regno)
4586 continue;
4588 set = find_bypass_set (regno, e->src->index);
4590 if (! set)
4591 continue;
4593 src = SET_SRC (pc_set (jump));
4595 if (setcc != NULL)
4596 src = simplify_replace_rtx (src,
4597 SET_DEST (PATTERN (setcc)),
4598 SET_SRC (PATTERN (setcc)));
4600 new = simplify_replace_rtx (src, reg_used->reg_rtx,
4601 SET_SRC (set->expr));
4603 if (new == pc_rtx)
4604 dest = FALLTHRU_EDGE (bb)->dest;
4605 else if (GET_CODE (new) == LABEL_REF)
4606 dest = BRANCH_EDGE (bb)->dest;
4607 else
4608 dest = NULL;
4610 /* Once basic block indices are stable, we should be able
4611 to use redirect_edge_and_branch_force instead. */
4612 old_dest = e->dest;
4613 if (dest != NULL && dest != old_dest
4614 && redirect_edge_and_branch (e, dest))
4616 /* Copy the register setter to the redirected edge.
4617 Don't copy CC0 setters, as CC0 is dead after jump. */
4618 if (setcc)
4620 rtx pat = PATTERN (setcc);
4621 if (!CC0_P (SET_DEST (pat)))
4622 insert_insn_on_edge (copy_insn (pat), e);
4625 if (gcse_file != NULL)
4627 fprintf (gcse_file, "JUMP-BYPASS: Proved reg %d in jump_insn %d equals constant ",
4628 regno, INSN_UID (jump));
4629 print_rtl (gcse_file, SET_SRC (set->expr));
4630 fprintf (gcse_file, "\nBypass edge from %d->%d to %d\n",
4631 e->src->index, old_dest->index, dest->index);
4633 change = 1;
4634 break;
4638 return change;
4641 /* Find basic blocks with more than one predecessor that only contain a
4642 single conditional jump. If the result of the comparison is known at
4643 compile-time from any incoming edge, redirect that edge to the
4644 appropriate target. Returns nonzero if a change was made. */
4646 static int
4647 bypass_conditional_jumps ()
4649 basic_block bb;
4650 int changed;
4651 rtx setcc;
4652 rtx insn;
4653 rtx dest;
4655 /* Note we start at block 1. */
4656 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
4657 return 0;
4659 changed = 0;
4660 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb,
4661 EXIT_BLOCK_PTR, next_bb)
4663 /* Check for more than one predecessor. */
4664 if (bb->pred && bb->pred->pred_next)
4666 setcc = NULL_RTX;
4667 for (insn = bb->head;
4668 insn != NULL && insn != NEXT_INSN (bb->end);
4669 insn = NEXT_INSN (insn))
4670 if (GET_CODE (insn) == INSN)
4672 if (setcc)
4673 break;
4674 if (GET_CODE (PATTERN (insn)) != SET)
4675 break;
4677 dest = SET_DEST (PATTERN (insn));
4678 if (REG_P (dest) || CC0_P (dest))
4679 setcc = insn;
4680 else
4681 break;
4683 else if (GET_CODE (insn) == JUMP_INSN)
4685 if (any_condjump_p (insn) && onlyjump_p (insn))
4686 changed |= bypass_block (bb, setcc, insn);
4687 break;
4689 else if (INSN_P (insn))
4690 break;
4694 /* If we bypassed any register setting insns, we inserted a
4695 copy on the redirected edge. These need to be commited. */
4696 if (changed)
4697 commit_edge_insertions();
4699 return changed;
4702 /* Compute PRE+LCM working variables. */
4704 /* Local properties of expressions. */
4705 /* Nonzero for expressions that are transparent in the block. */
4706 static sbitmap *transp;
4708 /* Nonzero for expressions that are transparent at the end of the block.
4709 This is only zero for expressions killed by abnormal critical edge
4710 created by a calls. */
4711 static sbitmap *transpout;
4713 /* Nonzero for expressions that are computed (available) in the block. */
4714 static sbitmap *comp;
4716 /* Nonzero for expressions that are locally anticipatable in the block. */
4717 static sbitmap *antloc;
4719 /* Nonzero for expressions where this block is an optimal computation
4720 point. */
4721 static sbitmap *pre_optimal;
4723 /* Nonzero for expressions which are redundant in a particular block. */
4724 static sbitmap *pre_redundant;
4726 /* Nonzero for expressions which should be inserted on a specific edge. */
4727 static sbitmap *pre_insert_map;
4729 /* Nonzero for expressions which should be deleted in a specific block. */
4730 static sbitmap *pre_delete_map;
4732 /* Contains the edge_list returned by pre_edge_lcm. */
4733 static struct edge_list *edge_list;
4735 /* Redundant insns. */
4736 static sbitmap pre_redundant_insns;
4738 /* Allocate vars used for PRE analysis. */
4740 static void
4741 alloc_pre_mem (n_blocks, n_exprs)
4742 int n_blocks, n_exprs;
4744 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
4745 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
4746 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
4748 pre_optimal = NULL;
4749 pre_redundant = NULL;
4750 pre_insert_map = NULL;
4751 pre_delete_map = NULL;
4752 ae_in = NULL;
4753 ae_out = NULL;
4754 ae_kill = sbitmap_vector_alloc (n_blocks, n_exprs);
4756 /* pre_insert and pre_delete are allocated later. */
4759 /* Free vars used for PRE analysis. */
4761 static void
4762 free_pre_mem ()
4764 sbitmap_vector_free (transp);
4765 sbitmap_vector_free (comp);
4767 /* ANTLOC and AE_KILL are freed just after pre_lcm finishes. */
4769 if (pre_optimal)
4770 sbitmap_vector_free (pre_optimal);
4771 if (pre_redundant)
4772 sbitmap_vector_free (pre_redundant);
4773 if (pre_insert_map)
4774 sbitmap_vector_free (pre_insert_map);
4775 if (pre_delete_map)
4776 sbitmap_vector_free (pre_delete_map);
4777 if (ae_in)
4778 sbitmap_vector_free (ae_in);
4779 if (ae_out)
4780 sbitmap_vector_free (ae_out);
4782 transp = comp = NULL;
4783 pre_optimal = pre_redundant = pre_insert_map = pre_delete_map = NULL;
4784 ae_in = ae_out = NULL;
4787 /* Top level routine to do the dataflow analysis needed by PRE. */
4789 static void
4790 compute_pre_data ()
4792 sbitmap trapping_expr;
4793 basic_block bb;
4794 unsigned int ui;
4796 compute_local_properties (transp, comp, antloc, &expr_hash_table);
4797 sbitmap_vector_zero (ae_kill, last_basic_block);
4799 /* Collect expressions which might trap. */
4800 trapping_expr = sbitmap_alloc (expr_hash_table.n_elems);
4801 sbitmap_zero (trapping_expr);
4802 for (ui = 0; ui < expr_hash_table.size; ui++)
4804 struct expr *e;
4805 for (e = expr_hash_table.table[ui]; e != NULL; e = e->next_same_hash)
4806 if (may_trap_p (e->expr))
4807 SET_BIT (trapping_expr, e->bitmap_index);
4810 /* Compute ae_kill for each basic block using:
4812 ~(TRANSP | COMP)
4814 This is significantly faster than compute_ae_kill. */
4816 FOR_EACH_BB (bb)
4818 edge e;
4820 /* If the current block is the destination of an abnormal edge, we
4821 kill all trapping expressions because we won't be able to properly
4822 place the instruction on the edge. So make them neither
4823 anticipatable nor transparent. This is fairly conservative. */
4824 for (e = bb->pred; e ; e = e->pred_next)
4825 if (e->flags & EDGE_ABNORMAL)
4827 sbitmap_difference (antloc[bb->index], antloc[bb->index], trapping_expr);
4828 sbitmap_difference (transp[bb->index], transp[bb->index], trapping_expr);
4829 break;
4832 sbitmap_a_or_b (ae_kill[bb->index], transp[bb->index], comp[bb->index]);
4833 sbitmap_not (ae_kill[bb->index], ae_kill[bb->index]);
4836 edge_list = pre_edge_lcm (gcse_file, expr_hash_table.n_elems, transp, comp, antloc,
4837 ae_kill, &pre_insert_map, &pre_delete_map);
4838 sbitmap_vector_free (antloc);
4839 antloc = NULL;
4840 sbitmap_vector_free (ae_kill);
4841 ae_kill = NULL;
4842 sbitmap_free (trapping_expr);
4845 /* PRE utilities */
4847 /* Return nonzero if an occurrence of expression EXPR in OCCR_BB would reach
4848 block BB.
4850 VISITED is a pointer to a working buffer for tracking which BB's have
4851 been visited. It is NULL for the top-level call.
4853 We treat reaching expressions that go through blocks containing the same
4854 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
4855 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
4856 2 as not reaching. The intent is to improve the probability of finding
4857 only one reaching expression and to reduce register lifetimes by picking
4858 the closest such expression. */
4860 static int
4861 pre_expr_reaches_here_p_work (occr_bb, expr, bb, visited)
4862 basic_block occr_bb;
4863 struct expr *expr;
4864 basic_block bb;
4865 char *visited;
4867 edge pred;
4869 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
4871 basic_block pred_bb = pred->src;
4873 if (pred->src == ENTRY_BLOCK_PTR
4874 /* Has predecessor has already been visited? */
4875 || visited[pred_bb->index])
4876 ;/* Nothing to do. */
4878 /* Does this predecessor generate this expression? */
4879 else if (TEST_BIT (comp[pred_bb->index], expr->bitmap_index))
4881 /* Is this the occurrence we're looking for?
4882 Note that there's only one generating occurrence per block
4883 so we just need to check the block number. */
4884 if (occr_bb == pred_bb)
4885 return 1;
4887 visited[pred_bb->index] = 1;
4889 /* Ignore this predecessor if it kills the expression. */
4890 else if (! TEST_BIT (transp[pred_bb->index], expr->bitmap_index))
4891 visited[pred_bb->index] = 1;
4893 /* Neither gen nor kill. */
4894 else
4896 visited[pred_bb->index] = 1;
4897 if (pre_expr_reaches_here_p_work (occr_bb, expr, pred_bb, visited))
4898 return 1;
4902 /* All paths have been checked. */
4903 return 0;
4906 /* The wrapper for pre_expr_reaches_here_work that ensures that any
4907 memory allocated for that function is returned. */
4909 static int
4910 pre_expr_reaches_here_p (occr_bb, expr, bb)
4911 basic_block occr_bb;
4912 struct expr *expr;
4913 basic_block bb;
4915 int rval;
4916 char *visited = (char *) xcalloc (last_basic_block, 1);
4918 rval = pre_expr_reaches_here_p_work (occr_bb, expr, bb, visited);
4920 free (visited);
4921 return rval;
4925 /* Given an expr, generate RTL which we can insert at the end of a BB,
4926 or on an edge. Set the block number of any insns generated to
4927 the value of BB. */
4929 static rtx
4930 process_insert_insn (expr)
4931 struct expr *expr;
4933 rtx reg = expr->reaching_reg;
4934 rtx exp = copy_rtx (expr->expr);
4935 rtx pat;
4937 start_sequence ();
4939 /* If the expression is something that's an operand, like a constant,
4940 just copy it to a register. */
4941 if (general_operand (exp, GET_MODE (reg)))
4942 emit_move_insn (reg, exp);
4944 /* Otherwise, make a new insn to compute this expression and make sure the
4945 insn will be recognized (this also adds any needed CLOBBERs). Copy the
4946 expression to make sure we don't have any sharing issues. */
4947 else if (insn_invalid_p (emit_insn (gen_rtx_SET (VOIDmode, reg, exp))))
4948 abort ();
4950 pat = get_insns ();
4951 end_sequence ();
4953 return pat;
4956 /* Add EXPR to the end of basic block BB.
4958 This is used by both the PRE and code hoisting.
4960 For PRE, we want to verify that the expr is either transparent
4961 or locally anticipatable in the target block. This check makes
4962 no sense for code hoisting. */
4964 static void
4965 insert_insn_end_bb (expr, bb, pre)
4966 struct expr *expr;
4967 basic_block bb;
4968 int pre;
4970 rtx insn = bb->end;
4971 rtx new_insn;
4972 rtx reg = expr->reaching_reg;
4973 int regno = REGNO (reg);
4974 rtx pat, pat_end;
4976 pat = process_insert_insn (expr);
4977 if (pat == NULL_RTX || ! INSN_P (pat))
4978 abort ();
4980 pat_end = pat;
4981 while (NEXT_INSN (pat_end) != NULL_RTX)
4982 pat_end = NEXT_INSN (pat_end);
4984 /* If the last insn is a jump, insert EXPR in front [taking care to
4985 handle cc0, etc. properly]. Similary we need to care trapping
4986 instructions in presence of non-call exceptions. */
4988 if (GET_CODE (insn) == JUMP_INSN
4989 || (GET_CODE (insn) == INSN
4990 && (bb->succ->succ_next || (bb->succ->flags & EDGE_ABNORMAL))))
4992 #ifdef HAVE_cc0
4993 rtx note;
4994 #endif
4995 /* It should always be the case that we can put these instructions
4996 anywhere in the basic block with performing PRE optimizations.
4997 Check this. */
4998 if (GET_CODE (insn) == INSN && pre
4999 && !TEST_BIT (antloc[bb->index], expr->bitmap_index)
5000 && !TEST_BIT (transp[bb->index], expr->bitmap_index))
5001 abort ();
5003 /* If this is a jump table, then we can't insert stuff here. Since
5004 we know the previous real insn must be the tablejump, we insert
5005 the new instruction just before the tablejump. */
5006 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
5007 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
5008 insn = prev_real_insn (insn);
5010 #ifdef HAVE_cc0
5011 /* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts
5012 if cc0 isn't set. */
5013 note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
5014 if (note)
5015 insn = XEXP (note, 0);
5016 else
5018 rtx maybe_cc0_setter = prev_nonnote_insn (insn);
5019 if (maybe_cc0_setter
5020 && INSN_P (maybe_cc0_setter)
5021 && sets_cc0_p (PATTERN (maybe_cc0_setter)))
5022 insn = maybe_cc0_setter;
5024 #endif
5025 /* FIXME: What if something in cc0/jump uses value set in new insn? */
5026 new_insn = emit_insn_before (pat, insn);
5029 /* Likewise if the last insn is a call, as will happen in the presence
5030 of exception handling. */
5031 else if (GET_CODE (insn) == CALL_INSN
5032 && (bb->succ->succ_next || (bb->succ->flags & EDGE_ABNORMAL)))
5034 /* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers,
5035 we search backward and place the instructions before the first
5036 parameter is loaded. Do this for everyone for consistency and a
5037 presumtion that we'll get better code elsewhere as well.
5039 It should always be the case that we can put these instructions
5040 anywhere in the basic block with performing PRE optimizations.
5041 Check this. */
5043 if (pre
5044 && !TEST_BIT (antloc[bb->index], expr->bitmap_index)
5045 && !TEST_BIT (transp[bb->index], expr->bitmap_index))
5046 abort ();
5048 /* Since different machines initialize their parameter registers
5049 in different orders, assume nothing. Collect the set of all
5050 parameter registers. */
5051 insn = find_first_parameter_load (insn, bb->head);
5053 /* If we found all the parameter loads, then we want to insert
5054 before the first parameter load.
5056 If we did not find all the parameter loads, then we might have
5057 stopped on the head of the block, which could be a CODE_LABEL.
5058 If we inserted before the CODE_LABEL, then we would be putting
5059 the insn in the wrong basic block. In that case, put the insn
5060 after the CODE_LABEL. Also, respect NOTE_INSN_BASIC_BLOCK. */
5061 while (GET_CODE (insn) == CODE_LABEL
5062 || NOTE_INSN_BASIC_BLOCK_P (insn))
5063 insn = NEXT_INSN (insn);
5065 new_insn = emit_insn_before (pat, insn);
5067 else
5068 new_insn = emit_insn_after (pat, insn);
5070 while (1)
5072 if (INSN_P (pat))
5074 add_label_notes (PATTERN (pat), new_insn);
5075 note_stores (PATTERN (pat), record_set_info, pat);
5077 if (pat == pat_end)
5078 break;
5079 pat = NEXT_INSN (pat);
5082 gcse_create_count++;
5084 if (gcse_file)
5086 fprintf (gcse_file, "PRE/HOIST: end of bb %d, insn %d, ",
5087 bb->index, INSN_UID (new_insn));
5088 fprintf (gcse_file, "copying expression %d to reg %d\n",
5089 expr->bitmap_index, regno);
5093 /* Insert partially redundant expressions on edges in the CFG to make
5094 the expressions fully redundant. */
5096 static int
5097 pre_edge_insert (edge_list, index_map)
5098 struct edge_list *edge_list;
5099 struct expr **index_map;
5101 int e, i, j, num_edges, set_size, did_insert = 0;
5102 sbitmap *inserted;
5104 /* Where PRE_INSERT_MAP is nonzero, we add the expression on that edge
5105 if it reaches any of the deleted expressions. */
5107 set_size = pre_insert_map[0]->size;
5108 num_edges = NUM_EDGES (edge_list);
5109 inserted = sbitmap_vector_alloc (num_edges, expr_hash_table.n_elems);
5110 sbitmap_vector_zero (inserted, num_edges);
5112 for (e = 0; e < num_edges; e++)
5114 int indx;
5115 basic_block bb = INDEX_EDGE_PRED_BB (edge_list, e);
5117 for (i = indx = 0; i < set_size; i++, indx += SBITMAP_ELT_BITS)
5119 SBITMAP_ELT_TYPE insert = pre_insert_map[e]->elms[i];
5121 for (j = indx; insert && j < (int) expr_hash_table.n_elems; j++, insert >>= 1)
5122 if ((insert & 1) != 0 && index_map[j]->reaching_reg != NULL_RTX)
5124 struct expr *expr = index_map[j];
5125 struct occr *occr;
5127 /* Now look at each deleted occurrence of this expression. */
5128 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5130 if (! occr->deleted_p)
5131 continue;
5133 /* Insert this expression on this edge if if it would
5134 reach the deleted occurrence in BB. */
5135 if (!TEST_BIT (inserted[e], j))
5137 rtx insn;
5138 edge eg = INDEX_EDGE (edge_list, e);
5140 /* We can't insert anything on an abnormal and
5141 critical edge, so we insert the insn at the end of
5142 the previous block. There are several alternatives
5143 detailed in Morgans book P277 (sec 10.5) for
5144 handling this situation. This one is easiest for
5145 now. */
5147 if ((eg->flags & EDGE_ABNORMAL) == EDGE_ABNORMAL)
5148 insert_insn_end_bb (index_map[j], bb, 0);
5149 else
5151 insn = process_insert_insn (index_map[j]);
5152 insert_insn_on_edge (insn, eg);
5155 if (gcse_file)
5157 fprintf (gcse_file, "PRE/HOIST: edge (%d,%d), ",
5158 bb->index,
5159 INDEX_EDGE_SUCC_BB (edge_list, e)->index);
5160 fprintf (gcse_file, "copy expression %d\n",
5161 expr->bitmap_index);
5164 update_ld_motion_stores (expr);
5165 SET_BIT (inserted[e], j);
5166 did_insert = 1;
5167 gcse_create_count++;
5174 sbitmap_vector_free (inserted);
5175 return did_insert;
5178 /* Copy the result of INSN to REG. INDX is the expression number. */
5180 static void
5181 pre_insert_copy_insn (expr, insn)
5182 struct expr *expr;
5183 rtx insn;
5185 rtx reg = expr->reaching_reg;
5186 int regno = REGNO (reg);
5187 int indx = expr->bitmap_index;
5188 rtx set = single_set (insn);
5189 rtx new_insn;
5191 if (!set)
5192 abort ();
5194 new_insn = emit_insn_after (gen_move_insn (reg, SET_DEST (set)), insn);
5196 /* Keep register set table up to date. */
5197 record_one_set (regno, new_insn);
5199 gcse_create_count++;
5201 if (gcse_file)
5202 fprintf (gcse_file,
5203 "PRE: bb %d, insn %d, copy expression %d in insn %d to reg %d\n",
5204 BLOCK_NUM (insn), INSN_UID (new_insn), indx,
5205 INSN_UID (insn), regno);
5206 update_ld_motion_stores (expr);
5209 /* Copy available expressions that reach the redundant expression
5210 to `reaching_reg'. */
5212 static void
5213 pre_insert_copies ()
5215 unsigned int i;
5216 struct expr *expr;
5217 struct occr *occr;
5218 struct occr *avail;
5220 /* For each available expression in the table, copy the result to
5221 `reaching_reg' if the expression reaches a deleted one.
5223 ??? The current algorithm is rather brute force.
5224 Need to do some profiling. */
5226 for (i = 0; i < expr_hash_table.size; i++)
5227 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
5229 /* If the basic block isn't reachable, PPOUT will be TRUE. However,
5230 we don't want to insert a copy here because the expression may not
5231 really be redundant. So only insert an insn if the expression was
5232 deleted. This test also avoids further processing if the
5233 expression wasn't deleted anywhere. */
5234 if (expr->reaching_reg == NULL)
5235 continue;
5237 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5239 if (! occr->deleted_p)
5240 continue;
5242 for (avail = expr->avail_occr; avail != NULL; avail = avail->next)
5244 rtx insn = avail->insn;
5246 /* No need to handle this one if handled already. */
5247 if (avail->copied_p)
5248 continue;
5250 /* Don't handle this one if it's a redundant one. */
5251 if (TEST_BIT (pre_redundant_insns, INSN_CUID (insn)))
5252 continue;
5254 /* Or if the expression doesn't reach the deleted one. */
5255 if (! pre_expr_reaches_here_p (BLOCK_FOR_INSN (avail->insn),
5256 expr,
5257 BLOCK_FOR_INSN (occr->insn)))
5258 continue;
5260 /* Copy the result of avail to reaching_reg. */
5261 pre_insert_copy_insn (expr, insn);
5262 avail->copied_p = 1;
5268 /* Emit move from SRC to DEST noting the equivalence with expression computed
5269 in INSN. */
5270 static rtx
5271 gcse_emit_move_after (src, dest, insn)
5272 rtx src, dest, insn;
5274 rtx new;
5275 rtx set = single_set (insn), set2;
5276 rtx note;
5277 rtx eqv;
5279 /* This should never fail since we're creating a reg->reg copy
5280 we've verified to be valid. */
5282 new = emit_insn_after (gen_move_insn (dest, src), insn);
5284 /* Note the equivalence for local CSE pass. */
5285 set2 = single_set (new);
5286 if (!set2 || !rtx_equal_p (SET_DEST (set2), dest))
5287 return new;
5288 if ((note = find_reg_equal_equiv_note (insn)))
5289 eqv = XEXP (note, 0);
5290 else
5291 eqv = SET_SRC (set);
5293 set_unique_reg_note (new, REG_EQUAL, copy_insn_1 (eqv));
5295 return new;
5298 /* Delete redundant computations.
5299 Deletion is done by changing the insn to copy the `reaching_reg' of
5300 the expression into the result of the SET. It is left to later passes
5301 (cprop, cse2, flow, combine, regmove) to propagate the copy or eliminate it.
5303 Returns nonzero if a change is made. */
5305 static int
5306 pre_delete ()
5308 unsigned int i;
5309 int changed;
5310 struct expr *expr;
5311 struct occr *occr;
5313 changed = 0;
5314 for (i = 0; i < expr_hash_table.size; i++)
5315 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
5317 int indx = expr->bitmap_index;
5319 /* We only need to search antic_occr since we require
5320 ANTLOC != 0. */
5322 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5324 rtx insn = occr->insn;
5325 rtx set;
5326 basic_block bb = BLOCK_FOR_INSN (insn);
5328 if (TEST_BIT (pre_delete_map[bb->index], indx))
5330 set = single_set (insn);
5331 if (! set)
5332 abort ();
5334 /* Create a pseudo-reg to store the result of reaching
5335 expressions into. Get the mode for the new pseudo from
5336 the mode of the original destination pseudo. */
5337 if (expr->reaching_reg == NULL)
5338 expr->reaching_reg
5339 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
5341 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
5342 delete_insn (insn);
5343 occr->deleted_p = 1;
5344 SET_BIT (pre_redundant_insns, INSN_CUID (insn));
5345 changed = 1;
5346 gcse_subst_count++;
5348 if (gcse_file)
5350 fprintf (gcse_file,
5351 "PRE: redundant insn %d (expression %d) in ",
5352 INSN_UID (insn), indx);
5353 fprintf (gcse_file, "bb %d, reaching reg is %d\n",
5354 bb->index, REGNO (expr->reaching_reg));
5360 return changed;
5363 /* Perform GCSE optimizations using PRE.
5364 This is called by one_pre_gcse_pass after all the dataflow analysis
5365 has been done.
5367 This is based on the original Morel-Renvoise paper Fred Chow's thesis, and
5368 lazy code motion from Knoop, Ruthing and Steffen as described in Advanced
5369 Compiler Design and Implementation.
5371 ??? A new pseudo reg is created to hold the reaching expression. The nice
5372 thing about the classical approach is that it would try to use an existing
5373 reg. If the register can't be adequately optimized [i.e. we introduce
5374 reload problems], one could add a pass here to propagate the new register
5375 through the block.
5377 ??? We don't handle single sets in PARALLELs because we're [currently] not
5378 able to copy the rest of the parallel when we insert copies to create full
5379 redundancies from partial redundancies. However, there's no reason why we
5380 can't handle PARALLELs in the cases where there are no partial
5381 redundancies. */
5383 static int
5384 pre_gcse ()
5386 unsigned int i;
5387 int did_insert, changed;
5388 struct expr **index_map;
5389 struct expr *expr;
5391 /* Compute a mapping from expression number (`bitmap_index') to
5392 hash table entry. */
5394 index_map = (struct expr **) xcalloc (expr_hash_table.n_elems, sizeof (struct expr *));
5395 for (i = 0; i < expr_hash_table.size; i++)
5396 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
5397 index_map[expr->bitmap_index] = expr;
5399 /* Reset bitmap used to track which insns are redundant. */
5400 pre_redundant_insns = sbitmap_alloc (max_cuid);
5401 sbitmap_zero (pre_redundant_insns);
5403 /* Delete the redundant insns first so that
5404 - we know what register to use for the new insns and for the other
5405 ones with reaching expressions
5406 - we know which insns are redundant when we go to create copies */
5408 changed = pre_delete ();
5410 did_insert = pre_edge_insert (edge_list, index_map);
5412 /* In other places with reaching expressions, copy the expression to the
5413 specially allocated pseudo-reg that reaches the redundant expr. */
5414 pre_insert_copies ();
5415 if (did_insert)
5417 commit_edge_insertions ();
5418 changed = 1;
5421 free (index_map);
5422 sbitmap_free (pre_redundant_insns);
5423 return changed;
5426 /* Top level routine to perform one PRE GCSE pass.
5428 Return nonzero if a change was made. */
5430 static int
5431 one_pre_gcse_pass (pass)
5432 int pass;
5434 int changed = 0;
5436 gcse_subst_count = 0;
5437 gcse_create_count = 0;
5439 alloc_hash_table (max_cuid, &expr_hash_table, 0);
5440 add_noreturn_fake_exit_edges ();
5441 if (flag_gcse_lm)
5442 compute_ld_motion_mems ();
5444 compute_hash_table (&expr_hash_table);
5445 trim_ld_motion_mems ();
5446 if (gcse_file)
5447 dump_hash_table (gcse_file, "Expression", &expr_hash_table);
5449 if (expr_hash_table.n_elems > 0)
5451 alloc_pre_mem (last_basic_block, expr_hash_table.n_elems);
5452 compute_pre_data ();
5453 changed |= pre_gcse ();
5454 free_edge_list (edge_list);
5455 free_pre_mem ();
5458 free_ldst_mems ();
5459 remove_fake_edges ();
5460 free_hash_table (&expr_hash_table);
5462 if (gcse_file)
5464 fprintf (gcse_file, "\nPRE GCSE of %s, pass %d: %d bytes needed, ",
5465 current_function_name, pass, bytes_used);
5466 fprintf (gcse_file, "%d substs, %d insns created\n",
5467 gcse_subst_count, gcse_create_count);
5470 return changed;
5473 /* If X contains any LABEL_REF's, add REG_LABEL notes for them to INSN.
5474 If notes are added to an insn which references a CODE_LABEL, the
5475 LABEL_NUSES count is incremented. We have to add REG_LABEL notes,
5476 because the following loop optimization pass requires them. */
5478 /* ??? This is very similar to the loop.c add_label_notes function. We
5479 could probably share code here. */
5481 /* ??? If there was a jump optimization pass after gcse and before loop,
5482 then we would not need to do this here, because jump would add the
5483 necessary REG_LABEL notes. */
5485 static void
5486 add_label_notes (x, insn)
5487 rtx x;
5488 rtx insn;
5490 enum rtx_code code = GET_CODE (x);
5491 int i, j;
5492 const char *fmt;
5494 if (code == LABEL_REF && !LABEL_REF_NONLOCAL_P (x))
5496 /* This code used to ignore labels that referred to dispatch tables to
5497 avoid flow generating (slighly) worse code.
5499 We no longer ignore such label references (see LABEL_REF handling in
5500 mark_jump_label for additional information). */
5502 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, XEXP (x, 0),
5503 REG_NOTES (insn));
5504 if (LABEL_P (XEXP (x, 0)))
5505 LABEL_NUSES (XEXP (x, 0))++;
5506 return;
5509 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
5511 if (fmt[i] == 'e')
5512 add_label_notes (XEXP (x, i), insn);
5513 else if (fmt[i] == 'E')
5514 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5515 add_label_notes (XVECEXP (x, i, j), insn);
5519 /* Compute transparent outgoing information for each block.
5521 An expression is transparent to an edge unless it is killed by
5522 the edge itself. This can only happen with abnormal control flow,
5523 when the edge is traversed through a call. This happens with
5524 non-local labels and exceptions.
5526 This would not be necessary if we split the edge. While this is
5527 normally impossible for abnormal critical edges, with some effort
5528 it should be possible with exception handling, since we still have
5529 control over which handler should be invoked. But due to increased
5530 EH table sizes, this may not be worthwhile. */
5532 static void
5533 compute_transpout ()
5535 basic_block bb;
5536 unsigned int i;
5537 struct expr *expr;
5539 sbitmap_vector_ones (transpout, last_basic_block);
5541 FOR_EACH_BB (bb)
5543 /* Note that flow inserted a nop a the end of basic blocks that
5544 end in call instructions for reasons other than abnormal
5545 control flow. */
5546 if (GET_CODE (bb->end) != CALL_INSN)
5547 continue;
5549 for (i = 0; i < expr_hash_table.size; i++)
5550 for (expr = expr_hash_table.table[i]; expr ; expr = expr->next_same_hash)
5551 if (GET_CODE (expr->expr) == MEM)
5553 if (GET_CODE (XEXP (expr->expr, 0)) == SYMBOL_REF
5554 && CONSTANT_POOL_ADDRESS_P (XEXP (expr->expr, 0)))
5555 continue;
5557 /* ??? Optimally, we would use interprocedural alias
5558 analysis to determine if this mem is actually killed
5559 by this call. */
5560 RESET_BIT (transpout[bb->index], expr->bitmap_index);
5565 /* Removal of useless null pointer checks */
5567 /* Called via note_stores. X is set by SETTER. If X is a register we must
5568 invalidate nonnull_local and set nonnull_killed. DATA is really a
5569 `null_pointer_info *'.
5571 We ignore hard registers. */
5573 static void
5574 invalidate_nonnull_info (x, setter, data)
5575 rtx x;
5576 rtx setter ATTRIBUTE_UNUSED;
5577 void *data;
5579 unsigned int regno;
5580 struct null_pointer_info *npi = (struct null_pointer_info *) data;
5582 while (GET_CODE (x) == SUBREG)
5583 x = SUBREG_REG (x);
5585 /* Ignore anything that is not a register or is a hard register. */
5586 if (GET_CODE (x) != REG
5587 || REGNO (x) < npi->min_reg
5588 || REGNO (x) >= npi->max_reg)
5589 return;
5591 regno = REGNO (x) - npi->min_reg;
5593 RESET_BIT (npi->nonnull_local[npi->current_block->index], regno);
5594 SET_BIT (npi->nonnull_killed[npi->current_block->index], regno);
5597 /* Do null-pointer check elimination for the registers indicated in
5598 NPI. NONNULL_AVIN and NONNULL_AVOUT are pre-allocated sbitmaps;
5599 they are not our responsibility to free. */
5601 static int
5602 delete_null_pointer_checks_1 (block_reg, nonnull_avin,
5603 nonnull_avout, npi)
5604 unsigned int *block_reg;
5605 sbitmap *nonnull_avin;
5606 sbitmap *nonnull_avout;
5607 struct null_pointer_info *npi;
5609 basic_block bb, current_block;
5610 sbitmap *nonnull_local = npi->nonnull_local;
5611 sbitmap *nonnull_killed = npi->nonnull_killed;
5612 int something_changed = 0;
5614 /* Compute local properties, nonnull and killed. A register will have
5615 the nonnull property if at the end of the current block its value is
5616 known to be nonnull. The killed property indicates that somewhere in
5617 the block any information we had about the register is killed.
5619 Note that a register can have both properties in a single block. That
5620 indicates that it's killed, then later in the block a new value is
5621 computed. */
5622 sbitmap_vector_zero (nonnull_local, last_basic_block);
5623 sbitmap_vector_zero (nonnull_killed, last_basic_block);
5625 FOR_EACH_BB (current_block)
5627 rtx insn, stop_insn;
5629 /* Set the current block for invalidate_nonnull_info. */
5630 npi->current_block = current_block;
5632 /* Scan each insn in the basic block looking for memory references and
5633 register sets. */
5634 stop_insn = NEXT_INSN (current_block->end);
5635 for (insn = current_block->head;
5636 insn != stop_insn;
5637 insn = NEXT_INSN (insn))
5639 rtx set;
5640 rtx reg;
5642 /* Ignore anything that is not a normal insn. */
5643 if (! INSN_P (insn))
5644 continue;
5646 /* Basically ignore anything that is not a simple SET. We do have
5647 to make sure to invalidate nonnull_local and set nonnull_killed
5648 for such insns though. */
5649 set = single_set (insn);
5650 if (!set)
5652 note_stores (PATTERN (insn), invalidate_nonnull_info, npi);
5653 continue;
5656 /* See if we've got a usable memory load. We handle it first
5657 in case it uses its address register as a dest (which kills
5658 the nonnull property). */
5659 if (GET_CODE (SET_SRC (set)) == MEM
5660 && GET_CODE ((reg = XEXP (SET_SRC (set), 0))) == REG
5661 && REGNO (reg) >= npi->min_reg
5662 && REGNO (reg) < npi->max_reg)
5663 SET_BIT (nonnull_local[current_block->index],
5664 REGNO (reg) - npi->min_reg);
5666 /* Now invalidate stuff clobbered by this insn. */
5667 note_stores (PATTERN (insn), invalidate_nonnull_info, npi);
5669 /* And handle stores, we do these last since any sets in INSN can
5670 not kill the nonnull property if it is derived from a MEM
5671 appearing in a SET_DEST. */
5672 if (GET_CODE (SET_DEST (set)) == MEM
5673 && GET_CODE ((reg = XEXP (SET_DEST (set), 0))) == REG
5674 && REGNO (reg) >= npi->min_reg
5675 && REGNO (reg) < npi->max_reg)
5676 SET_BIT (nonnull_local[current_block->index],
5677 REGNO (reg) - npi->min_reg);
5681 /* Now compute global properties based on the local properties. This
5682 is a classic global availablity algorithm. */
5683 compute_available (nonnull_local, nonnull_killed,
5684 nonnull_avout, nonnull_avin);
5686 /* Now look at each bb and see if it ends with a compare of a value
5687 against zero. */
5688 FOR_EACH_BB (bb)
5690 rtx last_insn = bb->end;
5691 rtx condition, earliest;
5692 int compare_and_branch;
5694 /* Since MIN_REG is always at least FIRST_PSEUDO_REGISTER, and
5695 since BLOCK_REG[BB] is zero if this block did not end with a
5696 comparison against zero, this condition works. */
5697 if (block_reg[bb->index] < npi->min_reg
5698 || block_reg[bb->index] >= npi->max_reg)
5699 continue;
5701 /* LAST_INSN is a conditional jump. Get its condition. */
5702 condition = get_condition (last_insn, &earliest);
5704 /* If we can't determine the condition then skip. */
5705 if (! condition)
5706 continue;
5708 /* Is the register known to have a nonzero value? */
5709 if (!TEST_BIT (nonnull_avout[bb->index], block_reg[bb->index] - npi->min_reg))
5710 continue;
5712 /* Try to compute whether the compare/branch at the loop end is one or
5713 two instructions. */
5714 if (earliest == last_insn)
5715 compare_and_branch = 1;
5716 else if (earliest == prev_nonnote_insn (last_insn))
5717 compare_and_branch = 2;
5718 else
5719 continue;
5721 /* We know the register in this comparison is nonnull at exit from
5722 this block. We can optimize this comparison. */
5723 if (GET_CODE (condition) == NE)
5725 rtx new_jump;
5727 new_jump = emit_jump_insn_after (gen_jump (JUMP_LABEL (last_insn)),
5728 last_insn);
5729 JUMP_LABEL (new_jump) = JUMP_LABEL (last_insn);
5730 LABEL_NUSES (JUMP_LABEL (new_jump))++;
5731 emit_barrier_after (new_jump);
5734 something_changed = 1;
5735 delete_insn (last_insn);
5736 if (compare_and_branch == 2)
5737 delete_insn (earliest);
5738 purge_dead_edges (bb);
5740 /* Don't check this block again. (Note that BLOCK_END is
5741 invalid here; we deleted the last instruction in the
5742 block.) */
5743 block_reg[bb->index] = 0;
5746 return something_changed;
5749 /* Find EQ/NE comparisons against zero which can be (indirectly) evaluated
5750 at compile time.
5752 This is conceptually similar to global constant/copy propagation and
5753 classic global CSE (it even uses the same dataflow equations as cprop).
5755 If a register is used as memory address with the form (mem (reg)), then we
5756 know that REG can not be zero at that point in the program. Any instruction
5757 which sets REG "kills" this property.
5759 So, if every path leading to a conditional branch has an available memory
5760 reference of that form, then we know the register can not have the value
5761 zero at the conditional branch.
5763 So we merely need to compute the local properies and propagate that data
5764 around the cfg, then optimize where possible.
5766 We run this pass two times. Once before CSE, then again after CSE. This
5767 has proven to be the most profitable approach. It is rare for new
5768 optimization opportunities of this nature to appear after the first CSE
5769 pass.
5771 This could probably be integrated with global cprop with a little work. */
5774 delete_null_pointer_checks (f)
5775 rtx f ATTRIBUTE_UNUSED;
5777 sbitmap *nonnull_avin, *nonnull_avout;
5778 unsigned int *block_reg;
5779 basic_block bb;
5780 int reg;
5781 int regs_per_pass;
5782 int max_reg;
5783 struct null_pointer_info npi;
5784 int something_changed = 0;
5786 /* If we have only a single block, then there's nothing to do. */
5787 if (n_basic_blocks <= 1)
5788 return 0;
5790 /* Trying to perform global optimizations on flow graphs which have
5791 a high connectivity will take a long time and is unlikely to be
5792 particularly useful.
5794 In normal circumstances a cfg should have about twice as many edges
5795 as blocks. But we do not want to punish small functions which have
5796 a couple switch statements. So we require a relatively large number
5797 of basic blocks and the ratio of edges to blocks to be high. */
5798 if (n_basic_blocks > 1000 && n_edges / n_basic_blocks >= 20)
5799 return 0;
5801 /* We need four bitmaps, each with a bit for each register in each
5802 basic block. */
5803 max_reg = max_reg_num ();
5804 regs_per_pass = get_bitmap_width (4, last_basic_block, max_reg);
5806 /* Allocate bitmaps to hold local and global properties. */
5807 npi.nonnull_local = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
5808 npi.nonnull_killed = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
5809 nonnull_avin = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
5810 nonnull_avout = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
5812 /* Go through the basic blocks, seeing whether or not each block
5813 ends with a conditional branch whose condition is a comparison
5814 against zero. Record the register compared in BLOCK_REG. */
5815 block_reg = (unsigned int *) xcalloc (last_basic_block, sizeof (int));
5816 FOR_EACH_BB (bb)
5818 rtx last_insn = bb->end;
5819 rtx condition, earliest, reg;
5821 /* We only want conditional branches. */
5822 if (GET_CODE (last_insn) != JUMP_INSN
5823 || !any_condjump_p (last_insn)
5824 || !onlyjump_p (last_insn))
5825 continue;
5827 /* LAST_INSN is a conditional jump. Get its condition. */
5828 condition = get_condition (last_insn, &earliest);
5830 /* If we were unable to get the condition, or it is not an equality
5831 comparison against zero then there's nothing we can do. */
5832 if (!condition
5833 || (GET_CODE (condition) != NE && GET_CODE (condition) != EQ)
5834 || GET_CODE (XEXP (condition, 1)) != CONST_INT
5835 || (XEXP (condition, 1)
5836 != CONST0_RTX (GET_MODE (XEXP (condition, 0)))))
5837 continue;
5839 /* We must be checking a register against zero. */
5840 reg = XEXP (condition, 0);
5841 if (GET_CODE (reg) != REG)
5842 continue;
5844 block_reg[bb->index] = REGNO (reg);
5847 /* Go through the algorithm for each block of registers. */
5848 for (reg = FIRST_PSEUDO_REGISTER; reg < max_reg; reg += regs_per_pass)
5850 npi.min_reg = reg;
5851 npi.max_reg = MIN (reg + regs_per_pass, max_reg);
5852 something_changed |= delete_null_pointer_checks_1 (block_reg,
5853 nonnull_avin,
5854 nonnull_avout,
5855 &npi);
5858 /* Free the table of registers compared at the end of every block. */
5859 free (block_reg);
5861 /* Free bitmaps. */
5862 sbitmap_vector_free (npi.nonnull_local);
5863 sbitmap_vector_free (npi.nonnull_killed);
5864 sbitmap_vector_free (nonnull_avin);
5865 sbitmap_vector_free (nonnull_avout);
5867 return something_changed;
5870 /* Code Hoisting variables and subroutines. */
5872 /* Very busy expressions. */
5873 static sbitmap *hoist_vbein;
5874 static sbitmap *hoist_vbeout;
5876 /* Hoistable expressions. */
5877 static sbitmap *hoist_exprs;
5879 /* Dominator bitmaps. */
5880 dominance_info dominators;
5882 /* ??? We could compute post dominators and run this algorithm in
5883 reverse to perform tail merging, doing so would probably be
5884 more effective than the tail merging code in jump.c.
5886 It's unclear if tail merging could be run in parallel with
5887 code hoisting. It would be nice. */
5889 /* Allocate vars used for code hoisting analysis. */
5891 static void
5892 alloc_code_hoist_mem (n_blocks, n_exprs)
5893 int n_blocks, n_exprs;
5895 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
5896 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
5897 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
5899 hoist_vbein = sbitmap_vector_alloc (n_blocks, n_exprs);
5900 hoist_vbeout = sbitmap_vector_alloc (n_blocks, n_exprs);
5901 hoist_exprs = sbitmap_vector_alloc (n_blocks, n_exprs);
5902 transpout = sbitmap_vector_alloc (n_blocks, n_exprs);
5905 /* Free vars used for code hoisting analysis. */
5907 static void
5908 free_code_hoist_mem ()
5910 sbitmap_vector_free (antloc);
5911 sbitmap_vector_free (transp);
5912 sbitmap_vector_free (comp);
5914 sbitmap_vector_free (hoist_vbein);
5915 sbitmap_vector_free (hoist_vbeout);
5916 sbitmap_vector_free (hoist_exprs);
5917 sbitmap_vector_free (transpout);
5919 free_dominance_info (dominators);
5922 /* Compute the very busy expressions at entry/exit from each block.
5924 An expression is very busy if all paths from a given point
5925 compute the expression. */
5927 static void
5928 compute_code_hoist_vbeinout ()
5930 int changed, passes;
5931 basic_block bb;
5933 sbitmap_vector_zero (hoist_vbeout, last_basic_block);
5934 sbitmap_vector_zero (hoist_vbein, last_basic_block);
5936 passes = 0;
5937 changed = 1;
5939 while (changed)
5941 changed = 0;
5943 /* We scan the blocks in the reverse order to speed up
5944 the convergence. */
5945 FOR_EACH_BB_REVERSE (bb)
5947 changed |= sbitmap_a_or_b_and_c_cg (hoist_vbein[bb->index], antloc[bb->index],
5948 hoist_vbeout[bb->index], transp[bb->index]);
5949 if (bb->next_bb != EXIT_BLOCK_PTR)
5950 sbitmap_intersection_of_succs (hoist_vbeout[bb->index], hoist_vbein, bb->index);
5953 passes++;
5956 if (gcse_file)
5957 fprintf (gcse_file, "hoisting vbeinout computation: %d passes\n", passes);
5960 /* Top level routine to do the dataflow analysis needed by code hoisting. */
5962 static void
5963 compute_code_hoist_data ()
5965 compute_local_properties (transp, comp, antloc, &expr_hash_table);
5966 compute_transpout ();
5967 compute_code_hoist_vbeinout ();
5968 dominators = calculate_dominance_info (CDI_DOMINATORS);
5969 if (gcse_file)
5970 fprintf (gcse_file, "\n");
5973 /* Determine if the expression identified by EXPR_INDEX would
5974 reach BB unimpared if it was placed at the end of EXPR_BB.
5976 It's unclear exactly what Muchnick meant by "unimpared". It seems
5977 to me that the expression must either be computed or transparent in
5978 *every* block in the path(s) from EXPR_BB to BB. Any other definition
5979 would allow the expression to be hoisted out of loops, even if
5980 the expression wasn't a loop invariant.
5982 Contrast this to reachability for PRE where an expression is
5983 considered reachable if *any* path reaches instead of *all*
5984 paths. */
5986 static int
5987 hoist_expr_reaches_here_p (expr_bb, expr_index, bb, visited)
5988 basic_block expr_bb;
5989 int expr_index;
5990 basic_block bb;
5991 char *visited;
5993 edge pred;
5994 int visited_allocated_locally = 0;
5997 if (visited == NULL)
5999 visited_allocated_locally = 1;
6000 visited = xcalloc (last_basic_block, 1);
6003 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
6005 basic_block pred_bb = pred->src;
6007 if (pred->src == ENTRY_BLOCK_PTR)
6008 break;
6009 else if (pred_bb == expr_bb)
6010 continue;
6011 else if (visited[pred_bb->index])
6012 continue;
6014 /* Does this predecessor generate this expression? */
6015 else if (TEST_BIT (comp[pred_bb->index], expr_index))
6016 break;
6017 else if (! TEST_BIT (transp[pred_bb->index], expr_index))
6018 break;
6020 /* Not killed. */
6021 else
6023 visited[pred_bb->index] = 1;
6024 if (! hoist_expr_reaches_here_p (expr_bb, expr_index,
6025 pred_bb, visited))
6026 break;
6029 if (visited_allocated_locally)
6030 free (visited);
6032 return (pred == NULL);
6035 /* Actually perform code hoisting. */
6037 static void
6038 hoist_code ()
6040 basic_block bb, dominated;
6041 basic_block *domby;
6042 unsigned int domby_len;
6043 unsigned int i,j;
6044 struct expr **index_map;
6045 struct expr *expr;
6047 sbitmap_vector_zero (hoist_exprs, last_basic_block);
6049 /* Compute a mapping from expression number (`bitmap_index') to
6050 hash table entry. */
6052 index_map = (struct expr **) xcalloc (expr_hash_table.n_elems, sizeof (struct expr *));
6053 for (i = 0; i < expr_hash_table.size; i++)
6054 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
6055 index_map[expr->bitmap_index] = expr;
6057 /* Walk over each basic block looking for potentially hoistable
6058 expressions, nothing gets hoisted from the entry block. */
6059 FOR_EACH_BB (bb)
6061 int found = 0;
6062 int insn_inserted_p;
6064 domby_len = get_dominated_by (dominators, bb, &domby);
6065 /* Examine each expression that is very busy at the exit of this
6066 block. These are the potentially hoistable expressions. */
6067 for (i = 0; i < hoist_vbeout[bb->index]->n_bits; i++)
6069 int hoistable = 0;
6071 if (TEST_BIT (hoist_vbeout[bb->index], i)
6072 && TEST_BIT (transpout[bb->index], i))
6074 /* We've found a potentially hoistable expression, now
6075 we look at every block BB dominates to see if it
6076 computes the expression. */
6077 for (j = 0; j < domby_len; j++)
6079 dominated = domby[j];
6080 /* Ignore self dominance. */
6081 if (bb == dominated)
6082 continue;
6083 /* We've found a dominated block, now see if it computes
6084 the busy expression and whether or not moving that
6085 expression to the "beginning" of that block is safe. */
6086 if (!TEST_BIT (antloc[dominated->index], i))
6087 continue;
6089 /* Note if the expression would reach the dominated block
6090 unimpared if it was placed at the end of BB.
6092 Keep track of how many times this expression is hoistable
6093 from a dominated block into BB. */
6094 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
6095 hoistable++;
6098 /* If we found more than one hoistable occurrence of this
6099 expression, then note it in the bitmap of expressions to
6100 hoist. It makes no sense to hoist things which are computed
6101 in only one BB, and doing so tends to pessimize register
6102 allocation. One could increase this value to try harder
6103 to avoid any possible code expansion due to register
6104 allocation issues; however experiments have shown that
6105 the vast majority of hoistable expressions are only movable
6106 from two successors, so raising this threshhold is likely
6107 to nullify any benefit we get from code hoisting. */
6108 if (hoistable > 1)
6110 SET_BIT (hoist_exprs[bb->index], i);
6111 found = 1;
6115 /* If we found nothing to hoist, then quit now. */
6116 if (! found)
6118 free (domby);
6119 continue;
6122 /* Loop over all the hoistable expressions. */
6123 for (i = 0; i < hoist_exprs[bb->index]->n_bits; i++)
6125 /* We want to insert the expression into BB only once, so
6126 note when we've inserted it. */
6127 insn_inserted_p = 0;
6129 /* These tests should be the same as the tests above. */
6130 if (TEST_BIT (hoist_vbeout[bb->index], i))
6132 /* We've found a potentially hoistable expression, now
6133 we look at every block BB dominates to see if it
6134 computes the expression. */
6135 for (j = 0; j < domby_len; j++)
6137 dominated = domby[j];
6138 /* Ignore self dominance. */
6139 if (bb == dominated)
6140 continue;
6142 /* We've found a dominated block, now see if it computes
6143 the busy expression and whether or not moving that
6144 expression to the "beginning" of that block is safe. */
6145 if (!TEST_BIT (antloc[dominated->index], i))
6146 continue;
6148 /* The expression is computed in the dominated block and
6149 it would be safe to compute it at the start of the
6150 dominated block. Now we have to determine if the
6151 expression would reach the dominated block if it was
6152 placed at the end of BB. */
6153 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
6155 struct expr *expr = index_map[i];
6156 struct occr *occr = expr->antic_occr;
6157 rtx insn;
6158 rtx set;
6160 /* Find the right occurrence of this expression. */
6161 while (BLOCK_FOR_INSN (occr->insn) != dominated && occr)
6162 occr = occr->next;
6164 /* Should never happen. */
6165 if (!occr)
6166 abort ();
6168 insn = occr->insn;
6170 set = single_set (insn);
6171 if (! set)
6172 abort ();
6174 /* Create a pseudo-reg to store the result of reaching
6175 expressions into. Get the mode for the new pseudo
6176 from the mode of the original destination pseudo. */
6177 if (expr->reaching_reg == NULL)
6178 expr->reaching_reg
6179 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
6181 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
6182 delete_insn (insn);
6183 occr->deleted_p = 1;
6184 if (!insn_inserted_p)
6186 insert_insn_end_bb (index_map[i], bb, 0);
6187 insn_inserted_p = 1;
6193 free (domby);
6196 free (index_map);
6199 /* Top level routine to perform one code hoisting (aka unification) pass
6201 Return nonzero if a change was made. */
6203 static int
6204 one_code_hoisting_pass ()
6206 int changed = 0;
6208 alloc_hash_table (max_cuid, &expr_hash_table, 0);
6209 compute_hash_table (&expr_hash_table);
6210 if (gcse_file)
6211 dump_hash_table (gcse_file, "Code Hosting Expressions", &expr_hash_table);
6213 if (expr_hash_table.n_elems > 0)
6215 alloc_code_hoist_mem (last_basic_block, expr_hash_table.n_elems);
6216 compute_code_hoist_data ();
6217 hoist_code ();
6218 free_code_hoist_mem ();
6221 free_hash_table (&expr_hash_table);
6223 return changed;
6226 /* Here we provide the things required to do store motion towards
6227 the exit. In order for this to be effective, gcse also needed to
6228 be taught how to move a load when it is kill only by a store to itself.
6230 int i;
6231 float a[10];
6233 void foo(float scale)
6235 for (i=0; i<10; i++)
6236 a[i] *= scale;
6239 'i' is both loaded and stored to in the loop. Normally, gcse cannot move
6240 the load out since its live around the loop, and stored at the bottom
6241 of the loop.
6243 The 'Load Motion' referred to and implemented in this file is
6244 an enhancement to gcse which when using edge based lcm, recognizes
6245 this situation and allows gcse to move the load out of the loop.
6247 Once gcse has hoisted the load, store motion can then push this
6248 load towards the exit, and we end up with no loads or stores of 'i'
6249 in the loop. */
6251 /* This will search the ldst list for a matching expression. If it
6252 doesn't find one, we create one and initialize it. */
6254 static struct ls_expr *
6255 ldst_entry (x)
6256 rtx x;
6258 struct ls_expr * ptr;
6260 for (ptr = first_ls_expr(); ptr != NULL; ptr = next_ls_expr (ptr))
6261 if (expr_equiv_p (ptr->pattern, x))
6262 break;
6264 if (!ptr)
6266 ptr = (struct ls_expr *) xmalloc (sizeof (struct ls_expr));
6268 ptr->next = pre_ldst_mems;
6269 ptr->expr = NULL;
6270 ptr->pattern = x;
6271 ptr->loads = NULL_RTX;
6272 ptr->stores = NULL_RTX;
6273 ptr->reaching_reg = NULL_RTX;
6274 ptr->invalid = 0;
6275 ptr->index = 0;
6276 ptr->hash_index = 0;
6277 pre_ldst_mems = ptr;
6280 return ptr;
6283 /* Free up an individual ldst entry. */
6285 static void
6286 free_ldst_entry (ptr)
6287 struct ls_expr * ptr;
6289 free_INSN_LIST_list (& ptr->loads);
6290 free_INSN_LIST_list (& ptr->stores);
6292 free (ptr);
6295 /* Free up all memory associated with the ldst list. */
6297 static void
6298 free_ldst_mems ()
6300 while (pre_ldst_mems)
6302 struct ls_expr * tmp = pre_ldst_mems;
6304 pre_ldst_mems = pre_ldst_mems->next;
6306 free_ldst_entry (tmp);
6309 pre_ldst_mems = NULL;
6312 /* Dump debugging info about the ldst list. */
6314 static void
6315 print_ldst_list (file)
6316 FILE * file;
6318 struct ls_expr * ptr;
6320 fprintf (file, "LDST list: \n");
6322 for (ptr = first_ls_expr(); ptr != NULL; ptr = next_ls_expr (ptr))
6324 fprintf (file, " Pattern (%3d): ", ptr->index);
6326 print_rtl (file, ptr->pattern);
6328 fprintf (file, "\n Loads : ");
6330 if (ptr->loads)
6331 print_rtl (file, ptr->loads);
6332 else
6333 fprintf (file, "(nil)");
6335 fprintf (file, "\n Stores : ");
6337 if (ptr->stores)
6338 print_rtl (file, ptr->stores);
6339 else
6340 fprintf (file, "(nil)");
6342 fprintf (file, "\n\n");
6345 fprintf (file, "\n");
6348 /* Returns 1 if X is in the list of ldst only expressions. */
6350 static struct ls_expr *
6351 find_rtx_in_ldst (x)
6352 rtx x;
6354 struct ls_expr * ptr;
6356 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
6357 if (expr_equiv_p (ptr->pattern, x) && ! ptr->invalid)
6358 return ptr;
6360 return NULL;
6363 /* Assign each element of the list of mems a monotonically increasing value. */
6365 static int
6366 enumerate_ldsts ()
6368 struct ls_expr * ptr;
6369 int n = 0;
6371 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
6372 ptr->index = n++;
6374 return n;
6377 /* Return first item in the list. */
6379 static inline struct ls_expr *
6380 first_ls_expr ()
6382 return pre_ldst_mems;
6385 /* Return the next item in ther list after the specified one. */
6387 static inline struct ls_expr *
6388 next_ls_expr (ptr)
6389 struct ls_expr * ptr;
6391 return ptr->next;
6394 /* Load Motion for loads which only kill themselves. */
6396 /* Return true if x is a simple MEM operation, with no registers or
6397 side effects. These are the types of loads we consider for the
6398 ld_motion list, otherwise we let the usual aliasing take care of it. */
6400 static int
6401 simple_mem (x)
6402 rtx x;
6404 if (GET_CODE (x) != MEM)
6405 return 0;
6407 if (MEM_VOLATILE_P (x))
6408 return 0;
6410 if (GET_MODE (x) == BLKmode)
6411 return 0;
6413 if (!rtx_varies_p (XEXP (x, 0), 0))
6414 return 1;
6416 return 0;
6419 /* Make sure there isn't a buried reference in this pattern anywhere.
6420 If there is, invalidate the entry for it since we're not capable
6421 of fixing it up just yet.. We have to be sure we know about ALL
6422 loads since the aliasing code will allow all entries in the
6423 ld_motion list to not-alias itself. If we miss a load, we will get
6424 the wrong value since gcse might common it and we won't know to
6425 fix it up. */
6427 static void
6428 invalidate_any_buried_refs (x)
6429 rtx x;
6431 const char * fmt;
6432 int i, j;
6433 struct ls_expr * ptr;
6435 /* Invalidate it in the list. */
6436 if (GET_CODE (x) == MEM && simple_mem (x))
6438 ptr = ldst_entry (x);
6439 ptr->invalid = 1;
6442 /* Recursively process the insn. */
6443 fmt = GET_RTX_FORMAT (GET_CODE (x));
6445 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6447 if (fmt[i] == 'e')
6448 invalidate_any_buried_refs (XEXP (x, i));
6449 else if (fmt[i] == 'E')
6450 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6451 invalidate_any_buried_refs (XVECEXP (x, i, j));
6455 /* Find all the 'simple' MEMs which are used in LOADs and STORES. Simple
6456 being defined as MEM loads and stores to symbols, with no
6457 side effects and no registers in the expression. If there are any
6458 uses/defs which don't match this criteria, it is invalidated and
6459 trimmed out later. */
6461 static void
6462 compute_ld_motion_mems ()
6464 struct ls_expr * ptr;
6465 basic_block bb;
6466 rtx insn;
6468 pre_ldst_mems = NULL;
6470 FOR_EACH_BB (bb)
6472 for (insn = bb->head;
6473 insn && insn != NEXT_INSN (bb->end);
6474 insn = NEXT_INSN (insn))
6476 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
6478 if (GET_CODE (PATTERN (insn)) == SET)
6480 rtx src = SET_SRC (PATTERN (insn));
6481 rtx dest = SET_DEST (PATTERN (insn));
6483 /* Check for a simple LOAD... */
6484 if (GET_CODE (src) == MEM && simple_mem (src))
6486 ptr = ldst_entry (src);
6487 if (GET_CODE (dest) == REG)
6488 ptr->loads = alloc_INSN_LIST (insn, ptr->loads);
6489 else
6490 ptr->invalid = 1;
6492 else
6494 /* Make sure there isn't a buried load somewhere. */
6495 invalidate_any_buried_refs (src);
6498 /* Check for stores. Don't worry about aliased ones, they
6499 will block any movement we might do later. We only care
6500 about this exact pattern since those are the only
6501 circumstance that we will ignore the aliasing info. */
6502 if (GET_CODE (dest) == MEM && simple_mem (dest))
6504 ptr = ldst_entry (dest);
6506 if (GET_CODE (src) != MEM
6507 && GET_CODE (src) != ASM_OPERANDS)
6508 ptr->stores = alloc_INSN_LIST (insn, ptr->stores);
6509 else
6510 ptr->invalid = 1;
6513 else
6514 invalidate_any_buried_refs (PATTERN (insn));
6520 /* Remove any references that have been either invalidated or are not in the
6521 expression list for pre gcse. */
6523 static void
6524 trim_ld_motion_mems ()
6526 struct ls_expr * last = NULL;
6527 struct ls_expr * ptr = first_ls_expr ();
6529 while (ptr != NULL)
6531 int del = ptr->invalid;
6532 struct expr * expr = NULL;
6534 /* Delete if entry has been made invalid. */
6535 if (!del)
6537 unsigned int i;
6539 del = 1;
6540 /* Delete if we cannot find this mem in the expression list. */
6541 for (i = 0; i < expr_hash_table.size && del; i++)
6543 for (expr = expr_hash_table.table[i];
6544 expr != NULL;
6545 expr = expr->next_same_hash)
6546 if (expr_equiv_p (expr->expr, ptr->pattern))
6548 del = 0;
6549 break;
6554 if (del)
6556 if (last != NULL)
6558 last->next = ptr->next;
6559 free_ldst_entry (ptr);
6560 ptr = last->next;
6562 else
6564 pre_ldst_mems = pre_ldst_mems->next;
6565 free_ldst_entry (ptr);
6566 ptr = pre_ldst_mems;
6569 else
6571 /* Set the expression field if we are keeping it. */
6572 last = ptr;
6573 ptr->expr = expr;
6574 ptr = ptr->next;
6578 /* Show the world what we've found. */
6579 if (gcse_file && pre_ldst_mems != NULL)
6580 print_ldst_list (gcse_file);
6583 /* This routine will take an expression which we are replacing with
6584 a reaching register, and update any stores that are needed if
6585 that expression is in the ld_motion list. Stores are updated by
6586 copying their SRC to the reaching register, and then storeing
6587 the reaching register into the store location. These keeps the
6588 correct value in the reaching register for the loads. */
6590 static void
6591 update_ld_motion_stores (expr)
6592 struct expr * expr;
6594 struct ls_expr * mem_ptr;
6596 if ((mem_ptr = find_rtx_in_ldst (expr->expr)))
6598 /* We can try to find just the REACHED stores, but is shouldn't
6599 matter to set the reaching reg everywhere... some might be
6600 dead and should be eliminated later. */
6602 /* We replace SET mem = expr with
6603 SET reg = expr
6604 SET mem = reg , where reg is the
6605 reaching reg used in the load. */
6606 rtx list = mem_ptr->stores;
6608 for ( ; list != NULL_RTX; list = XEXP (list, 1))
6610 rtx insn = XEXP (list, 0);
6611 rtx pat = PATTERN (insn);
6612 rtx src = SET_SRC (pat);
6613 rtx reg = expr->reaching_reg;
6614 rtx copy, new;
6616 /* If we've already copied it, continue. */
6617 if (expr->reaching_reg == src)
6618 continue;
6620 if (gcse_file)
6622 fprintf (gcse_file, "PRE: store updated with reaching reg ");
6623 print_rtl (gcse_file, expr->reaching_reg);
6624 fprintf (gcse_file, ":\n ");
6625 print_inline_rtx (gcse_file, insn, 8);
6626 fprintf (gcse_file, "\n");
6629 copy = gen_move_insn ( reg, SET_SRC (pat));
6630 new = emit_insn_before (copy, insn);
6631 record_one_set (REGNO (reg), new);
6632 SET_SRC (pat) = reg;
6634 /* un-recognize this pattern since it's probably different now. */
6635 INSN_CODE (insn) = -1;
6636 gcse_create_count++;
6641 /* Store motion code. */
6643 /* This is used to communicate the target bitvector we want to use in the
6644 reg_set_info routine when called via the note_stores mechanism. */
6645 static sbitmap * regvec;
6647 /* Used in computing the reverse edge graph bit vectors. */
6648 static sbitmap * st_antloc;
6650 /* Global holding the number of store expressions we are dealing with. */
6651 static int num_stores;
6653 /* Checks to set if we need to mark a register set. Called from note_stores. */
6655 static void
6656 reg_set_info (dest, setter, data)
6657 rtx dest, setter ATTRIBUTE_UNUSED;
6658 void * data ATTRIBUTE_UNUSED;
6660 if (GET_CODE (dest) == SUBREG)
6661 dest = SUBREG_REG (dest);
6663 if (GET_CODE (dest) == REG)
6664 SET_BIT (*regvec, REGNO (dest));
6667 /* Return nonzero if the register operands of expression X are killed
6668 anywhere in basic block BB. */
6670 static int
6671 store_ops_ok (x, bb)
6672 rtx x;
6673 basic_block bb;
6675 int i;
6676 enum rtx_code code;
6677 const char * fmt;
6679 /* Repeat is used to turn tail-recursion into iteration. */
6680 repeat:
6682 if (x == 0)
6683 return 1;
6685 code = GET_CODE (x);
6686 switch (code)
6688 case REG:
6689 /* If a reg has changed after us in this
6690 block, the operand has been killed. */
6691 return TEST_BIT (reg_set_in_block[bb->index], REGNO (x));
6693 case MEM:
6694 x = XEXP (x, 0);
6695 goto repeat;
6697 case PRE_DEC:
6698 case PRE_INC:
6699 case POST_DEC:
6700 case POST_INC:
6701 return 0;
6703 case PC:
6704 case CC0: /*FIXME*/
6705 case CONST:
6706 case CONST_INT:
6707 case CONST_DOUBLE:
6708 case CONST_VECTOR:
6709 case SYMBOL_REF:
6710 case LABEL_REF:
6711 case ADDR_VEC:
6712 case ADDR_DIFF_VEC:
6713 return 1;
6715 default:
6716 break;
6719 i = GET_RTX_LENGTH (code) - 1;
6720 fmt = GET_RTX_FORMAT (code);
6722 for (; i >= 0; i--)
6724 if (fmt[i] == 'e')
6726 rtx tem = XEXP (x, i);
6728 /* If we are about to do the last recursive call
6729 needed at this level, change it into iteration.
6730 This function is called enough to be worth it. */
6731 if (i == 0)
6733 x = tem;
6734 goto repeat;
6737 if (! store_ops_ok (tem, bb))
6738 return 0;
6740 else if (fmt[i] == 'E')
6742 int j;
6744 for (j = 0; j < XVECLEN (x, i); j++)
6746 if (! store_ops_ok (XVECEXP (x, i, j), bb))
6747 return 0;
6752 return 1;
6755 /* Determine whether insn is MEM store pattern that we will consider moving. */
6757 static void
6758 find_moveable_store (insn)
6759 rtx insn;
6761 struct ls_expr * ptr;
6762 rtx dest = PATTERN (insn);
6764 if (GET_CODE (dest) != SET
6765 || GET_CODE (SET_SRC (dest)) == ASM_OPERANDS)
6766 return;
6768 dest = SET_DEST (dest);
6770 if (GET_CODE (dest) != MEM || MEM_VOLATILE_P (dest)
6771 || GET_MODE (dest) == BLKmode)
6772 return;
6774 if (GET_CODE (XEXP (dest, 0)) != SYMBOL_REF)
6775 return;
6777 if (rtx_varies_p (XEXP (dest, 0), 0))
6778 return;
6780 ptr = ldst_entry (dest);
6781 ptr->stores = alloc_INSN_LIST (insn, ptr->stores);
6784 /* Perform store motion. Much like gcse, except we move expressions the
6785 other way by looking at the flowgraph in reverse. */
6787 static int
6788 compute_store_table ()
6790 int ret;
6791 basic_block bb;
6792 unsigned regno;
6793 rtx insn, pat;
6795 max_gcse_regno = max_reg_num ();
6797 reg_set_in_block = (sbitmap *) sbitmap_vector_alloc (last_basic_block,
6798 max_gcse_regno);
6799 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
6800 pre_ldst_mems = 0;
6802 /* Find all the stores we care about. */
6803 FOR_EACH_BB (bb)
6805 regvec = & (reg_set_in_block[bb->index]);
6806 for (insn = bb->end;
6807 insn && insn != PREV_INSN (bb->end);
6808 insn = PREV_INSN (insn))
6810 /* Ignore anything that is not a normal insn. */
6811 if (! INSN_P (insn))
6812 continue;
6814 if (GET_CODE (insn) == CALL_INSN)
6816 bool clobbers_all = false;
6817 #ifdef NON_SAVING_SETJMP
6818 if (NON_SAVING_SETJMP
6819 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
6820 clobbers_all = true;
6821 #endif
6823 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6824 if (clobbers_all
6825 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
6826 SET_BIT (reg_set_in_block[bb->index], regno);
6829 pat = PATTERN (insn);
6830 note_stores (pat, reg_set_info, NULL);
6832 /* Now that we've marked regs, look for stores. */
6833 if (GET_CODE (pat) == SET)
6834 find_moveable_store (insn);
6838 ret = enumerate_ldsts ();
6840 if (gcse_file)
6842 fprintf (gcse_file, "Store Motion Expressions.\n");
6843 print_ldst_list (gcse_file);
6846 return ret;
6849 /* Check to see if the load X is aliased with STORE_PATTERN. */
6851 static int
6852 load_kills_store (x, store_pattern)
6853 rtx x, store_pattern;
6855 if (true_dependence (x, GET_MODE (x), store_pattern, rtx_addr_varies_p))
6856 return 1;
6857 return 0;
6860 /* Go through the entire insn X, looking for any loads which might alias
6861 STORE_PATTERN. Return 1 if found. */
6863 static int
6864 find_loads (x, store_pattern)
6865 rtx x, store_pattern;
6867 const char * fmt;
6868 int i, j;
6869 int ret = 0;
6871 if (!x)
6872 return 0;
6874 if (GET_CODE (x) == SET)
6875 x = SET_SRC (x);
6877 if (GET_CODE (x) == MEM)
6879 if (load_kills_store (x, store_pattern))
6880 return 1;
6883 /* Recursively process the insn. */
6884 fmt = GET_RTX_FORMAT (GET_CODE (x));
6886 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0 && !ret; i--)
6888 if (fmt[i] == 'e')
6889 ret |= find_loads (XEXP (x, i), store_pattern);
6890 else if (fmt[i] == 'E')
6891 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6892 ret |= find_loads (XVECEXP (x, i, j), store_pattern);
6894 return ret;
6897 /* Check if INSN kills the store pattern X (is aliased with it).
6898 Return 1 if it it does. */
6900 static int
6901 store_killed_in_insn (x, insn)
6902 rtx x, insn;
6904 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
6905 return 0;
6907 if (GET_CODE (insn) == CALL_INSN)
6909 /* A normal or pure call might read from pattern,
6910 but a const call will not. */
6911 return ! CONST_OR_PURE_CALL_P (insn) || pure_call_p (insn);
6914 if (GET_CODE (PATTERN (insn)) == SET)
6916 rtx pat = PATTERN (insn);
6917 /* Check for memory stores to aliased objects. */
6918 if (GET_CODE (SET_DEST (pat)) == MEM && !expr_equiv_p (SET_DEST (pat), x))
6919 /* pretend its a load and check for aliasing. */
6920 if (find_loads (SET_DEST (pat), x))
6921 return 1;
6922 return find_loads (SET_SRC (pat), x);
6924 else
6925 return find_loads (PATTERN (insn), x);
6928 /* Returns 1 if the expression X is loaded or clobbered on or after INSN
6929 within basic block BB. */
6931 static int
6932 store_killed_after (x, insn, bb)
6933 rtx x, insn;
6934 basic_block bb;
6936 rtx last = bb->end;
6938 if (insn == last)
6939 return 0;
6941 /* Check if the register operands of the store are OK in this block.
6942 Note that if registers are changed ANYWHERE in the block, we'll
6943 decide we can't move it, regardless of whether it changed above
6944 or below the store. This could be improved by checking the register
6945 operands while lookinng for aliasing in each insn. */
6946 if (!store_ops_ok (XEXP (x, 0), bb))
6947 return 1;
6949 for ( ; insn && insn != NEXT_INSN (last); insn = NEXT_INSN (insn))
6950 if (store_killed_in_insn (x, insn))
6951 return 1;
6953 return 0;
6956 /* Returns 1 if the expression X is loaded or clobbered on or before INSN
6957 within basic block BB. */
6958 static int
6959 store_killed_before (x, insn, bb)
6960 rtx x, insn;
6961 basic_block bb;
6963 rtx first = bb->head;
6965 if (insn == first)
6966 return store_killed_in_insn (x, insn);
6968 /* Check if the register operands of the store are OK in this block.
6969 Note that if registers are changed ANYWHERE in the block, we'll
6970 decide we can't move it, regardless of whether it changed above
6971 or below the store. This could be improved by checking the register
6972 operands while lookinng for aliasing in each insn. */
6973 if (!store_ops_ok (XEXP (x, 0), bb))
6974 return 1;
6976 for ( ; insn && insn != PREV_INSN (first); insn = PREV_INSN (insn))
6977 if (store_killed_in_insn (x, insn))
6978 return 1;
6980 return 0;
6983 #define ANTIC_STORE_LIST(x) ((x)->loads)
6984 #define AVAIL_STORE_LIST(x) ((x)->stores)
6986 /* Given the table of available store insns at the end of blocks,
6987 determine which ones are not killed by aliasing, and generate
6988 the appropriate vectors for gen and killed. */
6989 static void
6990 build_store_vectors ()
6992 basic_block bb, b;
6993 rtx insn, st;
6994 struct ls_expr * ptr;
6996 /* Build the gen_vector. This is any store in the table which is not killed
6997 by aliasing later in its block. */
6998 ae_gen = (sbitmap *) sbitmap_vector_alloc (last_basic_block, num_stores);
6999 sbitmap_vector_zero (ae_gen, last_basic_block);
7001 st_antloc = (sbitmap *) sbitmap_vector_alloc (last_basic_block, num_stores);
7002 sbitmap_vector_zero (st_antloc, last_basic_block);
7004 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7006 /* Put all the stores into either the antic list, or the avail list,
7007 or both. */
7008 rtx store_list = ptr->stores;
7009 ptr->stores = NULL_RTX;
7011 for (st = store_list; st != NULL; st = XEXP (st, 1))
7013 insn = XEXP (st, 0);
7014 bb = BLOCK_FOR_INSN (insn);
7016 if (!store_killed_after (ptr->pattern, insn, bb))
7018 /* If we've already seen an availale expression in this block,
7019 we can delete the one we saw already (It occurs earlier in
7020 the block), and replace it with this one). We'll copy the
7021 old SRC expression to an unused register in case there
7022 are any side effects. */
7023 if (TEST_BIT (ae_gen[bb->index], ptr->index))
7025 /* Find previous store. */
7026 rtx st;
7027 for (st = AVAIL_STORE_LIST (ptr); st ; st = XEXP (st, 1))
7028 if (BLOCK_FOR_INSN (XEXP (st, 0)) == bb)
7029 break;
7030 if (st)
7032 rtx r = gen_reg_rtx (GET_MODE (ptr->pattern));
7033 if (gcse_file)
7034 fprintf (gcse_file, "Removing redundant store:\n");
7035 replace_store_insn (r, XEXP (st, 0), bb);
7036 XEXP (st, 0) = insn;
7037 continue;
7040 SET_BIT (ae_gen[bb->index], ptr->index);
7041 AVAIL_STORE_LIST (ptr) = alloc_INSN_LIST (insn,
7042 AVAIL_STORE_LIST (ptr));
7045 if (!store_killed_before (ptr->pattern, insn, bb))
7047 SET_BIT (st_antloc[BLOCK_NUM (insn)], ptr->index);
7048 ANTIC_STORE_LIST (ptr) = alloc_INSN_LIST (insn,
7049 ANTIC_STORE_LIST (ptr));
7053 /* Free the original list of store insns. */
7054 free_INSN_LIST_list (&store_list);
7057 ae_kill = (sbitmap *) sbitmap_vector_alloc (last_basic_block, num_stores);
7058 sbitmap_vector_zero (ae_kill, last_basic_block);
7060 transp = (sbitmap *) sbitmap_vector_alloc (last_basic_block, num_stores);
7061 sbitmap_vector_zero (transp, last_basic_block);
7063 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7064 FOR_EACH_BB (b)
7066 if (store_killed_after (ptr->pattern, b->head, b))
7068 /* The anticipatable expression is not killed if it's gen'd. */
7070 We leave this check out for now. If we have a code sequence
7071 in a block which looks like:
7072 ST MEMa = x
7073 L y = MEMa
7074 ST MEMa = z
7075 We should flag this as having an ANTIC expression, NOT
7076 transparent, NOT killed, and AVAIL.
7077 Unfortunately, since we haven't re-written all loads to
7078 use the reaching reg, we'll end up doing an incorrect
7079 Load in the middle here if we push the store down. It happens in
7080 gcc.c-torture/execute/960311-1.c with -O3
7081 If we always kill it in this case, we'll sometimes do
7082 uneccessary work, but it shouldn't actually hurt anything.
7083 if (!TEST_BIT (ae_gen[b], ptr->index)). */
7084 SET_BIT (ae_kill[b->index], ptr->index);
7086 else
7087 SET_BIT (transp[b->index], ptr->index);
7090 /* Any block with no exits calls some non-returning function, so
7091 we better mark the store killed here, or we might not store to
7092 it at all. If we knew it was abort, we wouldn't have to store,
7093 but we don't know that for sure. */
7094 if (gcse_file)
7096 fprintf (gcse_file, "ST_avail and ST_antic (shown under loads..)\n");
7097 print_ldst_list (gcse_file);
7098 dump_sbitmap_vector (gcse_file, "st_antloc", "", st_antloc, last_basic_block);
7099 dump_sbitmap_vector (gcse_file, "st_kill", "", ae_kill, last_basic_block);
7100 dump_sbitmap_vector (gcse_file, "Transpt", "", transp, last_basic_block);
7101 dump_sbitmap_vector (gcse_file, "st_avloc", "", ae_gen, last_basic_block);
7105 /* Insert an instruction at the begining of a basic block, and update
7106 the BLOCK_HEAD if needed. */
7108 static void
7109 insert_insn_start_bb (insn, bb)
7110 rtx insn;
7111 basic_block bb;
7113 /* Insert at start of successor block. */
7114 rtx prev = PREV_INSN (bb->head);
7115 rtx before = bb->head;
7116 while (before != 0)
7118 if (GET_CODE (before) != CODE_LABEL
7119 && (GET_CODE (before) != NOTE
7120 || NOTE_LINE_NUMBER (before) != NOTE_INSN_BASIC_BLOCK))
7121 break;
7122 prev = before;
7123 if (prev == bb->end)
7124 break;
7125 before = NEXT_INSN (before);
7128 insn = emit_insn_after (insn, prev);
7130 if (gcse_file)
7132 fprintf (gcse_file, "STORE_MOTION insert store at start of BB %d:\n",
7133 bb->index);
7134 print_inline_rtx (gcse_file, insn, 6);
7135 fprintf (gcse_file, "\n");
7139 /* This routine will insert a store on an edge. EXPR is the ldst entry for
7140 the memory reference, and E is the edge to insert it on. Returns nonzero
7141 if an edge insertion was performed. */
7143 static int
7144 insert_store (expr, e)
7145 struct ls_expr * expr;
7146 edge e;
7148 rtx reg, insn;
7149 basic_block bb;
7150 edge tmp;
7152 /* We did all the deleted before this insert, so if we didn't delete a
7153 store, then we haven't set the reaching reg yet either. */
7154 if (expr->reaching_reg == NULL_RTX)
7155 return 0;
7157 reg = expr->reaching_reg;
7158 insn = gen_move_insn (expr->pattern, reg);
7160 /* If we are inserting this expression on ALL predecessor edges of a BB,
7161 insert it at the start of the BB, and reset the insert bits on the other
7162 edges so we don't try to insert it on the other edges. */
7163 bb = e->dest;
7164 for (tmp = e->dest->pred; tmp ; tmp = tmp->pred_next)
7166 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
7167 if (index == EDGE_INDEX_NO_EDGE)
7168 abort ();
7169 if (! TEST_BIT (pre_insert_map[index], expr->index))
7170 break;
7173 /* If tmp is NULL, we found an insertion on every edge, blank the
7174 insertion vector for these edges, and insert at the start of the BB. */
7175 if (!tmp && bb != EXIT_BLOCK_PTR)
7177 for (tmp = e->dest->pred; tmp ; tmp = tmp->pred_next)
7179 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
7180 RESET_BIT (pre_insert_map[index], expr->index);
7182 insert_insn_start_bb (insn, bb);
7183 return 0;
7186 /* We can't insert on this edge, so we'll insert at the head of the
7187 successors block. See Morgan, sec 10.5. */
7188 if ((e->flags & EDGE_ABNORMAL) == EDGE_ABNORMAL)
7190 insert_insn_start_bb (insn, bb);
7191 return 0;
7194 insert_insn_on_edge (insn, e);
7196 if (gcse_file)
7198 fprintf (gcse_file, "STORE_MOTION insert insn on edge (%d, %d):\n",
7199 e->src->index, e->dest->index);
7200 print_inline_rtx (gcse_file, insn, 6);
7201 fprintf (gcse_file, "\n");
7204 return 1;
7207 /* This routine will replace a store with a SET to a specified register. */
7209 static void
7210 replace_store_insn (reg, del, bb)
7211 rtx reg, del;
7212 basic_block bb;
7214 rtx insn;
7216 insn = gen_move_insn (reg, SET_SRC (PATTERN (del)));
7217 insn = emit_insn_after (insn, del);
7219 if (gcse_file)
7221 fprintf (gcse_file,
7222 "STORE_MOTION delete insn in BB %d:\n ", bb->index);
7223 print_inline_rtx (gcse_file, del, 6);
7224 fprintf (gcse_file, "\nSTORE MOTION replaced with insn:\n ");
7225 print_inline_rtx (gcse_file, insn, 6);
7226 fprintf (gcse_file, "\n");
7229 delete_insn (del);
7233 /* Delete a store, but copy the value that would have been stored into
7234 the reaching_reg for later storing. */
7236 static void
7237 delete_store (expr, bb)
7238 struct ls_expr * expr;
7239 basic_block bb;
7241 rtx reg, i, del;
7243 if (expr->reaching_reg == NULL_RTX)
7244 expr->reaching_reg = gen_reg_rtx (GET_MODE (expr->pattern));
7247 /* If there is more than 1 store, the earlier ones will be dead,
7248 but it doesn't hurt to replace them here. */
7249 reg = expr->reaching_reg;
7251 for (i = AVAIL_STORE_LIST (expr); i; i = XEXP (i, 1))
7253 del = XEXP (i, 0);
7254 if (BLOCK_FOR_INSN (del) == bb)
7256 /* We know there is only one since we deleted redundant
7257 ones during the available computation. */
7258 replace_store_insn (reg, del, bb);
7259 break;
7264 /* Free memory used by store motion. */
7266 static void
7267 free_store_memory ()
7269 free_ldst_mems ();
7271 if (ae_gen)
7272 sbitmap_vector_free (ae_gen);
7273 if (ae_kill)
7274 sbitmap_vector_free (ae_kill);
7275 if (transp)
7276 sbitmap_vector_free (transp);
7277 if (st_antloc)
7278 sbitmap_vector_free (st_antloc);
7279 if (pre_insert_map)
7280 sbitmap_vector_free (pre_insert_map);
7281 if (pre_delete_map)
7282 sbitmap_vector_free (pre_delete_map);
7283 if (reg_set_in_block)
7284 sbitmap_vector_free (reg_set_in_block);
7286 ae_gen = ae_kill = transp = st_antloc = NULL;
7287 pre_insert_map = pre_delete_map = reg_set_in_block = NULL;
7290 /* Perform store motion. Much like gcse, except we move expressions the
7291 other way by looking at the flowgraph in reverse. */
7293 static void
7294 store_motion ()
7296 basic_block bb;
7297 int x;
7298 struct ls_expr * ptr;
7299 int update_flow = 0;
7301 if (gcse_file)
7303 fprintf (gcse_file, "before store motion\n");
7304 print_rtl (gcse_file, get_insns ());
7308 init_alias_analysis ();
7310 /* Find all the stores that are live to the end of their block. */
7311 num_stores = compute_store_table ();
7312 if (num_stores == 0)
7314 sbitmap_vector_free (reg_set_in_block);
7315 end_alias_analysis ();
7316 return;
7319 /* Now compute whats actually available to move. */
7320 add_noreturn_fake_exit_edges ();
7321 build_store_vectors ();
7323 edge_list = pre_edge_rev_lcm (gcse_file, num_stores, transp, ae_gen,
7324 st_antloc, ae_kill, &pre_insert_map,
7325 &pre_delete_map);
7327 /* Now we want to insert the new stores which are going to be needed. */
7328 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7330 FOR_EACH_BB (bb)
7331 if (TEST_BIT (pre_delete_map[bb->index], ptr->index))
7332 delete_store (ptr, bb);
7334 for (x = 0; x < NUM_EDGES (edge_list); x++)
7335 if (TEST_BIT (pre_insert_map[x], ptr->index))
7336 update_flow |= insert_store (ptr, INDEX_EDGE (edge_list, x));
7339 if (update_flow)
7340 commit_edge_insertions ();
7342 free_store_memory ();
7343 free_edge_list (edge_list);
7344 remove_fake_edges ();
7345 end_alias_analysis ();
7348 #include "gt-gcse.h"