* cp-demangle.c (d_demangle): If DMGL_PARAMS is not set, don't
[official-gcc.git] / gcc / gcse.c
blobdc18797f51354c5c30b5da247ad3b1329d5d99f0
1 /* Global common subexpression elimination/Partial redundancy elimination
2 and global constant/copy propagation for GNU compiler.
3 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 02111-1307, USA. */
23 /* TODO
24 - reordering of memory allocation and freeing to be more space efficient
25 - do rough calc of how many regs are needed in each block, and a rough
26 calc of how many regs are available in each class and use that to
27 throttle back the code in cases where RTX_COST is minimal.
28 - a store to the same address as a load does not kill the load if the
29 source of the store is also the destination of the load. Handling this
30 allows more load motion, particularly out of loops.
31 - ability to realloc sbitmap vectors would allow one initial computation
32 of reg_set_in_block with only subsequent additions, rather than
33 recomputing it for each pass
37 /* References searched while implementing this.
39 Compilers Principles, Techniques and Tools
40 Aho, Sethi, Ullman
41 Addison-Wesley, 1988
43 Global Optimization by Suppression of Partial Redundancies
44 E. Morel, C. Renvoise
45 communications of the acm, Vol. 22, Num. 2, Feb. 1979
47 A Portable Machine-Independent Global Optimizer - Design and Measurements
48 Frederick Chow
49 Stanford Ph.D. thesis, Dec. 1983
51 A Fast Algorithm for Code Movement Optimization
52 D.M. Dhamdhere
53 SIGPLAN Notices, Vol. 23, Num. 10, Oct. 1988
55 A Solution to a Problem with Morel and Renvoise's
56 Global Optimization by Suppression of Partial Redundancies
57 K-H Drechsler, M.P. Stadel
58 ACM TOPLAS, Vol. 10, Num. 4, Oct. 1988
60 Practical Adaptation of the Global Optimization
61 Algorithm of Morel and Renvoise
62 D.M. Dhamdhere
63 ACM TOPLAS, Vol. 13, Num. 2. Apr. 1991
65 Efficiently Computing Static Single Assignment Form and the Control
66 Dependence Graph
67 R. Cytron, J. Ferrante, B.K. Rosen, M.N. Wegman, and F.K. Zadeck
68 ACM TOPLAS, Vol. 13, Num. 4, Oct. 1991
70 Lazy Code Motion
71 J. Knoop, O. Ruthing, B. Steffen
72 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
74 What's In a Region? Or Computing Control Dependence Regions in Near-Linear
75 Time for Reducible Flow Control
76 Thomas Ball
77 ACM Letters on Programming Languages and Systems,
78 Vol. 2, Num. 1-4, Mar-Dec 1993
80 An Efficient Representation for Sparse Sets
81 Preston Briggs, Linda Torczon
82 ACM Letters on Programming Languages and Systems,
83 Vol. 2, Num. 1-4, Mar-Dec 1993
85 A Variation of Knoop, Ruthing, and Steffen's Lazy Code Motion
86 K-H Drechsler, M.P. Stadel
87 ACM SIGPLAN Notices, Vol. 28, Num. 5, May 1993
89 Partial Dead Code Elimination
90 J. Knoop, O. Ruthing, B. Steffen
91 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
93 Effective Partial Redundancy Elimination
94 P. Briggs, K.D. Cooper
95 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
97 The Program Structure Tree: Computing Control Regions in Linear Time
98 R. Johnson, D. Pearson, K. Pingali
99 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
101 Optimal Code Motion: Theory and Practice
102 J. Knoop, O. Ruthing, B. Steffen
103 ACM TOPLAS, Vol. 16, Num. 4, Jul. 1994
105 The power of assignment motion
106 J. Knoop, O. Ruthing, B. Steffen
107 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
109 Global code motion / global value numbering
110 C. Click
111 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
113 Value Driven Redundancy Elimination
114 L.T. Simpson
115 Rice University Ph.D. thesis, Apr. 1996
117 Value Numbering
118 L.T. Simpson
119 Massively Scalar Compiler Project, Rice University, Sep. 1996
121 High Performance Compilers for Parallel Computing
122 Michael Wolfe
123 Addison-Wesley, 1996
125 Advanced Compiler Design and Implementation
126 Steven Muchnick
127 Morgan Kaufmann, 1997
129 Building an Optimizing Compiler
130 Robert Morgan
131 Digital Press, 1998
133 People wishing to speed up the code here should read:
134 Elimination Algorithms for Data Flow Analysis
135 B.G. Ryder, M.C. Paull
136 ACM Computing Surveys, Vol. 18, Num. 3, Sep. 1986
138 How to Analyze Large Programs Efficiently and Informatively
139 D.M. Dhamdhere, B.K. Rosen, F.K. Zadeck
140 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
142 People wishing to do something different can find various possibilities
143 in the above papers and elsewhere.
146 #include "config.h"
147 #include "system.h"
148 #include "coretypes.h"
149 #include "tm.h"
150 #include "toplev.h"
152 #include "rtl.h"
153 #include "tm_p.h"
154 #include "regs.h"
155 #include "hard-reg-set.h"
156 #include "flags.h"
157 #include "real.h"
158 #include "insn-config.h"
159 #include "recog.h"
160 #include "basic-block.h"
161 #include "output.h"
162 #include "function.h"
163 #include "expr.h"
164 #include "except.h"
165 #include "ggc.h"
166 #include "params.h"
167 #include "cselib.h"
168 #include "intl.h"
169 #include "obstack.h"
171 /* Propagate flow information through back edges and thus enable PRE's
172 moving loop invariant calculations out of loops.
174 Originally this tended to create worse overall code, but several
175 improvements during the development of PRE seem to have made following
176 back edges generally a win.
178 Note much of the loop invariant code motion done here would normally
179 be done by loop.c, which has more heuristics for when to move invariants
180 out of loops. At some point we might need to move some of those
181 heuristics into gcse.c. */
183 /* We support GCSE via Partial Redundancy Elimination. PRE optimizations
184 are a superset of those done by GCSE.
186 We perform the following steps:
188 1) Compute basic block information.
190 2) Compute table of places where registers are set.
192 3) Perform copy/constant propagation.
194 4) Perform global cse.
196 5) Perform another pass of copy/constant propagation.
198 Two passes of copy/constant propagation are done because the first one
199 enables more GCSE and the second one helps to clean up the copies that
200 GCSE creates. This is needed more for PRE than for Classic because Classic
201 GCSE will try to use an existing register containing the common
202 subexpression rather than create a new one. This is harder to do for PRE
203 because of the code motion (which Classic GCSE doesn't do).
205 Expressions we are interested in GCSE-ing are of the form
206 (set (pseudo-reg) (expression)).
207 Function want_to_gcse_p says what these are.
209 PRE handles moving invariant expressions out of loops (by treating them as
210 partially redundant).
212 Eventually it would be nice to replace cse.c/gcse.c with SSA (static single
213 assignment) based GVN (global value numbering). L. T. Simpson's paper
214 (Rice University) on value numbering is a useful reference for this.
216 **********************
218 We used to support multiple passes but there are diminishing returns in
219 doing so. The first pass usually makes 90% of the changes that are doable.
220 A second pass can make a few more changes made possible by the first pass.
221 Experiments show any further passes don't make enough changes to justify
222 the expense.
224 A study of spec92 using an unlimited number of passes:
225 [1 pass] = 1208 substitutions, [2] = 577, [3] = 202, [4] = 192, [5] = 83,
226 [6] = 34, [7] = 17, [8] = 9, [9] = 4, [10] = 4, [11] = 2,
227 [12] = 2, [13] = 1, [15] = 1, [16] = 2, [41] = 1
229 It was found doing copy propagation between each pass enables further
230 substitutions.
232 PRE is quite expensive in complicated functions because the DFA can take
233 awhile to converge. Hence we only perform one pass. The parameter max-gcse-passes can
234 be modified if one wants to experiment.
236 **********************
238 The steps for PRE are:
240 1) Build the hash table of expressions we wish to GCSE (expr_hash_table).
242 2) Perform the data flow analysis for PRE.
244 3) Delete the redundant instructions
246 4) Insert the required copies [if any] that make the partially
247 redundant instructions fully redundant.
249 5) For other reaching expressions, insert an instruction to copy the value
250 to a newly created pseudo that will reach the redundant instruction.
252 The deletion is done first so that when we do insertions we
253 know which pseudo reg to use.
255 Various papers have argued that PRE DFA is expensive (O(n^2)) and others
256 argue it is not. The number of iterations for the algorithm to converge
257 is typically 2-4 so I don't view it as that expensive (relatively speaking).
259 PRE GCSE depends heavily on the second CSE pass to clean up the copies
260 we create. To make an expression reach the place where it's redundant,
261 the result of the expression is copied to a new register, and the redundant
262 expression is deleted by replacing it with this new register. Classic GCSE
263 doesn't have this problem as much as it computes the reaching defs of
264 each register in each block and thus can try to use an existing register.
266 **********************
268 A fair bit of simplicity is created by creating small functions for simple
269 tasks, even when the function is only called in one place. This may
270 measurably slow things down [or may not] by creating more function call
271 overhead than is necessary. The source is laid out so that it's trivial
272 to make the affected functions inline so that one can measure what speed
273 up, if any, can be achieved, and maybe later when things settle things can
274 be rearranged.
276 Help stamp out big monolithic functions! */
278 /* GCSE global vars. */
280 /* -dG dump file. */
281 static FILE *gcse_file;
283 /* Note whether or not we should run jump optimization after gcse. We
284 want to do this for two cases.
286 * If we changed any jumps via cprop.
288 * If we added any labels via edge splitting. */
290 static int run_jump_opt_after_gcse;
292 /* Bitmaps are normally not included in debugging dumps.
293 However it's useful to be able to print them from GDB.
294 We could create special functions for this, but it's simpler to
295 just allow passing stderr to the dump_foo fns. Since stderr can
296 be a macro, we store a copy here. */
297 static FILE *debug_stderr;
299 /* An obstack for our working variables. */
300 static struct obstack gcse_obstack;
302 struct reg_use {rtx reg_rtx; };
304 /* Hash table of expressions. */
306 struct expr
308 /* The expression (SET_SRC for expressions, PATTERN for assignments). */
309 rtx expr;
310 /* Index in the available expression bitmaps. */
311 int bitmap_index;
312 /* Next entry with the same hash. */
313 struct expr *next_same_hash;
314 /* List of anticipatable occurrences in basic blocks in the function.
315 An "anticipatable occurrence" is one that is the first occurrence in the
316 basic block, the operands are not modified in the basic block prior
317 to the occurrence and the output is not used between the start of
318 the block and the occurrence. */
319 struct occr *antic_occr;
320 /* List of available occurrence in basic blocks in the function.
321 An "available occurrence" is one that is the last occurrence in the
322 basic block and the operands are not modified by following statements in
323 the basic block [including this insn]. */
324 struct occr *avail_occr;
325 /* Non-null if the computation is PRE redundant.
326 The value is the newly created pseudo-reg to record a copy of the
327 expression in all the places that reach the redundant copy. */
328 rtx reaching_reg;
331 /* Occurrence of an expression.
332 There is one per basic block. If a pattern appears more than once the
333 last appearance is used [or first for anticipatable expressions]. */
335 struct occr
337 /* Next occurrence of this expression. */
338 struct occr *next;
339 /* The insn that computes the expression. */
340 rtx insn;
341 /* Nonzero if this [anticipatable] occurrence has been deleted. */
342 char deleted_p;
343 /* Nonzero if this [available] occurrence has been copied to
344 reaching_reg. */
345 /* ??? This is mutually exclusive with deleted_p, so they could share
346 the same byte. */
347 char copied_p;
350 /* Expression and copy propagation hash tables.
351 Each hash table is an array of buckets.
352 ??? It is known that if it were an array of entries, structure elements
353 `next_same_hash' and `bitmap_index' wouldn't be necessary. However, it is
354 not clear whether in the final analysis a sufficient amount of memory would
355 be saved as the size of the available expression bitmaps would be larger
356 [one could build a mapping table without holes afterwards though].
357 Someday I'll perform the computation and figure it out. */
359 struct hash_table
361 /* The table itself.
362 This is an array of `expr_hash_table_size' elements. */
363 struct expr **table;
365 /* Size of the hash table, in elements. */
366 unsigned int size;
368 /* Number of hash table elements. */
369 unsigned int n_elems;
371 /* Whether the table is expression of copy propagation one. */
372 int set_p;
375 /* Expression hash table. */
376 static struct hash_table expr_hash_table;
378 /* Copy propagation hash table. */
379 static struct hash_table set_hash_table;
381 /* Mapping of uids to cuids.
382 Only real insns get cuids. */
383 static int *uid_cuid;
385 /* Highest UID in UID_CUID. */
386 static int max_uid;
388 /* Get the cuid of an insn. */
389 #ifdef ENABLE_CHECKING
390 #define INSN_CUID(INSN) (INSN_UID (INSN) > max_uid ? (abort (), 0) : uid_cuid[INSN_UID (INSN)])
391 #else
392 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
393 #endif
395 /* Number of cuids. */
396 static int max_cuid;
398 /* Mapping of cuids to insns. */
399 static rtx *cuid_insn;
401 /* Get insn from cuid. */
402 #define CUID_INSN(CUID) (cuid_insn[CUID])
404 /* Maximum register number in function prior to doing gcse + 1.
405 Registers created during this pass have regno >= max_gcse_regno.
406 This is named with "gcse" to not collide with global of same name. */
407 static unsigned int max_gcse_regno;
409 /* Table of registers that are modified.
411 For each register, each element is a list of places where the pseudo-reg
412 is set.
414 For simplicity, GCSE is done on sets of pseudo-regs only. PRE GCSE only
415 requires knowledge of which blocks kill which regs [and thus could use
416 a bitmap instead of the lists `reg_set_table' uses].
418 `reg_set_table' and could be turned into an array of bitmaps (num-bbs x
419 num-regs) [however perhaps it may be useful to keep the data as is]. One
420 advantage of recording things this way is that `reg_set_table' is fairly
421 sparse with respect to pseudo regs but for hard regs could be fairly dense
422 [relatively speaking]. And recording sets of pseudo-regs in lists speeds
423 up functions like compute_transp since in the case of pseudo-regs we only
424 need to iterate over the number of times a pseudo-reg is set, not over the
425 number of basic blocks [clearly there is a bit of a slow down in the cases
426 where a pseudo is set more than once in a block, however it is believed
427 that the net effect is to speed things up]. This isn't done for hard-regs
428 because recording call-clobbered hard-regs in `reg_set_table' at each
429 function call can consume a fair bit of memory, and iterating over
430 hard-regs stored this way in compute_transp will be more expensive. */
432 typedef struct reg_set
434 /* The next setting of this register. */
435 struct reg_set *next;
436 /* The insn where it was set. */
437 rtx insn;
438 } reg_set;
440 static reg_set **reg_set_table;
442 /* Size of `reg_set_table'.
443 The table starts out at max_gcse_regno + slop, and is enlarged as
444 necessary. */
445 static int reg_set_table_size;
447 /* Amount to grow `reg_set_table' by when it's full. */
448 #define REG_SET_TABLE_SLOP 100
450 /* This is a list of expressions which are MEMs and will be used by load
451 or store motion.
452 Load motion tracks MEMs which aren't killed by
453 anything except itself. (ie, loads and stores to a single location).
454 We can then allow movement of these MEM refs with a little special
455 allowance. (all stores copy the same value to the reaching reg used
456 for the loads). This means all values used to store into memory must have
457 no side effects so we can re-issue the setter value.
458 Store Motion uses this structure as an expression table to track stores
459 which look interesting, and might be moveable towards the exit block. */
461 struct ls_expr
463 struct expr * expr; /* Gcse expression reference for LM. */
464 rtx pattern; /* Pattern of this mem. */
465 rtx pattern_regs; /* List of registers mentioned by the mem. */
466 rtx loads; /* INSN list of loads seen. */
467 rtx stores; /* INSN list of stores seen. */
468 struct ls_expr * next; /* Next in the list. */
469 int invalid; /* Invalid for some reason. */
470 int index; /* If it maps to a bitmap index. */
471 unsigned int hash_index; /* Index when in a hash table. */
472 rtx reaching_reg; /* Register to use when re-writing. */
475 /* Array of implicit set patterns indexed by basic block index. */
476 static rtx *implicit_sets;
478 /* Head of the list of load/store memory refs. */
479 static struct ls_expr * pre_ldst_mems = NULL;
481 /* Bitmap containing one bit for each register in the program.
482 Used when performing GCSE to track which registers have been set since
483 the start of the basic block. */
484 static regset reg_set_bitmap;
486 /* For each block, a bitmap of registers set in the block.
487 This is used by expr_killed_p and compute_transp.
488 It is computed during hash table computation and not by compute_sets
489 as it includes registers added since the last pass (or between cprop and
490 gcse) and it's currently not easy to realloc sbitmap vectors. */
491 static sbitmap *reg_set_in_block;
493 /* Array, indexed by basic block number for a list of insns which modify
494 memory within that block. */
495 static rtx * modify_mem_list;
496 bitmap modify_mem_list_set;
498 /* This array parallels modify_mem_list, but is kept canonicalized. */
499 static rtx * canon_modify_mem_list;
500 bitmap canon_modify_mem_list_set;
501 /* Various variables for statistics gathering. */
503 /* Memory used in a pass.
504 This isn't intended to be absolutely precise. Its intent is only
505 to keep an eye on memory usage. */
506 static int bytes_used;
508 /* GCSE substitutions made. */
509 static int gcse_subst_count;
510 /* Number of copy instructions created. */
511 static int gcse_create_count;
512 /* Number of constants propagated. */
513 static int const_prop_count;
514 /* Number of copys propagated. */
515 static int copy_prop_count;
517 /* These variables are used by classic GCSE.
518 Normally they'd be defined a bit later, but `rd_gen' needs to
519 be declared sooner. */
521 /* Each block has a bitmap of each type.
522 The length of each blocks bitmap is:
524 max_cuid - for reaching definitions
525 n_exprs - for available expressions
527 Thus we view the bitmaps as 2 dimensional arrays. i.e.
528 rd_kill[block_num][cuid_num]
529 ae_kill[block_num][expr_num] */
531 /* For reaching defs */
532 static sbitmap *rd_kill, *rd_gen, *reaching_defs, *rd_out;
534 /* for available exprs */
535 static sbitmap *ae_kill, *ae_gen, *ae_in, *ae_out;
537 /* Objects of this type are passed around by the null-pointer check
538 removal routines. */
539 struct null_pointer_info
541 /* The basic block being processed. */
542 basic_block current_block;
543 /* The first register to be handled in this pass. */
544 unsigned int min_reg;
545 /* One greater than the last register to be handled in this pass. */
546 unsigned int max_reg;
547 sbitmap *nonnull_local;
548 sbitmap *nonnull_killed;
551 static void compute_can_copy (void);
552 static void *gmalloc (size_t) ATTRIBUTE_MALLOC;
553 static void *gcalloc (size_t, size_t) ATTRIBUTE_MALLOC;
554 static void *grealloc (void *, size_t);
555 static void *gcse_alloc (unsigned long);
556 static void alloc_gcse_mem (rtx);
557 static void free_gcse_mem (void);
558 static void alloc_reg_set_mem (int);
559 static void free_reg_set_mem (void);
560 static int get_bitmap_width (int, int, int);
561 static void record_one_set (int, rtx);
562 static void replace_one_set (int, rtx, rtx);
563 static void record_set_info (rtx, rtx, void *);
564 static void compute_sets (rtx);
565 static void hash_scan_insn (rtx, struct hash_table *, int);
566 static void hash_scan_set (rtx, rtx, struct hash_table *);
567 static void hash_scan_clobber (rtx, rtx, struct hash_table *);
568 static void hash_scan_call (rtx, rtx, struct hash_table *);
569 static int want_to_gcse_p (rtx);
570 static bool gcse_constant_p (rtx);
571 static int oprs_unchanged_p (rtx, rtx, int);
572 static int oprs_anticipatable_p (rtx, rtx);
573 static int oprs_available_p (rtx, rtx);
574 static void insert_expr_in_table (rtx, enum machine_mode, rtx, int, int,
575 struct hash_table *);
576 static void insert_set_in_table (rtx, rtx, struct hash_table *);
577 static unsigned int hash_expr (rtx, enum machine_mode, int *, int);
578 static unsigned int hash_expr_1 (rtx, enum machine_mode, int *);
579 static unsigned int hash_string_1 (const char *);
580 static unsigned int hash_set (int, int);
581 static int expr_equiv_p (rtx, rtx);
582 static void record_last_reg_set_info (rtx, int);
583 static void record_last_mem_set_info (rtx);
584 static void record_last_set_info (rtx, rtx, void *);
585 static void compute_hash_table (struct hash_table *);
586 static void alloc_hash_table (int, struct hash_table *, int);
587 static void free_hash_table (struct hash_table *);
588 static void compute_hash_table_work (struct hash_table *);
589 static void dump_hash_table (FILE *, const char *, struct hash_table *);
590 static struct expr *lookup_expr (rtx, struct hash_table *);
591 static struct expr *lookup_set (unsigned int, struct hash_table *);
592 static struct expr *next_set (unsigned int, struct expr *);
593 static void reset_opr_set_tables (void);
594 static int oprs_not_set_p (rtx, rtx);
595 static void mark_call (rtx);
596 static void mark_set (rtx, rtx);
597 static void mark_clobber (rtx, rtx);
598 static void mark_oprs_set (rtx);
599 static void alloc_cprop_mem (int, int);
600 static void free_cprop_mem (void);
601 static void compute_transp (rtx, int, sbitmap *, int);
602 static void compute_transpout (void);
603 static void compute_local_properties (sbitmap *, sbitmap *, sbitmap *,
604 struct hash_table *);
605 static void compute_cprop_data (void);
606 static void find_used_regs (rtx *, void *);
607 static int try_replace_reg (rtx, rtx, rtx);
608 static struct expr *find_avail_set (int, rtx);
609 static int cprop_jump (basic_block, rtx, rtx, rtx, rtx);
610 static void mems_conflict_for_gcse_p (rtx, rtx, void *);
611 static int load_killed_in_block_p (basic_block, int, rtx, int);
612 static void canon_list_insert (rtx, rtx, void *);
613 static int cprop_insn (rtx, int);
614 static int cprop (int);
615 static void find_implicit_sets (void);
616 static int one_cprop_pass (int, int, int);
617 static bool constprop_register (rtx, rtx, rtx, int);
618 static struct expr *find_bypass_set (int, int);
619 static bool reg_killed_on_edge (rtx, edge);
620 static int bypass_block (basic_block, rtx, rtx);
621 static int bypass_conditional_jumps (void);
622 static void alloc_pre_mem (int, int);
623 static void free_pre_mem (void);
624 static void compute_pre_data (void);
625 static int pre_expr_reaches_here_p (basic_block, struct expr *,
626 basic_block);
627 static void insert_insn_end_bb (struct expr *, basic_block, int);
628 static void pre_insert_copy_insn (struct expr *, rtx);
629 static void pre_insert_copies (void);
630 static int pre_delete (void);
631 static int pre_gcse (void);
632 static int one_pre_gcse_pass (int);
633 static void add_label_notes (rtx, rtx);
634 static void alloc_code_hoist_mem (int, int);
635 static void free_code_hoist_mem (void);
636 static void compute_code_hoist_vbeinout (void);
637 static void compute_code_hoist_data (void);
638 static int hoist_expr_reaches_here_p (basic_block, int, basic_block, char *);
639 static void hoist_code (void);
640 static int one_code_hoisting_pass (void);
641 static void alloc_rd_mem (int, int);
642 static void free_rd_mem (void);
643 static void handle_rd_kill_set (rtx, int, basic_block);
644 static void compute_kill_rd (void);
645 static void compute_rd (void);
646 static void alloc_avail_expr_mem (int, int);
647 static void free_avail_expr_mem (void);
648 static void compute_ae_gen (struct hash_table *);
649 static int expr_killed_p (rtx, basic_block);
650 static void compute_ae_kill (sbitmap *, sbitmap *, struct hash_table *);
651 static int expr_reaches_here_p (struct occr *, struct expr *, basic_block,
652 int);
653 static rtx computing_insn (struct expr *, rtx);
654 static int def_reaches_here_p (rtx, rtx);
655 static int can_disregard_other_sets (struct reg_set **, rtx, int);
656 static int handle_avail_expr (rtx, struct expr *);
657 static int classic_gcse (void);
658 static int one_classic_gcse_pass (int);
659 static void invalidate_nonnull_info (rtx, rtx, void *);
660 static int delete_null_pointer_checks_1 (unsigned int *, sbitmap *, sbitmap *,
661 struct null_pointer_info *);
662 static rtx process_insert_insn (struct expr *);
663 static int pre_edge_insert (struct edge_list *, struct expr **);
664 static int expr_reaches_here_p_work (struct occr *, struct expr *,
665 basic_block, int, char *);
666 static int pre_expr_reaches_here_p_work (basic_block, struct expr *,
667 basic_block, char *);
668 static struct ls_expr * ldst_entry (rtx);
669 static void free_ldst_entry (struct ls_expr *);
670 static void free_ldst_mems (void);
671 static void print_ldst_list (FILE *);
672 static struct ls_expr * find_rtx_in_ldst (rtx);
673 static int enumerate_ldsts (void);
674 static inline struct ls_expr * first_ls_expr (void);
675 static inline struct ls_expr * next_ls_expr (struct ls_expr *);
676 static int simple_mem (rtx);
677 static void invalidate_any_buried_refs (rtx);
678 static void compute_ld_motion_mems (void);
679 static void trim_ld_motion_mems (void);
680 static void update_ld_motion_stores (struct expr *);
681 static void reg_set_info (rtx, rtx, void *);
682 static void reg_clear_last_set (rtx, rtx, void *);
683 static bool store_ops_ok (rtx, int *);
684 static rtx extract_mentioned_regs (rtx);
685 static rtx extract_mentioned_regs_helper (rtx, rtx);
686 static void find_moveable_store (rtx, int *, int *);
687 static int compute_store_table (void);
688 static bool load_kills_store (rtx, rtx, int);
689 static bool find_loads (rtx, rtx, int);
690 static bool store_killed_in_insn (rtx, rtx, rtx, int);
691 static bool store_killed_after (rtx, rtx, rtx, basic_block, int *, rtx *);
692 static bool store_killed_before (rtx, rtx, rtx, basic_block, int *);
693 static void build_store_vectors (void);
694 static void insert_insn_start_bb (rtx, basic_block);
695 static int insert_store (struct ls_expr *, edge);
696 static void remove_reachable_equiv_notes (basic_block, struct ls_expr *);
697 static void replace_store_insn (rtx, rtx, basic_block, struct ls_expr *);
698 static void delete_store (struct ls_expr *, basic_block);
699 static void free_store_memory (void);
700 static void store_motion (void);
701 static void free_insn_expr_list_list (rtx *);
702 static void clear_modify_mem_tables (void);
703 static void free_modify_mem_tables (void);
704 static rtx gcse_emit_move_after (rtx, rtx, rtx);
705 static void local_cprop_find_used_regs (rtx *, void *);
706 static bool do_local_cprop (rtx, rtx, int, rtx*);
707 static bool adjust_libcall_notes (rtx, rtx, rtx, rtx*);
708 static void local_cprop_pass (int);
709 static bool is_too_expensive (const char *);
712 /* Entry point for global common subexpression elimination.
713 F is the first instruction in the function. */
716 gcse_main (rtx f, FILE *file)
718 int changed, pass;
719 /* Bytes used at start of pass. */
720 int initial_bytes_used;
721 /* Maximum number of bytes used by a pass. */
722 int max_pass_bytes;
723 /* Point to release obstack data from for each pass. */
724 char *gcse_obstack_bottom;
726 /* We do not construct an accurate cfg in functions which call
727 setjmp, so just punt to be safe. */
728 if (current_function_calls_setjmp)
729 return 0;
731 /* Assume that we do not need to run jump optimizations after gcse. */
732 run_jump_opt_after_gcse = 0;
734 /* For calling dump_foo fns from gdb. */
735 debug_stderr = stderr;
736 gcse_file = file;
738 /* Identify the basic block information for this function, including
739 successors and predecessors. */
740 max_gcse_regno = max_reg_num ();
742 if (file)
743 dump_flow_info (file);
745 /* Return if there's nothing to do, or it is too expensive. */
746 if (n_basic_blocks <= 1 || is_too_expensive (_("GCSE disabled")))
747 return 0;
749 gcc_obstack_init (&gcse_obstack);
750 bytes_used = 0;
752 /* We need alias. */
753 init_alias_analysis ();
754 /* Record where pseudo-registers are set. This data is kept accurate
755 during each pass. ??? We could also record hard-reg information here
756 [since it's unchanging], however it is currently done during hash table
757 computation.
759 It may be tempting to compute MEM set information here too, but MEM sets
760 will be subject to code motion one day and thus we need to compute
761 information about memory sets when we build the hash tables. */
763 alloc_reg_set_mem (max_gcse_regno);
764 compute_sets (f);
766 pass = 0;
767 initial_bytes_used = bytes_used;
768 max_pass_bytes = 0;
769 gcse_obstack_bottom = gcse_alloc (1);
770 changed = 1;
771 while (changed && pass < MAX_GCSE_PASSES)
773 changed = 0;
774 if (file)
775 fprintf (file, "GCSE pass %d\n\n", pass + 1);
777 /* Initialize bytes_used to the space for the pred/succ lists,
778 and the reg_set_table data. */
779 bytes_used = initial_bytes_used;
781 /* Each pass may create new registers, so recalculate each time. */
782 max_gcse_regno = max_reg_num ();
784 alloc_gcse_mem (f);
786 /* Don't allow constant propagation to modify jumps
787 during this pass. */
788 changed = one_cprop_pass (pass + 1, 0, 0);
790 if (optimize_size)
791 changed |= one_classic_gcse_pass (pass + 1);
792 else
794 changed |= one_pre_gcse_pass (pass + 1);
795 /* We may have just created new basic blocks. Release and
796 recompute various things which are sized on the number of
797 basic blocks. */
798 if (changed)
800 free_modify_mem_tables ();
801 modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
802 canon_modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
804 free_reg_set_mem ();
805 alloc_reg_set_mem (max_reg_num ());
806 compute_sets (f);
807 run_jump_opt_after_gcse = 1;
810 if (max_pass_bytes < bytes_used)
811 max_pass_bytes = bytes_used;
813 /* Free up memory, then reallocate for code hoisting. We can
814 not re-use the existing allocated memory because the tables
815 will not have info for the insns or registers created by
816 partial redundancy elimination. */
817 free_gcse_mem ();
819 /* It does not make sense to run code hoisting unless we optimizing
820 for code size -- it rarely makes programs faster, and can make
821 them bigger if we did partial redundancy elimination (when optimizing
822 for space, we use a classic gcse algorithm instead of partial
823 redundancy algorithms). */
824 if (optimize_size)
826 max_gcse_regno = max_reg_num ();
827 alloc_gcse_mem (f);
828 changed |= one_code_hoisting_pass ();
829 free_gcse_mem ();
831 if (max_pass_bytes < bytes_used)
832 max_pass_bytes = bytes_used;
835 if (file)
837 fprintf (file, "\n");
838 fflush (file);
841 obstack_free (&gcse_obstack, gcse_obstack_bottom);
842 pass++;
845 /* Do one last pass of copy propagation, including cprop into
846 conditional jumps. */
848 max_gcse_regno = max_reg_num ();
849 alloc_gcse_mem (f);
850 /* This time, go ahead and allow cprop to alter jumps. */
851 one_cprop_pass (pass + 1, 1, 0);
852 free_gcse_mem ();
854 if (file)
856 fprintf (file, "GCSE of %s: %d basic blocks, ",
857 current_function_name, n_basic_blocks);
858 fprintf (file, "%d pass%s, %d bytes\n\n",
859 pass, pass > 1 ? "es" : "", max_pass_bytes);
862 obstack_free (&gcse_obstack, NULL);
863 free_reg_set_mem ();
864 /* We are finished with alias. */
865 end_alias_analysis ();
866 allocate_reg_info (max_reg_num (), FALSE, FALSE);
868 if (!optimize_size && flag_gcse_sm)
869 store_motion ();
871 /* Record where pseudo-registers are set. */
872 return run_jump_opt_after_gcse;
875 /* Misc. utilities. */
877 /* Nonzero for each mode that supports (set (reg) (reg)).
878 This is trivially true for integer and floating point values.
879 It may or may not be true for condition codes. */
880 static char can_copy[(int) NUM_MACHINE_MODES];
882 /* Compute which modes support reg/reg copy operations. */
884 static void
885 compute_can_copy (void)
887 int i;
888 #ifndef AVOID_CCMODE_COPIES
889 rtx reg, insn;
890 #endif
891 memset (can_copy, 0, NUM_MACHINE_MODES);
893 start_sequence ();
894 for (i = 0; i < NUM_MACHINE_MODES; i++)
895 if (GET_MODE_CLASS (i) == MODE_CC)
897 #ifdef AVOID_CCMODE_COPIES
898 can_copy[i] = 0;
899 #else
900 reg = gen_rtx_REG ((enum machine_mode) i, LAST_VIRTUAL_REGISTER + 1);
901 insn = emit_insn (gen_rtx_SET (VOIDmode, reg, reg));
902 if (recog (PATTERN (insn), insn, NULL) >= 0)
903 can_copy[i] = 1;
904 #endif
906 else
907 can_copy[i] = 1;
909 end_sequence ();
912 /* Returns whether the mode supports reg/reg copy operations. */
914 bool
915 can_copy_p (enum machine_mode mode)
917 static bool can_copy_init_p = false;
919 if (! can_copy_init_p)
921 compute_can_copy ();
922 can_copy_init_p = true;
925 return can_copy[mode] != 0;
928 /* Cover function to xmalloc to record bytes allocated. */
930 static void *
931 gmalloc (size_t size)
933 bytes_used += size;
934 return xmalloc (size);
937 /* Cover function to xcalloc to record bytes allocated. */
939 static void *
940 gcalloc (size_t nelem, size_t elsize)
942 bytes_used += nelem * elsize;
943 return xcalloc (nelem, elsize);
946 /* Cover function to xrealloc.
947 We don't record the additional size since we don't know it.
948 It won't affect memory usage stats much anyway. */
950 static void *
951 grealloc (void *ptr, size_t size)
953 return xrealloc (ptr, size);
956 /* Cover function to obstack_alloc. */
958 static void *
959 gcse_alloc (unsigned long size)
961 bytes_used += size;
962 return obstack_alloc (&gcse_obstack, size);
965 /* Allocate memory for the cuid mapping array,
966 and reg/memory set tracking tables.
968 This is called at the start of each pass. */
970 static void
971 alloc_gcse_mem (rtx f)
973 int i;
974 rtx insn;
976 /* Find the largest UID and create a mapping from UIDs to CUIDs.
977 CUIDs are like UIDs except they increase monotonically, have no gaps,
978 and only apply to real insns. */
980 max_uid = get_max_uid ();
981 uid_cuid = gcalloc (max_uid + 1, sizeof (int));
982 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
984 if (INSN_P (insn))
985 uid_cuid[INSN_UID (insn)] = i++;
986 else
987 uid_cuid[INSN_UID (insn)] = i;
990 /* Create a table mapping cuids to insns. */
992 max_cuid = i;
993 cuid_insn = gcalloc (max_cuid + 1, sizeof (rtx));
994 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
995 if (INSN_P (insn))
996 CUID_INSN (i++) = insn;
998 /* Allocate vars to track sets of regs. */
999 reg_set_bitmap = BITMAP_XMALLOC ();
1001 /* Allocate vars to track sets of regs, memory per block. */
1002 reg_set_in_block = sbitmap_vector_alloc (last_basic_block, max_gcse_regno);
1003 /* Allocate array to keep a list of insns which modify memory in each
1004 basic block. */
1005 modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
1006 canon_modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
1007 modify_mem_list_set = BITMAP_XMALLOC ();
1008 canon_modify_mem_list_set = BITMAP_XMALLOC ();
1011 /* Free memory allocated by alloc_gcse_mem. */
1013 static void
1014 free_gcse_mem (void)
1016 free (uid_cuid);
1017 free (cuid_insn);
1019 BITMAP_XFREE (reg_set_bitmap);
1021 sbitmap_vector_free (reg_set_in_block);
1022 free_modify_mem_tables ();
1023 BITMAP_XFREE (modify_mem_list_set);
1024 BITMAP_XFREE (canon_modify_mem_list_set);
1027 /* Many of the global optimization algorithms work by solving dataflow
1028 equations for various expressions. Initially, some local value is
1029 computed for each expression in each block. Then, the values across the
1030 various blocks are combined (by following flow graph edges) to arrive at
1031 global values. Conceptually, each set of equations is independent. We
1032 may therefore solve all the equations in parallel, solve them one at a
1033 time, or pick any intermediate approach.
1035 When you're going to need N two-dimensional bitmaps, each X (say, the
1036 number of blocks) by Y (say, the number of expressions), call this
1037 function. It's not important what X and Y represent; only that Y
1038 correspond to the things that can be done in parallel. This function will
1039 return an appropriate chunking factor C; you should solve C sets of
1040 equations in parallel. By going through this function, we can easily
1041 trade space against time; by solving fewer equations in parallel we use
1042 less space. */
1044 static int
1045 get_bitmap_width (int n, int x, int y)
1047 /* It's not really worth figuring out *exactly* how much memory will
1048 be used by a particular choice. The important thing is to get
1049 something approximately right. */
1050 size_t max_bitmap_memory = 10 * 1024 * 1024;
1052 /* The number of bytes we'd use for a single column of minimum
1053 width. */
1054 size_t column_size = n * x * sizeof (SBITMAP_ELT_TYPE);
1056 /* Often, it's reasonable just to solve all the equations in
1057 parallel. */
1058 if (column_size * SBITMAP_SET_SIZE (y) <= max_bitmap_memory)
1059 return y;
1061 /* Otherwise, pick the largest width we can, without going over the
1062 limit. */
1063 return SBITMAP_ELT_BITS * ((max_bitmap_memory + column_size - 1)
1064 / column_size);
1067 /* Compute the local properties of each recorded expression.
1069 Local properties are those that are defined by the block, irrespective of
1070 other blocks.
1072 An expression is transparent in a block if its operands are not modified
1073 in the block.
1075 An expression is computed (locally available) in a block if it is computed
1076 at least once and expression would contain the same value if the
1077 computation was moved to the end of the block.
1079 An expression is locally anticipatable in a block if it is computed at
1080 least once and expression would contain the same value if the computation
1081 was moved to the beginning of the block.
1083 We call this routine for cprop, pre and code hoisting. They all compute
1084 basically the same information and thus can easily share this code.
1086 TRANSP, COMP, and ANTLOC are destination sbitmaps for recording local
1087 properties. If NULL, then it is not necessary to compute or record that
1088 particular property.
1090 TABLE controls which hash table to look at. If it is set hash table,
1091 additionally, TRANSP is computed as ~TRANSP, since this is really cprop's
1092 ABSALTERED. */
1094 static void
1095 compute_local_properties (sbitmap *transp, sbitmap *comp, sbitmap *antloc, struct hash_table *table)
1097 unsigned int i;
1099 /* Initialize any bitmaps that were passed in. */
1100 if (transp)
1102 if (table->set_p)
1103 sbitmap_vector_zero (transp, last_basic_block);
1104 else
1105 sbitmap_vector_ones (transp, last_basic_block);
1108 if (comp)
1109 sbitmap_vector_zero (comp, last_basic_block);
1110 if (antloc)
1111 sbitmap_vector_zero (antloc, last_basic_block);
1113 for (i = 0; i < table->size; i++)
1115 struct expr *expr;
1117 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
1119 int indx = expr->bitmap_index;
1120 struct occr *occr;
1122 /* The expression is transparent in this block if it is not killed.
1123 We start by assuming all are transparent [none are killed], and
1124 then reset the bits for those that are. */
1125 if (transp)
1126 compute_transp (expr->expr, indx, transp, table->set_p);
1128 /* The occurrences recorded in antic_occr are exactly those that
1129 we want to set to nonzero in ANTLOC. */
1130 if (antloc)
1131 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
1133 SET_BIT (antloc[BLOCK_NUM (occr->insn)], indx);
1135 /* While we're scanning the table, this is a good place to
1136 initialize this. */
1137 occr->deleted_p = 0;
1140 /* The occurrences recorded in avail_occr are exactly those that
1141 we want to set to nonzero in COMP. */
1142 if (comp)
1143 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
1145 SET_BIT (comp[BLOCK_NUM (occr->insn)], indx);
1147 /* While we're scanning the table, this is a good place to
1148 initialize this. */
1149 occr->copied_p = 0;
1152 /* While we're scanning the table, this is a good place to
1153 initialize this. */
1154 expr->reaching_reg = 0;
1159 /* Register set information.
1161 `reg_set_table' records where each register is set or otherwise
1162 modified. */
1164 static struct obstack reg_set_obstack;
1166 static void
1167 alloc_reg_set_mem (int n_regs)
1169 reg_set_table_size = n_regs + REG_SET_TABLE_SLOP;
1170 reg_set_table = gcalloc (reg_set_table_size, sizeof (struct reg_set *));
1172 gcc_obstack_init (&reg_set_obstack);
1175 static void
1176 free_reg_set_mem (void)
1178 free (reg_set_table);
1179 obstack_free (&reg_set_obstack, NULL);
1182 /* An OLD_INSN that used to set REGNO was replaced by NEW_INSN.
1183 Update the corresponding `reg_set_table' entry accordingly.
1184 We assume that NEW_INSN is not already recorded in reg_set_table[regno]. */
1186 static void
1187 replace_one_set (int regno, rtx old_insn, rtx new_insn)
1189 struct reg_set *reg_info;
1190 if (regno >= reg_set_table_size)
1191 return;
1192 for (reg_info = reg_set_table[regno]; reg_info; reg_info = reg_info->next)
1193 if (reg_info->insn == old_insn)
1195 reg_info->insn = new_insn;
1196 break;
1200 /* Record REGNO in the reg_set table. */
1202 static void
1203 record_one_set (int regno, rtx insn)
1205 /* Allocate a new reg_set element and link it onto the list. */
1206 struct reg_set *new_reg_info;
1208 /* If the table isn't big enough, enlarge it. */
1209 if (regno >= reg_set_table_size)
1211 int new_size = regno + REG_SET_TABLE_SLOP;
1213 reg_set_table = grealloc (reg_set_table,
1214 new_size * sizeof (struct reg_set *));
1215 memset (reg_set_table + reg_set_table_size, 0,
1216 (new_size - reg_set_table_size) * sizeof (struct reg_set *));
1217 reg_set_table_size = new_size;
1220 new_reg_info = obstack_alloc (&reg_set_obstack, sizeof (struct reg_set));
1221 bytes_used += sizeof (struct reg_set);
1222 new_reg_info->insn = insn;
1223 new_reg_info->next = reg_set_table[regno];
1224 reg_set_table[regno] = new_reg_info;
1227 /* Called from compute_sets via note_stores to handle one SET or CLOBBER in
1228 an insn. The DATA is really the instruction in which the SET is
1229 occurring. */
1231 static void
1232 record_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED, void *data)
1234 rtx record_set_insn = (rtx) data;
1236 if (GET_CODE (dest) == REG && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
1237 record_one_set (REGNO (dest), record_set_insn);
1240 /* Scan the function and record each set of each pseudo-register.
1242 This is called once, at the start of the gcse pass. See the comments for
1243 `reg_set_table' for further documentation. */
1245 static void
1246 compute_sets (rtx f)
1248 rtx insn;
1250 for (insn = f; insn != 0; insn = NEXT_INSN (insn))
1251 if (INSN_P (insn))
1252 note_stores (PATTERN (insn), record_set_info, insn);
1255 /* Hash table support. */
1257 struct reg_avail_info
1259 basic_block last_bb;
1260 int first_set;
1261 int last_set;
1264 static struct reg_avail_info *reg_avail_info;
1265 static basic_block current_bb;
1268 /* See whether X, the source of a set, is something we want to consider for
1269 GCSE. */
1271 static GTY(()) rtx test_insn;
1272 static int
1273 want_to_gcse_p (rtx x)
1275 int num_clobbers = 0;
1276 int icode;
1278 switch (GET_CODE (x))
1280 case REG:
1281 case SUBREG:
1282 case CONST_INT:
1283 case CONST_DOUBLE:
1284 case CONST_VECTOR:
1285 case CALL:
1286 case CONSTANT_P_RTX:
1287 return 0;
1289 default:
1290 break;
1293 /* If this is a valid operand, we are OK. If it's VOIDmode, we aren't. */
1294 if (general_operand (x, GET_MODE (x)))
1295 return 1;
1296 else if (GET_MODE (x) == VOIDmode)
1297 return 0;
1299 /* Otherwise, check if we can make a valid insn from it. First initialize
1300 our test insn if we haven't already. */
1301 if (test_insn == 0)
1303 test_insn
1304 = make_insn_raw (gen_rtx_SET (VOIDmode,
1305 gen_rtx_REG (word_mode,
1306 FIRST_PSEUDO_REGISTER * 2),
1307 const0_rtx));
1308 NEXT_INSN (test_insn) = PREV_INSN (test_insn) = 0;
1311 /* Now make an insn like the one we would make when GCSE'ing and see if
1312 valid. */
1313 PUT_MODE (SET_DEST (PATTERN (test_insn)), GET_MODE (x));
1314 SET_SRC (PATTERN (test_insn)) = x;
1315 return ((icode = recog (PATTERN (test_insn), test_insn, &num_clobbers)) >= 0
1316 && (num_clobbers == 0 || ! added_clobbers_hard_reg_p (icode)));
1319 /* Return nonzero if the operands of expression X are unchanged from the
1320 start of INSN's basic block up to but not including INSN (if AVAIL_P == 0),
1321 or from INSN to the end of INSN's basic block (if AVAIL_P != 0). */
1323 static int
1324 oprs_unchanged_p (rtx x, rtx insn, int avail_p)
1326 int i, j;
1327 enum rtx_code code;
1328 const char *fmt;
1330 if (x == 0)
1331 return 1;
1333 code = GET_CODE (x);
1334 switch (code)
1336 case REG:
1338 struct reg_avail_info *info = &reg_avail_info[REGNO (x)];
1340 if (info->last_bb != current_bb)
1341 return 1;
1342 if (avail_p)
1343 return info->last_set < INSN_CUID (insn);
1344 else
1345 return info->first_set >= INSN_CUID (insn);
1348 case MEM:
1349 if (load_killed_in_block_p (current_bb, INSN_CUID (insn),
1350 x, avail_p))
1351 return 0;
1352 else
1353 return oprs_unchanged_p (XEXP (x, 0), insn, avail_p);
1355 case PRE_DEC:
1356 case PRE_INC:
1357 case POST_DEC:
1358 case POST_INC:
1359 case PRE_MODIFY:
1360 case POST_MODIFY:
1361 return 0;
1363 case PC:
1364 case CC0: /*FIXME*/
1365 case CONST:
1366 case CONST_INT:
1367 case CONST_DOUBLE:
1368 case CONST_VECTOR:
1369 case SYMBOL_REF:
1370 case LABEL_REF:
1371 case ADDR_VEC:
1372 case ADDR_DIFF_VEC:
1373 return 1;
1375 default:
1376 break;
1379 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
1381 if (fmt[i] == 'e')
1383 /* If we are about to do the last recursive call needed at this
1384 level, change it into iteration. This function is called enough
1385 to be worth it. */
1386 if (i == 0)
1387 return oprs_unchanged_p (XEXP (x, i), insn, avail_p);
1389 else if (! oprs_unchanged_p (XEXP (x, i), insn, avail_p))
1390 return 0;
1392 else if (fmt[i] == 'E')
1393 for (j = 0; j < XVECLEN (x, i); j++)
1394 if (! oprs_unchanged_p (XVECEXP (x, i, j), insn, avail_p))
1395 return 0;
1398 return 1;
1401 /* Used for communication between mems_conflict_for_gcse_p and
1402 load_killed_in_block_p. Nonzero if mems_conflict_for_gcse_p finds a
1403 conflict between two memory references. */
1404 static int gcse_mems_conflict_p;
1406 /* Used for communication between mems_conflict_for_gcse_p and
1407 load_killed_in_block_p. A memory reference for a load instruction,
1408 mems_conflict_for_gcse_p will see if a memory store conflicts with
1409 this memory load. */
1410 static rtx gcse_mem_operand;
1412 /* DEST is the output of an instruction. If it is a memory reference, and
1413 possibly conflicts with the load found in gcse_mem_operand, then set
1414 gcse_mems_conflict_p to a nonzero value. */
1416 static void
1417 mems_conflict_for_gcse_p (rtx dest, rtx setter ATTRIBUTE_UNUSED,
1418 void *data ATTRIBUTE_UNUSED)
1420 while (GET_CODE (dest) == SUBREG
1421 || GET_CODE (dest) == ZERO_EXTRACT
1422 || GET_CODE (dest) == SIGN_EXTRACT
1423 || GET_CODE (dest) == STRICT_LOW_PART)
1424 dest = XEXP (dest, 0);
1426 /* If DEST is not a MEM, then it will not conflict with the load. Note
1427 that function calls are assumed to clobber memory, but are handled
1428 elsewhere. */
1429 if (GET_CODE (dest) != MEM)
1430 return;
1432 /* If we are setting a MEM in our list of specially recognized MEMs,
1433 don't mark as killed this time. */
1435 if (expr_equiv_p (dest, gcse_mem_operand) && pre_ldst_mems != NULL)
1437 if (!find_rtx_in_ldst (dest))
1438 gcse_mems_conflict_p = 1;
1439 return;
1442 if (true_dependence (dest, GET_MODE (dest), gcse_mem_operand,
1443 rtx_addr_varies_p))
1444 gcse_mems_conflict_p = 1;
1447 /* Return nonzero if the expression in X (a memory reference) is killed
1448 in block BB before or after the insn with the CUID in UID_LIMIT.
1449 AVAIL_P is nonzero for kills after UID_LIMIT, and zero for kills
1450 before UID_LIMIT.
1452 To check the entire block, set UID_LIMIT to max_uid + 1 and
1453 AVAIL_P to 0. */
1455 static int
1456 load_killed_in_block_p (basic_block bb, int uid_limit, rtx x, int avail_p)
1458 rtx list_entry = modify_mem_list[bb->index];
1459 while (list_entry)
1461 rtx setter;
1462 /* Ignore entries in the list that do not apply. */
1463 if ((avail_p
1464 && INSN_CUID (XEXP (list_entry, 0)) < uid_limit)
1465 || (! avail_p
1466 && INSN_CUID (XEXP (list_entry, 0)) > uid_limit))
1468 list_entry = XEXP (list_entry, 1);
1469 continue;
1472 setter = XEXP (list_entry, 0);
1474 /* If SETTER is a call everything is clobbered. Note that calls
1475 to pure functions are never put on the list, so we need not
1476 worry about them. */
1477 if (GET_CODE (setter) == CALL_INSN)
1478 return 1;
1480 /* SETTER must be an INSN of some kind that sets memory. Call
1481 note_stores to examine each hunk of memory that is modified.
1483 The note_stores interface is pretty limited, so we have to
1484 communicate via global variables. Yuk. */
1485 gcse_mem_operand = x;
1486 gcse_mems_conflict_p = 0;
1487 note_stores (PATTERN (setter), mems_conflict_for_gcse_p, NULL);
1488 if (gcse_mems_conflict_p)
1489 return 1;
1490 list_entry = XEXP (list_entry, 1);
1492 return 0;
1495 /* Return nonzero if the operands of expression X are unchanged from
1496 the start of INSN's basic block up to but not including INSN. */
1498 static int
1499 oprs_anticipatable_p (rtx x, rtx insn)
1501 return oprs_unchanged_p (x, insn, 0);
1504 /* Return nonzero if the operands of expression X are unchanged from
1505 INSN to the end of INSN's basic block. */
1507 static int
1508 oprs_available_p (rtx x, rtx insn)
1510 return oprs_unchanged_p (x, insn, 1);
1513 /* Hash expression X.
1515 MODE is only used if X is a CONST_INT. DO_NOT_RECORD_P is a boolean
1516 indicating if a volatile operand is found or if the expression contains
1517 something we don't want to insert in the table. HASH_TABLE_SIZE is
1518 the current size of the hash table to be probed.
1520 ??? One might want to merge this with canon_hash. Later. */
1522 static unsigned int
1523 hash_expr (rtx x, enum machine_mode mode, int *do_not_record_p,
1524 int hash_table_size)
1526 unsigned int hash;
1528 *do_not_record_p = 0;
1530 hash = hash_expr_1 (x, mode, do_not_record_p);
1531 return hash % hash_table_size;
1534 /* Hash a string. Just add its bytes up. */
1536 static inline unsigned
1537 hash_string_1 (const char *ps)
1539 unsigned hash = 0;
1540 const unsigned char *p = (const unsigned char *) ps;
1542 if (p)
1543 while (*p)
1544 hash += *p++;
1546 return hash;
1549 /* Subroutine of hash_expr to do the actual work. */
1551 static unsigned int
1552 hash_expr_1 (rtx x, enum machine_mode mode, int *do_not_record_p)
1554 int i, j;
1555 unsigned hash = 0;
1556 enum rtx_code code;
1557 const char *fmt;
1559 /* Used to turn recursion into iteration. We can't rely on GCC's
1560 tail-recursion elimination since we need to keep accumulating values
1561 in HASH. */
1563 if (x == 0)
1564 return hash;
1566 repeat:
1567 code = GET_CODE (x);
1568 switch (code)
1570 case REG:
1571 hash += ((unsigned int) REG << 7) + REGNO (x);
1572 return hash;
1574 case CONST_INT:
1575 hash += (((unsigned int) CONST_INT << 7) + (unsigned int) mode
1576 + (unsigned int) INTVAL (x));
1577 return hash;
1579 case CONST_DOUBLE:
1580 /* This is like the general case, except that it only counts
1581 the integers representing the constant. */
1582 hash += (unsigned int) code + (unsigned int) GET_MODE (x);
1583 if (GET_MODE (x) != VOIDmode)
1584 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
1585 hash += (unsigned int) XWINT (x, i);
1586 else
1587 hash += ((unsigned int) CONST_DOUBLE_LOW (x)
1588 + (unsigned int) CONST_DOUBLE_HIGH (x));
1589 return hash;
1591 case CONST_VECTOR:
1593 int units;
1594 rtx elt;
1596 units = CONST_VECTOR_NUNITS (x);
1598 for (i = 0; i < units; ++i)
1600 elt = CONST_VECTOR_ELT (x, i);
1601 hash += hash_expr_1 (elt, GET_MODE (elt), do_not_record_p);
1604 return hash;
1607 /* Assume there is only one rtx object for any given label. */
1608 case LABEL_REF:
1609 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
1610 differences and differences between each stage's debugging dumps. */
1611 hash += (((unsigned int) LABEL_REF << 7)
1612 + CODE_LABEL_NUMBER (XEXP (x, 0)));
1613 return hash;
1615 case SYMBOL_REF:
1617 /* Don't hash on the symbol's address to avoid bootstrap differences.
1618 Different hash values may cause expressions to be recorded in
1619 different orders and thus different registers to be used in the
1620 final assembler. This also avoids differences in the dump files
1621 between various stages. */
1622 unsigned int h = 0;
1623 const unsigned char *p = (const unsigned char *) XSTR (x, 0);
1625 while (*p)
1626 h += (h << 7) + *p++; /* ??? revisit */
1628 hash += ((unsigned int) SYMBOL_REF << 7) + h;
1629 return hash;
1632 case MEM:
1633 if (MEM_VOLATILE_P (x))
1635 *do_not_record_p = 1;
1636 return 0;
1639 hash += (unsigned int) MEM;
1640 /* We used alias set for hashing, but this is not good, since the alias
1641 set may differ in -fprofile-arcs and -fbranch-probabilities compilation
1642 causing the profiles to fail to match. */
1643 x = XEXP (x, 0);
1644 goto repeat;
1646 case PRE_DEC:
1647 case PRE_INC:
1648 case POST_DEC:
1649 case POST_INC:
1650 case PC:
1651 case CC0:
1652 case CALL:
1653 case UNSPEC_VOLATILE:
1654 *do_not_record_p = 1;
1655 return 0;
1657 case ASM_OPERANDS:
1658 if (MEM_VOLATILE_P (x))
1660 *do_not_record_p = 1;
1661 return 0;
1663 else
1665 /* We don't want to take the filename and line into account. */
1666 hash += (unsigned) code + (unsigned) GET_MODE (x)
1667 + hash_string_1 (ASM_OPERANDS_TEMPLATE (x))
1668 + hash_string_1 (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
1669 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
1671 if (ASM_OPERANDS_INPUT_LENGTH (x))
1673 for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
1675 hash += (hash_expr_1 (ASM_OPERANDS_INPUT (x, i),
1676 GET_MODE (ASM_OPERANDS_INPUT (x, i)),
1677 do_not_record_p)
1678 + hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT
1679 (x, i)));
1682 hash += hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
1683 x = ASM_OPERANDS_INPUT (x, 0);
1684 mode = GET_MODE (x);
1685 goto repeat;
1687 return hash;
1690 default:
1691 break;
1694 hash += (unsigned) code + (unsigned) GET_MODE (x);
1695 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
1697 if (fmt[i] == 'e')
1699 /* If we are about to do the last recursive call
1700 needed at this level, change it into iteration.
1701 This function is called enough to be worth it. */
1702 if (i == 0)
1704 x = XEXP (x, i);
1705 goto repeat;
1708 hash += hash_expr_1 (XEXP (x, i), 0, do_not_record_p);
1709 if (*do_not_record_p)
1710 return 0;
1713 else if (fmt[i] == 'E')
1714 for (j = 0; j < XVECLEN (x, i); j++)
1716 hash += hash_expr_1 (XVECEXP (x, i, j), 0, do_not_record_p);
1717 if (*do_not_record_p)
1718 return 0;
1721 else if (fmt[i] == 's')
1722 hash += hash_string_1 (XSTR (x, i));
1723 else if (fmt[i] == 'i')
1724 hash += (unsigned int) XINT (x, i);
1725 else
1726 abort ();
1729 return hash;
1732 /* Hash a set of register REGNO.
1734 Sets are hashed on the register that is set. This simplifies the PRE copy
1735 propagation code.
1737 ??? May need to make things more elaborate. Later, as necessary. */
1739 static unsigned int
1740 hash_set (int regno, int hash_table_size)
1742 unsigned int hash;
1744 hash = regno;
1745 return hash % hash_table_size;
1748 /* Return nonzero if exp1 is equivalent to exp2.
1749 ??? Borrowed from cse.c. Might want to remerge with cse.c. Later. */
1751 static int
1752 expr_equiv_p (rtx x, rtx y)
1754 int i, j;
1755 enum rtx_code code;
1756 const char *fmt;
1758 if (x == y)
1759 return 1;
1761 if (x == 0 || y == 0)
1762 return 0;
1764 code = GET_CODE (x);
1765 if (code != GET_CODE (y))
1766 return 0;
1768 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
1769 if (GET_MODE (x) != GET_MODE (y))
1770 return 0;
1772 switch (code)
1774 case PC:
1775 case CC0:
1776 case CONST_INT:
1777 return 0;
1779 case LABEL_REF:
1780 return XEXP (x, 0) == XEXP (y, 0);
1782 case SYMBOL_REF:
1783 return XSTR (x, 0) == XSTR (y, 0);
1785 case REG:
1786 return REGNO (x) == REGNO (y);
1788 case MEM:
1789 /* Can't merge two expressions in different alias sets, since we can
1790 decide that the expression is transparent in a block when it isn't,
1791 due to it being set with the different alias set. */
1792 if (MEM_ALIAS_SET (x) != MEM_ALIAS_SET (y))
1793 return 0;
1795 /* A volatile mem should not be considered equivalent to any other. */
1796 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
1797 return 0;
1798 break;
1800 /* For commutative operations, check both orders. */
1801 case PLUS:
1802 case MULT:
1803 case AND:
1804 case IOR:
1805 case XOR:
1806 case NE:
1807 case EQ:
1808 return ((expr_equiv_p (XEXP (x, 0), XEXP (y, 0))
1809 && expr_equiv_p (XEXP (x, 1), XEXP (y, 1)))
1810 || (expr_equiv_p (XEXP (x, 0), XEXP (y, 1))
1811 && expr_equiv_p (XEXP (x, 1), XEXP (y, 0))));
1813 case ASM_OPERANDS:
1814 /* We don't use the generic code below because we want to
1815 disregard filename and line numbers. */
1817 /* A volatile asm isn't equivalent to any other. */
1818 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
1819 return 0;
1821 if (GET_MODE (x) != GET_MODE (y)
1822 || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
1823 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
1824 ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
1825 || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
1826 || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
1827 return 0;
1829 if (ASM_OPERANDS_INPUT_LENGTH (x))
1831 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
1832 if (! expr_equiv_p (ASM_OPERANDS_INPUT (x, i),
1833 ASM_OPERANDS_INPUT (y, i))
1834 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
1835 ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
1836 return 0;
1839 return 1;
1841 default:
1842 break;
1845 /* Compare the elements. If any pair of corresponding elements
1846 fail to match, return 0 for the whole thing. */
1848 fmt = GET_RTX_FORMAT (code);
1849 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1851 switch (fmt[i])
1853 case 'e':
1854 if (! expr_equiv_p (XEXP (x, i), XEXP (y, i)))
1855 return 0;
1856 break;
1858 case 'E':
1859 if (XVECLEN (x, i) != XVECLEN (y, i))
1860 return 0;
1861 for (j = 0; j < XVECLEN (x, i); j++)
1862 if (! expr_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
1863 return 0;
1864 break;
1866 case 's':
1867 if (strcmp (XSTR (x, i), XSTR (y, i)))
1868 return 0;
1869 break;
1871 case 'i':
1872 if (XINT (x, i) != XINT (y, i))
1873 return 0;
1874 break;
1876 case 'w':
1877 if (XWINT (x, i) != XWINT (y, i))
1878 return 0;
1879 break;
1881 case '0':
1882 break;
1884 default:
1885 abort ();
1889 return 1;
1892 /* Insert expression X in INSN in the hash TABLE.
1893 If it is already present, record it as the last occurrence in INSN's
1894 basic block.
1896 MODE is the mode of the value X is being stored into.
1897 It is only used if X is a CONST_INT.
1899 ANTIC_P is nonzero if X is an anticipatable expression.
1900 AVAIL_P is nonzero if X is an available expression. */
1902 static void
1903 insert_expr_in_table (rtx x, enum machine_mode mode, rtx insn, int antic_p,
1904 int avail_p, struct hash_table *table)
1906 int found, do_not_record_p;
1907 unsigned int hash;
1908 struct expr *cur_expr, *last_expr = NULL;
1909 struct occr *antic_occr, *avail_occr;
1910 struct occr *last_occr = NULL;
1912 hash = hash_expr (x, mode, &do_not_record_p, table->size);
1914 /* Do not insert expression in table if it contains volatile operands,
1915 or if hash_expr determines the expression is something we don't want
1916 to or can't handle. */
1917 if (do_not_record_p)
1918 return;
1920 cur_expr = table->table[hash];
1921 found = 0;
1923 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
1925 /* If the expression isn't found, save a pointer to the end of
1926 the list. */
1927 last_expr = cur_expr;
1928 cur_expr = cur_expr->next_same_hash;
1931 if (! found)
1933 cur_expr = gcse_alloc (sizeof (struct expr));
1934 bytes_used += sizeof (struct expr);
1935 if (table->table[hash] == NULL)
1936 /* This is the first pattern that hashed to this index. */
1937 table->table[hash] = cur_expr;
1938 else
1939 /* Add EXPR to end of this hash chain. */
1940 last_expr->next_same_hash = cur_expr;
1942 /* Set the fields of the expr element. */
1943 cur_expr->expr = x;
1944 cur_expr->bitmap_index = table->n_elems++;
1945 cur_expr->next_same_hash = NULL;
1946 cur_expr->antic_occr = NULL;
1947 cur_expr->avail_occr = NULL;
1950 /* Now record the occurrence(s). */
1951 if (antic_p)
1953 antic_occr = cur_expr->antic_occr;
1955 /* Search for another occurrence in the same basic block. */
1956 while (antic_occr && BLOCK_NUM (antic_occr->insn) != BLOCK_NUM (insn))
1958 /* If an occurrence isn't found, save a pointer to the end of
1959 the list. */
1960 last_occr = antic_occr;
1961 antic_occr = antic_occr->next;
1964 if (antic_occr)
1965 /* Found another instance of the expression in the same basic block.
1966 Prefer the currently recorded one. We want the first one in the
1967 block and the block is scanned from start to end. */
1968 ; /* nothing to do */
1969 else
1971 /* First occurrence of this expression in this basic block. */
1972 antic_occr = gcse_alloc (sizeof (struct occr));
1973 bytes_used += sizeof (struct occr);
1974 /* First occurrence of this expression in any block? */
1975 if (cur_expr->antic_occr == NULL)
1976 cur_expr->antic_occr = antic_occr;
1977 else
1978 last_occr->next = antic_occr;
1980 antic_occr->insn = insn;
1981 antic_occr->next = NULL;
1985 if (avail_p)
1987 avail_occr = cur_expr->avail_occr;
1989 /* Search for another occurrence in the same basic block. */
1990 while (avail_occr && BLOCK_NUM (avail_occr->insn) != BLOCK_NUM (insn))
1992 /* If an occurrence isn't found, save a pointer to the end of
1993 the list. */
1994 last_occr = avail_occr;
1995 avail_occr = avail_occr->next;
1998 if (avail_occr)
1999 /* Found another instance of the expression in the same basic block.
2000 Prefer this occurrence to the currently recorded one. We want
2001 the last one in the block and the block is scanned from start
2002 to end. */
2003 avail_occr->insn = insn;
2004 else
2006 /* First occurrence of this expression in this basic block. */
2007 avail_occr = gcse_alloc (sizeof (struct occr));
2008 bytes_used += sizeof (struct occr);
2010 /* First occurrence of this expression in any block? */
2011 if (cur_expr->avail_occr == NULL)
2012 cur_expr->avail_occr = avail_occr;
2013 else
2014 last_occr->next = avail_occr;
2016 avail_occr->insn = insn;
2017 avail_occr->next = NULL;
2022 /* Insert pattern X in INSN in the hash table.
2023 X is a SET of a reg to either another reg or a constant.
2024 If it is already present, record it as the last occurrence in INSN's
2025 basic block. */
2027 static void
2028 insert_set_in_table (rtx x, rtx insn, struct hash_table *table)
2030 int found;
2031 unsigned int hash;
2032 struct expr *cur_expr, *last_expr = NULL;
2033 struct occr *cur_occr, *last_occr = NULL;
2035 if (GET_CODE (x) != SET
2036 || GET_CODE (SET_DEST (x)) != REG)
2037 abort ();
2039 hash = hash_set (REGNO (SET_DEST (x)), table->size);
2041 cur_expr = table->table[hash];
2042 found = 0;
2044 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
2046 /* If the expression isn't found, save a pointer to the end of
2047 the list. */
2048 last_expr = cur_expr;
2049 cur_expr = cur_expr->next_same_hash;
2052 if (! found)
2054 cur_expr = gcse_alloc (sizeof (struct expr));
2055 bytes_used += sizeof (struct expr);
2056 if (table->table[hash] == NULL)
2057 /* This is the first pattern that hashed to this index. */
2058 table->table[hash] = cur_expr;
2059 else
2060 /* Add EXPR to end of this hash chain. */
2061 last_expr->next_same_hash = cur_expr;
2063 /* Set the fields of the expr element.
2064 We must copy X because it can be modified when copy propagation is
2065 performed on its operands. */
2066 cur_expr->expr = copy_rtx (x);
2067 cur_expr->bitmap_index = table->n_elems++;
2068 cur_expr->next_same_hash = NULL;
2069 cur_expr->antic_occr = NULL;
2070 cur_expr->avail_occr = NULL;
2073 /* Now record the occurrence. */
2074 cur_occr = cur_expr->avail_occr;
2076 /* Search for another occurrence in the same basic block. */
2077 while (cur_occr && BLOCK_NUM (cur_occr->insn) != BLOCK_NUM (insn))
2079 /* If an occurrence isn't found, save a pointer to the end of
2080 the list. */
2081 last_occr = cur_occr;
2082 cur_occr = cur_occr->next;
2085 if (cur_occr)
2086 /* Found another instance of the expression in the same basic block.
2087 Prefer this occurrence to the currently recorded one. We want the
2088 last one in the block and the block is scanned from start to end. */
2089 cur_occr->insn = insn;
2090 else
2092 /* First occurrence of this expression in this basic block. */
2093 cur_occr = gcse_alloc (sizeof (struct occr));
2094 bytes_used += sizeof (struct occr);
2096 /* First occurrence of this expression in any block? */
2097 if (cur_expr->avail_occr == NULL)
2098 cur_expr->avail_occr = cur_occr;
2099 else
2100 last_occr->next = cur_occr;
2102 cur_occr->insn = insn;
2103 cur_occr->next = NULL;
2107 /* Determine whether the rtx X should be treated as a constant for
2108 the purposes of GCSE's constant propagation. */
2110 static bool
2111 gcse_constant_p (rtx x)
2113 /* Consider a COMPARE of two integers constant. */
2114 if (GET_CODE (x) == COMPARE
2115 && GET_CODE (XEXP (x, 0)) == CONST_INT
2116 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2117 return true;
2120 /* Consider a COMPARE of the same registers is a constant
2121 if they are not floating point registers. */
2122 if (GET_CODE(x) == COMPARE
2123 && GET_CODE (XEXP (x, 0)) == REG
2124 && GET_CODE (XEXP (x, 1)) == REG
2125 && REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 1))
2126 && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0)))
2127 && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 1))))
2128 return true;
2130 if (GET_CODE (x) == CONSTANT_P_RTX)
2131 return false;
2133 return CONSTANT_P (x);
2136 /* Scan pattern PAT of INSN and add an entry to the hash TABLE (set or
2137 expression one). */
2139 static void
2140 hash_scan_set (rtx pat, rtx insn, struct hash_table *table)
2142 rtx src = SET_SRC (pat);
2143 rtx dest = SET_DEST (pat);
2144 rtx note;
2146 if (GET_CODE (src) == CALL)
2147 hash_scan_call (src, insn, table);
2149 else if (GET_CODE (dest) == REG)
2151 unsigned int regno = REGNO (dest);
2152 rtx tmp;
2154 /* If this is a single set and we are doing constant propagation,
2155 see if a REG_NOTE shows this equivalent to a constant. */
2156 if (table->set_p && (note = find_reg_equal_equiv_note (insn)) != 0
2157 && gcse_constant_p (XEXP (note, 0)))
2158 src = XEXP (note, 0), pat = gen_rtx_SET (VOIDmode, dest, src);
2160 /* Only record sets of pseudo-regs in the hash table. */
2161 if (! table->set_p
2162 && regno >= FIRST_PSEUDO_REGISTER
2163 /* Don't GCSE something if we can't do a reg/reg copy. */
2164 && can_copy_p (GET_MODE (dest))
2165 /* GCSE commonly inserts instruction after the insn. We can't
2166 do that easily for EH_REGION notes so disable GCSE on these
2167 for now. */
2168 && !find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2169 /* Is SET_SRC something we want to gcse? */
2170 && want_to_gcse_p (src)
2171 /* Don't CSE a nop. */
2172 && ! set_noop_p (pat)
2173 /* Don't GCSE if it has attached REG_EQUIV note.
2174 At this point this only function parameters should have
2175 REG_EQUIV notes and if the argument slot is used somewhere
2176 explicitly, it means address of parameter has been taken,
2177 so we should not extend the lifetime of the pseudo. */
2178 && ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) == 0
2179 || GET_CODE (XEXP (note, 0)) != MEM))
2181 /* An expression is not anticipatable if its operands are
2182 modified before this insn or if this is not the only SET in
2183 this insn. */
2184 int antic_p = oprs_anticipatable_p (src, insn) && single_set (insn);
2185 /* An expression is not available if its operands are
2186 subsequently modified, including this insn. It's also not
2187 available if this is a branch, because we can't insert
2188 a set after the branch. */
2189 int avail_p = (oprs_available_p (src, insn)
2190 && ! JUMP_P (insn));
2192 insert_expr_in_table (src, GET_MODE (dest), insn, antic_p, avail_p, table);
2195 /* Record sets for constant/copy propagation. */
2196 else if (table->set_p
2197 && regno >= FIRST_PSEUDO_REGISTER
2198 && ((GET_CODE (src) == REG
2199 && REGNO (src) >= FIRST_PSEUDO_REGISTER
2200 && can_copy_p (GET_MODE (dest))
2201 && REGNO (src) != regno)
2202 || gcse_constant_p (src))
2203 /* A copy is not available if its src or dest is subsequently
2204 modified. Here we want to search from INSN+1 on, but
2205 oprs_available_p searches from INSN on. */
2206 && (insn == BB_END (BLOCK_FOR_INSN (insn))
2207 || ((tmp = next_nonnote_insn (insn)) != NULL_RTX
2208 && oprs_available_p (pat, tmp))))
2209 insert_set_in_table (pat, insn, table);
2211 /* In case of store we want to consider the memory value as available in
2212 the REG stored in that memory. This makes it possible to remove
2213 redundant loads from due to stores to the same location. */
2214 else if (flag_gcse_las && GET_CODE (src) == REG && GET_CODE (dest) == MEM)
2216 unsigned int regno = REGNO (src);
2218 /* Do not do this for constant/copy propagation. */
2219 if (! table->set_p
2220 /* Only record sets of pseudo-regs in the hash table. */
2221 && regno >= FIRST_PSEUDO_REGISTER
2222 /* Don't GCSE something if we can't do a reg/reg copy. */
2223 && can_copy_p (GET_MODE (src))
2224 /* GCSE commonly inserts instruction after the insn. We can't
2225 do that easily for EH_REGION notes so disable GCSE on these
2226 for now. */
2227 && ! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2228 /* Is SET_DEST something we want to gcse? */
2229 && want_to_gcse_p (dest)
2230 /* Don't CSE a nop. */
2231 && ! set_noop_p (pat)
2232 /* Don't GCSE if it has attached REG_EQUIV note.
2233 At this point this only function parameters should have
2234 REG_EQUIV notes and if the argument slot is used somewhere
2235 explicitly, it means address of parameter has been taken,
2236 so we should not extend the lifetime of the pseudo. */
2237 && ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) == 0
2238 || GET_CODE (XEXP (note, 0)) != MEM))
2240 /* Stores are never anticipatable. */
2241 int antic_p = 0;
2242 /* An expression is not available if its operands are
2243 subsequently modified, including this insn. It's also not
2244 available if this is a branch, because we can't insert
2245 a set after the branch. */
2246 int avail_p = oprs_available_p (dest, insn)
2247 && ! JUMP_P (insn);
2249 /* Record the memory expression (DEST) in the hash table. */
2250 insert_expr_in_table (dest, GET_MODE (dest), insn,
2251 antic_p, avail_p, table);
2256 static void
2257 hash_scan_clobber (rtx x ATTRIBUTE_UNUSED, rtx insn ATTRIBUTE_UNUSED,
2258 struct hash_table *table ATTRIBUTE_UNUSED)
2260 /* Currently nothing to do. */
2263 static void
2264 hash_scan_call (rtx x ATTRIBUTE_UNUSED, rtx insn ATTRIBUTE_UNUSED,
2265 struct hash_table *table ATTRIBUTE_UNUSED)
2267 /* Currently nothing to do. */
2270 /* Process INSN and add hash table entries as appropriate.
2272 Only available expressions that set a single pseudo-reg are recorded.
2274 Single sets in a PARALLEL could be handled, but it's an extra complication
2275 that isn't dealt with right now. The trick is handling the CLOBBERs that
2276 are also in the PARALLEL. Later.
2278 If SET_P is nonzero, this is for the assignment hash table,
2279 otherwise it is for the expression hash table.
2280 If IN_LIBCALL_BLOCK nonzero, we are in a libcall block, and should
2281 not record any expressions. */
2283 static void
2284 hash_scan_insn (rtx insn, struct hash_table *table, int in_libcall_block)
2286 rtx pat = PATTERN (insn);
2287 int i;
2289 if (in_libcall_block)
2290 return;
2292 /* Pick out the sets of INSN and for other forms of instructions record
2293 what's been modified. */
2295 if (GET_CODE (pat) == SET)
2296 hash_scan_set (pat, insn, table);
2297 else if (GET_CODE (pat) == PARALLEL)
2298 for (i = 0; i < XVECLEN (pat, 0); i++)
2300 rtx x = XVECEXP (pat, 0, i);
2302 if (GET_CODE (x) == SET)
2303 hash_scan_set (x, insn, table);
2304 else if (GET_CODE (x) == CLOBBER)
2305 hash_scan_clobber (x, insn, table);
2306 else if (GET_CODE (x) == CALL)
2307 hash_scan_call (x, insn, table);
2310 else if (GET_CODE (pat) == CLOBBER)
2311 hash_scan_clobber (pat, insn, table);
2312 else if (GET_CODE (pat) == CALL)
2313 hash_scan_call (pat, insn, table);
2316 static void
2317 dump_hash_table (FILE *file, const char *name, struct hash_table *table)
2319 int i;
2320 /* Flattened out table, so it's printed in proper order. */
2321 struct expr **flat_table;
2322 unsigned int *hash_val;
2323 struct expr *expr;
2325 flat_table = xcalloc (table->n_elems, sizeof (struct expr *));
2326 hash_val = xmalloc (table->n_elems * sizeof (unsigned int));
2328 for (i = 0; i < (int) table->size; i++)
2329 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
2331 flat_table[expr->bitmap_index] = expr;
2332 hash_val[expr->bitmap_index] = i;
2335 fprintf (file, "%s hash table (%d buckets, %d entries)\n",
2336 name, table->size, table->n_elems);
2338 for (i = 0; i < (int) table->n_elems; i++)
2339 if (flat_table[i] != 0)
2341 expr = flat_table[i];
2342 fprintf (file, "Index %d (hash value %d)\n ",
2343 expr->bitmap_index, hash_val[i]);
2344 print_rtl (file, expr->expr);
2345 fprintf (file, "\n");
2348 fprintf (file, "\n");
2350 free (flat_table);
2351 free (hash_val);
2354 /* Record register first/last/block set information for REGNO in INSN.
2356 first_set records the first place in the block where the register
2357 is set and is used to compute "anticipatability".
2359 last_set records the last place in the block where the register
2360 is set and is used to compute "availability".
2362 last_bb records the block for which first_set and last_set are
2363 valid, as a quick test to invalidate them.
2365 reg_set_in_block records whether the register is set in the block
2366 and is used to compute "transparency". */
2368 static void
2369 record_last_reg_set_info (rtx insn, int regno)
2371 struct reg_avail_info *info = &reg_avail_info[regno];
2372 int cuid = INSN_CUID (insn);
2374 info->last_set = cuid;
2375 if (info->last_bb != current_bb)
2377 info->last_bb = current_bb;
2378 info->first_set = cuid;
2379 SET_BIT (reg_set_in_block[current_bb->index], regno);
2384 /* Record all of the canonicalized MEMs of record_last_mem_set_info's insn.
2385 Note we store a pair of elements in the list, so they have to be
2386 taken off pairwise. */
2388 static void
2389 canon_list_insert (rtx dest ATTRIBUTE_UNUSED, rtx unused1 ATTRIBUTE_UNUSED,
2390 void * v_insn)
2392 rtx dest_addr, insn;
2393 int bb;
2395 while (GET_CODE (dest) == SUBREG
2396 || GET_CODE (dest) == ZERO_EXTRACT
2397 || GET_CODE (dest) == SIGN_EXTRACT
2398 || GET_CODE (dest) == STRICT_LOW_PART)
2399 dest = XEXP (dest, 0);
2401 /* If DEST is not a MEM, then it will not conflict with a load. Note
2402 that function calls are assumed to clobber memory, but are handled
2403 elsewhere. */
2405 if (GET_CODE (dest) != MEM)
2406 return;
2408 dest_addr = get_addr (XEXP (dest, 0));
2409 dest_addr = canon_rtx (dest_addr);
2410 insn = (rtx) v_insn;
2411 bb = BLOCK_NUM (insn);
2413 canon_modify_mem_list[bb] =
2414 alloc_EXPR_LIST (VOIDmode, dest_addr, canon_modify_mem_list[bb]);
2415 canon_modify_mem_list[bb] =
2416 alloc_EXPR_LIST (VOIDmode, dest, canon_modify_mem_list[bb]);
2417 bitmap_set_bit (canon_modify_mem_list_set, bb);
2420 /* Record memory modification information for INSN. We do not actually care
2421 about the memory location(s) that are set, or even how they are set (consider
2422 a CALL_INSN). We merely need to record which insns modify memory. */
2424 static void
2425 record_last_mem_set_info (rtx insn)
2427 int bb = BLOCK_NUM (insn);
2429 /* load_killed_in_block_p will handle the case of calls clobbering
2430 everything. */
2431 modify_mem_list[bb] = alloc_INSN_LIST (insn, modify_mem_list[bb]);
2432 bitmap_set_bit (modify_mem_list_set, bb);
2434 if (GET_CODE (insn) == CALL_INSN)
2436 /* Note that traversals of this loop (other than for free-ing)
2437 will break after encountering a CALL_INSN. So, there's no
2438 need to insert a pair of items, as canon_list_insert does. */
2439 canon_modify_mem_list[bb] =
2440 alloc_INSN_LIST (insn, canon_modify_mem_list[bb]);
2441 bitmap_set_bit (canon_modify_mem_list_set, bb);
2443 else
2444 note_stores (PATTERN (insn), canon_list_insert, (void*) insn);
2447 /* Called from compute_hash_table via note_stores to handle one
2448 SET or CLOBBER in an insn. DATA is really the instruction in which
2449 the SET is taking place. */
2451 static void
2452 record_last_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED, void *data)
2454 rtx last_set_insn = (rtx) data;
2456 if (GET_CODE (dest) == SUBREG)
2457 dest = SUBREG_REG (dest);
2459 if (GET_CODE (dest) == REG)
2460 record_last_reg_set_info (last_set_insn, REGNO (dest));
2461 else if (GET_CODE (dest) == MEM
2462 /* Ignore pushes, they clobber nothing. */
2463 && ! push_operand (dest, GET_MODE (dest)))
2464 record_last_mem_set_info (last_set_insn);
2467 /* Top level function to create an expression or assignment hash table.
2469 Expression entries are placed in the hash table if
2470 - they are of the form (set (pseudo-reg) src),
2471 - src is something we want to perform GCSE on,
2472 - none of the operands are subsequently modified in the block
2474 Assignment entries are placed in the hash table if
2475 - they are of the form (set (pseudo-reg) src),
2476 - src is something we want to perform const/copy propagation on,
2477 - none of the operands or target are subsequently modified in the block
2479 Currently src must be a pseudo-reg or a const_int.
2481 TABLE is the table computed. */
2483 static void
2484 compute_hash_table_work (struct hash_table *table)
2486 unsigned int i;
2488 /* While we compute the hash table we also compute a bit array of which
2489 registers are set in which blocks.
2490 ??? This isn't needed during const/copy propagation, but it's cheap to
2491 compute. Later. */
2492 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
2494 /* re-Cache any INSN_LIST nodes we have allocated. */
2495 clear_modify_mem_tables ();
2496 /* Some working arrays used to track first and last set in each block. */
2497 reg_avail_info = gmalloc (max_gcse_regno * sizeof (struct reg_avail_info));
2499 for (i = 0; i < max_gcse_regno; ++i)
2500 reg_avail_info[i].last_bb = NULL;
2502 FOR_EACH_BB (current_bb)
2504 rtx insn;
2505 unsigned int regno;
2506 int in_libcall_block;
2508 /* First pass over the instructions records information used to
2509 determine when registers and memory are first and last set.
2510 ??? hard-reg reg_set_in_block computation
2511 could be moved to compute_sets since they currently don't change. */
2513 for (insn = BB_HEAD (current_bb);
2514 insn && insn != NEXT_INSN (BB_END (current_bb));
2515 insn = NEXT_INSN (insn))
2517 if (! INSN_P (insn))
2518 continue;
2520 if (GET_CODE (insn) == CALL_INSN)
2522 bool clobbers_all = false;
2523 #ifdef NON_SAVING_SETJMP
2524 if (NON_SAVING_SETJMP
2525 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
2526 clobbers_all = true;
2527 #endif
2529 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2530 if (clobbers_all
2531 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2532 record_last_reg_set_info (insn, regno);
2534 mark_call (insn);
2537 note_stores (PATTERN (insn), record_last_set_info, insn);
2540 /* Insert implicit sets in the hash table. */
2541 if (table->set_p
2542 && implicit_sets[current_bb->index] != NULL_RTX)
2543 hash_scan_set (implicit_sets[current_bb->index],
2544 BB_HEAD (current_bb), table);
2546 /* The next pass builds the hash table. */
2548 for (insn = BB_HEAD (current_bb), in_libcall_block = 0;
2549 insn && insn != NEXT_INSN (BB_END (current_bb));
2550 insn = NEXT_INSN (insn))
2551 if (INSN_P (insn))
2553 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
2554 in_libcall_block = 1;
2555 else if (table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
2556 in_libcall_block = 0;
2557 hash_scan_insn (insn, table, in_libcall_block);
2558 if (!table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
2559 in_libcall_block = 0;
2563 free (reg_avail_info);
2564 reg_avail_info = NULL;
2567 /* Allocate space for the set/expr hash TABLE.
2568 N_INSNS is the number of instructions in the function.
2569 It is used to determine the number of buckets to use.
2570 SET_P determines whether set or expression table will
2571 be created. */
2573 static void
2574 alloc_hash_table (int n_insns, struct hash_table *table, int set_p)
2576 int n;
2578 table->size = n_insns / 4;
2579 if (table->size < 11)
2580 table->size = 11;
2582 /* Attempt to maintain efficient use of hash table.
2583 Making it an odd number is simplest for now.
2584 ??? Later take some measurements. */
2585 table->size |= 1;
2586 n = table->size * sizeof (struct expr *);
2587 table->table = gmalloc (n);
2588 table->set_p = set_p;
2591 /* Free things allocated by alloc_hash_table. */
2593 static void
2594 free_hash_table (struct hash_table *table)
2596 free (table->table);
2599 /* Compute the hash TABLE for doing copy/const propagation or
2600 expression hash table. */
2602 static void
2603 compute_hash_table (struct hash_table *table)
2605 /* Initialize count of number of entries in hash table. */
2606 table->n_elems = 0;
2607 memset (table->table, 0, table->size * sizeof (struct expr *));
2609 compute_hash_table_work (table);
2612 /* Expression tracking support. */
2614 /* Lookup pattern PAT in the expression TABLE.
2615 The result is a pointer to the table entry, or NULL if not found. */
2617 static struct expr *
2618 lookup_expr (rtx pat, struct hash_table *table)
2620 int do_not_record_p;
2621 unsigned int hash = hash_expr (pat, GET_MODE (pat), &do_not_record_p,
2622 table->size);
2623 struct expr *expr;
2625 if (do_not_record_p)
2626 return NULL;
2628 expr = table->table[hash];
2630 while (expr && ! expr_equiv_p (expr->expr, pat))
2631 expr = expr->next_same_hash;
2633 return expr;
2636 /* Lookup REGNO in the set TABLE. The result is a pointer to the
2637 table entry, or NULL if not found. */
2639 static struct expr *
2640 lookup_set (unsigned int regno, struct hash_table *table)
2642 unsigned int hash = hash_set (regno, table->size);
2643 struct expr *expr;
2645 expr = table->table[hash];
2647 while (expr && REGNO (SET_DEST (expr->expr)) != regno)
2648 expr = expr->next_same_hash;
2650 return expr;
2653 /* Return the next entry for REGNO in list EXPR. */
2655 static struct expr *
2656 next_set (unsigned int regno, struct expr *expr)
2659 expr = expr->next_same_hash;
2660 while (expr && REGNO (SET_DEST (expr->expr)) != regno);
2662 return expr;
2665 /* Like free_INSN_LIST_list or free_EXPR_LIST_list, except that the node
2666 types may be mixed. */
2668 static void
2669 free_insn_expr_list_list (rtx *listp)
2671 rtx list, next;
2673 for (list = *listp; list ; list = next)
2675 next = XEXP (list, 1);
2676 if (GET_CODE (list) == EXPR_LIST)
2677 free_EXPR_LIST_node (list);
2678 else
2679 free_INSN_LIST_node (list);
2682 *listp = NULL;
2685 /* Clear canon_modify_mem_list and modify_mem_list tables. */
2686 static void
2687 clear_modify_mem_tables (void)
2689 int i;
2691 EXECUTE_IF_SET_IN_BITMAP
2692 (modify_mem_list_set, 0, i, free_INSN_LIST_list (modify_mem_list + i));
2693 bitmap_clear (modify_mem_list_set);
2695 EXECUTE_IF_SET_IN_BITMAP
2696 (canon_modify_mem_list_set, 0, i,
2697 free_insn_expr_list_list (canon_modify_mem_list + i));
2698 bitmap_clear (canon_modify_mem_list_set);
2701 /* Release memory used by modify_mem_list_set and canon_modify_mem_list_set. */
2703 static void
2704 free_modify_mem_tables (void)
2706 clear_modify_mem_tables ();
2707 free (modify_mem_list);
2708 free (canon_modify_mem_list);
2709 modify_mem_list = 0;
2710 canon_modify_mem_list = 0;
2713 /* Reset tables used to keep track of what's still available [since the
2714 start of the block]. */
2716 static void
2717 reset_opr_set_tables (void)
2719 /* Maintain a bitmap of which regs have been set since beginning of
2720 the block. */
2721 CLEAR_REG_SET (reg_set_bitmap);
2723 /* Also keep a record of the last instruction to modify memory.
2724 For now this is very trivial, we only record whether any memory
2725 location has been modified. */
2726 clear_modify_mem_tables ();
2729 /* Return nonzero if the operands of X are not set before INSN in
2730 INSN's basic block. */
2732 static int
2733 oprs_not_set_p (rtx x, rtx insn)
2735 int i, j;
2736 enum rtx_code code;
2737 const char *fmt;
2739 if (x == 0)
2740 return 1;
2742 code = GET_CODE (x);
2743 switch (code)
2745 case PC:
2746 case CC0:
2747 case CONST:
2748 case CONST_INT:
2749 case CONST_DOUBLE:
2750 case CONST_VECTOR:
2751 case SYMBOL_REF:
2752 case LABEL_REF:
2753 case ADDR_VEC:
2754 case ADDR_DIFF_VEC:
2755 return 1;
2757 case MEM:
2758 if (load_killed_in_block_p (BLOCK_FOR_INSN (insn),
2759 INSN_CUID (insn), x, 0))
2760 return 0;
2761 else
2762 return oprs_not_set_p (XEXP (x, 0), insn);
2764 case REG:
2765 return ! REGNO_REG_SET_P (reg_set_bitmap, REGNO (x));
2767 default:
2768 break;
2771 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
2773 if (fmt[i] == 'e')
2775 /* If we are about to do the last recursive call
2776 needed at this level, change it into iteration.
2777 This function is called enough to be worth it. */
2778 if (i == 0)
2779 return oprs_not_set_p (XEXP (x, i), insn);
2781 if (! oprs_not_set_p (XEXP (x, i), insn))
2782 return 0;
2784 else if (fmt[i] == 'E')
2785 for (j = 0; j < XVECLEN (x, i); j++)
2786 if (! oprs_not_set_p (XVECEXP (x, i, j), insn))
2787 return 0;
2790 return 1;
2793 /* Mark things set by a CALL. */
2795 static void
2796 mark_call (rtx insn)
2798 if (! CONST_OR_PURE_CALL_P (insn))
2799 record_last_mem_set_info (insn);
2802 /* Mark things set by a SET. */
2804 static void
2805 mark_set (rtx pat, rtx insn)
2807 rtx dest = SET_DEST (pat);
2809 while (GET_CODE (dest) == SUBREG
2810 || GET_CODE (dest) == ZERO_EXTRACT
2811 || GET_CODE (dest) == SIGN_EXTRACT
2812 || GET_CODE (dest) == STRICT_LOW_PART)
2813 dest = XEXP (dest, 0);
2815 if (GET_CODE (dest) == REG)
2816 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (dest));
2817 else if (GET_CODE (dest) == MEM)
2818 record_last_mem_set_info (insn);
2820 if (GET_CODE (SET_SRC (pat)) == CALL)
2821 mark_call (insn);
2824 /* Record things set by a CLOBBER. */
2826 static void
2827 mark_clobber (rtx pat, rtx insn)
2829 rtx clob = XEXP (pat, 0);
2831 while (GET_CODE (clob) == SUBREG || GET_CODE (clob) == STRICT_LOW_PART)
2832 clob = XEXP (clob, 0);
2834 if (GET_CODE (clob) == REG)
2835 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (clob));
2836 else
2837 record_last_mem_set_info (insn);
2840 /* Record things set by INSN.
2841 This data is used by oprs_not_set_p. */
2843 static void
2844 mark_oprs_set (rtx insn)
2846 rtx pat = PATTERN (insn);
2847 int i;
2849 if (GET_CODE (pat) == SET)
2850 mark_set (pat, insn);
2851 else if (GET_CODE (pat) == PARALLEL)
2852 for (i = 0; i < XVECLEN (pat, 0); i++)
2854 rtx x = XVECEXP (pat, 0, i);
2856 if (GET_CODE (x) == SET)
2857 mark_set (x, insn);
2858 else if (GET_CODE (x) == CLOBBER)
2859 mark_clobber (x, insn);
2860 else if (GET_CODE (x) == CALL)
2861 mark_call (insn);
2864 else if (GET_CODE (pat) == CLOBBER)
2865 mark_clobber (pat, insn);
2866 else if (GET_CODE (pat) == CALL)
2867 mark_call (insn);
2871 /* Classic GCSE reaching definition support. */
2873 /* Allocate reaching def variables. */
2875 static void
2876 alloc_rd_mem (int n_blocks, int n_insns)
2878 rd_kill = sbitmap_vector_alloc (n_blocks, n_insns);
2879 sbitmap_vector_zero (rd_kill, n_blocks);
2881 rd_gen = sbitmap_vector_alloc (n_blocks, n_insns);
2882 sbitmap_vector_zero (rd_gen, n_blocks);
2884 reaching_defs = sbitmap_vector_alloc (n_blocks, n_insns);
2885 sbitmap_vector_zero (reaching_defs, n_blocks);
2887 rd_out = sbitmap_vector_alloc (n_blocks, n_insns);
2888 sbitmap_vector_zero (rd_out, n_blocks);
2891 /* Free reaching def variables. */
2893 static void
2894 free_rd_mem (void)
2896 sbitmap_vector_free (rd_kill);
2897 sbitmap_vector_free (rd_gen);
2898 sbitmap_vector_free (reaching_defs);
2899 sbitmap_vector_free (rd_out);
2902 /* Add INSN to the kills of BB. REGNO, set in BB, is killed by INSN. */
2904 static void
2905 handle_rd_kill_set (rtx insn, int regno, basic_block bb)
2907 struct reg_set *this_reg;
2909 for (this_reg = reg_set_table[regno]; this_reg; this_reg = this_reg ->next)
2910 if (BLOCK_NUM (this_reg->insn) != BLOCK_NUM (insn))
2911 SET_BIT (rd_kill[bb->index], INSN_CUID (this_reg->insn));
2914 /* Compute the set of kill's for reaching definitions. */
2916 static void
2917 compute_kill_rd (void)
2919 int cuid;
2920 unsigned int regno;
2921 int i;
2922 basic_block bb;
2924 /* For each block
2925 For each set bit in `gen' of the block (i.e each insn which
2926 generates a definition in the block)
2927 Call the reg set by the insn corresponding to that bit regx
2928 Look at the linked list starting at reg_set_table[regx]
2929 For each setting of regx in the linked list, which is not in
2930 this block
2931 Set the bit in `kill' corresponding to that insn. */
2932 FOR_EACH_BB (bb)
2933 for (cuid = 0; cuid < max_cuid; cuid++)
2934 if (TEST_BIT (rd_gen[bb->index], cuid))
2936 rtx insn = CUID_INSN (cuid);
2937 rtx pat = PATTERN (insn);
2939 if (GET_CODE (insn) == CALL_INSN)
2941 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2942 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2943 handle_rd_kill_set (insn, regno, bb);
2946 if (GET_CODE (pat) == PARALLEL)
2948 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
2950 enum rtx_code code = GET_CODE (XVECEXP (pat, 0, i));
2952 if ((code == SET || code == CLOBBER)
2953 && GET_CODE (XEXP (XVECEXP (pat, 0, i), 0)) == REG)
2954 handle_rd_kill_set (insn,
2955 REGNO (XEXP (XVECEXP (pat, 0, i), 0)),
2956 bb);
2959 else if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == REG)
2960 /* Each setting of this register outside of this block
2961 must be marked in the set of kills in this block. */
2962 handle_rd_kill_set (insn, REGNO (SET_DEST (pat)), bb);
2966 /* Compute the reaching definitions as in
2967 Compilers Principles, Techniques, and Tools. Aho, Sethi, Ullman,
2968 Chapter 10. It is the same algorithm as used for computing available
2969 expressions but applied to the gens and kills of reaching definitions. */
2971 static void
2972 compute_rd (void)
2974 int changed, passes;
2975 basic_block bb;
2977 FOR_EACH_BB (bb)
2978 sbitmap_copy (rd_out[bb->index] /*dst*/, rd_gen[bb->index] /*src*/);
2980 passes = 0;
2981 changed = 1;
2982 while (changed)
2984 changed = 0;
2985 FOR_EACH_BB (bb)
2987 sbitmap_union_of_preds (reaching_defs[bb->index], rd_out, bb->index);
2988 changed |= sbitmap_union_of_diff_cg (rd_out[bb->index], rd_gen[bb->index],
2989 reaching_defs[bb->index], rd_kill[bb->index]);
2991 passes++;
2994 if (gcse_file)
2995 fprintf (gcse_file, "reaching def computation: %d passes\n", passes);
2998 /* Classic GCSE available expression support. */
3000 /* Allocate memory for available expression computation. */
3002 static void
3003 alloc_avail_expr_mem (int n_blocks, int n_exprs)
3005 ae_kill = sbitmap_vector_alloc (n_blocks, n_exprs);
3006 sbitmap_vector_zero (ae_kill, n_blocks);
3008 ae_gen = sbitmap_vector_alloc (n_blocks, n_exprs);
3009 sbitmap_vector_zero (ae_gen, n_blocks);
3011 ae_in = sbitmap_vector_alloc (n_blocks, n_exprs);
3012 sbitmap_vector_zero (ae_in, n_blocks);
3014 ae_out = sbitmap_vector_alloc (n_blocks, n_exprs);
3015 sbitmap_vector_zero (ae_out, n_blocks);
3018 static void
3019 free_avail_expr_mem (void)
3021 sbitmap_vector_free (ae_kill);
3022 sbitmap_vector_free (ae_gen);
3023 sbitmap_vector_free (ae_in);
3024 sbitmap_vector_free (ae_out);
3027 /* Compute the set of available expressions generated in each basic block. */
3029 static void
3030 compute_ae_gen (struct hash_table *expr_hash_table)
3032 unsigned int i;
3033 struct expr *expr;
3034 struct occr *occr;
3036 /* For each recorded occurrence of each expression, set ae_gen[bb][expr].
3037 This is all we have to do because an expression is not recorded if it
3038 is not available, and the only expressions we want to work with are the
3039 ones that are recorded. */
3040 for (i = 0; i < expr_hash_table->size; i++)
3041 for (expr = expr_hash_table->table[i]; expr != 0; expr = expr->next_same_hash)
3042 for (occr = expr->avail_occr; occr != 0; occr = occr->next)
3043 SET_BIT (ae_gen[BLOCK_NUM (occr->insn)], expr->bitmap_index);
3046 /* Return nonzero if expression X is killed in BB. */
3048 static int
3049 expr_killed_p (rtx x, basic_block bb)
3051 int i, j;
3052 enum rtx_code code;
3053 const char *fmt;
3055 if (x == 0)
3056 return 1;
3058 code = GET_CODE (x);
3059 switch (code)
3061 case REG:
3062 return TEST_BIT (reg_set_in_block[bb->index], REGNO (x));
3064 case MEM:
3065 if (load_killed_in_block_p (bb, get_max_uid () + 1, x, 0))
3066 return 1;
3067 else
3068 return expr_killed_p (XEXP (x, 0), bb);
3070 case PC:
3071 case CC0: /*FIXME*/
3072 case CONST:
3073 case CONST_INT:
3074 case CONST_DOUBLE:
3075 case CONST_VECTOR:
3076 case SYMBOL_REF:
3077 case LABEL_REF:
3078 case ADDR_VEC:
3079 case ADDR_DIFF_VEC:
3080 return 0;
3082 default:
3083 break;
3086 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
3088 if (fmt[i] == 'e')
3090 /* If we are about to do the last recursive call
3091 needed at this level, change it into iteration.
3092 This function is called enough to be worth it. */
3093 if (i == 0)
3094 return expr_killed_p (XEXP (x, i), bb);
3095 else if (expr_killed_p (XEXP (x, i), bb))
3096 return 1;
3098 else if (fmt[i] == 'E')
3099 for (j = 0; j < XVECLEN (x, i); j++)
3100 if (expr_killed_p (XVECEXP (x, i, j), bb))
3101 return 1;
3104 return 0;
3107 /* Compute the set of available expressions killed in each basic block. */
3109 static void
3110 compute_ae_kill (sbitmap *ae_gen, sbitmap *ae_kill,
3111 struct hash_table *expr_hash_table)
3113 basic_block bb;
3114 unsigned int i;
3115 struct expr *expr;
3117 FOR_EACH_BB (bb)
3118 for (i = 0; i < expr_hash_table->size; i++)
3119 for (expr = expr_hash_table->table[i]; expr; expr = expr->next_same_hash)
3121 /* Skip EXPR if generated in this block. */
3122 if (TEST_BIT (ae_gen[bb->index], expr->bitmap_index))
3123 continue;
3125 if (expr_killed_p (expr->expr, bb))
3126 SET_BIT (ae_kill[bb->index], expr->bitmap_index);
3130 /* Actually perform the Classic GCSE optimizations. */
3132 /* Return nonzero if occurrence OCCR of expression EXPR reaches block BB.
3134 CHECK_SELF_LOOP is nonzero if we should consider a block reaching itself
3135 as a positive reach. We want to do this when there are two computations
3136 of the expression in the block.
3138 VISITED is a pointer to a working buffer for tracking which BB's have
3139 been visited. It is NULL for the top-level call.
3141 We treat reaching expressions that go through blocks containing the same
3142 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
3143 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
3144 2 as not reaching. The intent is to improve the probability of finding
3145 only one reaching expression and to reduce register lifetimes by picking
3146 the closest such expression. */
3148 static int
3149 expr_reaches_here_p_work (struct occr *occr, struct expr *expr,
3150 basic_block bb, int check_self_loop, char *visited)
3152 edge pred;
3154 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
3156 basic_block pred_bb = pred->src;
3158 if (visited[pred_bb->index])
3159 /* This predecessor has already been visited. Nothing to do. */
3161 else if (pred_bb == bb)
3163 /* BB loops on itself. */
3164 if (check_self_loop
3165 && TEST_BIT (ae_gen[pred_bb->index], expr->bitmap_index)
3166 && BLOCK_NUM (occr->insn) == pred_bb->index)
3167 return 1;
3169 visited[pred_bb->index] = 1;
3172 /* Ignore this predecessor if it kills the expression. */
3173 else if (TEST_BIT (ae_kill[pred_bb->index], expr->bitmap_index))
3174 visited[pred_bb->index] = 1;
3176 /* Does this predecessor generate this expression? */
3177 else if (TEST_BIT (ae_gen[pred_bb->index], expr->bitmap_index))
3179 /* Is this the occurrence we're looking for?
3180 Note that there's only one generating occurrence per block
3181 so we just need to check the block number. */
3182 if (BLOCK_NUM (occr->insn) == pred_bb->index)
3183 return 1;
3185 visited[pred_bb->index] = 1;
3188 /* Neither gen nor kill. */
3189 else
3191 visited[pred_bb->index] = 1;
3192 if (expr_reaches_here_p_work (occr, expr, pred_bb, check_self_loop,
3193 visited))
3195 return 1;
3199 /* All paths have been checked. */
3200 return 0;
3203 /* This wrapper for expr_reaches_here_p_work() is to ensure that any
3204 memory allocated for that function is returned. */
3206 static int
3207 expr_reaches_here_p (struct occr *occr, struct expr *expr, basic_block bb,
3208 int check_self_loop)
3210 int rval;
3211 char *visited = xcalloc (last_basic_block, 1);
3213 rval = expr_reaches_here_p_work (occr, expr, bb, check_self_loop, visited);
3215 free (visited);
3216 return rval;
3219 /* Return the instruction that computes EXPR that reaches INSN's basic block.
3220 If there is more than one such instruction, return NULL.
3222 Called only by handle_avail_expr. */
3224 static rtx
3225 computing_insn (struct expr *expr, rtx insn)
3227 basic_block bb = BLOCK_FOR_INSN (insn);
3229 if (expr->avail_occr->next == NULL)
3231 if (BLOCK_FOR_INSN (expr->avail_occr->insn) == bb)
3232 /* The available expression is actually itself
3233 (i.e. a loop in the flow graph) so do nothing. */
3234 return NULL;
3236 /* (FIXME) Case that we found a pattern that was created by
3237 a substitution that took place. */
3238 return expr->avail_occr->insn;
3240 else
3242 /* Pattern is computed more than once.
3243 Search backwards from this insn to see how many of these
3244 computations actually reach this insn. */
3245 struct occr *occr;
3246 rtx insn_computes_expr = NULL;
3247 int can_reach = 0;
3249 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
3251 if (BLOCK_FOR_INSN (occr->insn) == bb)
3253 /* The expression is generated in this block.
3254 The only time we care about this is when the expression
3255 is generated later in the block [and thus there's a loop].
3256 We let the normal cse pass handle the other cases. */
3257 if (INSN_CUID (insn) < INSN_CUID (occr->insn)
3258 && expr_reaches_here_p (occr, expr, bb, 1))
3260 can_reach++;
3261 if (can_reach > 1)
3262 return NULL;
3264 insn_computes_expr = occr->insn;
3267 else if (expr_reaches_here_p (occr, expr, bb, 0))
3269 can_reach++;
3270 if (can_reach > 1)
3271 return NULL;
3273 insn_computes_expr = occr->insn;
3277 if (insn_computes_expr == NULL)
3278 abort ();
3280 return insn_computes_expr;
3284 /* Return nonzero if the definition in DEF_INSN can reach INSN.
3285 Only called by can_disregard_other_sets. */
3287 static int
3288 def_reaches_here_p (rtx insn, rtx def_insn)
3290 rtx reg;
3292 if (TEST_BIT (reaching_defs[BLOCK_NUM (insn)], INSN_CUID (def_insn)))
3293 return 1;
3295 if (BLOCK_NUM (insn) == BLOCK_NUM (def_insn))
3297 if (INSN_CUID (def_insn) < INSN_CUID (insn))
3299 if (GET_CODE (PATTERN (def_insn)) == PARALLEL)
3300 return 1;
3301 else if (GET_CODE (PATTERN (def_insn)) == CLOBBER)
3302 reg = XEXP (PATTERN (def_insn), 0);
3303 else if (GET_CODE (PATTERN (def_insn)) == SET)
3304 reg = SET_DEST (PATTERN (def_insn));
3305 else
3306 abort ();
3308 return ! reg_set_between_p (reg, NEXT_INSN (def_insn), insn);
3310 else
3311 return 0;
3314 return 0;
3317 /* Return nonzero if *ADDR_THIS_REG can only have one value at INSN. The
3318 value returned is the number of definitions that reach INSN. Returning a
3319 value of zero means that [maybe] more than one definition reaches INSN and
3320 the caller can't perform whatever optimization it is trying. i.e. it is
3321 always safe to return zero. */
3323 static int
3324 can_disregard_other_sets (struct reg_set **addr_this_reg, rtx insn, int for_combine)
3326 int number_of_reaching_defs = 0;
3327 struct reg_set *this_reg;
3329 for (this_reg = *addr_this_reg; this_reg != 0; this_reg = this_reg->next)
3330 if (def_reaches_here_p (insn, this_reg->insn))
3332 number_of_reaching_defs++;
3333 /* Ignore parallels for now. */
3334 if (GET_CODE (PATTERN (this_reg->insn)) == PARALLEL)
3335 return 0;
3337 if (!for_combine
3338 && (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER
3339 || ! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)),
3340 SET_SRC (PATTERN (insn)))))
3341 /* A setting of the reg to a different value reaches INSN. */
3342 return 0;
3344 if (number_of_reaching_defs > 1)
3346 /* If in this setting the value the register is being set to is
3347 equal to the previous value the register was set to and this
3348 setting reaches the insn we are trying to do the substitution
3349 on then we are ok. */
3350 if (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER)
3351 return 0;
3352 else if (! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)),
3353 SET_SRC (PATTERN (insn))))
3354 return 0;
3357 *addr_this_reg = this_reg;
3360 return number_of_reaching_defs;
3363 /* Expression computed by insn is available and the substitution is legal,
3364 so try to perform the substitution.
3366 The result is nonzero if any changes were made. */
3368 static int
3369 handle_avail_expr (rtx insn, struct expr *expr)
3371 rtx pat, insn_computes_expr, expr_set;
3372 rtx to;
3373 struct reg_set *this_reg;
3374 int found_setting, use_src;
3375 int changed = 0;
3377 /* We only handle the case where one computation of the expression
3378 reaches this instruction. */
3379 insn_computes_expr = computing_insn (expr, insn);
3380 if (insn_computes_expr == NULL)
3381 return 0;
3382 expr_set = single_set (insn_computes_expr);
3383 if (!expr_set)
3384 abort ();
3386 found_setting = 0;
3387 use_src = 0;
3389 /* At this point we know only one computation of EXPR outside of this
3390 block reaches this insn. Now try to find a register that the
3391 expression is computed into. */
3392 if (GET_CODE (SET_SRC (expr_set)) == REG)
3394 /* This is the case when the available expression that reaches
3395 here has already been handled as an available expression. */
3396 unsigned int regnum_for_replacing
3397 = REGNO (SET_SRC (expr_set));
3399 /* If the register was created by GCSE we can't use `reg_set_table',
3400 however we know it's set only once. */
3401 if (regnum_for_replacing >= max_gcse_regno
3402 /* If the register the expression is computed into is set only once,
3403 or only one set reaches this insn, we can use it. */
3404 || (((this_reg = reg_set_table[regnum_for_replacing]),
3405 this_reg->next == NULL)
3406 || can_disregard_other_sets (&this_reg, insn, 0)))
3408 use_src = 1;
3409 found_setting = 1;
3413 if (!found_setting)
3415 unsigned int regnum_for_replacing
3416 = REGNO (SET_DEST (expr_set));
3418 /* This shouldn't happen. */
3419 if (regnum_for_replacing >= max_gcse_regno)
3420 abort ();
3422 this_reg = reg_set_table[regnum_for_replacing];
3424 /* If the register the expression is computed into is set only once,
3425 or only one set reaches this insn, use it. */
3426 if (this_reg->next == NULL
3427 || can_disregard_other_sets (&this_reg, insn, 0))
3428 found_setting = 1;
3431 if (found_setting)
3433 pat = PATTERN (insn);
3434 if (use_src)
3435 to = SET_SRC (expr_set);
3436 else
3437 to = SET_DEST (expr_set);
3438 changed = validate_change (insn, &SET_SRC (pat), to, 0);
3440 /* We should be able to ignore the return code from validate_change but
3441 to play it safe we check. */
3442 if (changed)
3444 gcse_subst_count++;
3445 if (gcse_file != NULL)
3447 fprintf (gcse_file, "GCSE: Replacing the source in insn %d with",
3448 INSN_UID (insn));
3449 fprintf (gcse_file, " reg %d %s insn %d\n",
3450 REGNO (to), use_src ? "from" : "set in",
3451 INSN_UID (insn_computes_expr));
3456 /* The register that the expr is computed into is set more than once. */
3457 else if (1 /*expensive_op(this_pattrn->op) && do_expensive_gcse)*/)
3459 /* Insert an insn after insnx that copies the reg set in insnx
3460 into a new pseudo register call this new register REGN.
3461 From insnb until end of basic block or until REGB is set
3462 replace all uses of REGB with REGN. */
3463 rtx new_insn;
3465 to = gen_reg_rtx (GET_MODE (SET_DEST (expr_set)));
3467 /* Generate the new insn. */
3468 /* ??? If the change fails, we return 0, even though we created
3469 an insn. I think this is ok. */
3470 new_insn
3471 = emit_insn_after (gen_rtx_SET (VOIDmode, to,
3472 SET_DEST (expr_set)),
3473 insn_computes_expr);
3475 /* Keep register set table up to date. */
3476 record_one_set (REGNO (to), new_insn);
3478 gcse_create_count++;
3479 if (gcse_file != NULL)
3481 fprintf (gcse_file, "GCSE: Creating insn %d to copy value of reg %d",
3482 INSN_UID (NEXT_INSN (insn_computes_expr)),
3483 REGNO (SET_SRC (PATTERN (NEXT_INSN (insn_computes_expr)))));
3484 fprintf (gcse_file, ", computed in insn %d,\n",
3485 INSN_UID (insn_computes_expr));
3486 fprintf (gcse_file, " into newly allocated reg %d\n",
3487 REGNO (to));
3490 pat = PATTERN (insn);
3492 /* Do register replacement for INSN. */
3493 changed = validate_change (insn, &SET_SRC (pat),
3494 SET_DEST (PATTERN
3495 (NEXT_INSN (insn_computes_expr))),
3498 /* We should be able to ignore the return code from validate_change but
3499 to play it safe we check. */
3500 if (changed)
3502 gcse_subst_count++;
3503 if (gcse_file != NULL)
3505 fprintf (gcse_file,
3506 "GCSE: Replacing the source in insn %d with reg %d ",
3507 INSN_UID (insn),
3508 REGNO (SET_DEST (PATTERN (NEXT_INSN
3509 (insn_computes_expr)))));
3510 fprintf (gcse_file, "set in insn %d\n",
3511 INSN_UID (insn_computes_expr));
3516 return changed;
3519 /* Perform classic GCSE. This is called by one_classic_gcse_pass after all
3520 the dataflow analysis has been done.
3522 The result is nonzero if a change was made. */
3524 static int
3525 classic_gcse (void)
3527 int changed;
3528 rtx insn;
3529 basic_block bb;
3531 /* Note we start at block 1. */
3533 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
3534 return 0;
3536 changed = 0;
3537 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR, next_bb)
3539 /* Reset tables used to keep track of what's still valid [since the
3540 start of the block]. */
3541 reset_opr_set_tables ();
3543 for (insn = BB_HEAD (bb);
3544 insn != NULL && insn != NEXT_INSN (BB_END (bb));
3545 insn = NEXT_INSN (insn))
3547 /* Is insn of form (set (pseudo-reg) ...)? */
3548 if (GET_CODE (insn) == INSN
3549 && GET_CODE (PATTERN (insn)) == SET
3550 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
3551 && REGNO (SET_DEST (PATTERN (insn))) >= FIRST_PSEUDO_REGISTER)
3553 rtx pat = PATTERN (insn);
3554 rtx src = SET_SRC (pat);
3555 struct expr *expr;
3557 if (want_to_gcse_p (src)
3558 /* Is the expression recorded? */
3559 && ((expr = lookup_expr (src, &expr_hash_table)) != NULL)
3560 /* Is the expression available [at the start of the
3561 block]? */
3562 && TEST_BIT (ae_in[bb->index], expr->bitmap_index)
3563 /* Are the operands unchanged since the start of the
3564 block? */
3565 && oprs_not_set_p (src, insn))
3566 changed |= handle_avail_expr (insn, expr);
3569 /* Keep track of everything modified by this insn. */
3570 /* ??? Need to be careful w.r.t. mods done to INSN. */
3571 if (INSN_P (insn))
3572 mark_oprs_set (insn);
3576 return changed;
3579 /* Top level routine to perform one classic GCSE pass.
3581 Return nonzero if a change was made. */
3583 static int
3584 one_classic_gcse_pass (int pass)
3586 int changed = 0;
3588 gcse_subst_count = 0;
3589 gcse_create_count = 0;
3591 alloc_hash_table (max_cuid, &expr_hash_table, 0);
3592 alloc_rd_mem (last_basic_block, max_cuid);
3593 compute_hash_table (&expr_hash_table);
3594 if (gcse_file)
3595 dump_hash_table (gcse_file, "Expression", &expr_hash_table);
3597 if (expr_hash_table.n_elems > 0)
3599 compute_kill_rd ();
3600 compute_rd ();
3601 alloc_avail_expr_mem (last_basic_block, expr_hash_table.n_elems);
3602 compute_ae_gen (&expr_hash_table);
3603 compute_ae_kill (ae_gen, ae_kill, &expr_hash_table);
3604 compute_available (ae_gen, ae_kill, ae_out, ae_in);
3605 changed = classic_gcse ();
3606 free_avail_expr_mem ();
3609 free_rd_mem ();
3610 free_hash_table (&expr_hash_table);
3612 if (gcse_file)
3614 fprintf (gcse_file, "\n");
3615 fprintf (gcse_file, "GCSE of %s, pass %d: %d bytes needed, %d substs,",
3616 current_function_name, pass, bytes_used, gcse_subst_count);
3617 fprintf (gcse_file, "%d insns created\n", gcse_create_count);
3620 return changed;
3623 /* Compute copy/constant propagation working variables. */
3625 /* Local properties of assignments. */
3626 static sbitmap *cprop_pavloc;
3627 static sbitmap *cprop_absaltered;
3629 /* Global properties of assignments (computed from the local properties). */
3630 static sbitmap *cprop_avin;
3631 static sbitmap *cprop_avout;
3633 /* Allocate vars used for copy/const propagation. N_BLOCKS is the number of
3634 basic blocks. N_SETS is the number of sets. */
3636 static void
3637 alloc_cprop_mem (int n_blocks, int n_sets)
3639 cprop_pavloc = sbitmap_vector_alloc (n_blocks, n_sets);
3640 cprop_absaltered = sbitmap_vector_alloc (n_blocks, n_sets);
3642 cprop_avin = sbitmap_vector_alloc (n_blocks, n_sets);
3643 cprop_avout = sbitmap_vector_alloc (n_blocks, n_sets);
3646 /* Free vars used by copy/const propagation. */
3648 static void
3649 free_cprop_mem (void)
3651 sbitmap_vector_free (cprop_pavloc);
3652 sbitmap_vector_free (cprop_absaltered);
3653 sbitmap_vector_free (cprop_avin);
3654 sbitmap_vector_free (cprop_avout);
3657 /* For each block, compute whether X is transparent. X is either an
3658 expression or an assignment [though we don't care which, for this context
3659 an assignment is treated as an expression]. For each block where an
3660 element of X is modified, set (SET_P == 1) or reset (SET_P == 0) the INDX
3661 bit in BMAP. */
3663 static void
3664 compute_transp (rtx x, int indx, sbitmap *bmap, int set_p)
3666 int i, j;
3667 basic_block bb;
3668 enum rtx_code code;
3669 reg_set *r;
3670 const char *fmt;
3672 /* repeat is used to turn tail-recursion into iteration since GCC
3673 can't do it when there's no return value. */
3674 repeat:
3676 if (x == 0)
3677 return;
3679 code = GET_CODE (x);
3680 switch (code)
3682 case REG:
3683 if (set_p)
3685 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3687 FOR_EACH_BB (bb)
3688 if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
3689 SET_BIT (bmap[bb->index], indx);
3691 else
3693 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
3694 SET_BIT (bmap[BLOCK_NUM (r->insn)], indx);
3697 else
3699 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3701 FOR_EACH_BB (bb)
3702 if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
3703 RESET_BIT (bmap[bb->index], indx);
3705 else
3707 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
3708 RESET_BIT (bmap[BLOCK_NUM (r->insn)], indx);
3712 return;
3714 case MEM:
3715 FOR_EACH_BB (bb)
3717 rtx list_entry = canon_modify_mem_list[bb->index];
3719 while (list_entry)
3721 rtx dest, dest_addr;
3723 if (GET_CODE (XEXP (list_entry, 0)) == CALL_INSN)
3725 if (set_p)
3726 SET_BIT (bmap[bb->index], indx);
3727 else
3728 RESET_BIT (bmap[bb->index], indx);
3729 break;
3731 /* LIST_ENTRY must be an INSN of some kind that sets memory.
3732 Examine each hunk of memory that is modified. */
3734 dest = XEXP (list_entry, 0);
3735 list_entry = XEXP (list_entry, 1);
3736 dest_addr = XEXP (list_entry, 0);
3738 if (canon_true_dependence (dest, GET_MODE (dest), dest_addr,
3739 x, rtx_addr_varies_p))
3741 if (set_p)
3742 SET_BIT (bmap[bb->index], indx);
3743 else
3744 RESET_BIT (bmap[bb->index], indx);
3745 break;
3747 list_entry = XEXP (list_entry, 1);
3751 x = XEXP (x, 0);
3752 goto repeat;
3754 case PC:
3755 case CC0: /*FIXME*/
3756 case CONST:
3757 case CONST_INT:
3758 case CONST_DOUBLE:
3759 case CONST_VECTOR:
3760 case SYMBOL_REF:
3761 case LABEL_REF:
3762 case ADDR_VEC:
3763 case ADDR_DIFF_VEC:
3764 return;
3766 default:
3767 break;
3770 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
3772 if (fmt[i] == 'e')
3774 /* If we are about to do the last recursive call
3775 needed at this level, change it into iteration.
3776 This function is called enough to be worth it. */
3777 if (i == 0)
3779 x = XEXP (x, i);
3780 goto repeat;
3783 compute_transp (XEXP (x, i), indx, bmap, set_p);
3785 else if (fmt[i] == 'E')
3786 for (j = 0; j < XVECLEN (x, i); j++)
3787 compute_transp (XVECEXP (x, i, j), indx, bmap, set_p);
3791 /* Top level routine to do the dataflow analysis needed by copy/const
3792 propagation. */
3794 static void
3795 compute_cprop_data (void)
3797 compute_local_properties (cprop_absaltered, cprop_pavloc, NULL, &set_hash_table);
3798 compute_available (cprop_pavloc, cprop_absaltered,
3799 cprop_avout, cprop_avin);
3802 /* Copy/constant propagation. */
3804 /* Maximum number of register uses in an insn that we handle. */
3805 #define MAX_USES 8
3807 /* Table of uses found in an insn.
3808 Allocated statically to avoid alloc/free complexity and overhead. */
3809 static struct reg_use reg_use_table[MAX_USES];
3811 /* Index into `reg_use_table' while building it. */
3812 static int reg_use_count;
3814 /* Set up a list of register numbers used in INSN. The found uses are stored
3815 in `reg_use_table'. `reg_use_count' is initialized to zero before entry,
3816 and contains the number of uses in the table upon exit.
3818 ??? If a register appears multiple times we will record it multiple times.
3819 This doesn't hurt anything but it will slow things down. */
3821 static void
3822 find_used_regs (rtx *xptr, void *data ATTRIBUTE_UNUSED)
3824 int i, j;
3825 enum rtx_code code;
3826 const char *fmt;
3827 rtx x = *xptr;
3829 /* repeat is used to turn tail-recursion into iteration since GCC
3830 can't do it when there's no return value. */
3831 repeat:
3832 if (x == 0)
3833 return;
3835 code = GET_CODE (x);
3836 if (REG_P (x))
3838 if (reg_use_count == MAX_USES)
3839 return;
3841 reg_use_table[reg_use_count].reg_rtx = x;
3842 reg_use_count++;
3845 /* Recursively scan the operands of this expression. */
3847 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
3849 if (fmt[i] == 'e')
3851 /* If we are about to do the last recursive call
3852 needed at this level, change it into iteration.
3853 This function is called enough to be worth it. */
3854 if (i == 0)
3856 x = XEXP (x, 0);
3857 goto repeat;
3860 find_used_regs (&XEXP (x, i), data);
3862 else if (fmt[i] == 'E')
3863 for (j = 0; j < XVECLEN (x, i); j++)
3864 find_used_regs (&XVECEXP (x, i, j), data);
3868 /* Try to replace all non-SET_DEST occurrences of FROM in INSN with TO.
3869 Returns nonzero is successful. */
3871 static int
3872 try_replace_reg (rtx from, rtx to, rtx insn)
3874 rtx note = find_reg_equal_equiv_note (insn);
3875 rtx src = 0;
3876 int success = 0;
3877 rtx set = single_set (insn);
3879 validate_replace_src_group (from, to, insn);
3880 if (num_changes_pending () && apply_change_group ())
3881 success = 1;
3883 /* Try to simplify SET_SRC if we have substituted a constant. */
3884 if (success && set && CONSTANT_P (to))
3886 src = simplify_rtx (SET_SRC (set));
3888 if (src)
3889 validate_change (insn, &SET_SRC (set), src, 0);
3892 /* If there is already a NOTE, update the expression in it with our
3893 replacement. */
3894 if (note != 0)
3895 XEXP (note, 0) = simplify_replace_rtx (XEXP (note, 0), from, to);
3897 if (!success && set && reg_mentioned_p (from, SET_SRC (set)))
3899 /* If above failed and this is a single set, try to simplify the source of
3900 the set given our substitution. We could perhaps try this for multiple
3901 SETs, but it probably won't buy us anything. */
3902 src = simplify_replace_rtx (SET_SRC (set), from, to);
3904 if (!rtx_equal_p (src, SET_SRC (set))
3905 && validate_change (insn, &SET_SRC (set), src, 0))
3906 success = 1;
3908 /* If we've failed to do replacement, have a single SET, don't already
3909 have a note, and have no special SET, add a REG_EQUAL note to not
3910 lose information. */
3911 if (!success && note == 0 && set != 0
3912 && GET_CODE (XEXP (set, 0)) != ZERO_EXTRACT
3913 && GET_CODE (XEXP (set, 0)) != SIGN_EXTRACT)
3914 note = set_unique_reg_note (insn, REG_EQUAL, copy_rtx (src));
3917 /* REG_EQUAL may get simplified into register.
3918 We don't allow that. Remove that note. This code ought
3919 not to happen, because previous code ought to synthesize
3920 reg-reg move, but be on the safe side. */
3921 if (note && REG_P (XEXP (note, 0)))
3922 remove_note (insn, note);
3924 return success;
3927 /* Find a set of REGNOs that are available on entry to INSN's block. Returns
3928 NULL no such set is found. */
3930 static struct expr *
3931 find_avail_set (int regno, rtx insn)
3933 /* SET1 contains the last set found that can be returned to the caller for
3934 use in a substitution. */
3935 struct expr *set1 = 0;
3937 /* Loops are not possible here. To get a loop we would need two sets
3938 available at the start of the block containing INSN. ie we would
3939 need two sets like this available at the start of the block:
3941 (set (reg X) (reg Y))
3942 (set (reg Y) (reg X))
3944 This can not happen since the set of (reg Y) would have killed the
3945 set of (reg X) making it unavailable at the start of this block. */
3946 while (1)
3948 rtx src;
3949 struct expr *set = lookup_set (regno, &set_hash_table);
3951 /* Find a set that is available at the start of the block
3952 which contains INSN. */
3953 while (set)
3955 if (TEST_BIT (cprop_avin[BLOCK_NUM (insn)], set->bitmap_index))
3956 break;
3957 set = next_set (regno, set);
3960 /* If no available set was found we've reached the end of the
3961 (possibly empty) copy chain. */
3962 if (set == 0)
3963 break;
3965 if (GET_CODE (set->expr) != SET)
3966 abort ();
3968 src = SET_SRC (set->expr);
3970 /* We know the set is available.
3971 Now check that SRC is ANTLOC (i.e. none of the source operands
3972 have changed since the start of the block).
3974 If the source operand changed, we may still use it for the next
3975 iteration of this loop, but we may not use it for substitutions. */
3977 if (gcse_constant_p (src) || oprs_not_set_p (src, insn))
3978 set1 = set;
3980 /* If the source of the set is anything except a register, then
3981 we have reached the end of the copy chain. */
3982 if (GET_CODE (src) != REG)
3983 break;
3985 /* Follow the copy chain, ie start another iteration of the loop
3986 and see if we have an available copy into SRC. */
3987 regno = REGNO (src);
3990 /* SET1 holds the last set that was available and anticipatable at
3991 INSN. */
3992 return set1;
3995 /* Subroutine of cprop_insn that tries to propagate constants into
3996 JUMP_INSNS. JUMP must be a conditional jump. If SETCC is non-NULL
3997 it is the instruction that immediately precedes JUMP, and must be a
3998 single SET of a register. FROM is what we will try to replace,
3999 SRC is the constant we will try to substitute for it. Returns nonzero
4000 if a change was made. */
4002 static int
4003 cprop_jump (basic_block bb, rtx setcc, rtx jump, rtx from, rtx src)
4005 rtx new, set_src, note_src;
4006 rtx set = pc_set (jump);
4007 rtx note = find_reg_equal_equiv_note (jump);
4009 if (note)
4011 note_src = XEXP (note, 0);
4012 if (GET_CODE (note_src) == EXPR_LIST)
4013 note_src = NULL_RTX;
4015 else note_src = NULL_RTX;
4017 /* Prefer REG_EQUAL notes except those containing EXPR_LISTs. */
4018 set_src = note_src ? note_src : SET_SRC (set);
4020 /* First substitute the SETCC condition into the JUMP instruction,
4021 then substitute that given values into this expanded JUMP. */
4022 if (setcc != NULL_RTX
4023 && !modified_between_p (from, setcc, jump)
4024 && !modified_between_p (src, setcc, jump))
4026 rtx setcc_src;
4027 rtx setcc_set = single_set (setcc);
4028 rtx setcc_note = find_reg_equal_equiv_note (setcc);
4029 setcc_src = (setcc_note && GET_CODE (XEXP (setcc_note, 0)) != EXPR_LIST)
4030 ? XEXP (setcc_note, 0) : SET_SRC (setcc_set);
4031 set_src = simplify_replace_rtx (set_src, SET_DEST (setcc_set),
4032 setcc_src);
4034 else
4035 setcc = NULL_RTX;
4037 new = simplify_replace_rtx (set_src, from, src);
4039 /* If no simplification can be made, then try the next register. */
4040 if (rtx_equal_p (new, SET_SRC (set)))
4041 return 0;
4043 /* If this is now a no-op delete it, otherwise this must be a valid insn. */
4044 if (new == pc_rtx)
4045 delete_insn (jump);
4046 else
4048 /* Ensure the value computed inside the jump insn to be equivalent
4049 to one computed by setcc. */
4050 if (setcc && modified_in_p (new, setcc))
4051 return 0;
4052 if (! validate_change (jump, &SET_SRC (set), new, 0))
4054 /* When (some) constants are not valid in a comparison, and there
4055 are two registers to be replaced by constants before the entire
4056 comparison can be folded into a constant, we need to keep
4057 intermediate information in REG_EQUAL notes. For targets with
4058 separate compare insns, such notes are added by try_replace_reg.
4059 When we have a combined compare-and-branch instruction, however,
4060 we need to attach a note to the branch itself to make this
4061 optimization work. */
4063 if (!rtx_equal_p (new, note_src))
4064 set_unique_reg_note (jump, REG_EQUAL, copy_rtx (new));
4065 return 0;
4068 /* Remove REG_EQUAL note after simplification. */
4069 if (note_src)
4070 remove_note (jump, note);
4072 /* If this has turned into an unconditional jump,
4073 then put a barrier after it so that the unreachable
4074 code will be deleted. */
4075 if (GET_CODE (SET_SRC (set)) == LABEL_REF)
4076 emit_barrier_after (jump);
4079 #ifdef HAVE_cc0
4080 /* Delete the cc0 setter. */
4081 if (setcc != NULL && CC0_P (SET_DEST (single_set (setcc))))
4082 delete_insn (setcc);
4083 #endif
4085 run_jump_opt_after_gcse = 1;
4087 const_prop_count++;
4088 if (gcse_file != NULL)
4090 fprintf (gcse_file,
4091 "CONST-PROP: Replacing reg %d in jump_insn %d with constant ",
4092 REGNO (from), INSN_UID (jump));
4093 print_rtl (gcse_file, src);
4094 fprintf (gcse_file, "\n");
4096 purge_dead_edges (bb);
4098 return 1;
4101 static bool
4102 constprop_register (rtx insn, rtx from, rtx to, int alter_jumps)
4104 rtx sset;
4106 /* Check for reg or cc0 setting instructions followed by
4107 conditional branch instructions first. */
4108 if (alter_jumps
4109 && (sset = single_set (insn)) != NULL
4110 && NEXT_INSN (insn)
4111 && any_condjump_p (NEXT_INSN (insn)) && onlyjump_p (NEXT_INSN (insn)))
4113 rtx dest = SET_DEST (sset);
4114 if ((REG_P (dest) || CC0_P (dest))
4115 && cprop_jump (BLOCK_FOR_INSN (insn), insn, NEXT_INSN (insn), from, to))
4116 return 1;
4119 /* Handle normal insns next. */
4120 if (GET_CODE (insn) == INSN
4121 && try_replace_reg (from, to, insn))
4122 return 1;
4124 /* Try to propagate a CONST_INT into a conditional jump.
4125 We're pretty specific about what we will handle in this
4126 code, we can extend this as necessary over time.
4128 Right now the insn in question must look like
4129 (set (pc) (if_then_else ...)) */
4130 else if (alter_jumps && any_condjump_p (insn) && onlyjump_p (insn))
4131 return cprop_jump (BLOCK_FOR_INSN (insn), NULL, insn, from, to);
4132 return 0;
4135 /* Perform constant and copy propagation on INSN.
4136 The result is nonzero if a change was made. */
4138 static int
4139 cprop_insn (rtx insn, int alter_jumps)
4141 struct reg_use *reg_used;
4142 int changed = 0;
4143 rtx note;
4145 if (!INSN_P (insn))
4146 return 0;
4148 reg_use_count = 0;
4149 note_uses (&PATTERN (insn), find_used_regs, NULL);
4151 note = find_reg_equal_equiv_note (insn);
4153 /* We may win even when propagating constants into notes. */
4154 if (note)
4155 find_used_regs (&XEXP (note, 0), NULL);
4157 for (reg_used = &reg_use_table[0]; reg_use_count > 0;
4158 reg_used++, reg_use_count--)
4160 unsigned int regno = REGNO (reg_used->reg_rtx);
4161 rtx pat, src;
4162 struct expr *set;
4164 /* Ignore registers created by GCSE.
4165 We do this because ... */
4166 if (regno >= max_gcse_regno)
4167 continue;
4169 /* If the register has already been set in this block, there's
4170 nothing we can do. */
4171 if (! oprs_not_set_p (reg_used->reg_rtx, insn))
4172 continue;
4174 /* Find an assignment that sets reg_used and is available
4175 at the start of the block. */
4176 set = find_avail_set (regno, insn);
4177 if (! set)
4178 continue;
4180 pat = set->expr;
4181 /* ??? We might be able to handle PARALLELs. Later. */
4182 if (GET_CODE (pat) != SET)
4183 abort ();
4185 src = SET_SRC (pat);
4187 /* Constant propagation. */
4188 if (gcse_constant_p (src))
4190 if (constprop_register (insn, reg_used->reg_rtx, src, alter_jumps))
4192 changed = 1;
4193 const_prop_count++;
4194 if (gcse_file != NULL)
4196 fprintf (gcse_file, "GLOBAL CONST-PROP: Replacing reg %d in ", regno);
4197 fprintf (gcse_file, "insn %d with constant ", INSN_UID (insn));
4198 print_rtl (gcse_file, src);
4199 fprintf (gcse_file, "\n");
4201 if (INSN_DELETED_P (insn))
4202 return 1;
4205 else if (GET_CODE (src) == REG
4206 && REGNO (src) >= FIRST_PSEUDO_REGISTER
4207 && REGNO (src) != regno)
4209 if (try_replace_reg (reg_used->reg_rtx, src, insn))
4211 changed = 1;
4212 copy_prop_count++;
4213 if (gcse_file != NULL)
4215 fprintf (gcse_file, "GLOBAL COPY-PROP: Replacing reg %d in insn %d",
4216 regno, INSN_UID (insn));
4217 fprintf (gcse_file, " with reg %d\n", REGNO (src));
4220 /* The original insn setting reg_used may or may not now be
4221 deletable. We leave the deletion to flow. */
4222 /* FIXME: If it turns out that the insn isn't deletable,
4223 then we may have unnecessarily extended register lifetimes
4224 and made things worse. */
4229 return changed;
4232 /* Like find_used_regs, but avoid recording uses that appear in
4233 input-output contexts such as zero_extract or pre_dec. This
4234 restricts the cases we consider to those for which local cprop
4235 can legitimately make replacements. */
4237 static void
4238 local_cprop_find_used_regs (rtx *xptr, void *data)
4240 rtx x = *xptr;
4242 if (x == 0)
4243 return;
4245 switch (GET_CODE (x))
4247 case ZERO_EXTRACT:
4248 case SIGN_EXTRACT:
4249 case STRICT_LOW_PART:
4250 return;
4252 case PRE_DEC:
4253 case PRE_INC:
4254 case POST_DEC:
4255 case POST_INC:
4256 case PRE_MODIFY:
4257 case POST_MODIFY:
4258 /* Can only legitimately appear this early in the context of
4259 stack pushes for function arguments, but handle all of the
4260 codes nonetheless. */
4261 return;
4263 case SUBREG:
4264 /* Setting a subreg of a register larger than word_mode leaves
4265 the non-written words unchanged. */
4266 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) > BITS_PER_WORD)
4267 return;
4268 break;
4270 default:
4271 break;
4274 find_used_regs (xptr, data);
4277 /* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
4278 their REG_EQUAL notes need updating. */
4280 static bool
4281 do_local_cprop (rtx x, rtx insn, int alter_jumps, rtx *libcall_sp)
4283 rtx newreg = NULL, newcnst = NULL;
4285 /* Rule out USE instructions and ASM statements as we don't want to
4286 change the hard registers mentioned. */
4287 if (GET_CODE (x) == REG
4288 && (REGNO (x) >= FIRST_PSEUDO_REGISTER
4289 || (GET_CODE (PATTERN (insn)) != USE
4290 && asm_noperands (PATTERN (insn)) < 0)))
4292 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0);
4293 struct elt_loc_list *l;
4295 if (!val)
4296 return false;
4297 for (l = val->locs; l; l = l->next)
4299 rtx this_rtx = l->loc;
4300 rtx note;
4302 if (l->in_libcall)
4303 continue;
4305 if (gcse_constant_p (this_rtx))
4306 newcnst = this_rtx;
4307 if (REG_P (this_rtx) && REGNO (this_rtx) >= FIRST_PSEUDO_REGISTER
4308 /* Don't copy propagate if it has attached REG_EQUIV note.
4309 At this point this only function parameters should have
4310 REG_EQUIV notes and if the argument slot is used somewhere
4311 explicitly, it means address of parameter has been taken,
4312 so we should not extend the lifetime of the pseudo. */
4313 && (!(note = find_reg_note (l->setting_insn, REG_EQUIV, NULL_RTX))
4314 || GET_CODE (XEXP (note, 0)) != MEM))
4315 newreg = this_rtx;
4317 if (newcnst && constprop_register (insn, x, newcnst, alter_jumps))
4319 /* If we find a case where we can't fix the retval REG_EQUAL notes
4320 match the new register, we either have to abandon this replacement
4321 or fix delete_trivially_dead_insns to preserve the setting insn,
4322 or make it delete the REG_EUAQL note, and fix up all passes that
4323 require the REG_EQUAL note there. */
4324 if (!adjust_libcall_notes (x, newcnst, insn, libcall_sp))
4325 abort ();
4326 if (gcse_file != NULL)
4328 fprintf (gcse_file, "LOCAL CONST-PROP: Replacing reg %d in ",
4329 REGNO (x));
4330 fprintf (gcse_file, "insn %d with constant ",
4331 INSN_UID (insn));
4332 print_rtl (gcse_file, newcnst);
4333 fprintf (gcse_file, "\n");
4335 const_prop_count++;
4336 return true;
4338 else if (newreg && newreg != x && try_replace_reg (x, newreg, insn))
4340 adjust_libcall_notes (x, newreg, insn, libcall_sp);
4341 if (gcse_file != NULL)
4343 fprintf (gcse_file,
4344 "LOCAL COPY-PROP: Replacing reg %d in insn %d",
4345 REGNO (x), INSN_UID (insn));
4346 fprintf (gcse_file, " with reg %d\n", REGNO (newreg));
4348 copy_prop_count++;
4349 return true;
4352 return false;
4355 /* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
4356 their REG_EQUAL notes need updating to reflect that OLDREG has been
4357 replaced with NEWVAL in INSN. Return true if all substitutions could
4358 be made. */
4359 static bool
4360 adjust_libcall_notes (rtx oldreg, rtx newval, rtx insn, rtx *libcall_sp)
4362 rtx end;
4364 while ((end = *libcall_sp++))
4366 rtx note = find_reg_equal_equiv_note (end);
4368 if (! note)
4369 continue;
4371 if (REG_P (newval))
4373 if (reg_set_between_p (newval, PREV_INSN (insn), end))
4377 note = find_reg_equal_equiv_note (end);
4378 if (! note)
4379 continue;
4380 if (reg_mentioned_p (newval, XEXP (note, 0)))
4381 return false;
4383 while ((end = *libcall_sp++));
4384 return true;
4387 XEXP (note, 0) = replace_rtx (XEXP (note, 0), oldreg, newval);
4388 insn = end;
4390 return true;
4393 #define MAX_NESTED_LIBCALLS 9
4395 static void
4396 local_cprop_pass (int alter_jumps)
4398 rtx insn;
4399 struct reg_use *reg_used;
4400 rtx libcall_stack[MAX_NESTED_LIBCALLS + 1], *libcall_sp;
4401 bool changed = false;
4403 cselib_init ();
4404 libcall_sp = &libcall_stack[MAX_NESTED_LIBCALLS];
4405 *libcall_sp = 0;
4406 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4408 if (INSN_P (insn))
4410 rtx note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
4412 if (note)
4414 if (libcall_sp == libcall_stack)
4415 abort ();
4416 *--libcall_sp = XEXP (note, 0);
4418 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
4419 if (note)
4420 libcall_sp++;
4421 note = find_reg_equal_equiv_note (insn);
4424 reg_use_count = 0;
4425 note_uses (&PATTERN (insn), local_cprop_find_used_regs, NULL);
4426 if (note)
4427 local_cprop_find_used_regs (&XEXP (note, 0), NULL);
4429 for (reg_used = &reg_use_table[0]; reg_use_count > 0;
4430 reg_used++, reg_use_count--)
4431 if (do_local_cprop (reg_used->reg_rtx, insn, alter_jumps,
4432 libcall_sp))
4434 changed = true;
4435 break;
4437 if (INSN_DELETED_P (insn))
4438 break;
4440 while (reg_use_count);
4442 cselib_process_insn (insn);
4444 cselib_finish ();
4445 /* Global analysis may get into infinite loops for unreachable blocks. */
4446 if (changed && alter_jumps)
4448 delete_unreachable_blocks ();
4449 free_reg_set_mem ();
4450 alloc_reg_set_mem (max_reg_num ());
4451 compute_sets (get_insns ());
4455 /* Forward propagate copies. This includes copies and constants. Return
4456 nonzero if a change was made. */
4458 static int
4459 cprop (int alter_jumps)
4461 int changed;
4462 basic_block bb;
4463 rtx insn;
4465 /* Note we start at block 1. */
4466 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
4468 if (gcse_file != NULL)
4469 fprintf (gcse_file, "\n");
4470 return 0;
4473 changed = 0;
4474 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR, next_bb)
4476 /* Reset tables used to keep track of what's still valid [since the
4477 start of the block]. */
4478 reset_opr_set_tables ();
4480 for (insn = BB_HEAD (bb);
4481 insn != NULL && insn != NEXT_INSN (BB_END (bb));
4482 insn = NEXT_INSN (insn))
4483 if (INSN_P (insn))
4485 changed |= cprop_insn (insn, alter_jumps);
4487 /* Keep track of everything modified by this insn. */
4488 /* ??? Need to be careful w.r.t. mods done to INSN. Don't
4489 call mark_oprs_set if we turned the insn into a NOTE. */
4490 if (GET_CODE (insn) != NOTE)
4491 mark_oprs_set (insn);
4495 if (gcse_file != NULL)
4496 fprintf (gcse_file, "\n");
4498 return changed;
4501 /* Similar to get_condition, only the resulting condition must be
4502 valid at JUMP, instead of at EARLIEST.
4504 This differs from noce_get_condition in ifcvt.c in that we prefer not to
4505 settle for the condition variable in the jump instruction being integral.
4506 We prefer to be able to record the value of a user variable, rather than
4507 the value of a temporary used in a condition. This could be solved by
4508 recording the value of *every* register scaned by canonicalize_condition,
4509 but this would require some code reorganization. */
4512 fis_get_condition (rtx jump)
4514 rtx cond, set, tmp, insn, earliest;
4515 bool reverse;
4517 if (! any_condjump_p (jump))
4518 return NULL_RTX;
4520 set = pc_set (jump);
4521 cond = XEXP (SET_SRC (set), 0);
4523 /* If this branches to JUMP_LABEL when the condition is false,
4524 reverse the condition. */
4525 reverse = (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
4526 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump));
4528 /* Use canonicalize_condition to do the dirty work of manipulating
4529 MODE_CC values and COMPARE rtx codes. */
4530 tmp = canonicalize_condition (jump, cond, reverse, &earliest, NULL_RTX,
4531 false);
4532 if (!tmp)
4533 return NULL_RTX;
4535 /* Verify that the given condition is valid at JUMP by virtue of not
4536 having been modified since EARLIEST. */
4537 for (insn = earliest; insn != jump; insn = NEXT_INSN (insn))
4538 if (INSN_P (insn) && modified_in_p (tmp, insn))
4539 break;
4540 if (insn == jump)
4541 return tmp;
4543 /* The condition was modified. See if we can get a partial result
4544 that doesn't follow all the reversals. Perhaps combine can fold
4545 them together later. */
4546 tmp = XEXP (tmp, 0);
4547 if (!REG_P (tmp) || GET_MODE_CLASS (GET_MODE (tmp)) != MODE_INT)
4548 return NULL_RTX;
4549 tmp = canonicalize_condition (jump, cond, reverse, &earliest, tmp,
4550 false);
4551 if (!tmp)
4552 return NULL_RTX;
4554 /* For sanity's sake, re-validate the new result. */
4555 for (insn = earliest; insn != jump; insn = NEXT_INSN (insn))
4556 if (INSN_P (insn) && modified_in_p (tmp, insn))
4557 return NULL_RTX;
4559 return tmp;
4562 /* Find the implicit sets of a function. An "implicit set" is a constraint
4563 on the value of a variable, implied by a conditional jump. For example,
4564 following "if (x == 2)", the then branch may be optimized as though the
4565 conditional performed an "explicit set", in this example, "x = 2". This
4566 function records the set patterns that are implicit at the start of each
4567 basic block. */
4569 static void
4570 find_implicit_sets (void)
4572 basic_block bb, dest;
4573 unsigned int count;
4574 rtx cond, new;
4576 count = 0;
4577 FOR_EACH_BB (bb)
4578 /* Check for more than one successor. */
4579 if (bb->succ && bb->succ->succ_next)
4581 cond = fis_get_condition (BB_END (bb));
4583 if (cond
4584 && (GET_CODE (cond) == EQ || GET_CODE (cond) == NE)
4585 && GET_CODE (XEXP (cond, 0)) == REG
4586 && REGNO (XEXP (cond, 0)) >= FIRST_PSEUDO_REGISTER
4587 && gcse_constant_p (XEXP (cond, 1)))
4589 dest = GET_CODE (cond) == EQ ? BRANCH_EDGE (bb)->dest
4590 : FALLTHRU_EDGE (bb)->dest;
4592 if (dest && ! dest->pred->pred_next
4593 && dest != EXIT_BLOCK_PTR)
4595 new = gen_rtx_SET (VOIDmode, XEXP (cond, 0),
4596 XEXP (cond, 1));
4597 implicit_sets[dest->index] = new;
4598 if (gcse_file)
4600 fprintf(gcse_file, "Implicit set of reg %d in ",
4601 REGNO (XEXP (cond, 0)));
4602 fprintf(gcse_file, "basic block %d\n", dest->index);
4604 count++;
4609 if (gcse_file)
4610 fprintf (gcse_file, "Found %d implicit sets\n", count);
4613 /* Perform one copy/constant propagation pass.
4614 PASS is the pass count. If CPROP_JUMPS is true, perform constant
4615 propagation into conditional jumps. If BYPASS_JUMPS is true,
4616 perform conditional jump bypassing optimizations. */
4618 static int
4619 one_cprop_pass (int pass, int cprop_jumps, int bypass_jumps)
4621 int changed = 0;
4623 const_prop_count = 0;
4624 copy_prop_count = 0;
4626 local_cprop_pass (cprop_jumps);
4628 /* Determine implicit sets. */
4629 implicit_sets = xcalloc (last_basic_block, sizeof (rtx));
4630 find_implicit_sets ();
4632 alloc_hash_table (max_cuid, &set_hash_table, 1);
4633 compute_hash_table (&set_hash_table);
4635 /* Free implicit_sets before peak usage. */
4636 free (implicit_sets);
4637 implicit_sets = NULL;
4639 if (gcse_file)
4640 dump_hash_table (gcse_file, "SET", &set_hash_table);
4641 if (set_hash_table.n_elems > 0)
4643 alloc_cprop_mem (last_basic_block, set_hash_table.n_elems);
4644 compute_cprop_data ();
4645 changed = cprop (cprop_jumps);
4646 if (bypass_jumps)
4647 changed |= bypass_conditional_jumps ();
4648 free_cprop_mem ();
4651 free_hash_table (&set_hash_table);
4653 if (gcse_file)
4655 fprintf (gcse_file, "CPROP of %s, pass %d: %d bytes needed, ",
4656 current_function_name, pass, bytes_used);
4657 fprintf (gcse_file, "%d const props, %d copy props\n\n",
4658 const_prop_count, copy_prop_count);
4660 /* Global analysis may get into infinite loops for unreachable blocks. */
4661 if (changed && cprop_jumps)
4662 delete_unreachable_blocks ();
4664 return changed;
4667 /* Bypass conditional jumps. */
4669 /* The value of last_basic_block at the beginning of the jump_bypass
4670 pass. The use of redirect_edge_and_branch_force may introduce new
4671 basic blocks, but the data flow analysis is only valid for basic
4672 block indices less than bypass_last_basic_block. */
4674 static int bypass_last_basic_block;
4676 /* Find a set of REGNO to a constant that is available at the end of basic
4677 block BB. Returns NULL if no such set is found. Based heavily upon
4678 find_avail_set. */
4680 static struct expr *
4681 find_bypass_set (int regno, int bb)
4683 struct expr *result = 0;
4685 for (;;)
4687 rtx src;
4688 struct expr *set = lookup_set (regno, &set_hash_table);
4690 while (set)
4692 if (TEST_BIT (cprop_avout[bb], set->bitmap_index))
4693 break;
4694 set = next_set (regno, set);
4697 if (set == 0)
4698 break;
4700 if (GET_CODE (set->expr) != SET)
4701 abort ();
4703 src = SET_SRC (set->expr);
4704 if (gcse_constant_p (src))
4705 result = set;
4707 if (GET_CODE (src) != REG)
4708 break;
4710 regno = REGNO (src);
4712 return result;
4716 /* Subroutine of bypass_block that checks whether a pseudo is killed by
4717 any of the instructions inserted on an edge. Jump bypassing places
4718 condition code setters on CFG edges using insert_insn_on_edge. This
4719 function is required to check that our data flow analysis is still
4720 valid prior to commit_edge_insertions. */
4722 static bool
4723 reg_killed_on_edge (rtx reg, edge e)
4725 rtx insn;
4727 for (insn = e->insns; insn; insn = NEXT_INSN (insn))
4728 if (INSN_P (insn) && reg_set_p (reg, insn))
4729 return true;
4731 return false;
4734 /* Subroutine of bypass_conditional_jumps that attempts to bypass the given
4735 basic block BB which has more than one predecessor. If not NULL, SETCC
4736 is the first instruction of BB, which is immediately followed by JUMP_INSN
4737 JUMP. Otherwise, SETCC is NULL, and JUMP is the first insn of BB.
4738 Returns nonzero if a change was made.
4740 During the jump bypassing pass, we may place copies of SETCC instructions
4741 on CFG edges. The following routine must be careful to pay attention to
4742 these inserted insns when performing its transformations. */
4744 static int
4745 bypass_block (basic_block bb, rtx setcc, rtx jump)
4747 rtx insn, note;
4748 edge e, enext, edest;
4749 int i, change;
4750 int may_be_loop_header;
4752 insn = (setcc != NULL) ? setcc : jump;
4754 /* Determine set of register uses in INSN. */
4755 reg_use_count = 0;
4756 note_uses (&PATTERN (insn), find_used_regs, NULL);
4757 note = find_reg_equal_equiv_note (insn);
4758 if (note)
4759 find_used_regs (&XEXP (note, 0), NULL);
4761 may_be_loop_header = false;
4762 for (e = bb->pred; e; e = e->pred_next)
4763 if (e->flags & EDGE_DFS_BACK)
4765 may_be_loop_header = true;
4766 break;
4769 change = 0;
4770 for (e = bb->pred; e; e = enext)
4772 enext = e->pred_next;
4773 if (e->flags & EDGE_COMPLEX)
4774 continue;
4776 /* We can't redirect edges from new basic blocks. */
4777 if (e->src->index >= bypass_last_basic_block)
4778 continue;
4780 /* The irreducible loops created by redirecting of edges entering the
4781 loop from outside would decrease effectiveness of some of the following
4782 optimizations, so prevent this. */
4783 if (may_be_loop_header
4784 && !(e->flags & EDGE_DFS_BACK))
4785 continue;
4787 for (i = 0; i < reg_use_count; i++)
4789 struct reg_use *reg_used = &reg_use_table[i];
4790 unsigned int regno = REGNO (reg_used->reg_rtx);
4791 basic_block dest, old_dest;
4792 struct expr *set;
4793 rtx src, new;
4795 if (regno >= max_gcse_regno)
4796 continue;
4798 set = find_bypass_set (regno, e->src->index);
4800 if (! set)
4801 continue;
4803 /* Check the data flow is valid after edge insertions. */
4804 if (e->insns && reg_killed_on_edge (reg_used->reg_rtx, e))
4805 continue;
4807 src = SET_SRC (pc_set (jump));
4809 if (setcc != NULL)
4810 src = simplify_replace_rtx (src,
4811 SET_DEST (PATTERN (setcc)),
4812 SET_SRC (PATTERN (setcc)));
4814 new = simplify_replace_rtx (src, reg_used->reg_rtx,
4815 SET_SRC (set->expr));
4817 /* Jump bypassing may have already placed instructions on
4818 edges of the CFG. We can't bypass an outgoing edge that
4819 has instructions associated with it, as these insns won't
4820 get executed if the incoming edge is redirected. */
4822 if (new == pc_rtx)
4824 edest = FALLTHRU_EDGE (bb);
4825 dest = edest->insns ? NULL : edest->dest;
4827 else if (GET_CODE (new) == LABEL_REF)
4829 dest = BLOCK_FOR_INSN (XEXP (new, 0));
4830 /* Don't bypass edges containing instructions. */
4831 for (edest = bb->succ; edest; edest = edest->succ_next)
4832 if (edest->dest == dest && edest->insns)
4834 dest = NULL;
4835 break;
4838 else
4839 dest = NULL;
4841 old_dest = e->dest;
4842 if (dest != NULL
4843 && dest != old_dest
4844 && dest != EXIT_BLOCK_PTR)
4846 redirect_edge_and_branch_force (e, dest);
4848 /* Copy the register setter to the redirected edge.
4849 Don't copy CC0 setters, as CC0 is dead after jump. */
4850 if (setcc)
4852 rtx pat = PATTERN (setcc);
4853 if (!CC0_P (SET_DEST (pat)))
4854 insert_insn_on_edge (copy_insn (pat), e);
4857 if (gcse_file != NULL)
4859 fprintf (gcse_file, "JUMP-BYPASS: Proved reg %d in jump_insn %d equals constant ",
4860 regno, INSN_UID (jump));
4861 print_rtl (gcse_file, SET_SRC (set->expr));
4862 fprintf (gcse_file, "\nBypass edge from %d->%d to %d\n",
4863 e->src->index, old_dest->index, dest->index);
4865 change = 1;
4866 break;
4870 return change;
4873 /* Find basic blocks with more than one predecessor that only contain a
4874 single conditional jump. If the result of the comparison is known at
4875 compile-time from any incoming edge, redirect that edge to the
4876 appropriate target. Returns nonzero if a change was made.
4878 This function is now mis-named, because we also handle indirect jumps. */
4880 static int
4881 bypass_conditional_jumps (void)
4883 basic_block bb;
4884 int changed;
4885 rtx setcc;
4886 rtx insn;
4887 rtx dest;
4889 /* Note we start at block 1. */
4890 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
4891 return 0;
4893 bypass_last_basic_block = last_basic_block;
4894 mark_dfs_back_edges ();
4896 changed = 0;
4897 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb,
4898 EXIT_BLOCK_PTR, next_bb)
4900 /* Check for more than one predecessor. */
4901 if (bb->pred && bb->pred->pred_next)
4903 setcc = NULL_RTX;
4904 for (insn = BB_HEAD (bb);
4905 insn != NULL && insn != NEXT_INSN (BB_END (bb));
4906 insn = NEXT_INSN (insn))
4907 if (GET_CODE (insn) == INSN)
4909 if (setcc)
4910 break;
4911 if (GET_CODE (PATTERN (insn)) != SET)
4912 break;
4914 dest = SET_DEST (PATTERN (insn));
4915 if (REG_P (dest) || CC0_P (dest))
4916 setcc = insn;
4917 else
4918 break;
4920 else if (GET_CODE (insn) == JUMP_INSN)
4922 if ((any_condjump_p (insn) || computed_jump_p (insn))
4923 && onlyjump_p (insn))
4924 changed |= bypass_block (bb, setcc, insn);
4925 break;
4927 else if (INSN_P (insn))
4928 break;
4932 /* If we bypassed any register setting insns, we inserted a
4933 copy on the redirected edge. These need to be committed. */
4934 if (changed)
4935 commit_edge_insertions();
4937 return changed;
4940 /* Compute PRE+LCM working variables. */
4942 /* Local properties of expressions. */
4943 /* Nonzero for expressions that are transparent in the block. */
4944 static sbitmap *transp;
4946 /* Nonzero for expressions that are transparent at the end of the block.
4947 This is only zero for expressions killed by abnormal critical edge
4948 created by a calls. */
4949 static sbitmap *transpout;
4951 /* Nonzero for expressions that are computed (available) in the block. */
4952 static sbitmap *comp;
4954 /* Nonzero for expressions that are locally anticipatable in the block. */
4955 static sbitmap *antloc;
4957 /* Nonzero for expressions where this block is an optimal computation
4958 point. */
4959 static sbitmap *pre_optimal;
4961 /* Nonzero for expressions which are redundant in a particular block. */
4962 static sbitmap *pre_redundant;
4964 /* Nonzero for expressions which should be inserted on a specific edge. */
4965 static sbitmap *pre_insert_map;
4967 /* Nonzero for expressions which should be deleted in a specific block. */
4968 static sbitmap *pre_delete_map;
4970 /* Contains the edge_list returned by pre_edge_lcm. */
4971 static struct edge_list *edge_list;
4973 /* Redundant insns. */
4974 static sbitmap pre_redundant_insns;
4976 /* Allocate vars used for PRE analysis. */
4978 static void
4979 alloc_pre_mem (int n_blocks, int n_exprs)
4981 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
4982 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
4983 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
4985 pre_optimal = NULL;
4986 pre_redundant = NULL;
4987 pre_insert_map = NULL;
4988 pre_delete_map = NULL;
4989 ae_in = NULL;
4990 ae_out = NULL;
4991 ae_kill = sbitmap_vector_alloc (n_blocks, n_exprs);
4993 /* pre_insert and pre_delete are allocated later. */
4996 /* Free vars used for PRE analysis. */
4998 static void
4999 free_pre_mem (void)
5001 sbitmap_vector_free (transp);
5002 sbitmap_vector_free (comp);
5004 /* ANTLOC and AE_KILL are freed just after pre_lcm finishes. */
5006 if (pre_optimal)
5007 sbitmap_vector_free (pre_optimal);
5008 if (pre_redundant)
5009 sbitmap_vector_free (pre_redundant);
5010 if (pre_insert_map)
5011 sbitmap_vector_free (pre_insert_map);
5012 if (pre_delete_map)
5013 sbitmap_vector_free (pre_delete_map);
5014 if (ae_in)
5015 sbitmap_vector_free (ae_in);
5016 if (ae_out)
5017 sbitmap_vector_free (ae_out);
5019 transp = comp = NULL;
5020 pre_optimal = pre_redundant = pre_insert_map = pre_delete_map = NULL;
5021 ae_in = ae_out = NULL;
5024 /* Top level routine to do the dataflow analysis needed by PRE. */
5026 static void
5027 compute_pre_data (void)
5029 sbitmap trapping_expr;
5030 basic_block bb;
5031 unsigned int ui;
5033 compute_local_properties (transp, comp, antloc, &expr_hash_table);
5034 sbitmap_vector_zero (ae_kill, last_basic_block);
5036 /* Collect expressions which might trap. */
5037 trapping_expr = sbitmap_alloc (expr_hash_table.n_elems);
5038 sbitmap_zero (trapping_expr);
5039 for (ui = 0; ui < expr_hash_table.size; ui++)
5041 struct expr *e;
5042 for (e = expr_hash_table.table[ui]; e != NULL; e = e->next_same_hash)
5043 if (may_trap_p (e->expr))
5044 SET_BIT (trapping_expr, e->bitmap_index);
5047 /* Compute ae_kill for each basic block using:
5049 ~(TRANSP | COMP)
5051 This is significantly faster than compute_ae_kill. */
5053 FOR_EACH_BB (bb)
5055 edge e;
5057 /* If the current block is the destination of an abnormal edge, we
5058 kill all trapping expressions because we won't be able to properly
5059 place the instruction on the edge. So make them neither
5060 anticipatable nor transparent. This is fairly conservative. */
5061 for (e = bb->pred; e ; e = e->pred_next)
5062 if (e->flags & EDGE_ABNORMAL)
5064 sbitmap_difference (antloc[bb->index], antloc[bb->index], trapping_expr);
5065 sbitmap_difference (transp[bb->index], transp[bb->index], trapping_expr);
5066 break;
5069 sbitmap_a_or_b (ae_kill[bb->index], transp[bb->index], comp[bb->index]);
5070 sbitmap_not (ae_kill[bb->index], ae_kill[bb->index]);
5073 edge_list = pre_edge_lcm (gcse_file, expr_hash_table.n_elems, transp, comp, antloc,
5074 ae_kill, &pre_insert_map, &pre_delete_map);
5075 sbitmap_vector_free (antloc);
5076 antloc = NULL;
5077 sbitmap_vector_free (ae_kill);
5078 ae_kill = NULL;
5079 sbitmap_free (trapping_expr);
5082 /* PRE utilities */
5084 /* Return nonzero if an occurrence of expression EXPR in OCCR_BB would reach
5085 block BB.
5087 VISITED is a pointer to a working buffer for tracking which BB's have
5088 been visited. It is NULL for the top-level call.
5090 We treat reaching expressions that go through blocks containing the same
5091 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
5092 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
5093 2 as not reaching. The intent is to improve the probability of finding
5094 only one reaching expression and to reduce register lifetimes by picking
5095 the closest such expression. */
5097 static int
5098 pre_expr_reaches_here_p_work (basic_block occr_bb, struct expr *expr, basic_block bb, char *visited)
5100 edge pred;
5102 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
5104 basic_block pred_bb = pred->src;
5106 if (pred->src == ENTRY_BLOCK_PTR
5107 /* Has predecessor has already been visited? */
5108 || visited[pred_bb->index])
5109 ;/* Nothing to do. */
5111 /* Does this predecessor generate this expression? */
5112 else if (TEST_BIT (comp[pred_bb->index], expr->bitmap_index))
5114 /* Is this the occurrence we're looking for?
5115 Note that there's only one generating occurrence per block
5116 so we just need to check the block number. */
5117 if (occr_bb == pred_bb)
5118 return 1;
5120 visited[pred_bb->index] = 1;
5122 /* Ignore this predecessor if it kills the expression. */
5123 else if (! TEST_BIT (transp[pred_bb->index], expr->bitmap_index))
5124 visited[pred_bb->index] = 1;
5126 /* Neither gen nor kill. */
5127 else
5129 visited[pred_bb->index] = 1;
5130 if (pre_expr_reaches_here_p_work (occr_bb, expr, pred_bb, visited))
5131 return 1;
5135 /* All paths have been checked. */
5136 return 0;
5139 /* The wrapper for pre_expr_reaches_here_work that ensures that any
5140 memory allocated for that function is returned. */
5142 static int
5143 pre_expr_reaches_here_p (basic_block occr_bb, struct expr *expr, basic_block bb)
5145 int rval;
5146 char *visited = xcalloc (last_basic_block, 1);
5148 rval = pre_expr_reaches_here_p_work (occr_bb, expr, bb, visited);
5150 free (visited);
5151 return rval;
5155 /* Given an expr, generate RTL which we can insert at the end of a BB,
5156 or on an edge. Set the block number of any insns generated to
5157 the value of BB. */
5159 static rtx
5160 process_insert_insn (struct expr *expr)
5162 rtx reg = expr->reaching_reg;
5163 rtx exp = copy_rtx (expr->expr);
5164 rtx pat;
5166 start_sequence ();
5168 /* If the expression is something that's an operand, like a constant,
5169 just copy it to a register. */
5170 if (general_operand (exp, GET_MODE (reg)))
5171 emit_move_insn (reg, exp);
5173 /* Otherwise, make a new insn to compute this expression and make sure the
5174 insn will be recognized (this also adds any needed CLOBBERs). Copy the
5175 expression to make sure we don't have any sharing issues. */
5176 else if (insn_invalid_p (emit_insn (gen_rtx_SET (VOIDmode, reg, exp))))
5177 abort ();
5179 pat = get_insns ();
5180 end_sequence ();
5182 return pat;
5185 /* Add EXPR to the end of basic block BB.
5187 This is used by both the PRE and code hoisting.
5189 For PRE, we want to verify that the expr is either transparent
5190 or locally anticipatable in the target block. This check makes
5191 no sense for code hoisting. */
5193 static void
5194 insert_insn_end_bb (struct expr *expr, basic_block bb, int pre)
5196 rtx insn = BB_END (bb);
5197 rtx new_insn;
5198 rtx reg = expr->reaching_reg;
5199 int regno = REGNO (reg);
5200 rtx pat, pat_end;
5202 pat = process_insert_insn (expr);
5203 if (pat == NULL_RTX || ! INSN_P (pat))
5204 abort ();
5206 pat_end = pat;
5207 while (NEXT_INSN (pat_end) != NULL_RTX)
5208 pat_end = NEXT_INSN (pat_end);
5210 /* If the last insn is a jump, insert EXPR in front [taking care to
5211 handle cc0, etc. properly]. Similarly we need to care trapping
5212 instructions in presence of non-call exceptions. */
5214 if (GET_CODE (insn) == JUMP_INSN
5215 || (GET_CODE (insn) == INSN
5216 && (bb->succ->succ_next || (bb->succ->flags & EDGE_ABNORMAL))))
5218 #ifdef HAVE_cc0
5219 rtx note;
5220 #endif
5221 /* It should always be the case that we can put these instructions
5222 anywhere in the basic block with performing PRE optimizations.
5223 Check this. */
5224 if (GET_CODE (insn) == INSN && pre
5225 && !TEST_BIT (antloc[bb->index], expr->bitmap_index)
5226 && !TEST_BIT (transp[bb->index], expr->bitmap_index))
5227 abort ();
5229 /* If this is a jump table, then we can't insert stuff here. Since
5230 we know the previous real insn must be the tablejump, we insert
5231 the new instruction just before the tablejump. */
5232 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
5233 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
5234 insn = prev_real_insn (insn);
5236 #ifdef HAVE_cc0
5237 /* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts
5238 if cc0 isn't set. */
5239 note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
5240 if (note)
5241 insn = XEXP (note, 0);
5242 else
5244 rtx maybe_cc0_setter = prev_nonnote_insn (insn);
5245 if (maybe_cc0_setter
5246 && INSN_P (maybe_cc0_setter)
5247 && sets_cc0_p (PATTERN (maybe_cc0_setter)))
5248 insn = maybe_cc0_setter;
5250 #endif
5251 /* FIXME: What if something in cc0/jump uses value set in new insn? */
5252 new_insn = emit_insn_before (pat, insn);
5255 /* Likewise if the last insn is a call, as will happen in the presence
5256 of exception handling. */
5257 else if (GET_CODE (insn) == CALL_INSN
5258 && (bb->succ->succ_next || (bb->succ->flags & EDGE_ABNORMAL)))
5260 /* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers,
5261 we search backward and place the instructions before the first
5262 parameter is loaded. Do this for everyone for consistency and a
5263 presumption that we'll get better code elsewhere as well.
5265 It should always be the case that we can put these instructions
5266 anywhere in the basic block with performing PRE optimizations.
5267 Check this. */
5269 if (pre
5270 && !TEST_BIT (antloc[bb->index], expr->bitmap_index)
5271 && !TEST_BIT (transp[bb->index], expr->bitmap_index))
5272 abort ();
5274 /* Since different machines initialize their parameter registers
5275 in different orders, assume nothing. Collect the set of all
5276 parameter registers. */
5277 insn = find_first_parameter_load (insn, BB_HEAD (bb));
5279 /* If we found all the parameter loads, then we want to insert
5280 before the first parameter load.
5282 If we did not find all the parameter loads, then we might have
5283 stopped on the head of the block, which could be a CODE_LABEL.
5284 If we inserted before the CODE_LABEL, then we would be putting
5285 the insn in the wrong basic block. In that case, put the insn
5286 after the CODE_LABEL. Also, respect NOTE_INSN_BASIC_BLOCK. */
5287 while (GET_CODE (insn) == CODE_LABEL
5288 || NOTE_INSN_BASIC_BLOCK_P (insn))
5289 insn = NEXT_INSN (insn);
5291 new_insn = emit_insn_before (pat, insn);
5293 else
5294 new_insn = emit_insn_after (pat, insn);
5296 while (1)
5298 if (INSN_P (pat))
5300 add_label_notes (PATTERN (pat), new_insn);
5301 note_stores (PATTERN (pat), record_set_info, pat);
5303 if (pat == pat_end)
5304 break;
5305 pat = NEXT_INSN (pat);
5308 gcse_create_count++;
5310 if (gcse_file)
5312 fprintf (gcse_file, "PRE/HOIST: end of bb %d, insn %d, ",
5313 bb->index, INSN_UID (new_insn));
5314 fprintf (gcse_file, "copying expression %d to reg %d\n",
5315 expr->bitmap_index, regno);
5319 /* Insert partially redundant expressions on edges in the CFG to make
5320 the expressions fully redundant. */
5322 static int
5323 pre_edge_insert (struct edge_list *edge_list, struct expr **index_map)
5325 int e, i, j, num_edges, set_size, did_insert = 0;
5326 sbitmap *inserted;
5328 /* Where PRE_INSERT_MAP is nonzero, we add the expression on that edge
5329 if it reaches any of the deleted expressions. */
5331 set_size = pre_insert_map[0]->size;
5332 num_edges = NUM_EDGES (edge_list);
5333 inserted = sbitmap_vector_alloc (num_edges, expr_hash_table.n_elems);
5334 sbitmap_vector_zero (inserted, num_edges);
5336 for (e = 0; e < num_edges; e++)
5338 int indx;
5339 basic_block bb = INDEX_EDGE_PRED_BB (edge_list, e);
5341 for (i = indx = 0; i < set_size; i++, indx += SBITMAP_ELT_BITS)
5343 SBITMAP_ELT_TYPE insert = pre_insert_map[e]->elms[i];
5345 for (j = indx; insert && j < (int) expr_hash_table.n_elems; j++, insert >>= 1)
5346 if ((insert & 1) != 0 && index_map[j]->reaching_reg != NULL_RTX)
5348 struct expr *expr = index_map[j];
5349 struct occr *occr;
5351 /* Now look at each deleted occurrence of this expression. */
5352 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5354 if (! occr->deleted_p)
5355 continue;
5357 /* Insert this expression on this edge if if it would
5358 reach the deleted occurrence in BB. */
5359 if (!TEST_BIT (inserted[e], j))
5361 rtx insn;
5362 edge eg = INDEX_EDGE (edge_list, e);
5364 /* We can't insert anything on an abnormal and
5365 critical edge, so we insert the insn at the end of
5366 the previous block. There are several alternatives
5367 detailed in Morgans book P277 (sec 10.5) for
5368 handling this situation. This one is easiest for
5369 now. */
5371 if ((eg->flags & EDGE_ABNORMAL) == EDGE_ABNORMAL)
5372 insert_insn_end_bb (index_map[j], bb, 0);
5373 else
5375 insn = process_insert_insn (index_map[j]);
5376 insert_insn_on_edge (insn, eg);
5379 if (gcse_file)
5381 fprintf (gcse_file, "PRE/HOIST: edge (%d,%d), ",
5382 bb->index,
5383 INDEX_EDGE_SUCC_BB (edge_list, e)->index);
5384 fprintf (gcse_file, "copy expression %d\n",
5385 expr->bitmap_index);
5388 update_ld_motion_stores (expr);
5389 SET_BIT (inserted[e], j);
5390 did_insert = 1;
5391 gcse_create_count++;
5398 sbitmap_vector_free (inserted);
5399 return did_insert;
5402 /* Copy the result of EXPR->EXPR generated by INSN to EXPR->REACHING_REG.
5403 Given "old_reg <- expr" (INSN), instead of adding after it
5404 reaching_reg <- old_reg
5405 it's better to do the following:
5406 reaching_reg <- expr
5407 old_reg <- reaching_reg
5408 because this way copy propagation can discover additional PRE
5409 opportunities. But if this fails, we try the old way.
5410 When "expr" is a store, i.e.
5411 given "MEM <- old_reg", instead of adding after it
5412 reaching_reg <- old_reg
5413 it's better to add it before as follows:
5414 reaching_reg <- old_reg
5415 MEM <- reaching_reg. */
5417 static void
5418 pre_insert_copy_insn (struct expr *expr, rtx insn)
5420 rtx reg = expr->reaching_reg;
5421 int regno = REGNO (reg);
5422 int indx = expr->bitmap_index;
5423 rtx pat = PATTERN (insn);
5424 rtx set, new_insn;
5425 rtx old_reg;
5426 int i;
5428 /* This block matches the logic in hash_scan_insn. */
5429 if (GET_CODE (pat) == SET)
5430 set = pat;
5431 else if (GET_CODE (pat) == PARALLEL)
5433 /* Search through the parallel looking for the set whose
5434 source was the expression that we're interested in. */
5435 set = NULL_RTX;
5436 for (i = 0; i < XVECLEN (pat, 0); i++)
5438 rtx x = XVECEXP (pat, 0, i);
5439 if (GET_CODE (x) == SET
5440 && expr_equiv_p (SET_SRC (x), expr->expr))
5442 set = x;
5443 break;
5447 else
5448 abort ();
5450 if (GET_CODE (SET_DEST (set)) == REG)
5452 old_reg = SET_DEST (set);
5453 /* Check if we can modify the set destination in the original insn. */
5454 if (validate_change (insn, &SET_DEST (set), reg, 0))
5456 new_insn = gen_move_insn (old_reg, reg);
5457 new_insn = emit_insn_after (new_insn, insn);
5459 /* Keep register set table up to date. */
5460 replace_one_set (REGNO (old_reg), insn, new_insn);
5461 record_one_set (regno, insn);
5463 else
5465 new_insn = gen_move_insn (reg, old_reg);
5466 new_insn = emit_insn_after (new_insn, insn);
5468 /* Keep register set table up to date. */
5469 record_one_set (regno, new_insn);
5472 else /* This is possible only in case of a store to memory. */
5474 old_reg = SET_SRC (set);
5475 new_insn = gen_move_insn (reg, old_reg);
5477 /* Check if we can modify the set source in the original insn. */
5478 if (validate_change (insn, &SET_SRC (set), reg, 0))
5479 new_insn = emit_insn_before (new_insn, insn);
5480 else
5481 new_insn = emit_insn_after (new_insn, insn);
5483 /* Keep register set table up to date. */
5484 record_one_set (regno, new_insn);
5487 gcse_create_count++;
5489 if (gcse_file)
5490 fprintf (gcse_file,
5491 "PRE: bb %d, insn %d, copy expression %d in insn %d to reg %d\n",
5492 BLOCK_NUM (insn), INSN_UID (new_insn), indx,
5493 INSN_UID (insn), regno);
5496 /* Copy available expressions that reach the redundant expression
5497 to `reaching_reg'. */
5499 static void
5500 pre_insert_copies (void)
5502 unsigned int i, added_copy;
5503 struct expr *expr;
5504 struct occr *occr;
5505 struct occr *avail;
5507 /* For each available expression in the table, copy the result to
5508 `reaching_reg' if the expression reaches a deleted one.
5510 ??? The current algorithm is rather brute force.
5511 Need to do some profiling. */
5513 for (i = 0; i < expr_hash_table.size; i++)
5514 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
5516 /* If the basic block isn't reachable, PPOUT will be TRUE. However,
5517 we don't want to insert a copy here because the expression may not
5518 really be redundant. So only insert an insn if the expression was
5519 deleted. This test also avoids further processing if the
5520 expression wasn't deleted anywhere. */
5521 if (expr->reaching_reg == NULL)
5522 continue;
5524 /* Set when we add a copy for that expression. */
5525 added_copy = 0;
5527 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5529 if (! occr->deleted_p)
5530 continue;
5532 for (avail = expr->avail_occr; avail != NULL; avail = avail->next)
5534 rtx insn = avail->insn;
5536 /* No need to handle this one if handled already. */
5537 if (avail->copied_p)
5538 continue;
5540 /* Don't handle this one if it's a redundant one. */
5541 if (TEST_BIT (pre_redundant_insns, INSN_CUID (insn)))
5542 continue;
5544 /* Or if the expression doesn't reach the deleted one. */
5545 if (! pre_expr_reaches_here_p (BLOCK_FOR_INSN (avail->insn),
5546 expr,
5547 BLOCK_FOR_INSN (occr->insn)))
5548 continue;
5550 added_copy = 1;
5552 /* Copy the result of avail to reaching_reg. */
5553 pre_insert_copy_insn (expr, insn);
5554 avail->copied_p = 1;
5558 if (added_copy)
5559 update_ld_motion_stores (expr);
5563 /* Emit move from SRC to DEST noting the equivalence with expression computed
5564 in INSN. */
5565 static rtx
5566 gcse_emit_move_after (rtx src, rtx dest, rtx insn)
5568 rtx new;
5569 rtx set = single_set (insn), set2;
5570 rtx note;
5571 rtx eqv;
5573 /* This should never fail since we're creating a reg->reg copy
5574 we've verified to be valid. */
5576 new = emit_insn_after (gen_move_insn (dest, src), insn);
5578 /* Note the equivalence for local CSE pass. */
5579 set2 = single_set (new);
5580 if (!set2 || !rtx_equal_p (SET_DEST (set2), dest))
5581 return new;
5582 if ((note = find_reg_equal_equiv_note (insn)))
5583 eqv = XEXP (note, 0);
5584 else
5585 eqv = SET_SRC (set);
5587 set_unique_reg_note (new, REG_EQUAL, copy_insn_1 (eqv));
5589 return new;
5592 /* Delete redundant computations.
5593 Deletion is done by changing the insn to copy the `reaching_reg' of
5594 the expression into the result of the SET. It is left to later passes
5595 (cprop, cse2, flow, combine, regmove) to propagate the copy or eliminate it.
5597 Returns nonzero if a change is made. */
5599 static int
5600 pre_delete (void)
5602 unsigned int i;
5603 int changed;
5604 struct expr *expr;
5605 struct occr *occr;
5607 changed = 0;
5608 for (i = 0; i < expr_hash_table.size; i++)
5609 for (expr = expr_hash_table.table[i];
5610 expr != NULL;
5611 expr = expr->next_same_hash)
5613 int indx = expr->bitmap_index;
5615 /* We only need to search antic_occr since we require
5616 ANTLOC != 0. */
5618 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5620 rtx insn = occr->insn;
5621 rtx set;
5622 basic_block bb = BLOCK_FOR_INSN (insn);
5624 /* We only delete insns that have a single_set. */
5625 if (TEST_BIT (pre_delete_map[bb->index], indx)
5626 && (set = single_set (insn)) != 0)
5628 /* Create a pseudo-reg to store the result of reaching
5629 expressions into. Get the mode for the new pseudo from
5630 the mode of the original destination pseudo. */
5631 if (expr->reaching_reg == NULL)
5632 expr->reaching_reg
5633 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
5635 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
5636 delete_insn (insn);
5637 occr->deleted_p = 1;
5638 SET_BIT (pre_redundant_insns, INSN_CUID (insn));
5639 changed = 1;
5640 gcse_subst_count++;
5642 if (gcse_file)
5644 fprintf (gcse_file,
5645 "PRE: redundant insn %d (expression %d) in ",
5646 INSN_UID (insn), indx);
5647 fprintf (gcse_file, "bb %d, reaching reg is %d\n",
5648 bb->index, REGNO (expr->reaching_reg));
5654 return changed;
5657 /* Perform GCSE optimizations using PRE.
5658 This is called by one_pre_gcse_pass after all the dataflow analysis
5659 has been done.
5661 This is based on the original Morel-Renvoise paper Fred Chow's thesis, and
5662 lazy code motion from Knoop, Ruthing and Steffen as described in Advanced
5663 Compiler Design and Implementation.
5665 ??? A new pseudo reg is created to hold the reaching expression. The nice
5666 thing about the classical approach is that it would try to use an existing
5667 reg. If the register can't be adequately optimized [i.e. we introduce
5668 reload problems], one could add a pass here to propagate the new register
5669 through the block.
5671 ??? We don't handle single sets in PARALLELs because we're [currently] not
5672 able to copy the rest of the parallel when we insert copies to create full
5673 redundancies from partial redundancies. However, there's no reason why we
5674 can't handle PARALLELs in the cases where there are no partial
5675 redundancies. */
5677 static int
5678 pre_gcse (void)
5680 unsigned int i;
5681 int did_insert, changed;
5682 struct expr **index_map;
5683 struct expr *expr;
5685 /* Compute a mapping from expression number (`bitmap_index') to
5686 hash table entry. */
5688 index_map = xcalloc (expr_hash_table.n_elems, sizeof (struct expr *));
5689 for (i = 0; i < expr_hash_table.size; i++)
5690 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
5691 index_map[expr->bitmap_index] = expr;
5693 /* Reset bitmap used to track which insns are redundant. */
5694 pre_redundant_insns = sbitmap_alloc (max_cuid);
5695 sbitmap_zero (pre_redundant_insns);
5697 /* Delete the redundant insns first so that
5698 - we know what register to use for the new insns and for the other
5699 ones with reaching expressions
5700 - we know which insns are redundant when we go to create copies */
5702 changed = pre_delete ();
5704 did_insert = pre_edge_insert (edge_list, index_map);
5706 /* In other places with reaching expressions, copy the expression to the
5707 specially allocated pseudo-reg that reaches the redundant expr. */
5708 pre_insert_copies ();
5709 if (did_insert)
5711 commit_edge_insertions ();
5712 changed = 1;
5715 free (index_map);
5716 sbitmap_free (pre_redundant_insns);
5717 return changed;
5720 /* Top level routine to perform one PRE GCSE pass.
5722 Return nonzero if a change was made. */
5724 static int
5725 one_pre_gcse_pass (int pass)
5727 int changed = 0;
5729 gcse_subst_count = 0;
5730 gcse_create_count = 0;
5732 alloc_hash_table (max_cuid, &expr_hash_table, 0);
5733 add_noreturn_fake_exit_edges ();
5734 if (flag_gcse_lm)
5735 compute_ld_motion_mems ();
5737 compute_hash_table (&expr_hash_table);
5738 trim_ld_motion_mems ();
5739 if (gcse_file)
5740 dump_hash_table (gcse_file, "Expression", &expr_hash_table);
5742 if (expr_hash_table.n_elems > 0)
5744 alloc_pre_mem (last_basic_block, expr_hash_table.n_elems);
5745 compute_pre_data ();
5746 changed |= pre_gcse ();
5747 free_edge_list (edge_list);
5748 free_pre_mem ();
5751 free_ldst_mems ();
5752 remove_fake_edges ();
5753 free_hash_table (&expr_hash_table);
5755 if (gcse_file)
5757 fprintf (gcse_file, "\nPRE GCSE of %s, pass %d: %d bytes needed, ",
5758 current_function_name, pass, bytes_used);
5759 fprintf (gcse_file, "%d substs, %d insns created\n",
5760 gcse_subst_count, gcse_create_count);
5763 return changed;
5766 /* If X contains any LABEL_REF's, add REG_LABEL notes for them to INSN.
5767 If notes are added to an insn which references a CODE_LABEL, the
5768 LABEL_NUSES count is incremented. We have to add REG_LABEL notes,
5769 because the following loop optimization pass requires them. */
5771 /* ??? This is very similar to the loop.c add_label_notes function. We
5772 could probably share code here. */
5774 /* ??? If there was a jump optimization pass after gcse and before loop,
5775 then we would not need to do this here, because jump would add the
5776 necessary REG_LABEL notes. */
5778 static void
5779 add_label_notes (rtx x, rtx insn)
5781 enum rtx_code code = GET_CODE (x);
5782 int i, j;
5783 const char *fmt;
5785 if (code == LABEL_REF && !LABEL_REF_NONLOCAL_P (x))
5787 /* This code used to ignore labels that referred to dispatch tables to
5788 avoid flow generating (slightly) worse code.
5790 We no longer ignore such label references (see LABEL_REF handling in
5791 mark_jump_label for additional information). */
5793 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, XEXP (x, 0),
5794 REG_NOTES (insn));
5795 if (LABEL_P (XEXP (x, 0)))
5796 LABEL_NUSES (XEXP (x, 0))++;
5797 return;
5800 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
5802 if (fmt[i] == 'e')
5803 add_label_notes (XEXP (x, i), insn);
5804 else if (fmt[i] == 'E')
5805 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5806 add_label_notes (XVECEXP (x, i, j), insn);
5810 /* Compute transparent outgoing information for each block.
5812 An expression is transparent to an edge unless it is killed by
5813 the edge itself. This can only happen with abnormal control flow,
5814 when the edge is traversed through a call. This happens with
5815 non-local labels and exceptions.
5817 This would not be necessary if we split the edge. While this is
5818 normally impossible for abnormal critical edges, with some effort
5819 it should be possible with exception handling, since we still have
5820 control over which handler should be invoked. But due to increased
5821 EH table sizes, this may not be worthwhile. */
5823 static void
5824 compute_transpout (void)
5826 basic_block bb;
5827 unsigned int i;
5828 struct expr *expr;
5830 sbitmap_vector_ones (transpout, last_basic_block);
5832 FOR_EACH_BB (bb)
5834 /* Note that flow inserted a nop a the end of basic blocks that
5835 end in call instructions for reasons other than abnormal
5836 control flow. */
5837 if (GET_CODE (BB_END (bb)) != CALL_INSN)
5838 continue;
5840 for (i = 0; i < expr_hash_table.size; i++)
5841 for (expr = expr_hash_table.table[i]; expr ; expr = expr->next_same_hash)
5842 if (GET_CODE (expr->expr) == MEM)
5844 if (GET_CODE (XEXP (expr->expr, 0)) == SYMBOL_REF
5845 && CONSTANT_POOL_ADDRESS_P (XEXP (expr->expr, 0)))
5846 continue;
5848 /* ??? Optimally, we would use interprocedural alias
5849 analysis to determine if this mem is actually killed
5850 by this call. */
5851 RESET_BIT (transpout[bb->index], expr->bitmap_index);
5856 /* Removal of useless null pointer checks */
5858 /* Called via note_stores. X is set by SETTER. If X is a register we must
5859 invalidate nonnull_local and set nonnull_killed. DATA is really a
5860 `null_pointer_info *'.
5862 We ignore hard registers. */
5864 static void
5865 invalidate_nonnull_info (rtx x, rtx setter ATTRIBUTE_UNUSED, void *data)
5867 unsigned int regno;
5868 struct null_pointer_info *npi = (struct null_pointer_info *) data;
5870 while (GET_CODE (x) == SUBREG)
5871 x = SUBREG_REG (x);
5873 /* Ignore anything that is not a register or is a hard register. */
5874 if (GET_CODE (x) != REG
5875 || REGNO (x) < npi->min_reg
5876 || REGNO (x) >= npi->max_reg)
5877 return;
5879 regno = REGNO (x) - npi->min_reg;
5881 RESET_BIT (npi->nonnull_local[npi->current_block->index], regno);
5882 SET_BIT (npi->nonnull_killed[npi->current_block->index], regno);
5885 /* Do null-pointer check elimination for the registers indicated in
5886 NPI. NONNULL_AVIN and NONNULL_AVOUT are pre-allocated sbitmaps;
5887 they are not our responsibility to free. */
5889 static int
5890 delete_null_pointer_checks_1 (unsigned int *block_reg, sbitmap *nonnull_avin,
5891 sbitmap *nonnull_avout,
5892 struct null_pointer_info *npi)
5894 basic_block bb, current_block;
5895 sbitmap *nonnull_local = npi->nonnull_local;
5896 sbitmap *nonnull_killed = npi->nonnull_killed;
5897 int something_changed = 0;
5899 /* Compute local properties, nonnull and killed. A register will have
5900 the nonnull property if at the end of the current block its value is
5901 known to be nonnull. The killed property indicates that somewhere in
5902 the block any information we had about the register is killed.
5904 Note that a register can have both properties in a single block. That
5905 indicates that it's killed, then later in the block a new value is
5906 computed. */
5907 sbitmap_vector_zero (nonnull_local, last_basic_block);
5908 sbitmap_vector_zero (nonnull_killed, last_basic_block);
5910 FOR_EACH_BB (current_block)
5912 rtx insn, stop_insn;
5914 /* Set the current block for invalidate_nonnull_info. */
5915 npi->current_block = current_block;
5917 /* Scan each insn in the basic block looking for memory references and
5918 register sets. */
5919 stop_insn = NEXT_INSN (BB_HEAD (current_block));
5920 for (insn = BB_HEAD (current_block);
5921 insn != stop_insn;
5922 insn = NEXT_INSN (insn))
5924 rtx set;
5925 rtx reg;
5927 /* Ignore anything that is not a normal insn. */
5928 if (! INSN_P (insn))
5929 continue;
5931 /* Basically ignore anything that is not a simple SET. We do have
5932 to make sure to invalidate nonnull_local and set nonnull_killed
5933 for such insns though. */
5934 set = single_set (insn);
5935 if (!set)
5937 note_stores (PATTERN (insn), invalidate_nonnull_info, npi);
5938 continue;
5941 /* See if we've got a usable memory load. We handle it first
5942 in case it uses its address register as a dest (which kills
5943 the nonnull property). */
5944 if (GET_CODE (SET_SRC (set)) == MEM
5945 && GET_CODE ((reg = XEXP (SET_SRC (set), 0))) == REG
5946 && REGNO (reg) >= npi->min_reg
5947 && REGNO (reg) < npi->max_reg)
5948 SET_BIT (nonnull_local[current_block->index],
5949 REGNO (reg) - npi->min_reg);
5951 /* Now invalidate stuff clobbered by this insn. */
5952 note_stores (PATTERN (insn), invalidate_nonnull_info, npi);
5954 /* And handle stores, we do these last since any sets in INSN can
5955 not kill the nonnull property if it is derived from a MEM
5956 appearing in a SET_DEST. */
5957 if (GET_CODE (SET_DEST (set)) == MEM
5958 && GET_CODE ((reg = XEXP (SET_DEST (set), 0))) == REG
5959 && REGNO (reg) >= npi->min_reg
5960 && REGNO (reg) < npi->max_reg)
5961 SET_BIT (nonnull_local[current_block->index],
5962 REGNO (reg) - npi->min_reg);
5966 /* Now compute global properties based on the local properties. This
5967 is a classic global availability algorithm. */
5968 compute_available (nonnull_local, nonnull_killed,
5969 nonnull_avout, nonnull_avin);
5971 /* Now look at each bb and see if it ends with a compare of a value
5972 against zero. */
5973 FOR_EACH_BB (bb)
5975 rtx last_insn = BB_END (bb);
5976 rtx condition, earliest;
5977 int compare_and_branch;
5979 /* Since MIN_REG is always at least FIRST_PSEUDO_REGISTER, and
5980 since BLOCK_REG[BB] is zero if this block did not end with a
5981 comparison against zero, this condition works. */
5982 if (block_reg[bb->index] < npi->min_reg
5983 || block_reg[bb->index] >= npi->max_reg)
5984 continue;
5986 /* LAST_INSN is a conditional jump. Get its condition. */
5987 condition = get_condition (last_insn, &earliest, false);
5989 /* If we can't determine the condition then skip. */
5990 if (! condition)
5991 continue;
5993 /* Is the register known to have a nonzero value? */
5994 if (!TEST_BIT (nonnull_avout[bb->index], block_reg[bb->index] - npi->min_reg))
5995 continue;
5997 /* Try to compute whether the compare/branch at the loop end is one or
5998 two instructions. */
5999 if (earliest == last_insn)
6000 compare_and_branch = 1;
6001 else if (earliest == prev_nonnote_insn (last_insn))
6002 compare_and_branch = 2;
6003 else
6004 continue;
6006 /* We know the register in this comparison is nonnull at exit from
6007 this block. We can optimize this comparison. */
6008 if (GET_CODE (condition) == NE)
6010 rtx new_jump;
6012 new_jump = emit_jump_insn_after (gen_jump (JUMP_LABEL (last_insn)),
6013 last_insn);
6014 JUMP_LABEL (new_jump) = JUMP_LABEL (last_insn);
6015 LABEL_NUSES (JUMP_LABEL (new_jump))++;
6016 emit_barrier_after (new_jump);
6019 something_changed = 1;
6020 delete_insn (last_insn);
6021 if (compare_and_branch == 2)
6022 delete_insn (earliest);
6023 purge_dead_edges (bb);
6025 /* Don't check this block again. (Note that BB_END is
6026 invalid here; we deleted the last instruction in the
6027 block.) */
6028 block_reg[bb->index] = 0;
6031 return something_changed;
6034 /* Find EQ/NE comparisons against zero which can be (indirectly) evaluated
6035 at compile time.
6037 This is conceptually similar to global constant/copy propagation and
6038 classic global CSE (it even uses the same dataflow equations as cprop).
6040 If a register is used as memory address with the form (mem (reg)), then we
6041 know that REG can not be zero at that point in the program. Any instruction
6042 which sets REG "kills" this property.
6044 So, if every path leading to a conditional branch has an available memory
6045 reference of that form, then we know the register can not have the value
6046 zero at the conditional branch.
6048 So we merely need to compute the local properties and propagate that data
6049 around the cfg, then optimize where possible.
6051 We run this pass two times. Once before CSE, then again after CSE. This
6052 has proven to be the most profitable approach. It is rare for new
6053 optimization opportunities of this nature to appear after the first CSE
6054 pass.
6056 This could probably be integrated with global cprop with a little work. */
6059 delete_null_pointer_checks (rtx f ATTRIBUTE_UNUSED)
6061 sbitmap *nonnull_avin, *nonnull_avout;
6062 unsigned int *block_reg;
6063 basic_block bb;
6064 int reg;
6065 int regs_per_pass;
6066 int max_reg = max_reg_num ();
6067 struct null_pointer_info npi;
6068 int something_changed = 0;
6070 /* If we have only a single block, or it is too expensive, give up. */
6071 if (n_basic_blocks <= 1
6072 || is_too_expensive (_ ("NULL pointer checks disabled")))
6073 return 0;
6075 /* We need four bitmaps, each with a bit for each register in each
6076 basic block. */
6077 regs_per_pass = get_bitmap_width (4, last_basic_block, max_reg);
6079 /* Allocate bitmaps to hold local and global properties. */
6080 npi.nonnull_local = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
6081 npi.nonnull_killed = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
6082 nonnull_avin = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
6083 nonnull_avout = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
6085 /* Go through the basic blocks, seeing whether or not each block
6086 ends with a conditional branch whose condition is a comparison
6087 against zero. Record the register compared in BLOCK_REG. */
6088 block_reg = xcalloc (last_basic_block, sizeof (int));
6089 FOR_EACH_BB (bb)
6091 rtx last_insn = BB_END (bb);
6092 rtx condition, earliest, reg;
6094 /* We only want conditional branches. */
6095 if (GET_CODE (last_insn) != JUMP_INSN
6096 || !any_condjump_p (last_insn)
6097 || !onlyjump_p (last_insn))
6098 continue;
6100 /* LAST_INSN is a conditional jump. Get its condition. */
6101 condition = get_condition (last_insn, &earliest, false);
6103 /* If we were unable to get the condition, or it is not an equality
6104 comparison against zero then there's nothing we can do. */
6105 if (!condition
6106 || (GET_CODE (condition) != NE && GET_CODE (condition) != EQ)
6107 || GET_CODE (XEXP (condition, 1)) != CONST_INT
6108 || (XEXP (condition, 1)
6109 != CONST0_RTX (GET_MODE (XEXP (condition, 0)))))
6110 continue;
6112 /* We must be checking a register against zero. */
6113 reg = XEXP (condition, 0);
6114 if (GET_CODE (reg) != REG)
6115 continue;
6117 block_reg[bb->index] = REGNO (reg);
6120 /* Go through the algorithm for each block of registers. */
6121 for (reg = FIRST_PSEUDO_REGISTER; reg < max_reg; reg += regs_per_pass)
6123 npi.min_reg = reg;
6124 npi.max_reg = MIN (reg + regs_per_pass, max_reg);
6125 something_changed |= delete_null_pointer_checks_1 (block_reg,
6126 nonnull_avin,
6127 nonnull_avout,
6128 &npi);
6131 /* Free the table of registers compared at the end of every block. */
6132 free (block_reg);
6134 /* Free bitmaps. */
6135 sbitmap_vector_free (npi.nonnull_local);
6136 sbitmap_vector_free (npi.nonnull_killed);
6137 sbitmap_vector_free (nonnull_avin);
6138 sbitmap_vector_free (nonnull_avout);
6140 return something_changed;
6143 /* Code Hoisting variables and subroutines. */
6145 /* Very busy expressions. */
6146 static sbitmap *hoist_vbein;
6147 static sbitmap *hoist_vbeout;
6149 /* Hoistable expressions. */
6150 static sbitmap *hoist_exprs;
6152 /* Dominator bitmaps. */
6153 dominance_info dominators;
6155 /* ??? We could compute post dominators and run this algorithm in
6156 reverse to perform tail merging, doing so would probably be
6157 more effective than the tail merging code in jump.c.
6159 It's unclear if tail merging could be run in parallel with
6160 code hoisting. It would be nice. */
6162 /* Allocate vars used for code hoisting analysis. */
6164 static void
6165 alloc_code_hoist_mem (int n_blocks, int n_exprs)
6167 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
6168 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
6169 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
6171 hoist_vbein = sbitmap_vector_alloc (n_blocks, n_exprs);
6172 hoist_vbeout = sbitmap_vector_alloc (n_blocks, n_exprs);
6173 hoist_exprs = sbitmap_vector_alloc (n_blocks, n_exprs);
6174 transpout = sbitmap_vector_alloc (n_blocks, n_exprs);
6177 /* Free vars used for code hoisting analysis. */
6179 static void
6180 free_code_hoist_mem (void)
6182 sbitmap_vector_free (antloc);
6183 sbitmap_vector_free (transp);
6184 sbitmap_vector_free (comp);
6186 sbitmap_vector_free (hoist_vbein);
6187 sbitmap_vector_free (hoist_vbeout);
6188 sbitmap_vector_free (hoist_exprs);
6189 sbitmap_vector_free (transpout);
6191 free_dominance_info (dominators);
6194 /* Compute the very busy expressions at entry/exit from each block.
6196 An expression is very busy if all paths from a given point
6197 compute the expression. */
6199 static void
6200 compute_code_hoist_vbeinout (void)
6202 int changed, passes;
6203 basic_block bb;
6205 sbitmap_vector_zero (hoist_vbeout, last_basic_block);
6206 sbitmap_vector_zero (hoist_vbein, last_basic_block);
6208 passes = 0;
6209 changed = 1;
6211 while (changed)
6213 changed = 0;
6215 /* We scan the blocks in the reverse order to speed up
6216 the convergence. */
6217 FOR_EACH_BB_REVERSE (bb)
6219 changed |= sbitmap_a_or_b_and_c_cg (hoist_vbein[bb->index], antloc[bb->index],
6220 hoist_vbeout[bb->index], transp[bb->index]);
6221 if (bb->next_bb != EXIT_BLOCK_PTR)
6222 sbitmap_intersection_of_succs (hoist_vbeout[bb->index], hoist_vbein, bb->index);
6225 passes++;
6228 if (gcse_file)
6229 fprintf (gcse_file, "hoisting vbeinout computation: %d passes\n", passes);
6232 /* Top level routine to do the dataflow analysis needed by code hoisting. */
6234 static void
6235 compute_code_hoist_data (void)
6237 compute_local_properties (transp, comp, antloc, &expr_hash_table);
6238 compute_transpout ();
6239 compute_code_hoist_vbeinout ();
6240 dominators = calculate_dominance_info (CDI_DOMINATORS);
6241 if (gcse_file)
6242 fprintf (gcse_file, "\n");
6245 /* Determine if the expression identified by EXPR_INDEX would
6246 reach BB unimpared if it was placed at the end of EXPR_BB.
6248 It's unclear exactly what Muchnick meant by "unimpared". It seems
6249 to me that the expression must either be computed or transparent in
6250 *every* block in the path(s) from EXPR_BB to BB. Any other definition
6251 would allow the expression to be hoisted out of loops, even if
6252 the expression wasn't a loop invariant.
6254 Contrast this to reachability for PRE where an expression is
6255 considered reachable if *any* path reaches instead of *all*
6256 paths. */
6258 static int
6259 hoist_expr_reaches_here_p (basic_block expr_bb, int expr_index, basic_block bb, char *visited)
6261 edge pred;
6262 int visited_allocated_locally = 0;
6265 if (visited == NULL)
6267 visited_allocated_locally = 1;
6268 visited = xcalloc (last_basic_block, 1);
6271 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
6273 basic_block pred_bb = pred->src;
6275 if (pred->src == ENTRY_BLOCK_PTR)
6276 break;
6277 else if (pred_bb == expr_bb)
6278 continue;
6279 else if (visited[pred_bb->index])
6280 continue;
6282 /* Does this predecessor generate this expression? */
6283 else if (TEST_BIT (comp[pred_bb->index], expr_index))
6284 break;
6285 else if (! TEST_BIT (transp[pred_bb->index], expr_index))
6286 break;
6288 /* Not killed. */
6289 else
6291 visited[pred_bb->index] = 1;
6292 if (! hoist_expr_reaches_here_p (expr_bb, expr_index,
6293 pred_bb, visited))
6294 break;
6297 if (visited_allocated_locally)
6298 free (visited);
6300 return (pred == NULL);
6303 /* Actually perform code hoisting. */
6305 static void
6306 hoist_code (void)
6308 basic_block bb, dominated;
6309 basic_block *domby;
6310 unsigned int domby_len;
6311 unsigned int i,j;
6312 struct expr **index_map;
6313 struct expr *expr;
6315 sbitmap_vector_zero (hoist_exprs, last_basic_block);
6317 /* Compute a mapping from expression number (`bitmap_index') to
6318 hash table entry. */
6320 index_map = xcalloc (expr_hash_table.n_elems, sizeof (struct expr *));
6321 for (i = 0; i < expr_hash_table.size; i++)
6322 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
6323 index_map[expr->bitmap_index] = expr;
6325 /* Walk over each basic block looking for potentially hoistable
6326 expressions, nothing gets hoisted from the entry block. */
6327 FOR_EACH_BB (bb)
6329 int found = 0;
6330 int insn_inserted_p;
6332 domby_len = get_dominated_by (dominators, bb, &domby);
6333 /* Examine each expression that is very busy at the exit of this
6334 block. These are the potentially hoistable expressions. */
6335 for (i = 0; i < hoist_vbeout[bb->index]->n_bits; i++)
6337 int hoistable = 0;
6339 if (TEST_BIT (hoist_vbeout[bb->index], i)
6340 && TEST_BIT (transpout[bb->index], i))
6342 /* We've found a potentially hoistable expression, now
6343 we look at every block BB dominates to see if it
6344 computes the expression. */
6345 for (j = 0; j < domby_len; j++)
6347 dominated = domby[j];
6348 /* Ignore self dominance. */
6349 if (bb == dominated)
6350 continue;
6351 /* We've found a dominated block, now see if it computes
6352 the busy expression and whether or not moving that
6353 expression to the "beginning" of that block is safe. */
6354 if (!TEST_BIT (antloc[dominated->index], i))
6355 continue;
6357 /* Note if the expression would reach the dominated block
6358 unimpared if it was placed at the end of BB.
6360 Keep track of how many times this expression is hoistable
6361 from a dominated block into BB. */
6362 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
6363 hoistable++;
6366 /* If we found more than one hoistable occurrence of this
6367 expression, then note it in the bitmap of expressions to
6368 hoist. It makes no sense to hoist things which are computed
6369 in only one BB, and doing so tends to pessimize register
6370 allocation. One could increase this value to try harder
6371 to avoid any possible code expansion due to register
6372 allocation issues; however experiments have shown that
6373 the vast majority of hoistable expressions are only movable
6374 from two successors, so raising this threshold is likely
6375 to nullify any benefit we get from code hoisting. */
6376 if (hoistable > 1)
6378 SET_BIT (hoist_exprs[bb->index], i);
6379 found = 1;
6383 /* If we found nothing to hoist, then quit now. */
6384 if (! found)
6386 free (domby);
6387 continue;
6390 /* Loop over all the hoistable expressions. */
6391 for (i = 0; i < hoist_exprs[bb->index]->n_bits; i++)
6393 /* We want to insert the expression into BB only once, so
6394 note when we've inserted it. */
6395 insn_inserted_p = 0;
6397 /* These tests should be the same as the tests above. */
6398 if (TEST_BIT (hoist_vbeout[bb->index], i))
6400 /* We've found a potentially hoistable expression, now
6401 we look at every block BB dominates to see if it
6402 computes the expression. */
6403 for (j = 0; j < domby_len; j++)
6405 dominated = domby[j];
6406 /* Ignore self dominance. */
6407 if (bb == dominated)
6408 continue;
6410 /* We've found a dominated block, now see if it computes
6411 the busy expression and whether or not moving that
6412 expression to the "beginning" of that block is safe. */
6413 if (!TEST_BIT (antloc[dominated->index], i))
6414 continue;
6416 /* The expression is computed in the dominated block and
6417 it would be safe to compute it at the start of the
6418 dominated block. Now we have to determine if the
6419 expression would reach the dominated block if it was
6420 placed at the end of BB. */
6421 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
6423 struct expr *expr = index_map[i];
6424 struct occr *occr = expr->antic_occr;
6425 rtx insn;
6426 rtx set;
6428 /* Find the right occurrence of this expression. */
6429 while (BLOCK_FOR_INSN (occr->insn) != dominated && occr)
6430 occr = occr->next;
6432 /* Should never happen. */
6433 if (!occr)
6434 abort ();
6436 insn = occr->insn;
6438 set = single_set (insn);
6439 if (! set)
6440 abort ();
6442 /* Create a pseudo-reg to store the result of reaching
6443 expressions into. Get the mode for the new pseudo
6444 from the mode of the original destination pseudo. */
6445 if (expr->reaching_reg == NULL)
6446 expr->reaching_reg
6447 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
6449 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
6450 delete_insn (insn);
6451 occr->deleted_p = 1;
6452 if (!insn_inserted_p)
6454 insert_insn_end_bb (index_map[i], bb, 0);
6455 insn_inserted_p = 1;
6461 free (domby);
6464 free (index_map);
6467 /* Top level routine to perform one code hoisting (aka unification) pass
6469 Return nonzero if a change was made. */
6471 static int
6472 one_code_hoisting_pass (void)
6474 int changed = 0;
6476 alloc_hash_table (max_cuid, &expr_hash_table, 0);
6477 compute_hash_table (&expr_hash_table);
6478 if (gcse_file)
6479 dump_hash_table (gcse_file, "Code Hosting Expressions", &expr_hash_table);
6481 if (expr_hash_table.n_elems > 0)
6483 alloc_code_hoist_mem (last_basic_block, expr_hash_table.n_elems);
6484 compute_code_hoist_data ();
6485 hoist_code ();
6486 free_code_hoist_mem ();
6489 free_hash_table (&expr_hash_table);
6491 return changed;
6494 /* Here we provide the things required to do store motion towards
6495 the exit. In order for this to be effective, gcse also needed to
6496 be taught how to move a load when it is kill only by a store to itself.
6498 int i;
6499 float a[10];
6501 void foo(float scale)
6503 for (i=0; i<10; i++)
6504 a[i] *= scale;
6507 'i' is both loaded and stored to in the loop. Normally, gcse cannot move
6508 the load out since its live around the loop, and stored at the bottom
6509 of the loop.
6511 The 'Load Motion' referred to and implemented in this file is
6512 an enhancement to gcse which when using edge based lcm, recognizes
6513 this situation and allows gcse to move the load out of the loop.
6515 Once gcse has hoisted the load, store motion can then push this
6516 load towards the exit, and we end up with no loads or stores of 'i'
6517 in the loop. */
6519 /* This will search the ldst list for a matching expression. If it
6520 doesn't find one, we create one and initialize it. */
6522 static struct ls_expr *
6523 ldst_entry (rtx x)
6525 int do_not_record_p = 0;
6526 struct ls_expr * ptr;
6527 unsigned int hash;
6529 hash = hash_expr_1 (x, GET_MODE (x), & do_not_record_p);
6531 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
6532 if (ptr->hash_index == hash && expr_equiv_p (ptr->pattern, x))
6533 return ptr;
6535 ptr = xmalloc (sizeof (struct ls_expr));
6537 ptr->next = pre_ldst_mems;
6538 ptr->expr = NULL;
6539 ptr->pattern = x;
6540 ptr->pattern_regs = NULL_RTX;
6541 ptr->loads = NULL_RTX;
6542 ptr->stores = NULL_RTX;
6543 ptr->reaching_reg = NULL_RTX;
6544 ptr->invalid = 0;
6545 ptr->index = 0;
6546 ptr->hash_index = hash;
6547 pre_ldst_mems = ptr;
6549 return ptr;
6552 /* Free up an individual ldst entry. */
6554 static void
6555 free_ldst_entry (struct ls_expr * ptr)
6557 free_INSN_LIST_list (& ptr->loads);
6558 free_INSN_LIST_list (& ptr->stores);
6560 free (ptr);
6563 /* Free up all memory associated with the ldst list. */
6565 static void
6566 free_ldst_mems (void)
6568 while (pre_ldst_mems)
6570 struct ls_expr * tmp = pre_ldst_mems;
6572 pre_ldst_mems = pre_ldst_mems->next;
6574 free_ldst_entry (tmp);
6577 pre_ldst_mems = NULL;
6580 /* Dump debugging info about the ldst list. */
6582 static void
6583 print_ldst_list (FILE * file)
6585 struct ls_expr * ptr;
6587 fprintf (file, "LDST list: \n");
6589 for (ptr = first_ls_expr(); ptr != NULL; ptr = next_ls_expr (ptr))
6591 fprintf (file, " Pattern (%3d): ", ptr->index);
6593 print_rtl (file, ptr->pattern);
6595 fprintf (file, "\n Loads : ");
6597 if (ptr->loads)
6598 print_rtl (file, ptr->loads);
6599 else
6600 fprintf (file, "(nil)");
6602 fprintf (file, "\n Stores : ");
6604 if (ptr->stores)
6605 print_rtl (file, ptr->stores);
6606 else
6607 fprintf (file, "(nil)");
6609 fprintf (file, "\n\n");
6612 fprintf (file, "\n");
6615 /* Returns 1 if X is in the list of ldst only expressions. */
6617 static struct ls_expr *
6618 find_rtx_in_ldst (rtx x)
6620 struct ls_expr * ptr;
6622 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
6623 if (expr_equiv_p (ptr->pattern, x) && ! ptr->invalid)
6624 return ptr;
6626 return NULL;
6629 /* Assign each element of the list of mems a monotonically increasing value. */
6631 static int
6632 enumerate_ldsts (void)
6634 struct ls_expr * ptr;
6635 int n = 0;
6637 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
6638 ptr->index = n++;
6640 return n;
6643 /* Return first item in the list. */
6645 static inline struct ls_expr *
6646 first_ls_expr (void)
6648 return pre_ldst_mems;
6651 /* Return the next item in the list after the specified one. */
6653 static inline struct ls_expr *
6654 next_ls_expr (struct ls_expr * ptr)
6656 return ptr->next;
6659 /* Load Motion for loads which only kill themselves. */
6661 /* Return true if x is a simple MEM operation, with no registers or
6662 side effects. These are the types of loads we consider for the
6663 ld_motion list, otherwise we let the usual aliasing take care of it. */
6665 static int
6666 simple_mem (rtx x)
6668 if (GET_CODE (x) != MEM)
6669 return 0;
6671 if (MEM_VOLATILE_P (x))
6672 return 0;
6674 if (GET_MODE (x) == BLKmode)
6675 return 0;
6677 /* If we are handling exceptions, we must be careful with memory references
6678 that may trap. If we are not, the behavior is undefined, so we may just
6679 continue. */
6680 if (flag_non_call_exceptions && may_trap_p (x))
6681 return 0;
6683 if (side_effects_p (x))
6684 return 0;
6686 /* Do not consider function arguments passed on stack. */
6687 if (reg_mentioned_p (stack_pointer_rtx, x))
6688 return 0;
6690 if (flag_float_store && FLOAT_MODE_P (GET_MODE (x)))
6691 return 0;
6693 return 1;
6696 /* Make sure there isn't a buried reference in this pattern anywhere.
6697 If there is, invalidate the entry for it since we're not capable
6698 of fixing it up just yet.. We have to be sure we know about ALL
6699 loads since the aliasing code will allow all entries in the
6700 ld_motion list to not-alias itself. If we miss a load, we will get
6701 the wrong value since gcse might common it and we won't know to
6702 fix it up. */
6704 static void
6705 invalidate_any_buried_refs (rtx x)
6707 const char * fmt;
6708 int i, j;
6709 struct ls_expr * ptr;
6711 /* Invalidate it in the list. */
6712 if (GET_CODE (x) == MEM && simple_mem (x))
6714 ptr = ldst_entry (x);
6715 ptr->invalid = 1;
6718 /* Recursively process the insn. */
6719 fmt = GET_RTX_FORMAT (GET_CODE (x));
6721 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6723 if (fmt[i] == 'e')
6724 invalidate_any_buried_refs (XEXP (x, i));
6725 else if (fmt[i] == 'E')
6726 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6727 invalidate_any_buried_refs (XVECEXP (x, i, j));
6731 /* Find all the 'simple' MEMs which are used in LOADs and STORES. Simple
6732 being defined as MEM loads and stores to symbols, with no side effects
6733 and no registers in the expression. For a MEM destination, we also
6734 check that the insn is still valid if we replace the destination with a
6735 REG, as is done in update_ld_motion_stores. If there are any uses/defs
6736 which don't match this criteria, they are invalidated and trimmed out
6737 later. */
6739 static void
6740 compute_ld_motion_mems (void)
6742 struct ls_expr * ptr;
6743 basic_block bb;
6744 rtx insn;
6746 pre_ldst_mems = NULL;
6748 FOR_EACH_BB (bb)
6750 for (insn = BB_HEAD (bb);
6751 insn && insn != NEXT_INSN (BB_END (bb));
6752 insn = NEXT_INSN (insn))
6754 if (INSN_P (insn))
6756 if (GET_CODE (PATTERN (insn)) == SET)
6758 rtx src = SET_SRC (PATTERN (insn));
6759 rtx dest = SET_DEST (PATTERN (insn));
6761 /* Check for a simple LOAD... */
6762 if (GET_CODE (src) == MEM && simple_mem (src))
6764 ptr = ldst_entry (src);
6765 if (GET_CODE (dest) == REG)
6766 ptr->loads = alloc_INSN_LIST (insn, ptr->loads);
6767 else
6768 ptr->invalid = 1;
6770 else
6772 /* Make sure there isn't a buried load somewhere. */
6773 invalidate_any_buried_refs (src);
6776 /* Check for stores. Don't worry about aliased ones, they
6777 will block any movement we might do later. We only care
6778 about this exact pattern since those are the only
6779 circumstance that we will ignore the aliasing info. */
6780 if (GET_CODE (dest) == MEM && simple_mem (dest))
6782 ptr = ldst_entry (dest);
6784 if (GET_CODE (src) != MEM
6785 && GET_CODE (src) != ASM_OPERANDS
6786 /* Check for REG manually since want_to_gcse_p
6787 returns 0 for all REGs. */
6788 && (REG_P (src) || want_to_gcse_p (src)))
6789 ptr->stores = alloc_INSN_LIST (insn, ptr->stores);
6790 else
6791 ptr->invalid = 1;
6794 else
6795 invalidate_any_buried_refs (PATTERN (insn));
6801 /* Remove any references that have been either invalidated or are not in the
6802 expression list for pre gcse. */
6804 static void
6805 trim_ld_motion_mems (void)
6807 struct ls_expr * * last = & pre_ldst_mems;
6808 struct ls_expr * ptr = pre_ldst_mems;
6810 while (ptr != NULL)
6812 struct expr * expr;
6814 /* Delete if entry has been made invalid. */
6815 if (! ptr->invalid)
6817 /* Delete if we cannot find this mem in the expression list. */
6818 unsigned int hash = ptr->hash_index % expr_hash_table.size;
6820 for (expr = expr_hash_table.table[hash];
6821 expr != NULL;
6822 expr = expr->next_same_hash)
6823 if (expr_equiv_p (expr->expr, ptr->pattern))
6824 break;
6826 else
6827 expr = (struct expr *) 0;
6829 if (expr)
6831 /* Set the expression field if we are keeping it. */
6832 ptr->expr = expr;
6833 last = & ptr->next;
6834 ptr = ptr->next;
6836 else
6838 *last = ptr->next;
6839 free_ldst_entry (ptr);
6840 ptr = * last;
6844 /* Show the world what we've found. */
6845 if (gcse_file && pre_ldst_mems != NULL)
6846 print_ldst_list (gcse_file);
6849 /* This routine will take an expression which we are replacing with
6850 a reaching register, and update any stores that are needed if
6851 that expression is in the ld_motion list. Stores are updated by
6852 copying their SRC to the reaching register, and then storing
6853 the reaching register into the store location. These keeps the
6854 correct value in the reaching register for the loads. */
6856 static void
6857 update_ld_motion_stores (struct expr * expr)
6859 struct ls_expr * mem_ptr;
6861 if ((mem_ptr = find_rtx_in_ldst (expr->expr)))
6863 /* We can try to find just the REACHED stores, but is shouldn't
6864 matter to set the reaching reg everywhere... some might be
6865 dead and should be eliminated later. */
6867 /* We replace (set mem expr) with (set reg expr) (set mem reg)
6868 where reg is the reaching reg used in the load. We checked in
6869 compute_ld_motion_mems that we can replace (set mem expr) with
6870 (set reg expr) in that insn. */
6871 rtx list = mem_ptr->stores;
6873 for ( ; list != NULL_RTX; list = XEXP (list, 1))
6875 rtx insn = XEXP (list, 0);
6876 rtx pat = PATTERN (insn);
6877 rtx src = SET_SRC (pat);
6878 rtx reg = expr->reaching_reg;
6879 rtx copy, new;
6881 /* If we've already copied it, continue. */
6882 if (expr->reaching_reg == src)
6883 continue;
6885 if (gcse_file)
6887 fprintf (gcse_file, "PRE: store updated with reaching reg ");
6888 print_rtl (gcse_file, expr->reaching_reg);
6889 fprintf (gcse_file, ":\n ");
6890 print_inline_rtx (gcse_file, insn, 8);
6891 fprintf (gcse_file, "\n");
6894 copy = gen_move_insn ( reg, copy_rtx (SET_SRC (pat)));
6895 new = emit_insn_before (copy, insn);
6896 record_one_set (REGNO (reg), new);
6897 SET_SRC (pat) = reg;
6899 /* un-recognize this pattern since it's probably different now. */
6900 INSN_CODE (insn) = -1;
6901 gcse_create_count++;
6906 /* Store motion code. */
6908 #define ANTIC_STORE_LIST(x) ((x)->loads)
6909 #define AVAIL_STORE_LIST(x) ((x)->stores)
6910 #define LAST_AVAIL_CHECK_FAILURE(x) ((x)->reaching_reg)
6912 /* This is used to communicate the target bitvector we want to use in the
6913 reg_set_info routine when called via the note_stores mechanism. */
6914 static int * regvec;
6916 /* And current insn, for the same routine. */
6917 static rtx compute_store_table_current_insn;
6919 /* Used in computing the reverse edge graph bit vectors. */
6920 static sbitmap * st_antloc;
6922 /* Global holding the number of store expressions we are dealing with. */
6923 static int num_stores;
6925 /* Checks to set if we need to mark a register set. Called from
6926 note_stores. */
6928 static void
6929 reg_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED,
6930 void *data)
6932 sbitmap bb_reg = data;
6934 if (GET_CODE (dest) == SUBREG)
6935 dest = SUBREG_REG (dest);
6937 if (GET_CODE (dest) == REG)
6939 regvec[REGNO (dest)] = INSN_UID (compute_store_table_current_insn);
6940 if (bb_reg)
6941 SET_BIT (bb_reg, REGNO (dest));
6945 /* Clear any mark that says that this insn sets dest. Called from
6946 note_stores. */
6948 static void
6949 reg_clear_last_set (rtx dest, rtx setter ATTRIBUTE_UNUSED,
6950 void *data)
6952 int *dead_vec = data;
6954 if (GET_CODE (dest) == SUBREG)
6955 dest = SUBREG_REG (dest);
6957 if (GET_CODE (dest) == REG &&
6958 dead_vec[REGNO (dest)] == INSN_UID (compute_store_table_current_insn))
6959 dead_vec[REGNO (dest)] = 0;
6962 /* Return zero if some of the registers in list X are killed
6963 due to set of registers in bitmap REGS_SET. */
6965 static bool
6966 store_ops_ok (rtx x, int *regs_set)
6968 rtx reg;
6970 for (; x; x = XEXP (x, 1))
6972 reg = XEXP (x, 0);
6973 if (regs_set[REGNO(reg)])
6974 return false;
6977 return true;
6980 /* Returns a list of registers mentioned in X. */
6981 static rtx
6982 extract_mentioned_regs (rtx x)
6984 return extract_mentioned_regs_helper (x, NULL_RTX);
6987 /* Helper for extract_mentioned_regs; ACCUM is used to accumulate used
6988 registers. */
6989 static rtx
6990 extract_mentioned_regs_helper (rtx x, rtx accum)
6992 int i;
6993 enum rtx_code code;
6994 const char * fmt;
6996 /* Repeat is used to turn tail-recursion into iteration. */
6997 repeat:
6999 if (x == 0)
7000 return accum;
7002 code = GET_CODE (x);
7003 switch (code)
7005 case REG:
7006 return alloc_EXPR_LIST (0, x, accum);
7008 case MEM:
7009 x = XEXP (x, 0);
7010 goto repeat;
7012 case PRE_DEC:
7013 case PRE_INC:
7014 case POST_DEC:
7015 case POST_INC:
7016 /* We do not run this function with arguments having side effects. */
7017 abort ();
7019 case PC:
7020 case CC0: /*FIXME*/
7021 case CONST:
7022 case CONST_INT:
7023 case CONST_DOUBLE:
7024 case CONST_VECTOR:
7025 case SYMBOL_REF:
7026 case LABEL_REF:
7027 case ADDR_VEC:
7028 case ADDR_DIFF_VEC:
7029 return accum;
7031 default:
7032 break;
7035 i = GET_RTX_LENGTH (code) - 1;
7036 fmt = GET_RTX_FORMAT (code);
7038 for (; i >= 0; i--)
7040 if (fmt[i] == 'e')
7042 rtx tem = XEXP (x, i);
7044 /* If we are about to do the last recursive call
7045 needed at this level, change it into iteration. */
7046 if (i == 0)
7048 x = tem;
7049 goto repeat;
7052 accum = extract_mentioned_regs_helper (tem, accum);
7054 else if (fmt[i] == 'E')
7056 int j;
7058 for (j = 0; j < XVECLEN (x, i); j++)
7059 accum = extract_mentioned_regs_helper (XVECEXP (x, i, j), accum);
7063 return accum;
7066 /* Determine whether INSN is MEM store pattern that we will consider moving.
7067 REGS_SET_BEFORE is bitmap of registers set before (and including) the
7068 current insn, REGS_SET_AFTER is bitmap of registers set after (and
7069 including) the insn in this basic block. We must be passing through BB from
7070 head to end, as we are using this fact to speed things up.
7072 The results are stored this way:
7074 -- the first anticipatable expression is added into ANTIC_STORE_LIST
7075 -- if the processed expression is not anticipatable, NULL_RTX is added
7076 there instead, so that we can use it as indicator that no further
7077 expression of this type may be anticipatable
7078 -- if the expression is available, it is added as head of AVAIL_STORE_LIST;
7079 consequently, all of them but this head are dead and may be deleted.
7080 -- if the expression is not available, the insn due to that it fails to be
7081 available is stored in reaching_reg.
7083 The things are complicated a bit by fact that there already may be stores
7084 to the same MEM from other blocks; also caller must take care of the
7085 necessary cleanup of the temporary markers after end of the basic block.
7088 static void
7089 find_moveable_store (rtx insn, int *regs_set_before, int *regs_set_after)
7091 struct ls_expr * ptr;
7092 rtx dest, set, tmp;
7093 int check_anticipatable, check_available;
7094 basic_block bb = BLOCK_FOR_INSN (insn);
7096 set = single_set (insn);
7097 if (!set)
7098 return;
7100 dest = SET_DEST (set);
7102 if (GET_CODE (dest) != MEM || MEM_VOLATILE_P (dest)
7103 || GET_MODE (dest) == BLKmode)
7104 return;
7106 if (side_effects_p (dest))
7107 return;
7109 /* If we are handling exceptions, we must be careful with memory references
7110 that may trap. If we are not, the behavior is undefined, so we may just
7111 continue. */
7112 if (flag_non_call_exceptions && may_trap_p (dest))
7113 return;
7115 ptr = ldst_entry (dest);
7116 if (!ptr->pattern_regs)
7117 ptr->pattern_regs = extract_mentioned_regs (dest);
7119 /* Do not check for anticipatability if we either found one anticipatable
7120 store already, or tested for one and found out that it was killed. */
7121 check_anticipatable = 0;
7122 if (!ANTIC_STORE_LIST (ptr))
7123 check_anticipatable = 1;
7124 else
7126 tmp = XEXP (ANTIC_STORE_LIST (ptr), 0);
7127 if (tmp != NULL_RTX
7128 && BLOCK_FOR_INSN (tmp) != bb)
7129 check_anticipatable = 1;
7131 if (check_anticipatable)
7133 if (store_killed_before (dest, ptr->pattern_regs, insn, bb, regs_set_before))
7134 tmp = NULL_RTX;
7135 else
7136 tmp = insn;
7137 ANTIC_STORE_LIST (ptr) = alloc_INSN_LIST (tmp,
7138 ANTIC_STORE_LIST (ptr));
7141 /* It is not necessary to check whether store is available if we did
7142 it successfully before; if we failed before, do not bother to check
7143 until we reach the insn that caused us to fail. */
7144 check_available = 0;
7145 if (!AVAIL_STORE_LIST (ptr))
7146 check_available = 1;
7147 else
7149 tmp = XEXP (AVAIL_STORE_LIST (ptr), 0);
7150 if (BLOCK_FOR_INSN (tmp) != bb)
7151 check_available = 1;
7153 if (check_available)
7155 /* Check that we have already reached the insn at that the check
7156 failed last time. */
7157 if (LAST_AVAIL_CHECK_FAILURE (ptr))
7159 for (tmp = BB_END (bb);
7160 tmp != insn && tmp != LAST_AVAIL_CHECK_FAILURE (ptr);
7161 tmp = PREV_INSN (tmp))
7162 continue;
7163 if (tmp == insn)
7164 check_available = 0;
7166 else
7167 check_available = store_killed_after (dest, ptr->pattern_regs, insn,
7168 bb, regs_set_after,
7169 &LAST_AVAIL_CHECK_FAILURE (ptr));
7171 if (!check_available)
7172 AVAIL_STORE_LIST (ptr) = alloc_INSN_LIST (insn, AVAIL_STORE_LIST (ptr));
7175 /* Find available and anticipatable stores. */
7177 static int
7178 compute_store_table (void)
7180 int ret;
7181 basic_block bb;
7182 unsigned regno;
7183 rtx insn, pat, tmp;
7184 int *last_set_in, *already_set;
7185 struct ls_expr * ptr, **prev_next_ptr_ptr;
7187 max_gcse_regno = max_reg_num ();
7189 reg_set_in_block = sbitmap_vector_alloc (last_basic_block,
7190 max_gcse_regno);
7191 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
7192 pre_ldst_mems = 0;
7193 last_set_in = xcalloc (max_gcse_regno, sizeof (int));
7194 already_set = xmalloc (sizeof (int) * max_gcse_regno);
7196 /* Find all the stores we care about. */
7197 FOR_EACH_BB (bb)
7199 /* First compute the registers set in this block. */
7200 regvec = last_set_in;
7202 for (insn = BB_HEAD (bb);
7203 insn != NEXT_INSN (BB_END (bb));
7204 insn = NEXT_INSN (insn))
7206 if (! INSN_P (insn))
7207 continue;
7209 if (GET_CODE (insn) == CALL_INSN)
7211 bool clobbers_all = false;
7212 #ifdef NON_SAVING_SETJMP
7213 if (NON_SAVING_SETJMP
7214 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
7215 clobbers_all = true;
7216 #endif
7218 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7219 if (clobbers_all
7220 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
7222 last_set_in[regno] = INSN_UID (insn);
7223 SET_BIT (reg_set_in_block[bb->index], regno);
7227 pat = PATTERN (insn);
7228 compute_store_table_current_insn = insn;
7229 note_stores (pat, reg_set_info, reg_set_in_block[bb->index]);
7232 /* Now find the stores. */
7233 memset (already_set, 0, sizeof (int) * max_gcse_regno);
7234 regvec = already_set;
7235 for (insn = BB_HEAD (bb);
7236 insn != NEXT_INSN (BB_END (bb));
7237 insn = NEXT_INSN (insn))
7239 if (! INSN_P (insn))
7240 continue;
7242 if (GET_CODE (insn) == CALL_INSN)
7244 bool clobbers_all = false;
7245 #ifdef NON_SAVING_SETJMP
7246 if (NON_SAVING_SETJMP
7247 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
7248 clobbers_all = true;
7249 #endif
7251 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7252 if (clobbers_all
7253 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
7254 already_set[regno] = 1;
7257 pat = PATTERN (insn);
7258 note_stores (pat, reg_set_info, NULL);
7260 /* Now that we've marked regs, look for stores. */
7261 find_moveable_store (insn, already_set, last_set_in);
7263 /* Unmark regs that are no longer set. */
7264 compute_store_table_current_insn = insn;
7265 note_stores (pat, reg_clear_last_set, last_set_in);
7266 if (GET_CODE (insn) == CALL_INSN)
7268 bool clobbers_all = false;
7269 #ifdef NON_SAVING_SETJMP
7270 if (NON_SAVING_SETJMP
7271 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
7272 clobbers_all = true;
7273 #endif
7275 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7276 if ((clobbers_all
7277 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
7278 && last_set_in[regno] == INSN_UID (insn))
7279 last_set_in[regno] = 0;
7283 #ifdef ENABLE_CHECKING
7284 /* last_set_in should now be all-zero. */
7285 for (regno = 0; regno < max_gcse_regno; regno++)
7286 if (last_set_in[regno] != 0)
7287 abort ();
7288 #endif
7290 /* Clear temporary marks. */
7291 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7293 LAST_AVAIL_CHECK_FAILURE(ptr) = NULL_RTX;
7294 if (ANTIC_STORE_LIST (ptr)
7295 && (tmp = XEXP (ANTIC_STORE_LIST (ptr), 0)) == NULL_RTX)
7296 ANTIC_STORE_LIST (ptr) = XEXP (ANTIC_STORE_LIST (ptr), 1);
7300 /* Remove the stores that are not available anywhere, as there will
7301 be no opportunity to optimize them. */
7302 for (ptr = pre_ldst_mems, prev_next_ptr_ptr = &pre_ldst_mems;
7303 ptr != NULL;
7304 ptr = *prev_next_ptr_ptr)
7306 if (!AVAIL_STORE_LIST (ptr))
7308 *prev_next_ptr_ptr = ptr->next;
7309 free_ldst_entry (ptr);
7311 else
7312 prev_next_ptr_ptr = &ptr->next;
7315 ret = enumerate_ldsts ();
7317 if (gcse_file)
7319 fprintf (gcse_file, "ST_avail and ST_antic (shown under loads..)\n");
7320 print_ldst_list (gcse_file);
7323 free (last_set_in);
7324 free (already_set);
7325 return ret;
7328 /* Check to see if the load X is aliased with STORE_PATTERN.
7329 AFTER is true if we are checking the case when STORE_PATTERN occurs
7330 after the X. */
7332 static bool
7333 load_kills_store (rtx x, rtx store_pattern, int after)
7335 if (after)
7336 return anti_dependence (x, store_pattern);
7337 else
7338 return true_dependence (store_pattern, GET_MODE (store_pattern), x,
7339 rtx_addr_varies_p);
7342 /* Go through the entire insn X, looking for any loads which might alias
7343 STORE_PATTERN. Return true if found.
7344 AFTER is true if we are checking the case when STORE_PATTERN occurs
7345 after the insn X. */
7347 static bool
7348 find_loads (rtx x, rtx store_pattern, int after)
7350 const char * fmt;
7351 int i, j;
7352 int ret = false;
7354 if (!x)
7355 return false;
7357 if (GET_CODE (x) == SET)
7358 x = SET_SRC (x);
7360 if (GET_CODE (x) == MEM)
7362 if (load_kills_store (x, store_pattern, after))
7363 return true;
7366 /* Recursively process the insn. */
7367 fmt = GET_RTX_FORMAT (GET_CODE (x));
7369 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0 && !ret; i--)
7371 if (fmt[i] == 'e')
7372 ret |= find_loads (XEXP (x, i), store_pattern, after);
7373 else if (fmt[i] == 'E')
7374 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7375 ret |= find_loads (XVECEXP (x, i, j), store_pattern, after);
7377 return ret;
7380 /* Check if INSN kills the store pattern X (is aliased with it).
7381 AFTER is true if we are checking the case when store X occurs
7382 after the insn. Return true if it it does. */
7384 static bool
7385 store_killed_in_insn (rtx x, rtx x_regs, rtx insn, int after)
7387 rtx reg, base, note;
7389 if (!INSN_P (insn))
7390 return false;
7392 if (GET_CODE (insn) == CALL_INSN)
7394 /* A normal or pure call might read from pattern,
7395 but a const call will not. */
7396 if (! CONST_OR_PURE_CALL_P (insn) || pure_call_p (insn))
7397 return true;
7399 /* But even a const call reads its parameters. Check whether the
7400 base of some of registers used in mem is stack pointer. */
7401 for (reg = x_regs; reg; reg = XEXP (reg, 1))
7403 base = find_base_term (XEXP (reg, 0));
7404 if (!base
7405 || (GET_CODE (base) == ADDRESS
7406 && GET_MODE (base) == Pmode
7407 && XEXP (base, 0) == stack_pointer_rtx))
7408 return true;
7411 return false;
7414 if (GET_CODE (PATTERN (insn)) == SET)
7416 rtx pat = PATTERN (insn);
7417 rtx dest = SET_DEST (pat);
7419 if (GET_CODE (dest) == SIGN_EXTRACT
7420 || GET_CODE (dest) == ZERO_EXTRACT)
7421 dest = XEXP (dest, 0);
7423 /* Check for memory stores to aliased objects. */
7424 if (GET_CODE (dest) == MEM
7425 && !expr_equiv_p (dest, x))
7427 if (after)
7429 if (output_dependence (dest, x))
7430 return true;
7432 else
7434 if (output_dependence (x, dest))
7435 return true;
7438 if (find_loads (SET_SRC (pat), x, after))
7439 return true;
7441 else if (find_loads (PATTERN (insn), x, after))
7442 return true;
7444 /* If this insn has a REG_EQUAL or REG_EQUIV note referencing a memory
7445 location aliased with X, then this insn kills X. */
7446 note = find_reg_equal_equiv_note (insn);
7447 if (! note)
7448 return false;
7449 note = XEXP (note, 0);
7451 /* However, if the note represents a must alias rather than a may
7452 alias relationship, then it does not kill X. */
7453 if (expr_equiv_p (note, x))
7454 return false;
7456 /* See if there are any aliased loads in the note. */
7457 return find_loads (note, x, after);
7460 /* Returns true if the expression X is loaded or clobbered on or after INSN
7461 within basic block BB. REGS_SET_AFTER is bitmap of registers set in
7462 or after the insn. X_REGS is list of registers mentioned in X. If the store
7463 is killed, return the last insn in that it occurs in FAIL_INSN. */
7465 static bool
7466 store_killed_after (rtx x, rtx x_regs, rtx insn, basic_block bb,
7467 int *regs_set_after, rtx *fail_insn)
7469 rtx last = BB_END (bb), act;
7471 if (!store_ops_ok (x_regs, regs_set_after))
7473 /* We do not know where it will happen. */
7474 if (fail_insn)
7475 *fail_insn = NULL_RTX;
7476 return true;
7479 /* Scan from the end, so that fail_insn is determined correctly. */
7480 for (act = last; act != PREV_INSN (insn); act = PREV_INSN (act))
7481 if (store_killed_in_insn (x, x_regs, act, false))
7483 if (fail_insn)
7484 *fail_insn = act;
7485 return true;
7488 return false;
7491 /* Returns true if the expression X is loaded or clobbered on or before INSN
7492 within basic block BB. X_REGS is list of registers mentioned in X.
7493 REGS_SET_BEFORE is bitmap of registers set before or in this insn. */
7494 static bool
7495 store_killed_before (rtx x, rtx x_regs, rtx insn, basic_block bb,
7496 int *regs_set_before)
7498 rtx first = BB_HEAD (bb);
7500 if (!store_ops_ok (x_regs, regs_set_before))
7501 return true;
7503 for ( ; insn != PREV_INSN (first); insn = PREV_INSN (insn))
7504 if (store_killed_in_insn (x, x_regs, insn, true))
7505 return true;
7507 return false;
7510 /* Fill in available, anticipatable, transparent and kill vectors in
7511 STORE_DATA, based on lists of available and anticipatable stores. */
7512 static void
7513 build_store_vectors (void)
7515 basic_block bb;
7516 int *regs_set_in_block;
7517 rtx insn, st;
7518 struct ls_expr * ptr;
7519 unsigned regno;
7521 /* Build the gen_vector. This is any store in the table which is not killed
7522 by aliasing later in its block. */
7523 ae_gen = sbitmap_vector_alloc (last_basic_block, num_stores);
7524 sbitmap_vector_zero (ae_gen, last_basic_block);
7526 st_antloc = sbitmap_vector_alloc (last_basic_block, num_stores);
7527 sbitmap_vector_zero (st_antloc, last_basic_block);
7529 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7531 for (st = AVAIL_STORE_LIST (ptr); st != NULL; st = XEXP (st, 1))
7533 insn = XEXP (st, 0);
7534 bb = BLOCK_FOR_INSN (insn);
7536 /* If we've already seen an available expression in this block,
7537 we can delete this one (It occurs earlier in the block). We'll
7538 copy the SRC expression to an unused register in case there
7539 are any side effects. */
7540 if (TEST_BIT (ae_gen[bb->index], ptr->index))
7542 rtx r = gen_reg_rtx (GET_MODE (ptr->pattern));
7543 if (gcse_file)
7544 fprintf (gcse_file, "Removing redundant store:\n");
7545 replace_store_insn (r, XEXP (st, 0), bb, ptr);
7546 continue;
7548 SET_BIT (ae_gen[bb->index], ptr->index);
7551 for (st = ANTIC_STORE_LIST (ptr); st != NULL; st = XEXP (st, 1))
7553 insn = XEXP (st, 0);
7554 bb = BLOCK_FOR_INSN (insn);
7555 SET_BIT (st_antloc[bb->index], ptr->index);
7559 ae_kill = sbitmap_vector_alloc (last_basic_block, num_stores);
7560 sbitmap_vector_zero (ae_kill, last_basic_block);
7562 transp = sbitmap_vector_alloc (last_basic_block, num_stores);
7563 sbitmap_vector_zero (transp, last_basic_block);
7564 regs_set_in_block = xmalloc (sizeof (int) * max_gcse_regno);
7566 FOR_EACH_BB (bb)
7568 for (regno = 0; regno < max_gcse_regno; regno++)
7569 regs_set_in_block[regno] = TEST_BIT (reg_set_in_block[bb->index], regno);
7571 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7573 if (store_killed_after (ptr->pattern, ptr->pattern_regs, BB_HEAD (bb),
7574 bb, regs_set_in_block, NULL))
7576 /* It should not be necessary to consider the expression
7577 killed if it is both anticipatable and available. */
7578 if (!TEST_BIT (st_antloc[bb->index], ptr->index)
7579 || !TEST_BIT (ae_gen[bb->index], ptr->index))
7580 SET_BIT (ae_kill[bb->index], ptr->index);
7582 else
7583 SET_BIT (transp[bb->index], ptr->index);
7587 free (regs_set_in_block);
7589 if (gcse_file)
7591 dump_sbitmap_vector (gcse_file, "st_antloc", "", st_antloc, last_basic_block);
7592 dump_sbitmap_vector (gcse_file, "st_kill", "", ae_kill, last_basic_block);
7593 dump_sbitmap_vector (gcse_file, "Transpt", "", transp, last_basic_block);
7594 dump_sbitmap_vector (gcse_file, "st_avloc", "", ae_gen, last_basic_block);
7598 /* Insert an instruction at the beginning of a basic block, and update
7599 the BB_HEAD if needed. */
7601 static void
7602 insert_insn_start_bb (rtx insn, basic_block bb)
7604 /* Insert at start of successor block. */
7605 rtx prev = PREV_INSN (BB_HEAD (bb));
7606 rtx before = BB_HEAD (bb);
7607 while (before != 0)
7609 if (GET_CODE (before) != CODE_LABEL
7610 && (GET_CODE (before) != NOTE
7611 || NOTE_LINE_NUMBER (before) != NOTE_INSN_BASIC_BLOCK))
7612 break;
7613 prev = before;
7614 if (prev == BB_END (bb))
7615 break;
7616 before = NEXT_INSN (before);
7619 insn = emit_insn_after (insn, prev);
7621 if (gcse_file)
7623 fprintf (gcse_file, "STORE_MOTION insert store at start of BB %d:\n",
7624 bb->index);
7625 print_inline_rtx (gcse_file, insn, 6);
7626 fprintf (gcse_file, "\n");
7630 /* This routine will insert a store on an edge. EXPR is the ldst entry for
7631 the memory reference, and E is the edge to insert it on. Returns nonzero
7632 if an edge insertion was performed. */
7634 static int
7635 insert_store (struct ls_expr * expr, edge e)
7637 rtx reg, insn;
7638 basic_block bb;
7639 edge tmp;
7641 /* We did all the deleted before this insert, so if we didn't delete a
7642 store, then we haven't set the reaching reg yet either. */
7643 if (expr->reaching_reg == NULL_RTX)
7644 return 0;
7646 if (e->flags & EDGE_FAKE)
7647 return 0;
7649 reg = expr->reaching_reg;
7650 insn = gen_move_insn (copy_rtx (expr->pattern), reg);
7652 /* If we are inserting this expression on ALL predecessor edges of a BB,
7653 insert it at the start of the BB, and reset the insert bits on the other
7654 edges so we don't try to insert it on the other edges. */
7655 bb = e->dest;
7656 for (tmp = e->dest->pred; tmp ; tmp = tmp->pred_next)
7657 if (!(tmp->flags & EDGE_FAKE))
7659 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
7660 if (index == EDGE_INDEX_NO_EDGE)
7661 abort ();
7662 if (! TEST_BIT (pre_insert_map[index], expr->index))
7663 break;
7666 /* If tmp is NULL, we found an insertion on every edge, blank the
7667 insertion vector for these edges, and insert at the start of the BB. */
7668 if (!tmp && bb != EXIT_BLOCK_PTR)
7670 for (tmp = e->dest->pred; tmp ; tmp = tmp->pred_next)
7672 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
7673 RESET_BIT (pre_insert_map[index], expr->index);
7675 insert_insn_start_bb (insn, bb);
7676 return 0;
7679 /* We can't insert on this edge, so we'll insert at the head of the
7680 successors block. See Morgan, sec 10.5. */
7681 if ((e->flags & EDGE_ABNORMAL) == EDGE_ABNORMAL)
7683 insert_insn_start_bb (insn, bb);
7684 return 0;
7687 insert_insn_on_edge (insn, e);
7689 if (gcse_file)
7691 fprintf (gcse_file, "STORE_MOTION insert insn on edge (%d, %d):\n",
7692 e->src->index, e->dest->index);
7693 print_inline_rtx (gcse_file, insn, 6);
7694 fprintf (gcse_file, "\n");
7697 return 1;
7700 /* Remove any REG_EQUAL or REG_EQUIV notes containing a reference to the
7701 memory location in SMEXPR set in basic block BB.
7703 This could be rather expensive. */
7705 static void
7706 remove_reachable_equiv_notes (basic_block bb, struct ls_expr *smexpr)
7708 edge *stack = xmalloc (sizeof (edge) * n_basic_blocks), act;
7709 sbitmap visited = sbitmap_alloc (last_basic_block);
7710 int stack_top = 0;
7711 rtx last, insn, note;
7712 rtx mem = smexpr->pattern;
7714 sbitmap_zero (visited);
7715 act = bb->succ;
7717 while (1)
7719 if (!act)
7721 if (!stack_top)
7723 free (stack);
7724 sbitmap_free (visited);
7725 return;
7727 act = stack[--stack_top];
7729 bb = act->dest;
7731 if (bb == EXIT_BLOCK_PTR
7732 || TEST_BIT (visited, bb->index)
7733 || TEST_BIT (ae_kill[bb->index], smexpr->index))
7735 act = act->succ_next;
7736 continue;
7738 SET_BIT (visited, bb->index);
7740 if (TEST_BIT (st_antloc[bb->index], smexpr->index))
7742 for (last = ANTIC_STORE_LIST (smexpr);
7743 BLOCK_FOR_INSN (XEXP (last, 0)) != bb;
7744 last = XEXP (last, 1))
7745 continue;
7746 last = XEXP (last, 0);
7748 else
7749 last = NEXT_INSN (BB_END (bb));
7751 for (insn = BB_HEAD (bb); insn != last; insn = NEXT_INSN (insn))
7752 if (INSN_P (insn))
7754 note = find_reg_equal_equiv_note (insn);
7755 if (!note || !expr_equiv_p (XEXP (note, 0), mem))
7756 continue;
7758 if (gcse_file)
7759 fprintf (gcse_file, "STORE_MOTION drop REG_EQUAL note at insn %d:\n",
7760 INSN_UID (insn));
7761 remove_note (insn, note);
7763 act = act->succ_next;
7764 if (bb->succ)
7766 if (act)
7767 stack[stack_top++] = act;
7768 act = bb->succ;
7773 /* This routine will replace a store with a SET to a specified register. */
7775 static void
7776 replace_store_insn (rtx reg, rtx del, basic_block bb, struct ls_expr *smexpr)
7778 rtx insn, mem, note, set, ptr;
7780 mem = smexpr->pattern;
7781 insn = gen_move_insn (reg, SET_SRC (single_set (del)));
7782 insn = emit_insn_after (insn, del);
7784 if (gcse_file)
7786 fprintf (gcse_file,
7787 "STORE_MOTION delete insn in BB %d:\n ", bb->index);
7788 print_inline_rtx (gcse_file, del, 6);
7789 fprintf (gcse_file, "\nSTORE MOTION replaced with insn:\n ");
7790 print_inline_rtx (gcse_file, insn, 6);
7791 fprintf (gcse_file, "\n");
7794 for (ptr = ANTIC_STORE_LIST (smexpr); ptr; ptr = XEXP (ptr, 1))
7795 if (XEXP (ptr, 0) == del)
7797 XEXP (ptr, 0) = insn;
7798 break;
7800 delete_insn (del);
7802 /* Now we must handle REG_EQUAL notes whose contents is equal to the mem;
7803 they are no longer accurate provided that they are reached by this
7804 definition, so drop them. */
7805 for (; insn != NEXT_INSN (BB_END (bb)); insn = NEXT_INSN (insn))
7806 if (INSN_P (insn))
7808 set = single_set (insn);
7809 if (!set)
7810 continue;
7811 if (expr_equiv_p (SET_DEST (set), mem))
7812 return;
7813 note = find_reg_equal_equiv_note (insn);
7814 if (!note || !expr_equiv_p (XEXP (note, 0), mem))
7815 continue;
7817 if (gcse_file)
7818 fprintf (gcse_file, "STORE_MOTION drop REG_EQUAL note at insn %d:\n",
7819 INSN_UID (insn));
7820 remove_note (insn, note);
7822 remove_reachable_equiv_notes (bb, smexpr);
7826 /* Delete a store, but copy the value that would have been stored into
7827 the reaching_reg for later storing. */
7829 static void
7830 delete_store (struct ls_expr * expr, basic_block bb)
7832 rtx reg, i, del;
7834 if (expr->reaching_reg == NULL_RTX)
7835 expr->reaching_reg = gen_reg_rtx (GET_MODE (expr->pattern));
7837 reg = expr->reaching_reg;
7839 for (i = AVAIL_STORE_LIST (expr); i; i = XEXP (i, 1))
7841 del = XEXP (i, 0);
7842 if (BLOCK_FOR_INSN (del) == bb)
7844 /* We know there is only one since we deleted redundant
7845 ones during the available computation. */
7846 replace_store_insn (reg, del, bb, expr);
7847 break;
7852 /* Free memory used by store motion. */
7854 static void
7855 free_store_memory (void)
7857 free_ldst_mems ();
7859 if (ae_gen)
7860 sbitmap_vector_free (ae_gen);
7861 if (ae_kill)
7862 sbitmap_vector_free (ae_kill);
7863 if (transp)
7864 sbitmap_vector_free (transp);
7865 if (st_antloc)
7866 sbitmap_vector_free (st_antloc);
7867 if (pre_insert_map)
7868 sbitmap_vector_free (pre_insert_map);
7869 if (pre_delete_map)
7870 sbitmap_vector_free (pre_delete_map);
7871 if (reg_set_in_block)
7872 sbitmap_vector_free (reg_set_in_block);
7874 ae_gen = ae_kill = transp = st_antloc = NULL;
7875 pre_insert_map = pre_delete_map = reg_set_in_block = NULL;
7878 /* Perform store motion. Much like gcse, except we move expressions the
7879 other way by looking at the flowgraph in reverse. */
7881 static void
7882 store_motion (void)
7884 basic_block bb;
7885 int x;
7886 struct ls_expr * ptr;
7887 int update_flow = 0;
7889 if (gcse_file)
7891 fprintf (gcse_file, "before store motion\n");
7892 print_rtl (gcse_file, get_insns ());
7895 init_alias_analysis ();
7897 /* Find all the available and anticipatable stores. */
7898 num_stores = compute_store_table ();
7899 if (num_stores == 0)
7901 sbitmap_vector_free (reg_set_in_block);
7902 end_alias_analysis ();
7903 return;
7906 /* Now compute kill & transp vectors. */
7907 build_store_vectors ();
7908 add_noreturn_fake_exit_edges ();
7909 connect_infinite_loops_to_exit ();
7911 edge_list = pre_edge_rev_lcm (gcse_file, num_stores, transp, ae_gen,
7912 st_antloc, ae_kill, &pre_insert_map,
7913 &pre_delete_map);
7915 /* Now we want to insert the new stores which are going to be needed. */
7916 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7918 FOR_EACH_BB (bb)
7919 if (TEST_BIT (pre_delete_map[bb->index], ptr->index))
7920 delete_store (ptr, bb);
7922 for (x = 0; x < NUM_EDGES (edge_list); x++)
7923 if (TEST_BIT (pre_insert_map[x], ptr->index))
7924 update_flow |= insert_store (ptr, INDEX_EDGE (edge_list, x));
7927 if (update_flow)
7928 commit_edge_insertions ();
7930 free_store_memory ();
7931 free_edge_list (edge_list);
7932 remove_fake_edges ();
7933 end_alias_analysis ();
7937 /* Entry point for jump bypassing optimization pass. */
7940 bypass_jumps (FILE *file)
7942 int changed;
7944 /* We do not construct an accurate cfg in functions which call
7945 setjmp, so just punt to be safe. */
7946 if (current_function_calls_setjmp)
7947 return 0;
7949 /* For calling dump_foo fns from gdb. */
7950 debug_stderr = stderr;
7951 gcse_file = file;
7953 /* Identify the basic block information for this function, including
7954 successors and predecessors. */
7955 max_gcse_regno = max_reg_num ();
7957 if (file)
7958 dump_flow_info (file);
7960 /* Return if there's nothing to do, or it is too expensive */
7961 if (n_basic_blocks <= 1 || is_too_expensive (_ ("jump bypassing disabled")))
7962 return 0;
7964 gcc_obstack_init (&gcse_obstack);
7965 bytes_used = 0;
7967 /* We need alias. */
7968 init_alias_analysis ();
7970 /* Record where pseudo-registers are set. This data is kept accurate
7971 during each pass. ??? We could also record hard-reg information here
7972 [since it's unchanging], however it is currently done during hash table
7973 computation.
7975 It may be tempting to compute MEM set information here too, but MEM sets
7976 will be subject to code motion one day and thus we need to compute
7977 information about memory sets when we build the hash tables. */
7979 alloc_reg_set_mem (max_gcse_regno);
7980 compute_sets (get_insns ());
7982 max_gcse_regno = max_reg_num ();
7983 alloc_gcse_mem (get_insns ());
7984 changed = one_cprop_pass (1, 1, 1);
7985 free_gcse_mem ();
7987 if (file)
7989 fprintf (file, "BYPASS of %s: %d basic blocks, ",
7990 current_function_name, n_basic_blocks);
7991 fprintf (file, "%d bytes\n\n", bytes_used);
7994 obstack_free (&gcse_obstack, NULL);
7995 free_reg_set_mem ();
7997 /* We are finished with alias. */
7998 end_alias_analysis ();
7999 allocate_reg_info (max_reg_num (), FALSE, FALSE);
8001 return changed;
8004 /* Return true if the graph is too expensive to optimize. PASS is the
8005 optimization about to be performed. */
8007 static bool
8008 is_too_expensive (const char *pass)
8010 /* Trying to perform global optimizations on flow graphs which have
8011 a high connectivity will take a long time and is unlikely to be
8012 particularly useful.
8014 In normal circumstances a cfg should have about twice as many
8015 edges as blocks. But we do not want to punish small functions
8016 which have a couple switch statements. Rather than simply
8017 threshold the number of blocks, uses something with a more
8018 graceful degradation. */
8019 if (n_edges > 20000 + n_basic_blocks * 4)
8021 if (warn_disabled_optimization)
8022 warning ("%s: %d basic blocks and %d edges/basic block",
8023 pass, n_basic_blocks, n_edges / n_basic_blocks);
8025 return true;
8028 /* If allocating memory for the cprop bitmap would take up too much
8029 storage it's better just to disable the optimization. */
8030 if ((n_basic_blocks
8031 * SBITMAP_SET_SIZE (max_reg_num ())
8032 * sizeof (SBITMAP_ELT_TYPE)) > MAX_GCSE_MEMORY)
8034 if (warn_disabled_optimization)
8035 warning ("%s: %d basic blocks and %d registers",
8036 pass, n_basic_blocks, max_reg_num ());
8038 return true;
8041 return false;
8044 #include "gt-gcse.h"