* gcc.dg/i386-387-1.c: Add new test for __builtin_fmod.
[official-gcc.git] / gcc / gcse.c
blob48bc1f2b236e2b2910e89d413e6486a0aad7a947
1 /* Global common subexpression elimination/Partial redundancy elimination
2 and global constant/copy propagation for GNU compiler.
3 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 02111-1307, USA. */
23 /* TODO
24 - reordering of memory allocation and freeing to be more space efficient
25 - do rough calc of how many regs are needed in each block, and a rough
26 calc of how many regs are available in each class and use that to
27 throttle back the code in cases where RTX_COST is minimal.
28 - a store to the same address as a load does not kill the load if the
29 source of the store is also the destination of the load. Handling this
30 allows more load motion, particularly out of loops.
31 - ability to realloc sbitmap vectors would allow one initial computation
32 of reg_set_in_block with only subsequent additions, rather than
33 recomputing it for each pass
37 /* References searched while implementing this.
39 Compilers Principles, Techniques and Tools
40 Aho, Sethi, Ullman
41 Addison-Wesley, 1988
43 Global Optimization by Suppression of Partial Redundancies
44 E. Morel, C. Renvoise
45 communications of the acm, Vol. 22, Num. 2, Feb. 1979
47 A Portable Machine-Independent Global Optimizer - Design and Measurements
48 Frederick Chow
49 Stanford Ph.D. thesis, Dec. 1983
51 A Fast Algorithm for Code Movement Optimization
52 D.M. Dhamdhere
53 SIGPLAN Notices, Vol. 23, Num. 10, Oct. 1988
55 A Solution to a Problem with Morel and Renvoise's
56 Global Optimization by Suppression of Partial Redundancies
57 K-H Drechsler, M.P. Stadel
58 ACM TOPLAS, Vol. 10, Num. 4, Oct. 1988
60 Practical Adaptation of the Global Optimization
61 Algorithm of Morel and Renvoise
62 D.M. Dhamdhere
63 ACM TOPLAS, Vol. 13, Num. 2. Apr. 1991
65 Efficiently Computing Static Single Assignment Form and the Control
66 Dependence Graph
67 R. Cytron, J. Ferrante, B.K. Rosen, M.N. Wegman, and F.K. Zadeck
68 ACM TOPLAS, Vol. 13, Num. 4, Oct. 1991
70 Lazy Code Motion
71 J. Knoop, O. Ruthing, B. Steffen
72 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
74 What's In a Region? Or Computing Control Dependence Regions in Near-Linear
75 Time for Reducible Flow Control
76 Thomas Ball
77 ACM Letters on Programming Languages and Systems,
78 Vol. 2, Num. 1-4, Mar-Dec 1993
80 An Efficient Representation for Sparse Sets
81 Preston Briggs, Linda Torczon
82 ACM Letters on Programming Languages and Systems,
83 Vol. 2, Num. 1-4, Mar-Dec 1993
85 A Variation of Knoop, Ruthing, and Steffen's Lazy Code Motion
86 K-H Drechsler, M.P. Stadel
87 ACM SIGPLAN Notices, Vol. 28, Num. 5, May 1993
89 Partial Dead Code Elimination
90 J. Knoop, O. Ruthing, B. Steffen
91 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
93 Effective Partial Redundancy Elimination
94 P. Briggs, K.D. Cooper
95 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
97 The Program Structure Tree: Computing Control Regions in Linear Time
98 R. Johnson, D. Pearson, K. Pingali
99 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
101 Optimal Code Motion: Theory and Practice
102 J. Knoop, O. Ruthing, B. Steffen
103 ACM TOPLAS, Vol. 16, Num. 4, Jul. 1994
105 The power of assignment motion
106 J. Knoop, O. Ruthing, B. Steffen
107 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
109 Global code motion / global value numbering
110 C. Click
111 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
113 Value Driven Redundancy Elimination
114 L.T. Simpson
115 Rice University Ph.D. thesis, Apr. 1996
117 Value Numbering
118 L.T. Simpson
119 Massively Scalar Compiler Project, Rice University, Sep. 1996
121 High Performance Compilers for Parallel Computing
122 Michael Wolfe
123 Addison-Wesley, 1996
125 Advanced Compiler Design and Implementation
126 Steven Muchnick
127 Morgan Kaufmann, 1997
129 Building an Optimizing Compiler
130 Robert Morgan
131 Digital Press, 1998
133 People wishing to speed up the code here should read:
134 Elimination Algorithms for Data Flow Analysis
135 B.G. Ryder, M.C. Paull
136 ACM Computing Surveys, Vol. 18, Num. 3, Sep. 1986
138 How to Analyze Large Programs Efficiently and Informatively
139 D.M. Dhamdhere, B.K. Rosen, F.K. Zadeck
140 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
142 People wishing to do something different can find various possibilities
143 in the above papers and elsewhere.
146 #include "config.h"
147 #include "system.h"
148 #include "coretypes.h"
149 #include "tm.h"
150 #include "toplev.h"
152 #include "rtl.h"
153 #include "tree.h"
154 #include "tm_p.h"
155 #include "regs.h"
156 #include "hard-reg-set.h"
157 #include "flags.h"
158 #include "real.h"
159 #include "insn-config.h"
160 #include "recog.h"
161 #include "basic-block.h"
162 #include "output.h"
163 #include "function.h"
164 #include "expr.h"
165 #include "except.h"
166 #include "ggc.h"
167 #include "params.h"
168 #include "cselib.h"
169 #include "intl.h"
170 #include "obstack.h"
172 /* Propagate flow information through back edges and thus enable PRE's
173 moving loop invariant calculations out of loops.
175 Originally this tended to create worse overall code, but several
176 improvements during the development of PRE seem to have made following
177 back edges generally a win.
179 Note much of the loop invariant code motion done here would normally
180 be done by loop.c, which has more heuristics for when to move invariants
181 out of loops. At some point we might need to move some of those
182 heuristics into gcse.c. */
184 /* We support GCSE via Partial Redundancy Elimination. PRE optimizations
185 are a superset of those done by GCSE.
187 We perform the following steps:
189 1) Compute basic block information.
191 2) Compute table of places where registers are set.
193 3) Perform copy/constant propagation.
195 4) Perform global cse.
197 5) Perform another pass of copy/constant propagation.
199 Two passes of copy/constant propagation are done because the first one
200 enables more GCSE and the second one helps to clean up the copies that
201 GCSE creates. This is needed more for PRE than for Classic because Classic
202 GCSE will try to use an existing register containing the common
203 subexpression rather than create a new one. This is harder to do for PRE
204 because of the code motion (which Classic GCSE doesn't do).
206 Expressions we are interested in GCSE-ing are of the form
207 (set (pseudo-reg) (expression)).
208 Function want_to_gcse_p says what these are.
210 PRE handles moving invariant expressions out of loops (by treating them as
211 partially redundant).
213 Eventually it would be nice to replace cse.c/gcse.c with SSA (static single
214 assignment) based GVN (global value numbering). L. T. Simpson's paper
215 (Rice University) on value numbering is a useful reference for this.
217 **********************
219 We used to support multiple passes but there are diminishing returns in
220 doing so. The first pass usually makes 90% of the changes that are doable.
221 A second pass can make a few more changes made possible by the first pass.
222 Experiments show any further passes don't make enough changes to justify
223 the expense.
225 A study of spec92 using an unlimited number of passes:
226 [1 pass] = 1208 substitutions, [2] = 577, [3] = 202, [4] = 192, [5] = 83,
227 [6] = 34, [7] = 17, [8] = 9, [9] = 4, [10] = 4, [11] = 2,
228 [12] = 2, [13] = 1, [15] = 1, [16] = 2, [41] = 1
230 It was found doing copy propagation between each pass enables further
231 substitutions.
233 PRE is quite expensive in complicated functions because the DFA can take
234 awhile to converge. Hence we only perform one pass. The parameter max-gcse-passes can
235 be modified if one wants to experiment.
237 **********************
239 The steps for PRE are:
241 1) Build the hash table of expressions we wish to GCSE (expr_hash_table).
243 2) Perform the data flow analysis for PRE.
245 3) Delete the redundant instructions
247 4) Insert the required copies [if any] that make the partially
248 redundant instructions fully redundant.
250 5) For other reaching expressions, insert an instruction to copy the value
251 to a newly created pseudo that will reach the redundant instruction.
253 The deletion is done first so that when we do insertions we
254 know which pseudo reg to use.
256 Various papers have argued that PRE DFA is expensive (O(n^2)) and others
257 argue it is not. The number of iterations for the algorithm to converge
258 is typically 2-4 so I don't view it as that expensive (relatively speaking).
260 PRE GCSE depends heavily on the second CSE pass to clean up the copies
261 we create. To make an expression reach the place where it's redundant,
262 the result of the expression is copied to a new register, and the redundant
263 expression is deleted by replacing it with this new register. Classic GCSE
264 doesn't have this problem as much as it computes the reaching defs of
265 each register in each block and thus can try to use an existing register.
267 **********************
269 A fair bit of simplicity is created by creating small functions for simple
270 tasks, even when the function is only called in one place. This may
271 measurably slow things down [or may not] by creating more function call
272 overhead than is necessary. The source is laid out so that it's trivial
273 to make the affected functions inline so that one can measure what speed
274 up, if any, can be achieved, and maybe later when things settle things can
275 be rearranged.
277 Help stamp out big monolithic functions! */
279 /* GCSE global vars. */
281 /* -dG dump file. */
282 static FILE *gcse_file;
284 /* Note whether or not we should run jump optimization after gcse. We
285 want to do this for two cases.
287 * If we changed any jumps via cprop.
289 * If we added any labels via edge splitting. */
291 static int run_jump_opt_after_gcse;
293 /* Bitmaps are normally not included in debugging dumps.
294 However it's useful to be able to print them from GDB.
295 We could create special functions for this, but it's simpler to
296 just allow passing stderr to the dump_foo fns. Since stderr can
297 be a macro, we store a copy here. */
298 static FILE *debug_stderr;
300 /* An obstack for our working variables. */
301 static struct obstack gcse_obstack;
303 struct reg_use {rtx reg_rtx; };
305 /* Hash table of expressions. */
307 struct expr
309 /* The expression (SET_SRC for expressions, PATTERN for assignments). */
310 rtx expr;
311 /* Index in the available expression bitmaps. */
312 int bitmap_index;
313 /* Next entry with the same hash. */
314 struct expr *next_same_hash;
315 /* List of anticipatable occurrences in basic blocks in the function.
316 An "anticipatable occurrence" is one that is the first occurrence in the
317 basic block, the operands are not modified in the basic block prior
318 to the occurrence and the output is not used between the start of
319 the block and the occurrence. */
320 struct occr *antic_occr;
321 /* List of available occurrence in basic blocks in the function.
322 An "available occurrence" is one that is the last occurrence in the
323 basic block and the operands are not modified by following statements in
324 the basic block [including this insn]. */
325 struct occr *avail_occr;
326 /* Non-null if the computation is PRE redundant.
327 The value is the newly created pseudo-reg to record a copy of the
328 expression in all the places that reach the redundant copy. */
329 rtx reaching_reg;
332 /* Occurrence of an expression.
333 There is one per basic block. If a pattern appears more than once the
334 last appearance is used [or first for anticipatable expressions]. */
336 struct occr
338 /* Next occurrence of this expression. */
339 struct occr *next;
340 /* The insn that computes the expression. */
341 rtx insn;
342 /* Nonzero if this [anticipatable] occurrence has been deleted. */
343 char deleted_p;
344 /* Nonzero if this [available] occurrence has been copied to
345 reaching_reg. */
346 /* ??? This is mutually exclusive with deleted_p, so they could share
347 the same byte. */
348 char copied_p;
351 /* Expression and copy propagation hash tables.
352 Each hash table is an array of buckets.
353 ??? It is known that if it were an array of entries, structure elements
354 `next_same_hash' and `bitmap_index' wouldn't be necessary. However, it is
355 not clear whether in the final analysis a sufficient amount of memory would
356 be saved as the size of the available expression bitmaps would be larger
357 [one could build a mapping table without holes afterwards though].
358 Someday I'll perform the computation and figure it out. */
360 struct hash_table
362 /* The table itself.
363 This is an array of `expr_hash_table_size' elements. */
364 struct expr **table;
366 /* Size of the hash table, in elements. */
367 unsigned int size;
369 /* Number of hash table elements. */
370 unsigned int n_elems;
372 /* Whether the table is expression of copy propagation one. */
373 int set_p;
376 /* Expression hash table. */
377 static struct hash_table expr_hash_table;
379 /* Copy propagation hash table. */
380 static struct hash_table set_hash_table;
382 /* Mapping of uids to cuids.
383 Only real insns get cuids. */
384 static int *uid_cuid;
386 /* Highest UID in UID_CUID. */
387 static int max_uid;
389 /* Get the cuid of an insn. */
390 #ifdef ENABLE_CHECKING
391 #define INSN_CUID(INSN) (INSN_UID (INSN) > max_uid ? (abort (), 0) : uid_cuid[INSN_UID (INSN)])
392 #else
393 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
394 #endif
396 /* Number of cuids. */
397 static int max_cuid;
399 /* Mapping of cuids to insns. */
400 static rtx *cuid_insn;
402 /* Get insn from cuid. */
403 #define CUID_INSN(CUID) (cuid_insn[CUID])
405 /* Maximum register number in function prior to doing gcse + 1.
406 Registers created during this pass have regno >= max_gcse_regno.
407 This is named with "gcse" to not collide with global of same name. */
408 static unsigned int max_gcse_regno;
410 /* Table of registers that are modified.
412 For each register, each element is a list of places where the pseudo-reg
413 is set.
415 For simplicity, GCSE is done on sets of pseudo-regs only. PRE GCSE only
416 requires knowledge of which blocks kill which regs [and thus could use
417 a bitmap instead of the lists `reg_set_table' uses].
419 `reg_set_table' and could be turned into an array of bitmaps (num-bbs x
420 num-regs) [however perhaps it may be useful to keep the data as is]. One
421 advantage of recording things this way is that `reg_set_table' is fairly
422 sparse with respect to pseudo regs but for hard regs could be fairly dense
423 [relatively speaking]. And recording sets of pseudo-regs in lists speeds
424 up functions like compute_transp since in the case of pseudo-regs we only
425 need to iterate over the number of times a pseudo-reg is set, not over the
426 number of basic blocks [clearly there is a bit of a slow down in the cases
427 where a pseudo is set more than once in a block, however it is believed
428 that the net effect is to speed things up]. This isn't done for hard-regs
429 because recording call-clobbered hard-regs in `reg_set_table' at each
430 function call can consume a fair bit of memory, and iterating over
431 hard-regs stored this way in compute_transp will be more expensive. */
433 typedef struct reg_set
435 /* The next setting of this register. */
436 struct reg_set *next;
437 /* The insn where it was set. */
438 rtx insn;
439 } reg_set;
441 static reg_set **reg_set_table;
443 /* Size of `reg_set_table'.
444 The table starts out at max_gcse_regno + slop, and is enlarged as
445 necessary. */
446 static int reg_set_table_size;
448 /* Amount to grow `reg_set_table' by when it's full. */
449 #define REG_SET_TABLE_SLOP 100
451 /* This is a list of expressions which are MEMs and will be used by load
452 or store motion.
453 Load motion tracks MEMs which aren't killed by
454 anything except itself. (ie, loads and stores to a single location).
455 We can then allow movement of these MEM refs with a little special
456 allowance. (all stores copy the same value to the reaching reg used
457 for the loads). This means all values used to store into memory must have
458 no side effects so we can re-issue the setter value.
459 Store Motion uses this structure as an expression table to track stores
460 which look interesting, and might be moveable towards the exit block. */
462 struct ls_expr
464 struct expr * expr; /* Gcse expression reference for LM. */
465 rtx pattern; /* Pattern of this mem. */
466 rtx pattern_regs; /* List of registers mentioned by the mem. */
467 rtx loads; /* INSN list of loads seen. */
468 rtx stores; /* INSN list of stores seen. */
469 struct ls_expr * next; /* Next in the list. */
470 int invalid; /* Invalid for some reason. */
471 int index; /* If it maps to a bitmap index. */
472 unsigned int hash_index; /* Index when in a hash table. */
473 rtx reaching_reg; /* Register to use when re-writing. */
476 /* Array of implicit set patterns indexed by basic block index. */
477 static rtx *implicit_sets;
479 /* Head of the list of load/store memory refs. */
480 static struct ls_expr * pre_ldst_mems = NULL;
482 /* Bitmap containing one bit for each register in the program.
483 Used when performing GCSE to track which registers have been set since
484 the start of the basic block. */
485 static regset reg_set_bitmap;
487 /* For each block, a bitmap of registers set in the block.
488 This is used by expr_killed_p and compute_transp.
489 It is computed during hash table computation and not by compute_sets
490 as it includes registers added since the last pass (or between cprop and
491 gcse) and it's currently not easy to realloc sbitmap vectors. */
492 static sbitmap *reg_set_in_block;
494 /* Array, indexed by basic block number for a list of insns which modify
495 memory within that block. */
496 static rtx * modify_mem_list;
497 bitmap modify_mem_list_set;
499 /* This array parallels modify_mem_list, but is kept canonicalized. */
500 static rtx * canon_modify_mem_list;
501 bitmap canon_modify_mem_list_set;
502 /* Various variables for statistics gathering. */
504 /* Memory used in a pass.
505 This isn't intended to be absolutely precise. Its intent is only
506 to keep an eye on memory usage. */
507 static int bytes_used;
509 /* GCSE substitutions made. */
510 static int gcse_subst_count;
511 /* Number of copy instructions created. */
512 static int gcse_create_count;
513 /* Number of constants propagated. */
514 static int const_prop_count;
515 /* Number of copys propagated. */
516 static int copy_prop_count;
518 /* These variables are used by classic GCSE.
519 Normally they'd be defined a bit later, but `rd_gen' needs to
520 be declared sooner. */
522 /* Each block has a bitmap of each type.
523 The length of each blocks bitmap is:
525 max_cuid - for reaching definitions
526 n_exprs - for available expressions
528 Thus we view the bitmaps as 2 dimensional arrays. i.e.
529 rd_kill[block_num][cuid_num]
530 ae_kill[block_num][expr_num] */
532 /* For reaching defs */
533 static sbitmap *rd_kill, *rd_gen, *reaching_defs, *rd_out;
535 /* for available exprs */
536 static sbitmap *ae_kill, *ae_gen, *ae_in, *ae_out;
538 /* Objects of this type are passed around by the null-pointer check
539 removal routines. */
540 struct null_pointer_info
542 /* The basic block being processed. */
543 basic_block current_block;
544 /* The first register to be handled in this pass. */
545 unsigned int min_reg;
546 /* One greater than the last register to be handled in this pass. */
547 unsigned int max_reg;
548 sbitmap *nonnull_local;
549 sbitmap *nonnull_killed;
552 static void compute_can_copy (void);
553 static void *gmalloc (size_t) ATTRIBUTE_MALLOC;
554 static void *gcalloc (size_t, size_t) ATTRIBUTE_MALLOC;
555 static void *grealloc (void *, size_t);
556 static void *gcse_alloc (unsigned long);
557 static void alloc_gcse_mem (rtx);
558 static void free_gcse_mem (void);
559 static void alloc_reg_set_mem (int);
560 static void free_reg_set_mem (void);
561 static int get_bitmap_width (int, int, int);
562 static void record_one_set (int, rtx);
563 static void replace_one_set (int, rtx, rtx);
564 static void record_set_info (rtx, rtx, void *);
565 static void compute_sets (rtx);
566 static void hash_scan_insn (rtx, struct hash_table *, int);
567 static void hash_scan_set (rtx, rtx, struct hash_table *);
568 static void hash_scan_clobber (rtx, rtx, struct hash_table *);
569 static void hash_scan_call (rtx, rtx, struct hash_table *);
570 static int want_to_gcse_p (rtx);
571 static bool can_assign_to_reg_p (rtx);
572 static bool gcse_constant_p (rtx);
573 static int oprs_unchanged_p (rtx, rtx, int);
574 static int oprs_anticipatable_p (rtx, rtx);
575 static int oprs_available_p (rtx, rtx);
576 static void insert_expr_in_table (rtx, enum machine_mode, rtx, int, int,
577 struct hash_table *);
578 static void insert_set_in_table (rtx, rtx, struct hash_table *);
579 static unsigned int hash_expr (rtx, enum machine_mode, int *, int);
580 static unsigned int hash_expr_1 (rtx, enum machine_mode, int *);
581 static unsigned int hash_string_1 (const char *);
582 static unsigned int hash_set (int, int);
583 static int expr_equiv_p (rtx, rtx);
584 static void record_last_reg_set_info (rtx, int);
585 static void record_last_mem_set_info (rtx);
586 static void record_last_set_info (rtx, rtx, void *);
587 static void compute_hash_table (struct hash_table *);
588 static void alloc_hash_table (int, struct hash_table *, int);
589 static void free_hash_table (struct hash_table *);
590 static void compute_hash_table_work (struct hash_table *);
591 static void dump_hash_table (FILE *, const char *, struct hash_table *);
592 static struct expr *lookup_expr (rtx, struct hash_table *);
593 static struct expr *lookup_set (unsigned int, struct hash_table *);
594 static struct expr *next_set (unsigned int, struct expr *);
595 static void reset_opr_set_tables (void);
596 static int oprs_not_set_p (rtx, rtx);
597 static void mark_call (rtx);
598 static void mark_set (rtx, rtx);
599 static void mark_clobber (rtx, rtx);
600 static void mark_oprs_set (rtx);
601 static void alloc_cprop_mem (int, int);
602 static void free_cprop_mem (void);
603 static void compute_transp (rtx, int, sbitmap *, int);
604 static void compute_transpout (void);
605 static void compute_local_properties (sbitmap *, sbitmap *, sbitmap *,
606 struct hash_table *);
607 static void compute_cprop_data (void);
608 static void find_used_regs (rtx *, void *);
609 static int try_replace_reg (rtx, rtx, rtx);
610 static struct expr *find_avail_set (int, rtx);
611 static int cprop_jump (basic_block, rtx, rtx, rtx, rtx);
612 static void mems_conflict_for_gcse_p (rtx, rtx, void *);
613 static int load_killed_in_block_p (basic_block, int, rtx, int);
614 static void canon_list_insert (rtx, rtx, void *);
615 static int cprop_insn (rtx, int);
616 static int cprop (int);
617 static void find_implicit_sets (void);
618 static int one_cprop_pass (int, int, int);
619 static bool constprop_register (rtx, rtx, rtx, int);
620 static struct expr *find_bypass_set (int, int);
621 static bool reg_killed_on_edge (rtx, edge);
622 static int bypass_block (basic_block, rtx, rtx);
623 static int bypass_conditional_jumps (void);
624 static void alloc_pre_mem (int, int);
625 static void free_pre_mem (void);
626 static void compute_pre_data (void);
627 static int pre_expr_reaches_here_p (basic_block, struct expr *,
628 basic_block);
629 static void insert_insn_end_bb (struct expr *, basic_block, int);
630 static void pre_insert_copy_insn (struct expr *, rtx);
631 static void pre_insert_copies (void);
632 static int pre_delete (void);
633 static int pre_gcse (void);
634 static int one_pre_gcse_pass (int);
635 static void add_label_notes (rtx, rtx);
636 static void alloc_code_hoist_mem (int, int);
637 static void free_code_hoist_mem (void);
638 static void compute_code_hoist_vbeinout (void);
639 static void compute_code_hoist_data (void);
640 static int hoist_expr_reaches_here_p (basic_block, int, basic_block, char *);
641 static void hoist_code (void);
642 static int one_code_hoisting_pass (void);
643 static void alloc_rd_mem (int, int);
644 static void free_rd_mem (void);
645 static void handle_rd_kill_set (rtx, int, basic_block);
646 static void compute_kill_rd (void);
647 static void compute_rd (void);
648 static void alloc_avail_expr_mem (int, int);
649 static void free_avail_expr_mem (void);
650 static void compute_ae_gen (struct hash_table *);
651 static int expr_killed_p (rtx, basic_block);
652 static void compute_ae_kill (sbitmap *, sbitmap *, struct hash_table *);
653 static int expr_reaches_here_p (struct occr *, struct expr *, basic_block,
654 int);
655 static rtx computing_insn (struct expr *, rtx);
656 static int def_reaches_here_p (rtx, rtx);
657 static int can_disregard_other_sets (struct reg_set **, rtx, int);
658 static int handle_avail_expr (rtx, struct expr *);
659 static int classic_gcse (void);
660 static int one_classic_gcse_pass (int);
661 static void invalidate_nonnull_info (rtx, rtx, void *);
662 static int delete_null_pointer_checks_1 (unsigned int *, sbitmap *, sbitmap *,
663 struct null_pointer_info *);
664 static rtx process_insert_insn (struct expr *);
665 static int pre_edge_insert (struct edge_list *, struct expr **);
666 static int expr_reaches_here_p_work (struct occr *, struct expr *,
667 basic_block, int, char *);
668 static int pre_expr_reaches_here_p_work (basic_block, struct expr *,
669 basic_block, char *);
670 static struct ls_expr * ldst_entry (rtx);
671 static void free_ldst_entry (struct ls_expr *);
672 static void free_ldst_mems (void);
673 static void print_ldst_list (FILE *);
674 static struct ls_expr * find_rtx_in_ldst (rtx);
675 static int enumerate_ldsts (void);
676 static inline struct ls_expr * first_ls_expr (void);
677 static inline struct ls_expr * next_ls_expr (struct ls_expr *);
678 static int simple_mem (rtx);
679 static void invalidate_any_buried_refs (rtx);
680 static void compute_ld_motion_mems (void);
681 static void trim_ld_motion_mems (void);
682 static void update_ld_motion_stores (struct expr *);
683 static void reg_set_info (rtx, rtx, void *);
684 static void reg_clear_last_set (rtx, rtx, void *);
685 static bool store_ops_ok (rtx, int *);
686 static rtx extract_mentioned_regs (rtx);
687 static rtx extract_mentioned_regs_helper (rtx, rtx);
688 static void find_moveable_store (rtx, int *, int *);
689 static int compute_store_table (void);
690 static bool load_kills_store (rtx, rtx, int);
691 static bool find_loads (rtx, rtx, int);
692 static bool store_killed_in_insn (rtx, rtx, rtx, int);
693 static bool store_killed_after (rtx, rtx, rtx, basic_block, int *, rtx *);
694 static bool store_killed_before (rtx, rtx, rtx, basic_block, int *);
695 static void build_store_vectors (void);
696 static void insert_insn_start_bb (rtx, basic_block);
697 static int insert_store (struct ls_expr *, edge);
698 static void remove_reachable_equiv_notes (basic_block, struct ls_expr *);
699 static void replace_store_insn (rtx, rtx, basic_block, struct ls_expr *);
700 static void delete_store (struct ls_expr *, basic_block);
701 static void free_store_memory (void);
702 static void store_motion (void);
703 static void free_insn_expr_list_list (rtx *);
704 static void clear_modify_mem_tables (void);
705 static void free_modify_mem_tables (void);
706 static rtx gcse_emit_move_after (rtx, rtx, rtx);
707 static void local_cprop_find_used_regs (rtx *, void *);
708 static bool do_local_cprop (rtx, rtx, int, rtx*);
709 static bool adjust_libcall_notes (rtx, rtx, rtx, rtx*);
710 static void local_cprop_pass (int);
711 static bool is_too_expensive (const char *);
714 /* Entry point for global common subexpression elimination.
715 F is the first instruction in the function. */
718 gcse_main (rtx f, FILE *file)
720 int changed, pass;
721 /* Bytes used at start of pass. */
722 int initial_bytes_used;
723 /* Maximum number of bytes used by a pass. */
724 int max_pass_bytes;
725 /* Point to release obstack data from for each pass. */
726 char *gcse_obstack_bottom;
728 /* We do not construct an accurate cfg in functions which call
729 setjmp, so just punt to be safe. */
730 if (current_function_calls_setjmp)
731 return 0;
733 /* Assume that we do not need to run jump optimizations after gcse. */
734 run_jump_opt_after_gcse = 0;
736 /* For calling dump_foo fns from gdb. */
737 debug_stderr = stderr;
738 gcse_file = file;
740 /* Identify the basic block information for this function, including
741 successors and predecessors. */
742 max_gcse_regno = max_reg_num ();
744 if (file)
745 dump_flow_info (file);
747 /* Return if there's nothing to do, or it is too expensive. */
748 if (n_basic_blocks <= 1 || is_too_expensive (_("GCSE disabled")))
749 return 0;
751 gcc_obstack_init (&gcse_obstack);
752 bytes_used = 0;
754 /* We need alias. */
755 init_alias_analysis ();
756 /* Record where pseudo-registers are set. This data is kept accurate
757 during each pass. ??? We could also record hard-reg information here
758 [since it's unchanging], however it is currently done during hash table
759 computation.
761 It may be tempting to compute MEM set information here too, but MEM sets
762 will be subject to code motion one day and thus we need to compute
763 information about memory sets when we build the hash tables. */
765 alloc_reg_set_mem (max_gcse_regno);
766 compute_sets (f);
768 pass = 0;
769 initial_bytes_used = bytes_used;
770 max_pass_bytes = 0;
771 gcse_obstack_bottom = gcse_alloc (1);
772 changed = 1;
773 while (changed && pass < MAX_GCSE_PASSES)
775 changed = 0;
776 if (file)
777 fprintf (file, "GCSE pass %d\n\n", pass + 1);
779 /* Initialize bytes_used to the space for the pred/succ lists,
780 and the reg_set_table data. */
781 bytes_used = initial_bytes_used;
783 /* Each pass may create new registers, so recalculate each time. */
784 max_gcse_regno = max_reg_num ();
786 alloc_gcse_mem (f);
788 /* Don't allow constant propagation to modify jumps
789 during this pass. */
790 changed = one_cprop_pass (pass + 1, 0, 0);
792 if (optimize_size)
793 changed |= one_classic_gcse_pass (pass + 1);
794 else
796 changed |= one_pre_gcse_pass (pass + 1);
797 /* We may have just created new basic blocks. Release and
798 recompute various things which are sized on the number of
799 basic blocks. */
800 if (changed)
802 free_modify_mem_tables ();
803 modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
804 canon_modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
806 free_reg_set_mem ();
807 alloc_reg_set_mem (max_reg_num ());
808 compute_sets (f);
809 run_jump_opt_after_gcse = 1;
812 if (max_pass_bytes < bytes_used)
813 max_pass_bytes = bytes_used;
815 /* Free up memory, then reallocate for code hoisting. We can
816 not re-use the existing allocated memory because the tables
817 will not have info for the insns or registers created by
818 partial redundancy elimination. */
819 free_gcse_mem ();
821 /* It does not make sense to run code hoisting unless we are optimizing
822 for code size -- it rarely makes programs faster, and can make
823 them bigger if we did partial redundancy elimination (when optimizing
824 for space, we use a classic gcse algorithm instead of partial
825 redundancy algorithms). */
826 if (optimize_size)
828 max_gcse_regno = max_reg_num ();
829 alloc_gcse_mem (f);
830 changed |= one_code_hoisting_pass ();
831 free_gcse_mem ();
833 if (max_pass_bytes < bytes_used)
834 max_pass_bytes = bytes_used;
837 if (file)
839 fprintf (file, "\n");
840 fflush (file);
843 obstack_free (&gcse_obstack, gcse_obstack_bottom);
844 pass++;
847 /* Do one last pass of copy propagation, including cprop into
848 conditional jumps. */
850 max_gcse_regno = max_reg_num ();
851 alloc_gcse_mem (f);
852 /* This time, go ahead and allow cprop to alter jumps. */
853 one_cprop_pass (pass + 1, 1, 0);
854 free_gcse_mem ();
856 if (file)
858 fprintf (file, "GCSE of %s: %d basic blocks, ",
859 current_function_name (), n_basic_blocks);
860 fprintf (file, "%d pass%s, %d bytes\n\n",
861 pass, pass > 1 ? "es" : "", max_pass_bytes);
864 obstack_free (&gcse_obstack, NULL);
865 free_reg_set_mem ();
866 /* We are finished with alias. */
867 end_alias_analysis ();
868 allocate_reg_info (max_reg_num (), FALSE, FALSE);
870 if (!optimize_size && flag_gcse_sm)
871 store_motion ();
873 /* Record where pseudo-registers are set. */
874 return run_jump_opt_after_gcse;
877 /* Misc. utilities. */
879 /* Nonzero for each mode that supports (set (reg) (reg)).
880 This is trivially true for integer and floating point values.
881 It may or may not be true for condition codes. */
882 static char can_copy[(int) NUM_MACHINE_MODES];
884 /* Compute which modes support reg/reg copy operations. */
886 static void
887 compute_can_copy (void)
889 int i;
890 #ifndef AVOID_CCMODE_COPIES
891 rtx reg, insn;
892 #endif
893 memset (can_copy, 0, NUM_MACHINE_MODES);
895 start_sequence ();
896 for (i = 0; i < NUM_MACHINE_MODES; i++)
897 if (GET_MODE_CLASS (i) == MODE_CC)
899 #ifdef AVOID_CCMODE_COPIES
900 can_copy[i] = 0;
901 #else
902 reg = gen_rtx_REG ((enum machine_mode) i, LAST_VIRTUAL_REGISTER + 1);
903 insn = emit_insn (gen_rtx_SET (VOIDmode, reg, reg));
904 if (recog (PATTERN (insn), insn, NULL) >= 0)
905 can_copy[i] = 1;
906 #endif
908 else
909 can_copy[i] = 1;
911 end_sequence ();
914 /* Returns whether the mode supports reg/reg copy operations. */
916 bool
917 can_copy_p (enum machine_mode mode)
919 static bool can_copy_init_p = false;
921 if (! can_copy_init_p)
923 compute_can_copy ();
924 can_copy_init_p = true;
927 return can_copy[mode] != 0;
930 /* Cover function to xmalloc to record bytes allocated. */
932 static void *
933 gmalloc (size_t size)
935 bytes_used += size;
936 return xmalloc (size);
939 /* Cover function to xcalloc to record bytes allocated. */
941 static void *
942 gcalloc (size_t nelem, size_t elsize)
944 bytes_used += nelem * elsize;
945 return xcalloc (nelem, elsize);
948 /* Cover function to xrealloc.
949 We don't record the additional size since we don't know it.
950 It won't affect memory usage stats much anyway. */
952 static void *
953 grealloc (void *ptr, size_t size)
955 return xrealloc (ptr, size);
958 /* Cover function to obstack_alloc. */
960 static void *
961 gcse_alloc (unsigned long size)
963 bytes_used += size;
964 return obstack_alloc (&gcse_obstack, size);
967 /* Allocate memory for the cuid mapping array,
968 and reg/memory set tracking tables.
970 This is called at the start of each pass. */
972 static void
973 alloc_gcse_mem (rtx f)
975 int i;
976 rtx insn;
978 /* Find the largest UID and create a mapping from UIDs to CUIDs.
979 CUIDs are like UIDs except they increase monotonically, have no gaps,
980 and only apply to real insns. */
982 max_uid = get_max_uid ();
983 uid_cuid = gcalloc (max_uid + 1, sizeof (int));
984 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
986 if (INSN_P (insn))
987 uid_cuid[INSN_UID (insn)] = i++;
988 else
989 uid_cuid[INSN_UID (insn)] = i;
992 /* Create a table mapping cuids to insns. */
994 max_cuid = i;
995 cuid_insn = gcalloc (max_cuid + 1, sizeof (rtx));
996 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
997 if (INSN_P (insn))
998 CUID_INSN (i++) = insn;
1000 /* Allocate vars to track sets of regs. */
1001 reg_set_bitmap = BITMAP_XMALLOC ();
1003 /* Allocate vars to track sets of regs, memory per block. */
1004 reg_set_in_block = sbitmap_vector_alloc (last_basic_block, max_gcse_regno);
1005 /* Allocate array to keep a list of insns which modify memory in each
1006 basic block. */
1007 modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
1008 canon_modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
1009 modify_mem_list_set = BITMAP_XMALLOC ();
1010 canon_modify_mem_list_set = BITMAP_XMALLOC ();
1013 /* Free memory allocated by alloc_gcse_mem. */
1015 static void
1016 free_gcse_mem (void)
1018 free (uid_cuid);
1019 free (cuid_insn);
1021 BITMAP_XFREE (reg_set_bitmap);
1023 sbitmap_vector_free (reg_set_in_block);
1024 free_modify_mem_tables ();
1025 BITMAP_XFREE (modify_mem_list_set);
1026 BITMAP_XFREE (canon_modify_mem_list_set);
1029 /* Many of the global optimization algorithms work by solving dataflow
1030 equations for various expressions. Initially, some local value is
1031 computed for each expression in each block. Then, the values across the
1032 various blocks are combined (by following flow graph edges) to arrive at
1033 global values. Conceptually, each set of equations is independent. We
1034 may therefore solve all the equations in parallel, solve them one at a
1035 time, or pick any intermediate approach.
1037 When you're going to need N two-dimensional bitmaps, each X (say, the
1038 number of blocks) by Y (say, the number of expressions), call this
1039 function. It's not important what X and Y represent; only that Y
1040 correspond to the things that can be done in parallel. This function will
1041 return an appropriate chunking factor C; you should solve C sets of
1042 equations in parallel. By going through this function, we can easily
1043 trade space against time; by solving fewer equations in parallel we use
1044 less space. */
1046 static int
1047 get_bitmap_width (int n, int x, int y)
1049 /* It's not really worth figuring out *exactly* how much memory will
1050 be used by a particular choice. The important thing is to get
1051 something approximately right. */
1052 size_t max_bitmap_memory = 10 * 1024 * 1024;
1054 /* The number of bytes we'd use for a single column of minimum
1055 width. */
1056 size_t column_size = n * x * sizeof (SBITMAP_ELT_TYPE);
1058 /* Often, it's reasonable just to solve all the equations in
1059 parallel. */
1060 if (column_size * SBITMAP_SET_SIZE (y) <= max_bitmap_memory)
1061 return y;
1063 /* Otherwise, pick the largest width we can, without going over the
1064 limit. */
1065 return SBITMAP_ELT_BITS * ((max_bitmap_memory + column_size - 1)
1066 / column_size);
1069 /* Compute the local properties of each recorded expression.
1071 Local properties are those that are defined by the block, irrespective of
1072 other blocks.
1074 An expression is transparent in a block if its operands are not modified
1075 in the block.
1077 An expression is computed (locally available) in a block if it is computed
1078 at least once and expression would contain the same value if the
1079 computation was moved to the end of the block.
1081 An expression is locally anticipatable in a block if it is computed at
1082 least once and expression would contain the same value if the computation
1083 was moved to the beginning of the block.
1085 We call this routine for cprop, pre and code hoisting. They all compute
1086 basically the same information and thus can easily share this code.
1088 TRANSP, COMP, and ANTLOC are destination sbitmaps for recording local
1089 properties. If NULL, then it is not necessary to compute or record that
1090 particular property.
1092 TABLE controls which hash table to look at. If it is set hash table,
1093 additionally, TRANSP is computed as ~TRANSP, since this is really cprop's
1094 ABSALTERED. */
1096 static void
1097 compute_local_properties (sbitmap *transp, sbitmap *comp, sbitmap *antloc, struct hash_table *table)
1099 unsigned int i;
1101 /* Initialize any bitmaps that were passed in. */
1102 if (transp)
1104 if (table->set_p)
1105 sbitmap_vector_zero (transp, last_basic_block);
1106 else
1107 sbitmap_vector_ones (transp, last_basic_block);
1110 if (comp)
1111 sbitmap_vector_zero (comp, last_basic_block);
1112 if (antloc)
1113 sbitmap_vector_zero (antloc, last_basic_block);
1115 for (i = 0; i < table->size; i++)
1117 struct expr *expr;
1119 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
1121 int indx = expr->bitmap_index;
1122 struct occr *occr;
1124 /* The expression is transparent in this block if it is not killed.
1125 We start by assuming all are transparent [none are killed], and
1126 then reset the bits for those that are. */
1127 if (transp)
1128 compute_transp (expr->expr, indx, transp, table->set_p);
1130 /* The occurrences recorded in antic_occr are exactly those that
1131 we want to set to nonzero in ANTLOC. */
1132 if (antloc)
1133 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
1135 SET_BIT (antloc[BLOCK_NUM (occr->insn)], indx);
1137 /* While we're scanning the table, this is a good place to
1138 initialize this. */
1139 occr->deleted_p = 0;
1142 /* The occurrences recorded in avail_occr are exactly those that
1143 we want to set to nonzero in COMP. */
1144 if (comp)
1145 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
1147 SET_BIT (comp[BLOCK_NUM (occr->insn)], indx);
1149 /* While we're scanning the table, this is a good place to
1150 initialize this. */
1151 occr->copied_p = 0;
1154 /* While we're scanning the table, this is a good place to
1155 initialize this. */
1156 expr->reaching_reg = 0;
1161 /* Register set information.
1163 `reg_set_table' records where each register is set or otherwise
1164 modified. */
1166 static struct obstack reg_set_obstack;
1168 static void
1169 alloc_reg_set_mem (int n_regs)
1171 reg_set_table_size = n_regs + REG_SET_TABLE_SLOP;
1172 reg_set_table = gcalloc (reg_set_table_size, sizeof (struct reg_set *));
1174 gcc_obstack_init (&reg_set_obstack);
1177 static void
1178 free_reg_set_mem (void)
1180 free (reg_set_table);
1181 obstack_free (&reg_set_obstack, NULL);
1184 /* An OLD_INSN that used to set REGNO was replaced by NEW_INSN.
1185 Update the corresponding `reg_set_table' entry accordingly.
1186 We assume that NEW_INSN is not already recorded in reg_set_table[regno]. */
1188 static void
1189 replace_one_set (int regno, rtx old_insn, rtx new_insn)
1191 struct reg_set *reg_info;
1192 if (regno >= reg_set_table_size)
1193 return;
1194 for (reg_info = reg_set_table[regno]; reg_info; reg_info = reg_info->next)
1195 if (reg_info->insn == old_insn)
1197 reg_info->insn = new_insn;
1198 break;
1202 /* Record REGNO in the reg_set table. */
1204 static void
1205 record_one_set (int regno, rtx insn)
1207 /* Allocate a new reg_set element and link it onto the list. */
1208 struct reg_set *new_reg_info;
1210 /* If the table isn't big enough, enlarge it. */
1211 if (regno >= reg_set_table_size)
1213 int new_size = regno + REG_SET_TABLE_SLOP;
1215 reg_set_table = grealloc (reg_set_table,
1216 new_size * sizeof (struct reg_set *));
1217 memset (reg_set_table + reg_set_table_size, 0,
1218 (new_size - reg_set_table_size) * sizeof (struct reg_set *));
1219 reg_set_table_size = new_size;
1222 new_reg_info = obstack_alloc (&reg_set_obstack, sizeof (struct reg_set));
1223 bytes_used += sizeof (struct reg_set);
1224 new_reg_info->insn = insn;
1225 new_reg_info->next = reg_set_table[regno];
1226 reg_set_table[regno] = new_reg_info;
1229 /* Called from compute_sets via note_stores to handle one SET or CLOBBER in
1230 an insn. The DATA is really the instruction in which the SET is
1231 occurring. */
1233 static void
1234 record_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED, void *data)
1236 rtx record_set_insn = (rtx) data;
1238 if (GET_CODE (dest) == REG && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
1239 record_one_set (REGNO (dest), record_set_insn);
1242 /* Scan the function and record each set of each pseudo-register.
1244 This is called once, at the start of the gcse pass. See the comments for
1245 `reg_set_table' for further documentation. */
1247 static void
1248 compute_sets (rtx f)
1250 rtx insn;
1252 for (insn = f; insn != 0; insn = NEXT_INSN (insn))
1253 if (INSN_P (insn))
1254 note_stores (PATTERN (insn), record_set_info, insn);
1257 /* Hash table support. */
1259 struct reg_avail_info
1261 basic_block last_bb;
1262 int first_set;
1263 int last_set;
1266 static struct reg_avail_info *reg_avail_info;
1267 static basic_block current_bb;
1270 /* See whether X, the source of a set, is something we want to consider for
1271 GCSE. */
1273 static int
1274 want_to_gcse_p (rtx x)
1276 switch (GET_CODE (x))
1278 case REG:
1279 case SUBREG:
1280 case CONST_INT:
1281 case CONST_DOUBLE:
1282 case CONST_VECTOR:
1283 case CALL:
1284 case CONSTANT_P_RTX:
1285 return 0;
1287 default:
1288 return can_assign_to_reg_p (x);
1292 /* Used internally by can_assign_to_reg_p. */
1294 static GTY(()) rtx test_insn;
1296 /* Return true if we can assign X to a pseudo register. */
1298 static bool
1299 can_assign_to_reg_p (rtx x)
1301 int num_clobbers = 0;
1302 int icode;
1304 /* If this is a valid operand, we are OK. If it's VOIDmode, we aren't. */
1305 if (general_operand (x, GET_MODE (x)))
1306 return 1;
1307 else if (GET_MODE (x) == VOIDmode)
1308 return 0;
1310 /* Otherwise, check if we can make a valid insn from it. First initialize
1311 our test insn if we haven't already. */
1312 if (test_insn == 0)
1314 test_insn
1315 = make_insn_raw (gen_rtx_SET (VOIDmode,
1316 gen_rtx_REG (word_mode,
1317 FIRST_PSEUDO_REGISTER * 2),
1318 const0_rtx));
1319 NEXT_INSN (test_insn) = PREV_INSN (test_insn) = 0;
1322 /* Now make an insn like the one we would make when GCSE'ing and see if
1323 valid. */
1324 PUT_MODE (SET_DEST (PATTERN (test_insn)), GET_MODE (x));
1325 SET_SRC (PATTERN (test_insn)) = x;
1326 return ((icode = recog (PATTERN (test_insn), test_insn, &num_clobbers)) >= 0
1327 && (num_clobbers == 0 || ! added_clobbers_hard_reg_p (icode)));
1330 /* Return nonzero if the operands of expression X are unchanged from the
1331 start of INSN's basic block up to but not including INSN (if AVAIL_P == 0),
1332 or from INSN to the end of INSN's basic block (if AVAIL_P != 0). */
1334 static int
1335 oprs_unchanged_p (rtx x, rtx insn, int avail_p)
1337 int i, j;
1338 enum rtx_code code;
1339 const char *fmt;
1341 if (x == 0)
1342 return 1;
1344 code = GET_CODE (x);
1345 switch (code)
1347 case REG:
1349 struct reg_avail_info *info = &reg_avail_info[REGNO (x)];
1351 if (info->last_bb != current_bb)
1352 return 1;
1353 if (avail_p)
1354 return info->last_set < INSN_CUID (insn);
1355 else
1356 return info->first_set >= INSN_CUID (insn);
1359 case MEM:
1360 if (load_killed_in_block_p (current_bb, INSN_CUID (insn),
1361 x, avail_p))
1362 return 0;
1363 else
1364 return oprs_unchanged_p (XEXP (x, 0), insn, avail_p);
1366 case PRE_DEC:
1367 case PRE_INC:
1368 case POST_DEC:
1369 case POST_INC:
1370 case PRE_MODIFY:
1371 case POST_MODIFY:
1372 return 0;
1374 case PC:
1375 case CC0: /*FIXME*/
1376 case CONST:
1377 case CONST_INT:
1378 case CONST_DOUBLE:
1379 case CONST_VECTOR:
1380 case SYMBOL_REF:
1381 case LABEL_REF:
1382 case ADDR_VEC:
1383 case ADDR_DIFF_VEC:
1384 return 1;
1386 default:
1387 break;
1390 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
1392 if (fmt[i] == 'e')
1394 /* If we are about to do the last recursive call needed at this
1395 level, change it into iteration. This function is called enough
1396 to be worth it. */
1397 if (i == 0)
1398 return oprs_unchanged_p (XEXP (x, i), insn, avail_p);
1400 else if (! oprs_unchanged_p (XEXP (x, i), insn, avail_p))
1401 return 0;
1403 else if (fmt[i] == 'E')
1404 for (j = 0; j < XVECLEN (x, i); j++)
1405 if (! oprs_unchanged_p (XVECEXP (x, i, j), insn, avail_p))
1406 return 0;
1409 return 1;
1412 /* Used for communication between mems_conflict_for_gcse_p and
1413 load_killed_in_block_p. Nonzero if mems_conflict_for_gcse_p finds a
1414 conflict between two memory references. */
1415 static int gcse_mems_conflict_p;
1417 /* Used for communication between mems_conflict_for_gcse_p and
1418 load_killed_in_block_p. A memory reference for a load instruction,
1419 mems_conflict_for_gcse_p will see if a memory store conflicts with
1420 this memory load. */
1421 static rtx gcse_mem_operand;
1423 /* DEST is the output of an instruction. If it is a memory reference, and
1424 possibly conflicts with the load found in gcse_mem_operand, then set
1425 gcse_mems_conflict_p to a nonzero value. */
1427 static void
1428 mems_conflict_for_gcse_p (rtx dest, rtx setter ATTRIBUTE_UNUSED,
1429 void *data ATTRIBUTE_UNUSED)
1431 while (GET_CODE (dest) == SUBREG
1432 || GET_CODE (dest) == ZERO_EXTRACT
1433 || GET_CODE (dest) == SIGN_EXTRACT
1434 || GET_CODE (dest) == STRICT_LOW_PART)
1435 dest = XEXP (dest, 0);
1437 /* If DEST is not a MEM, then it will not conflict with the load. Note
1438 that function calls are assumed to clobber memory, but are handled
1439 elsewhere. */
1440 if (GET_CODE (dest) != MEM)
1441 return;
1443 /* If we are setting a MEM in our list of specially recognized MEMs,
1444 don't mark as killed this time. */
1446 if (expr_equiv_p (dest, gcse_mem_operand) && pre_ldst_mems != NULL)
1448 if (!find_rtx_in_ldst (dest))
1449 gcse_mems_conflict_p = 1;
1450 return;
1453 if (true_dependence (dest, GET_MODE (dest), gcse_mem_operand,
1454 rtx_addr_varies_p))
1455 gcse_mems_conflict_p = 1;
1458 /* Return nonzero if the expression in X (a memory reference) is killed
1459 in block BB before or after the insn with the CUID in UID_LIMIT.
1460 AVAIL_P is nonzero for kills after UID_LIMIT, and zero for kills
1461 before UID_LIMIT.
1463 To check the entire block, set UID_LIMIT to max_uid + 1 and
1464 AVAIL_P to 0. */
1466 static int
1467 load_killed_in_block_p (basic_block bb, int uid_limit, rtx x, int avail_p)
1469 rtx list_entry = modify_mem_list[bb->index];
1470 while (list_entry)
1472 rtx setter;
1473 /* Ignore entries in the list that do not apply. */
1474 if ((avail_p
1475 && INSN_CUID (XEXP (list_entry, 0)) < uid_limit)
1476 || (! avail_p
1477 && INSN_CUID (XEXP (list_entry, 0)) > uid_limit))
1479 list_entry = XEXP (list_entry, 1);
1480 continue;
1483 setter = XEXP (list_entry, 0);
1485 /* If SETTER is a call everything is clobbered. Note that calls
1486 to pure functions are never put on the list, so we need not
1487 worry about them. */
1488 if (GET_CODE (setter) == CALL_INSN)
1489 return 1;
1491 /* SETTER must be an INSN of some kind that sets memory. Call
1492 note_stores to examine each hunk of memory that is modified.
1494 The note_stores interface is pretty limited, so we have to
1495 communicate via global variables. Yuk. */
1496 gcse_mem_operand = x;
1497 gcse_mems_conflict_p = 0;
1498 note_stores (PATTERN (setter), mems_conflict_for_gcse_p, NULL);
1499 if (gcse_mems_conflict_p)
1500 return 1;
1501 list_entry = XEXP (list_entry, 1);
1503 return 0;
1506 /* Return nonzero if the operands of expression X are unchanged from
1507 the start of INSN's basic block up to but not including INSN. */
1509 static int
1510 oprs_anticipatable_p (rtx x, rtx insn)
1512 return oprs_unchanged_p (x, insn, 0);
1515 /* Return nonzero if the operands of expression X are unchanged from
1516 INSN to the end of INSN's basic block. */
1518 static int
1519 oprs_available_p (rtx x, rtx insn)
1521 return oprs_unchanged_p (x, insn, 1);
1524 /* Hash expression X.
1526 MODE is only used if X is a CONST_INT. DO_NOT_RECORD_P is a boolean
1527 indicating if a volatile operand is found or if the expression contains
1528 something we don't want to insert in the table. HASH_TABLE_SIZE is
1529 the current size of the hash table to be probed.
1531 ??? One might want to merge this with canon_hash. Later. */
1533 static unsigned int
1534 hash_expr (rtx x, enum machine_mode mode, int *do_not_record_p,
1535 int hash_table_size)
1537 unsigned int hash;
1539 *do_not_record_p = 0;
1541 hash = hash_expr_1 (x, mode, do_not_record_p);
1542 return hash % hash_table_size;
1545 /* Hash a string. Just add its bytes up. */
1547 static inline unsigned
1548 hash_string_1 (const char *ps)
1550 unsigned hash = 0;
1551 const unsigned char *p = (const unsigned char *) ps;
1553 if (p)
1554 while (*p)
1555 hash += *p++;
1557 return hash;
1560 /* Subroutine of hash_expr to do the actual work. */
1562 static unsigned int
1563 hash_expr_1 (rtx x, enum machine_mode mode, int *do_not_record_p)
1565 int i, j;
1566 unsigned hash = 0;
1567 enum rtx_code code;
1568 const char *fmt;
1570 /* Used to turn recursion into iteration. We can't rely on GCC's
1571 tail-recursion elimination since we need to keep accumulating values
1572 in HASH. */
1574 if (x == 0)
1575 return hash;
1577 repeat:
1578 code = GET_CODE (x);
1579 switch (code)
1581 case REG:
1582 hash += ((unsigned int) REG << 7) + REGNO (x);
1583 return hash;
1585 case CONST_INT:
1586 hash += (((unsigned int) CONST_INT << 7) + (unsigned int) mode
1587 + (unsigned int) INTVAL (x));
1588 return hash;
1590 case CONST_DOUBLE:
1591 /* This is like the general case, except that it only counts
1592 the integers representing the constant. */
1593 hash += (unsigned int) code + (unsigned int) GET_MODE (x);
1594 if (GET_MODE (x) != VOIDmode)
1595 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
1596 hash += (unsigned int) XWINT (x, i);
1597 else
1598 hash += ((unsigned int) CONST_DOUBLE_LOW (x)
1599 + (unsigned int) CONST_DOUBLE_HIGH (x));
1600 return hash;
1602 case CONST_VECTOR:
1604 int units;
1605 rtx elt;
1607 units = CONST_VECTOR_NUNITS (x);
1609 for (i = 0; i < units; ++i)
1611 elt = CONST_VECTOR_ELT (x, i);
1612 hash += hash_expr_1 (elt, GET_MODE (elt), do_not_record_p);
1615 return hash;
1618 /* Assume there is only one rtx object for any given label. */
1619 case LABEL_REF:
1620 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
1621 differences and differences between each stage's debugging dumps. */
1622 hash += (((unsigned int) LABEL_REF << 7)
1623 + CODE_LABEL_NUMBER (XEXP (x, 0)));
1624 return hash;
1626 case SYMBOL_REF:
1628 /* Don't hash on the symbol's address to avoid bootstrap differences.
1629 Different hash values may cause expressions to be recorded in
1630 different orders and thus different registers to be used in the
1631 final assembler. This also avoids differences in the dump files
1632 between various stages. */
1633 unsigned int h = 0;
1634 const unsigned char *p = (const unsigned char *) XSTR (x, 0);
1636 while (*p)
1637 h += (h << 7) + *p++; /* ??? revisit */
1639 hash += ((unsigned int) SYMBOL_REF << 7) + h;
1640 return hash;
1643 case MEM:
1644 if (MEM_VOLATILE_P (x))
1646 *do_not_record_p = 1;
1647 return 0;
1650 hash += (unsigned int) MEM;
1651 /* We used alias set for hashing, but this is not good, since the alias
1652 set may differ in -fprofile-arcs and -fbranch-probabilities compilation
1653 causing the profiles to fail to match. */
1654 x = XEXP (x, 0);
1655 goto repeat;
1657 case PRE_DEC:
1658 case PRE_INC:
1659 case POST_DEC:
1660 case POST_INC:
1661 case PC:
1662 case CC0:
1663 case CALL:
1664 case UNSPEC_VOLATILE:
1665 *do_not_record_p = 1;
1666 return 0;
1668 case ASM_OPERANDS:
1669 if (MEM_VOLATILE_P (x))
1671 *do_not_record_p = 1;
1672 return 0;
1674 else
1676 /* We don't want to take the filename and line into account. */
1677 hash += (unsigned) code + (unsigned) GET_MODE (x)
1678 + hash_string_1 (ASM_OPERANDS_TEMPLATE (x))
1679 + hash_string_1 (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
1680 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
1682 if (ASM_OPERANDS_INPUT_LENGTH (x))
1684 for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
1686 hash += (hash_expr_1 (ASM_OPERANDS_INPUT (x, i),
1687 GET_MODE (ASM_OPERANDS_INPUT (x, i)),
1688 do_not_record_p)
1689 + hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT
1690 (x, i)));
1693 hash += hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
1694 x = ASM_OPERANDS_INPUT (x, 0);
1695 mode = GET_MODE (x);
1696 goto repeat;
1698 return hash;
1701 default:
1702 break;
1705 hash += (unsigned) code + (unsigned) GET_MODE (x);
1706 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
1708 if (fmt[i] == 'e')
1710 /* If we are about to do the last recursive call
1711 needed at this level, change it into iteration.
1712 This function is called enough to be worth it. */
1713 if (i == 0)
1715 x = XEXP (x, i);
1716 goto repeat;
1719 hash += hash_expr_1 (XEXP (x, i), 0, do_not_record_p);
1720 if (*do_not_record_p)
1721 return 0;
1724 else if (fmt[i] == 'E')
1725 for (j = 0; j < XVECLEN (x, i); j++)
1727 hash += hash_expr_1 (XVECEXP (x, i, j), 0, do_not_record_p);
1728 if (*do_not_record_p)
1729 return 0;
1732 else if (fmt[i] == 's')
1733 hash += hash_string_1 (XSTR (x, i));
1734 else if (fmt[i] == 'i')
1735 hash += (unsigned int) XINT (x, i);
1736 else
1737 abort ();
1740 return hash;
1743 /* Hash a set of register REGNO.
1745 Sets are hashed on the register that is set. This simplifies the PRE copy
1746 propagation code.
1748 ??? May need to make things more elaborate. Later, as necessary. */
1750 static unsigned int
1751 hash_set (int regno, int hash_table_size)
1753 unsigned int hash;
1755 hash = regno;
1756 return hash % hash_table_size;
1759 /* Return nonzero if exp1 is equivalent to exp2.
1760 ??? Borrowed from cse.c. Might want to remerge with cse.c. Later. */
1762 static int
1763 expr_equiv_p (rtx x, rtx y)
1765 int i, j;
1766 enum rtx_code code;
1767 const char *fmt;
1769 if (x == y)
1770 return 1;
1772 if (x == 0 || y == 0)
1773 return 0;
1775 code = GET_CODE (x);
1776 if (code != GET_CODE (y))
1777 return 0;
1779 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
1780 if (GET_MODE (x) != GET_MODE (y))
1781 return 0;
1783 switch (code)
1785 case PC:
1786 case CC0:
1787 case CONST_INT:
1788 return 0;
1790 case LABEL_REF:
1791 return XEXP (x, 0) == XEXP (y, 0);
1793 case SYMBOL_REF:
1794 return XSTR (x, 0) == XSTR (y, 0);
1796 case REG:
1797 return REGNO (x) == REGNO (y);
1799 case MEM:
1800 /* Can't merge two expressions in different alias sets, since we can
1801 decide that the expression is transparent in a block when it isn't,
1802 due to it being set with the different alias set. */
1803 if (MEM_ALIAS_SET (x) != MEM_ALIAS_SET (y))
1804 return 0;
1806 /* A volatile mem should not be considered equivalent to any other. */
1807 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
1808 return 0;
1809 break;
1811 /* For commutative operations, check both orders. */
1812 case PLUS:
1813 case MULT:
1814 case AND:
1815 case IOR:
1816 case XOR:
1817 case NE:
1818 case EQ:
1819 return ((expr_equiv_p (XEXP (x, 0), XEXP (y, 0))
1820 && expr_equiv_p (XEXP (x, 1), XEXP (y, 1)))
1821 || (expr_equiv_p (XEXP (x, 0), XEXP (y, 1))
1822 && expr_equiv_p (XEXP (x, 1), XEXP (y, 0))));
1824 case ASM_OPERANDS:
1825 /* We don't use the generic code below because we want to
1826 disregard filename and line numbers. */
1828 /* A volatile asm isn't equivalent to any other. */
1829 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
1830 return 0;
1832 if (GET_MODE (x) != GET_MODE (y)
1833 || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
1834 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
1835 ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
1836 || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
1837 || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
1838 return 0;
1840 if (ASM_OPERANDS_INPUT_LENGTH (x))
1842 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
1843 if (! expr_equiv_p (ASM_OPERANDS_INPUT (x, i),
1844 ASM_OPERANDS_INPUT (y, i))
1845 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
1846 ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
1847 return 0;
1850 return 1;
1852 default:
1853 break;
1856 /* Compare the elements. If any pair of corresponding elements
1857 fail to match, return 0 for the whole thing. */
1859 fmt = GET_RTX_FORMAT (code);
1860 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1862 switch (fmt[i])
1864 case 'e':
1865 if (! expr_equiv_p (XEXP (x, i), XEXP (y, i)))
1866 return 0;
1867 break;
1869 case 'E':
1870 if (XVECLEN (x, i) != XVECLEN (y, i))
1871 return 0;
1872 for (j = 0; j < XVECLEN (x, i); j++)
1873 if (! expr_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
1874 return 0;
1875 break;
1877 case 's':
1878 if (strcmp (XSTR (x, i), XSTR (y, i)))
1879 return 0;
1880 break;
1882 case 'i':
1883 if (XINT (x, i) != XINT (y, i))
1884 return 0;
1885 break;
1887 case 'w':
1888 if (XWINT (x, i) != XWINT (y, i))
1889 return 0;
1890 break;
1892 case '0':
1893 break;
1895 default:
1896 abort ();
1900 return 1;
1903 /* Insert expression X in INSN in the hash TABLE.
1904 If it is already present, record it as the last occurrence in INSN's
1905 basic block.
1907 MODE is the mode of the value X is being stored into.
1908 It is only used if X is a CONST_INT.
1910 ANTIC_P is nonzero if X is an anticipatable expression.
1911 AVAIL_P is nonzero if X is an available expression. */
1913 static void
1914 insert_expr_in_table (rtx x, enum machine_mode mode, rtx insn, int antic_p,
1915 int avail_p, struct hash_table *table)
1917 int found, do_not_record_p;
1918 unsigned int hash;
1919 struct expr *cur_expr, *last_expr = NULL;
1920 struct occr *antic_occr, *avail_occr;
1921 struct occr *last_occr = NULL;
1923 hash = hash_expr (x, mode, &do_not_record_p, table->size);
1925 /* Do not insert expression in table if it contains volatile operands,
1926 or if hash_expr determines the expression is something we don't want
1927 to or can't handle. */
1928 if (do_not_record_p)
1929 return;
1931 cur_expr = table->table[hash];
1932 found = 0;
1934 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
1936 /* If the expression isn't found, save a pointer to the end of
1937 the list. */
1938 last_expr = cur_expr;
1939 cur_expr = cur_expr->next_same_hash;
1942 if (! found)
1944 cur_expr = gcse_alloc (sizeof (struct expr));
1945 bytes_used += sizeof (struct expr);
1946 if (table->table[hash] == NULL)
1947 /* This is the first pattern that hashed to this index. */
1948 table->table[hash] = cur_expr;
1949 else
1950 /* Add EXPR to end of this hash chain. */
1951 last_expr->next_same_hash = cur_expr;
1953 /* Set the fields of the expr element. */
1954 cur_expr->expr = x;
1955 cur_expr->bitmap_index = table->n_elems++;
1956 cur_expr->next_same_hash = NULL;
1957 cur_expr->antic_occr = NULL;
1958 cur_expr->avail_occr = NULL;
1961 /* Now record the occurrence(s). */
1962 if (antic_p)
1964 antic_occr = cur_expr->antic_occr;
1966 /* Search for another occurrence in the same basic block. */
1967 while (antic_occr && BLOCK_NUM (antic_occr->insn) != BLOCK_NUM (insn))
1969 /* If an occurrence isn't found, save a pointer to the end of
1970 the list. */
1971 last_occr = antic_occr;
1972 antic_occr = antic_occr->next;
1975 if (antic_occr)
1976 /* Found another instance of the expression in the same basic block.
1977 Prefer the currently recorded one. We want the first one in the
1978 block and the block is scanned from start to end. */
1979 ; /* nothing to do */
1980 else
1982 /* First occurrence of this expression in this basic block. */
1983 antic_occr = gcse_alloc (sizeof (struct occr));
1984 bytes_used += sizeof (struct occr);
1985 /* First occurrence of this expression in any block? */
1986 if (cur_expr->antic_occr == NULL)
1987 cur_expr->antic_occr = antic_occr;
1988 else
1989 last_occr->next = antic_occr;
1991 antic_occr->insn = insn;
1992 antic_occr->next = NULL;
1993 antic_occr->deleted_p = 0;
1997 if (avail_p)
1999 avail_occr = cur_expr->avail_occr;
2001 /* Search for another occurrence in the same basic block. */
2002 while (avail_occr && BLOCK_NUM (avail_occr->insn) != BLOCK_NUM (insn))
2004 /* If an occurrence isn't found, save a pointer to the end of
2005 the list. */
2006 last_occr = avail_occr;
2007 avail_occr = avail_occr->next;
2010 if (avail_occr)
2011 /* Found another instance of the expression in the same basic block.
2012 Prefer this occurrence to the currently recorded one. We want
2013 the last one in the block and the block is scanned from start
2014 to end. */
2015 avail_occr->insn = insn;
2016 else
2018 /* First occurrence of this expression in this basic block. */
2019 avail_occr = gcse_alloc (sizeof (struct occr));
2020 bytes_used += sizeof (struct occr);
2022 /* First occurrence of this expression in any block? */
2023 if (cur_expr->avail_occr == NULL)
2024 cur_expr->avail_occr = avail_occr;
2025 else
2026 last_occr->next = avail_occr;
2028 avail_occr->insn = insn;
2029 avail_occr->next = NULL;
2030 avail_occr->deleted_p = 0;
2035 /* Insert pattern X in INSN in the hash table.
2036 X is a SET of a reg to either another reg or a constant.
2037 If it is already present, record it as the last occurrence in INSN's
2038 basic block. */
2040 static void
2041 insert_set_in_table (rtx x, rtx insn, struct hash_table *table)
2043 int found;
2044 unsigned int hash;
2045 struct expr *cur_expr, *last_expr = NULL;
2046 struct occr *cur_occr, *last_occr = NULL;
2048 if (GET_CODE (x) != SET
2049 || GET_CODE (SET_DEST (x)) != REG)
2050 abort ();
2052 hash = hash_set (REGNO (SET_DEST (x)), table->size);
2054 cur_expr = table->table[hash];
2055 found = 0;
2057 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
2059 /* If the expression isn't found, save a pointer to the end of
2060 the list. */
2061 last_expr = cur_expr;
2062 cur_expr = cur_expr->next_same_hash;
2065 if (! found)
2067 cur_expr = gcse_alloc (sizeof (struct expr));
2068 bytes_used += sizeof (struct expr);
2069 if (table->table[hash] == NULL)
2070 /* This is the first pattern that hashed to this index. */
2071 table->table[hash] = cur_expr;
2072 else
2073 /* Add EXPR to end of this hash chain. */
2074 last_expr->next_same_hash = cur_expr;
2076 /* Set the fields of the expr element.
2077 We must copy X because it can be modified when copy propagation is
2078 performed on its operands. */
2079 cur_expr->expr = copy_rtx (x);
2080 cur_expr->bitmap_index = table->n_elems++;
2081 cur_expr->next_same_hash = NULL;
2082 cur_expr->antic_occr = NULL;
2083 cur_expr->avail_occr = NULL;
2086 /* Now record the occurrence. */
2087 cur_occr = cur_expr->avail_occr;
2089 /* Search for another occurrence in the same basic block. */
2090 while (cur_occr && BLOCK_NUM (cur_occr->insn) != BLOCK_NUM (insn))
2092 /* If an occurrence isn't found, save a pointer to the end of
2093 the list. */
2094 last_occr = cur_occr;
2095 cur_occr = cur_occr->next;
2098 if (cur_occr)
2099 /* Found another instance of the expression in the same basic block.
2100 Prefer this occurrence to the currently recorded one. We want the
2101 last one in the block and the block is scanned from start to end. */
2102 cur_occr->insn = insn;
2103 else
2105 /* First occurrence of this expression in this basic block. */
2106 cur_occr = gcse_alloc (sizeof (struct occr));
2107 bytes_used += sizeof (struct occr);
2109 /* First occurrence of this expression in any block? */
2110 if (cur_expr->avail_occr == NULL)
2111 cur_expr->avail_occr = cur_occr;
2112 else
2113 last_occr->next = cur_occr;
2115 cur_occr->insn = insn;
2116 cur_occr->next = NULL;
2117 cur_occr->deleted_p = 0;
2121 /* Determine whether the rtx X should be treated as a constant for
2122 the purposes of GCSE's constant propagation. */
2124 static bool
2125 gcse_constant_p (rtx x)
2127 /* Consider a COMPARE of two integers constant. */
2128 if (GET_CODE (x) == COMPARE
2129 && GET_CODE (XEXP (x, 0)) == CONST_INT
2130 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2131 return true;
2134 /* Consider a COMPARE of the same registers is a constant
2135 if they are not floating point registers. */
2136 if (GET_CODE(x) == COMPARE
2137 && GET_CODE (XEXP (x, 0)) == REG
2138 && GET_CODE (XEXP (x, 1)) == REG
2139 && REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 1))
2140 && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0)))
2141 && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 1))))
2142 return true;
2144 if (GET_CODE (x) == CONSTANT_P_RTX)
2145 return false;
2147 return CONSTANT_P (x);
2150 /* Scan pattern PAT of INSN and add an entry to the hash TABLE (set or
2151 expression one). */
2153 static void
2154 hash_scan_set (rtx pat, rtx insn, struct hash_table *table)
2156 rtx src = SET_SRC (pat);
2157 rtx dest = SET_DEST (pat);
2158 rtx note;
2160 if (GET_CODE (src) == CALL)
2161 hash_scan_call (src, insn, table);
2163 else if (GET_CODE (dest) == REG)
2165 unsigned int regno = REGNO (dest);
2166 rtx tmp;
2168 /* If this is a single set and we are doing constant propagation,
2169 see if a REG_NOTE shows this equivalent to a constant. */
2170 if (table->set_p && (note = find_reg_equal_equiv_note (insn)) != 0
2171 && gcse_constant_p (XEXP (note, 0)))
2172 src = XEXP (note, 0), pat = gen_rtx_SET (VOIDmode, dest, src);
2174 /* Only record sets of pseudo-regs in the hash table. */
2175 if (! table->set_p
2176 && regno >= FIRST_PSEUDO_REGISTER
2177 /* Don't GCSE something if we can't do a reg/reg copy. */
2178 && can_copy_p (GET_MODE (dest))
2179 /* GCSE commonly inserts instruction after the insn. We can't
2180 do that easily for EH_REGION notes so disable GCSE on these
2181 for now. */
2182 && !find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2183 /* Is SET_SRC something we want to gcse? */
2184 && want_to_gcse_p (src)
2185 /* Don't CSE a nop. */
2186 && ! set_noop_p (pat)
2187 /* Don't GCSE if it has attached REG_EQUIV note.
2188 At this point this only function parameters should have
2189 REG_EQUIV notes and if the argument slot is used somewhere
2190 explicitly, it means address of parameter has been taken,
2191 so we should not extend the lifetime of the pseudo. */
2192 && ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) == 0
2193 || GET_CODE (XEXP (note, 0)) != MEM))
2195 /* An expression is not anticipatable if its operands are
2196 modified before this insn or if this is not the only SET in
2197 this insn. */
2198 int antic_p = oprs_anticipatable_p (src, insn) && single_set (insn);
2199 /* An expression is not available if its operands are
2200 subsequently modified, including this insn. It's also not
2201 available if this is a branch, because we can't insert
2202 a set after the branch. */
2203 int avail_p = (oprs_available_p (src, insn)
2204 && ! JUMP_P (insn));
2206 insert_expr_in_table (src, GET_MODE (dest), insn, antic_p, avail_p, table);
2209 /* Record sets for constant/copy propagation. */
2210 else if (table->set_p
2211 && regno >= FIRST_PSEUDO_REGISTER
2212 && ((GET_CODE (src) == REG
2213 && REGNO (src) >= FIRST_PSEUDO_REGISTER
2214 && can_copy_p (GET_MODE (dest))
2215 && REGNO (src) != regno)
2216 || gcse_constant_p (src))
2217 /* A copy is not available if its src or dest is subsequently
2218 modified. Here we want to search from INSN+1 on, but
2219 oprs_available_p searches from INSN on. */
2220 && (insn == BB_END (BLOCK_FOR_INSN (insn))
2221 || ((tmp = next_nonnote_insn (insn)) != NULL_RTX
2222 && oprs_available_p (pat, tmp))))
2223 insert_set_in_table (pat, insn, table);
2225 /* In case of store we want to consider the memory value as available in
2226 the REG stored in that memory. This makes it possible to remove
2227 redundant loads from due to stores to the same location. */
2228 else if (flag_gcse_las && GET_CODE (src) == REG && GET_CODE (dest) == MEM)
2230 unsigned int regno = REGNO (src);
2232 /* Do not do this for constant/copy propagation. */
2233 if (! table->set_p
2234 /* Only record sets of pseudo-regs in the hash table. */
2235 && regno >= FIRST_PSEUDO_REGISTER
2236 /* Don't GCSE something if we can't do a reg/reg copy. */
2237 && can_copy_p (GET_MODE (src))
2238 /* GCSE commonly inserts instruction after the insn. We can't
2239 do that easily for EH_REGION notes so disable GCSE on these
2240 for now. */
2241 && ! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2242 /* Is SET_DEST something we want to gcse? */
2243 && want_to_gcse_p (dest)
2244 /* Don't CSE a nop. */
2245 && ! set_noop_p (pat)
2246 /* Don't GCSE if it has attached REG_EQUIV note.
2247 At this point this only function parameters should have
2248 REG_EQUIV notes and if the argument slot is used somewhere
2249 explicitly, it means address of parameter has been taken,
2250 so we should not extend the lifetime of the pseudo. */
2251 && ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) == 0
2252 || GET_CODE (XEXP (note, 0)) != MEM))
2254 /* Stores are never anticipatable. */
2255 int antic_p = 0;
2256 /* An expression is not available if its operands are
2257 subsequently modified, including this insn. It's also not
2258 available if this is a branch, because we can't insert
2259 a set after the branch. */
2260 int avail_p = oprs_available_p (dest, insn)
2261 && ! JUMP_P (insn);
2263 /* Record the memory expression (DEST) in the hash table. */
2264 insert_expr_in_table (dest, GET_MODE (dest), insn,
2265 antic_p, avail_p, table);
2270 static void
2271 hash_scan_clobber (rtx x ATTRIBUTE_UNUSED, rtx insn ATTRIBUTE_UNUSED,
2272 struct hash_table *table ATTRIBUTE_UNUSED)
2274 /* Currently nothing to do. */
2277 static void
2278 hash_scan_call (rtx x ATTRIBUTE_UNUSED, rtx insn ATTRIBUTE_UNUSED,
2279 struct hash_table *table ATTRIBUTE_UNUSED)
2281 /* Currently nothing to do. */
2284 /* Process INSN and add hash table entries as appropriate.
2286 Only available expressions that set a single pseudo-reg are recorded.
2288 Single sets in a PARALLEL could be handled, but it's an extra complication
2289 that isn't dealt with right now. The trick is handling the CLOBBERs that
2290 are also in the PARALLEL. Later.
2292 If SET_P is nonzero, this is for the assignment hash table,
2293 otherwise it is for the expression hash table.
2294 If IN_LIBCALL_BLOCK nonzero, we are in a libcall block, and should
2295 not record any expressions. */
2297 static void
2298 hash_scan_insn (rtx insn, struct hash_table *table, int in_libcall_block)
2300 rtx pat = PATTERN (insn);
2301 int i;
2303 if (in_libcall_block)
2304 return;
2306 /* Pick out the sets of INSN and for other forms of instructions record
2307 what's been modified. */
2309 if (GET_CODE (pat) == SET)
2310 hash_scan_set (pat, insn, table);
2311 else if (GET_CODE (pat) == PARALLEL)
2312 for (i = 0; i < XVECLEN (pat, 0); i++)
2314 rtx x = XVECEXP (pat, 0, i);
2316 if (GET_CODE (x) == SET)
2317 hash_scan_set (x, insn, table);
2318 else if (GET_CODE (x) == CLOBBER)
2319 hash_scan_clobber (x, insn, table);
2320 else if (GET_CODE (x) == CALL)
2321 hash_scan_call (x, insn, table);
2324 else if (GET_CODE (pat) == CLOBBER)
2325 hash_scan_clobber (pat, insn, table);
2326 else if (GET_CODE (pat) == CALL)
2327 hash_scan_call (pat, insn, table);
2330 static void
2331 dump_hash_table (FILE *file, const char *name, struct hash_table *table)
2333 int i;
2334 /* Flattened out table, so it's printed in proper order. */
2335 struct expr **flat_table;
2336 unsigned int *hash_val;
2337 struct expr *expr;
2339 flat_table = xcalloc (table->n_elems, sizeof (struct expr *));
2340 hash_val = xmalloc (table->n_elems * sizeof (unsigned int));
2342 for (i = 0; i < (int) table->size; i++)
2343 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
2345 flat_table[expr->bitmap_index] = expr;
2346 hash_val[expr->bitmap_index] = i;
2349 fprintf (file, "%s hash table (%d buckets, %d entries)\n",
2350 name, table->size, table->n_elems);
2352 for (i = 0; i < (int) table->n_elems; i++)
2353 if (flat_table[i] != 0)
2355 expr = flat_table[i];
2356 fprintf (file, "Index %d (hash value %d)\n ",
2357 expr->bitmap_index, hash_val[i]);
2358 print_rtl (file, expr->expr);
2359 fprintf (file, "\n");
2362 fprintf (file, "\n");
2364 free (flat_table);
2365 free (hash_val);
2368 /* Record register first/last/block set information for REGNO in INSN.
2370 first_set records the first place in the block where the register
2371 is set and is used to compute "anticipatability".
2373 last_set records the last place in the block where the register
2374 is set and is used to compute "availability".
2376 last_bb records the block for which first_set and last_set are
2377 valid, as a quick test to invalidate them.
2379 reg_set_in_block records whether the register is set in the block
2380 and is used to compute "transparency". */
2382 static void
2383 record_last_reg_set_info (rtx insn, int regno)
2385 struct reg_avail_info *info = &reg_avail_info[regno];
2386 int cuid = INSN_CUID (insn);
2388 info->last_set = cuid;
2389 if (info->last_bb != current_bb)
2391 info->last_bb = current_bb;
2392 info->first_set = cuid;
2393 SET_BIT (reg_set_in_block[current_bb->index], regno);
2398 /* Record all of the canonicalized MEMs of record_last_mem_set_info's insn.
2399 Note we store a pair of elements in the list, so they have to be
2400 taken off pairwise. */
2402 static void
2403 canon_list_insert (rtx dest ATTRIBUTE_UNUSED, rtx unused1 ATTRIBUTE_UNUSED,
2404 void * v_insn)
2406 rtx dest_addr, insn;
2407 int bb;
2409 while (GET_CODE (dest) == SUBREG
2410 || GET_CODE (dest) == ZERO_EXTRACT
2411 || GET_CODE (dest) == SIGN_EXTRACT
2412 || GET_CODE (dest) == STRICT_LOW_PART)
2413 dest = XEXP (dest, 0);
2415 /* If DEST is not a MEM, then it will not conflict with a load. Note
2416 that function calls are assumed to clobber memory, but are handled
2417 elsewhere. */
2419 if (GET_CODE (dest) != MEM)
2420 return;
2422 dest_addr = get_addr (XEXP (dest, 0));
2423 dest_addr = canon_rtx (dest_addr);
2424 insn = (rtx) v_insn;
2425 bb = BLOCK_NUM (insn);
2427 canon_modify_mem_list[bb] =
2428 alloc_EXPR_LIST (VOIDmode, dest_addr, canon_modify_mem_list[bb]);
2429 canon_modify_mem_list[bb] =
2430 alloc_EXPR_LIST (VOIDmode, dest, canon_modify_mem_list[bb]);
2431 bitmap_set_bit (canon_modify_mem_list_set, bb);
2434 /* Record memory modification information for INSN. We do not actually care
2435 about the memory location(s) that are set, or even how they are set (consider
2436 a CALL_INSN). We merely need to record which insns modify memory. */
2438 static void
2439 record_last_mem_set_info (rtx insn)
2441 int bb = BLOCK_NUM (insn);
2443 /* load_killed_in_block_p will handle the case of calls clobbering
2444 everything. */
2445 modify_mem_list[bb] = alloc_INSN_LIST (insn, modify_mem_list[bb]);
2446 bitmap_set_bit (modify_mem_list_set, bb);
2448 if (GET_CODE (insn) == CALL_INSN)
2450 /* Note that traversals of this loop (other than for free-ing)
2451 will break after encountering a CALL_INSN. So, there's no
2452 need to insert a pair of items, as canon_list_insert does. */
2453 canon_modify_mem_list[bb] =
2454 alloc_INSN_LIST (insn, canon_modify_mem_list[bb]);
2455 bitmap_set_bit (canon_modify_mem_list_set, bb);
2457 else
2458 note_stores (PATTERN (insn), canon_list_insert, (void*) insn);
2461 /* Called from compute_hash_table via note_stores to handle one
2462 SET or CLOBBER in an insn. DATA is really the instruction in which
2463 the SET is taking place. */
2465 static void
2466 record_last_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED, void *data)
2468 rtx last_set_insn = (rtx) data;
2470 if (GET_CODE (dest) == SUBREG)
2471 dest = SUBREG_REG (dest);
2473 if (GET_CODE (dest) == REG)
2474 record_last_reg_set_info (last_set_insn, REGNO (dest));
2475 else if (GET_CODE (dest) == MEM
2476 /* Ignore pushes, they clobber nothing. */
2477 && ! push_operand (dest, GET_MODE (dest)))
2478 record_last_mem_set_info (last_set_insn);
2481 /* Top level function to create an expression or assignment hash table.
2483 Expression entries are placed in the hash table if
2484 - they are of the form (set (pseudo-reg) src),
2485 - src is something we want to perform GCSE on,
2486 - none of the operands are subsequently modified in the block
2488 Assignment entries are placed in the hash table if
2489 - they are of the form (set (pseudo-reg) src),
2490 - src is something we want to perform const/copy propagation on,
2491 - none of the operands or target are subsequently modified in the block
2493 Currently src must be a pseudo-reg or a const_int.
2495 TABLE is the table computed. */
2497 static void
2498 compute_hash_table_work (struct hash_table *table)
2500 unsigned int i;
2502 /* While we compute the hash table we also compute a bit array of which
2503 registers are set in which blocks.
2504 ??? This isn't needed during const/copy propagation, but it's cheap to
2505 compute. Later. */
2506 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
2508 /* re-Cache any INSN_LIST nodes we have allocated. */
2509 clear_modify_mem_tables ();
2510 /* Some working arrays used to track first and last set in each block. */
2511 reg_avail_info = gmalloc (max_gcse_regno * sizeof (struct reg_avail_info));
2513 for (i = 0; i < max_gcse_regno; ++i)
2514 reg_avail_info[i].last_bb = NULL;
2516 FOR_EACH_BB (current_bb)
2518 rtx insn;
2519 unsigned int regno;
2520 int in_libcall_block;
2522 /* First pass over the instructions records information used to
2523 determine when registers and memory are first and last set.
2524 ??? hard-reg reg_set_in_block computation
2525 could be moved to compute_sets since they currently don't change. */
2527 for (insn = BB_HEAD (current_bb);
2528 insn && insn != NEXT_INSN (BB_END (current_bb));
2529 insn = NEXT_INSN (insn))
2531 if (! INSN_P (insn))
2532 continue;
2534 if (GET_CODE (insn) == CALL_INSN)
2536 bool clobbers_all = false;
2537 #ifdef NON_SAVING_SETJMP
2538 if (NON_SAVING_SETJMP
2539 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
2540 clobbers_all = true;
2541 #endif
2543 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2544 if (clobbers_all
2545 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2546 record_last_reg_set_info (insn, regno);
2548 mark_call (insn);
2551 note_stores (PATTERN (insn), record_last_set_info, insn);
2554 /* Insert implicit sets in the hash table. */
2555 if (table->set_p
2556 && implicit_sets[current_bb->index] != NULL_RTX)
2557 hash_scan_set (implicit_sets[current_bb->index],
2558 BB_HEAD (current_bb), table);
2560 /* The next pass builds the hash table. */
2562 for (insn = BB_HEAD (current_bb), in_libcall_block = 0;
2563 insn && insn != NEXT_INSN (BB_END (current_bb));
2564 insn = NEXT_INSN (insn))
2565 if (INSN_P (insn))
2567 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
2568 in_libcall_block = 1;
2569 else if (table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
2570 in_libcall_block = 0;
2571 hash_scan_insn (insn, table, in_libcall_block);
2572 if (!table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
2573 in_libcall_block = 0;
2577 free (reg_avail_info);
2578 reg_avail_info = NULL;
2581 /* Allocate space for the set/expr hash TABLE.
2582 N_INSNS is the number of instructions in the function.
2583 It is used to determine the number of buckets to use.
2584 SET_P determines whether set or expression table will
2585 be created. */
2587 static void
2588 alloc_hash_table (int n_insns, struct hash_table *table, int set_p)
2590 int n;
2592 table->size = n_insns / 4;
2593 if (table->size < 11)
2594 table->size = 11;
2596 /* Attempt to maintain efficient use of hash table.
2597 Making it an odd number is simplest for now.
2598 ??? Later take some measurements. */
2599 table->size |= 1;
2600 n = table->size * sizeof (struct expr *);
2601 table->table = gmalloc (n);
2602 table->set_p = set_p;
2605 /* Free things allocated by alloc_hash_table. */
2607 static void
2608 free_hash_table (struct hash_table *table)
2610 free (table->table);
2613 /* Compute the hash TABLE for doing copy/const propagation or
2614 expression hash table. */
2616 static void
2617 compute_hash_table (struct hash_table *table)
2619 /* Initialize count of number of entries in hash table. */
2620 table->n_elems = 0;
2621 memset (table->table, 0, table->size * sizeof (struct expr *));
2623 compute_hash_table_work (table);
2626 /* Expression tracking support. */
2628 /* Lookup pattern PAT in the expression TABLE.
2629 The result is a pointer to the table entry, or NULL if not found. */
2631 static struct expr *
2632 lookup_expr (rtx pat, struct hash_table *table)
2634 int do_not_record_p;
2635 unsigned int hash = hash_expr (pat, GET_MODE (pat), &do_not_record_p,
2636 table->size);
2637 struct expr *expr;
2639 if (do_not_record_p)
2640 return NULL;
2642 expr = table->table[hash];
2644 while (expr && ! expr_equiv_p (expr->expr, pat))
2645 expr = expr->next_same_hash;
2647 return expr;
2650 /* Lookup REGNO in the set TABLE. The result is a pointer to the
2651 table entry, or NULL if not found. */
2653 static struct expr *
2654 lookup_set (unsigned int regno, struct hash_table *table)
2656 unsigned int hash = hash_set (regno, table->size);
2657 struct expr *expr;
2659 expr = table->table[hash];
2661 while (expr && REGNO (SET_DEST (expr->expr)) != regno)
2662 expr = expr->next_same_hash;
2664 return expr;
2667 /* Return the next entry for REGNO in list EXPR. */
2669 static struct expr *
2670 next_set (unsigned int regno, struct expr *expr)
2673 expr = expr->next_same_hash;
2674 while (expr && REGNO (SET_DEST (expr->expr)) != regno);
2676 return expr;
2679 /* Like free_INSN_LIST_list or free_EXPR_LIST_list, except that the node
2680 types may be mixed. */
2682 static void
2683 free_insn_expr_list_list (rtx *listp)
2685 rtx list, next;
2687 for (list = *listp; list ; list = next)
2689 next = XEXP (list, 1);
2690 if (GET_CODE (list) == EXPR_LIST)
2691 free_EXPR_LIST_node (list);
2692 else
2693 free_INSN_LIST_node (list);
2696 *listp = NULL;
2699 /* Clear canon_modify_mem_list and modify_mem_list tables. */
2700 static void
2701 clear_modify_mem_tables (void)
2703 int i;
2705 EXECUTE_IF_SET_IN_BITMAP
2706 (modify_mem_list_set, 0, i, free_INSN_LIST_list (modify_mem_list + i));
2707 bitmap_clear (modify_mem_list_set);
2709 EXECUTE_IF_SET_IN_BITMAP
2710 (canon_modify_mem_list_set, 0, i,
2711 free_insn_expr_list_list (canon_modify_mem_list + i));
2712 bitmap_clear (canon_modify_mem_list_set);
2715 /* Release memory used by modify_mem_list_set and canon_modify_mem_list_set. */
2717 static void
2718 free_modify_mem_tables (void)
2720 clear_modify_mem_tables ();
2721 free (modify_mem_list);
2722 free (canon_modify_mem_list);
2723 modify_mem_list = 0;
2724 canon_modify_mem_list = 0;
2727 /* Reset tables used to keep track of what's still available [since the
2728 start of the block]. */
2730 static void
2731 reset_opr_set_tables (void)
2733 /* Maintain a bitmap of which regs have been set since beginning of
2734 the block. */
2735 CLEAR_REG_SET (reg_set_bitmap);
2737 /* Also keep a record of the last instruction to modify memory.
2738 For now this is very trivial, we only record whether any memory
2739 location has been modified. */
2740 clear_modify_mem_tables ();
2743 /* Return nonzero if the operands of X are not set before INSN in
2744 INSN's basic block. */
2746 static int
2747 oprs_not_set_p (rtx x, rtx insn)
2749 int i, j;
2750 enum rtx_code code;
2751 const char *fmt;
2753 if (x == 0)
2754 return 1;
2756 code = GET_CODE (x);
2757 switch (code)
2759 case PC:
2760 case CC0:
2761 case CONST:
2762 case CONST_INT:
2763 case CONST_DOUBLE:
2764 case CONST_VECTOR:
2765 case SYMBOL_REF:
2766 case LABEL_REF:
2767 case ADDR_VEC:
2768 case ADDR_DIFF_VEC:
2769 return 1;
2771 case MEM:
2772 if (load_killed_in_block_p (BLOCK_FOR_INSN (insn),
2773 INSN_CUID (insn), x, 0))
2774 return 0;
2775 else
2776 return oprs_not_set_p (XEXP (x, 0), insn);
2778 case REG:
2779 return ! REGNO_REG_SET_P (reg_set_bitmap, REGNO (x));
2781 default:
2782 break;
2785 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
2787 if (fmt[i] == 'e')
2789 /* If we are about to do the last recursive call
2790 needed at this level, change it into iteration.
2791 This function is called enough to be worth it. */
2792 if (i == 0)
2793 return oprs_not_set_p (XEXP (x, i), insn);
2795 if (! oprs_not_set_p (XEXP (x, i), insn))
2796 return 0;
2798 else if (fmt[i] == 'E')
2799 for (j = 0; j < XVECLEN (x, i); j++)
2800 if (! oprs_not_set_p (XVECEXP (x, i, j), insn))
2801 return 0;
2804 return 1;
2807 /* Mark things set by a CALL. */
2809 static void
2810 mark_call (rtx insn)
2812 if (! CONST_OR_PURE_CALL_P (insn))
2813 record_last_mem_set_info (insn);
2816 /* Mark things set by a SET. */
2818 static void
2819 mark_set (rtx pat, rtx insn)
2821 rtx dest = SET_DEST (pat);
2823 while (GET_CODE (dest) == SUBREG
2824 || GET_CODE (dest) == ZERO_EXTRACT
2825 || GET_CODE (dest) == SIGN_EXTRACT
2826 || GET_CODE (dest) == STRICT_LOW_PART)
2827 dest = XEXP (dest, 0);
2829 if (GET_CODE (dest) == REG)
2830 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (dest));
2831 else if (GET_CODE (dest) == MEM)
2832 record_last_mem_set_info (insn);
2834 if (GET_CODE (SET_SRC (pat)) == CALL)
2835 mark_call (insn);
2838 /* Record things set by a CLOBBER. */
2840 static void
2841 mark_clobber (rtx pat, rtx insn)
2843 rtx clob = XEXP (pat, 0);
2845 while (GET_CODE (clob) == SUBREG || GET_CODE (clob) == STRICT_LOW_PART)
2846 clob = XEXP (clob, 0);
2848 if (GET_CODE (clob) == REG)
2849 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (clob));
2850 else
2851 record_last_mem_set_info (insn);
2854 /* Record things set by INSN.
2855 This data is used by oprs_not_set_p. */
2857 static void
2858 mark_oprs_set (rtx insn)
2860 rtx pat = PATTERN (insn);
2861 int i;
2863 if (GET_CODE (pat) == SET)
2864 mark_set (pat, insn);
2865 else if (GET_CODE (pat) == PARALLEL)
2866 for (i = 0; i < XVECLEN (pat, 0); i++)
2868 rtx x = XVECEXP (pat, 0, i);
2870 if (GET_CODE (x) == SET)
2871 mark_set (x, insn);
2872 else if (GET_CODE (x) == CLOBBER)
2873 mark_clobber (x, insn);
2874 else if (GET_CODE (x) == CALL)
2875 mark_call (insn);
2878 else if (GET_CODE (pat) == CLOBBER)
2879 mark_clobber (pat, insn);
2880 else if (GET_CODE (pat) == CALL)
2881 mark_call (insn);
2885 /* Classic GCSE reaching definition support. */
2887 /* Allocate reaching def variables. */
2889 static void
2890 alloc_rd_mem (int n_blocks, int n_insns)
2892 rd_kill = sbitmap_vector_alloc (n_blocks, n_insns);
2893 sbitmap_vector_zero (rd_kill, n_blocks);
2895 rd_gen = sbitmap_vector_alloc (n_blocks, n_insns);
2896 sbitmap_vector_zero (rd_gen, n_blocks);
2898 reaching_defs = sbitmap_vector_alloc (n_blocks, n_insns);
2899 sbitmap_vector_zero (reaching_defs, n_blocks);
2901 rd_out = sbitmap_vector_alloc (n_blocks, n_insns);
2902 sbitmap_vector_zero (rd_out, n_blocks);
2905 /* Free reaching def variables. */
2907 static void
2908 free_rd_mem (void)
2910 sbitmap_vector_free (rd_kill);
2911 sbitmap_vector_free (rd_gen);
2912 sbitmap_vector_free (reaching_defs);
2913 sbitmap_vector_free (rd_out);
2916 /* Add INSN to the kills of BB. REGNO, set in BB, is killed by INSN. */
2918 static void
2919 handle_rd_kill_set (rtx insn, int regno, basic_block bb)
2921 struct reg_set *this_reg;
2923 for (this_reg = reg_set_table[regno]; this_reg; this_reg = this_reg ->next)
2924 if (BLOCK_NUM (this_reg->insn) != BLOCK_NUM (insn))
2925 SET_BIT (rd_kill[bb->index], INSN_CUID (this_reg->insn));
2928 /* Compute the set of kill's for reaching definitions. */
2930 static void
2931 compute_kill_rd (void)
2933 int cuid;
2934 unsigned int regno;
2935 int i;
2936 basic_block bb;
2938 /* For each block
2939 For each set bit in `gen' of the block (i.e each insn which
2940 generates a definition in the block)
2941 Call the reg set by the insn corresponding to that bit regx
2942 Look at the linked list starting at reg_set_table[regx]
2943 For each setting of regx in the linked list, which is not in
2944 this block
2945 Set the bit in `kill' corresponding to that insn. */
2946 FOR_EACH_BB (bb)
2947 for (cuid = 0; cuid < max_cuid; cuid++)
2948 if (TEST_BIT (rd_gen[bb->index], cuid))
2950 rtx insn = CUID_INSN (cuid);
2951 rtx pat = PATTERN (insn);
2953 if (GET_CODE (insn) == CALL_INSN)
2955 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2956 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2957 handle_rd_kill_set (insn, regno, bb);
2960 if (GET_CODE (pat) == PARALLEL)
2962 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
2964 enum rtx_code code = GET_CODE (XVECEXP (pat, 0, i));
2966 if ((code == SET || code == CLOBBER)
2967 && GET_CODE (XEXP (XVECEXP (pat, 0, i), 0)) == REG)
2968 handle_rd_kill_set (insn,
2969 REGNO (XEXP (XVECEXP (pat, 0, i), 0)),
2970 bb);
2973 else if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == REG)
2974 /* Each setting of this register outside of this block
2975 must be marked in the set of kills in this block. */
2976 handle_rd_kill_set (insn, REGNO (SET_DEST (pat)), bb);
2980 /* Compute the reaching definitions as in
2981 Compilers Principles, Techniques, and Tools. Aho, Sethi, Ullman,
2982 Chapter 10. It is the same algorithm as used for computing available
2983 expressions but applied to the gens and kills of reaching definitions. */
2985 static void
2986 compute_rd (void)
2988 int changed, passes;
2989 basic_block bb;
2991 FOR_EACH_BB (bb)
2992 sbitmap_copy (rd_out[bb->index] /*dst*/, rd_gen[bb->index] /*src*/);
2994 passes = 0;
2995 changed = 1;
2996 while (changed)
2998 changed = 0;
2999 FOR_EACH_BB (bb)
3001 sbitmap_union_of_preds (reaching_defs[bb->index], rd_out, bb->index);
3002 changed |= sbitmap_union_of_diff_cg (rd_out[bb->index], rd_gen[bb->index],
3003 reaching_defs[bb->index], rd_kill[bb->index]);
3005 passes++;
3008 if (gcse_file)
3009 fprintf (gcse_file, "reaching def computation: %d passes\n", passes);
3012 /* Classic GCSE available expression support. */
3014 /* Allocate memory for available expression computation. */
3016 static void
3017 alloc_avail_expr_mem (int n_blocks, int n_exprs)
3019 ae_kill = sbitmap_vector_alloc (n_blocks, n_exprs);
3020 sbitmap_vector_zero (ae_kill, n_blocks);
3022 ae_gen = sbitmap_vector_alloc (n_blocks, n_exprs);
3023 sbitmap_vector_zero (ae_gen, n_blocks);
3025 ae_in = sbitmap_vector_alloc (n_blocks, n_exprs);
3026 sbitmap_vector_zero (ae_in, n_blocks);
3028 ae_out = sbitmap_vector_alloc (n_blocks, n_exprs);
3029 sbitmap_vector_zero (ae_out, n_blocks);
3032 static void
3033 free_avail_expr_mem (void)
3035 sbitmap_vector_free (ae_kill);
3036 sbitmap_vector_free (ae_gen);
3037 sbitmap_vector_free (ae_in);
3038 sbitmap_vector_free (ae_out);
3041 /* Compute the set of available expressions generated in each basic block. */
3043 static void
3044 compute_ae_gen (struct hash_table *expr_hash_table)
3046 unsigned int i;
3047 struct expr *expr;
3048 struct occr *occr;
3050 /* For each recorded occurrence of each expression, set ae_gen[bb][expr].
3051 This is all we have to do because an expression is not recorded if it
3052 is not available, and the only expressions we want to work with are the
3053 ones that are recorded. */
3054 for (i = 0; i < expr_hash_table->size; i++)
3055 for (expr = expr_hash_table->table[i]; expr != 0; expr = expr->next_same_hash)
3056 for (occr = expr->avail_occr; occr != 0; occr = occr->next)
3057 SET_BIT (ae_gen[BLOCK_NUM (occr->insn)], expr->bitmap_index);
3060 /* Return nonzero if expression X is killed in BB. */
3062 static int
3063 expr_killed_p (rtx x, basic_block bb)
3065 int i, j;
3066 enum rtx_code code;
3067 const char *fmt;
3069 if (x == 0)
3070 return 1;
3072 code = GET_CODE (x);
3073 switch (code)
3075 case REG:
3076 return TEST_BIT (reg_set_in_block[bb->index], REGNO (x));
3078 case MEM:
3079 if (load_killed_in_block_p (bb, get_max_uid () + 1, x, 0))
3080 return 1;
3081 else
3082 return expr_killed_p (XEXP (x, 0), bb);
3084 case PC:
3085 case CC0: /*FIXME*/
3086 case CONST:
3087 case CONST_INT:
3088 case CONST_DOUBLE:
3089 case CONST_VECTOR:
3090 case SYMBOL_REF:
3091 case LABEL_REF:
3092 case ADDR_VEC:
3093 case ADDR_DIFF_VEC:
3094 return 0;
3096 default:
3097 break;
3100 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
3102 if (fmt[i] == 'e')
3104 /* If we are about to do the last recursive call
3105 needed at this level, change it into iteration.
3106 This function is called enough to be worth it. */
3107 if (i == 0)
3108 return expr_killed_p (XEXP (x, i), bb);
3109 else if (expr_killed_p (XEXP (x, i), bb))
3110 return 1;
3112 else if (fmt[i] == 'E')
3113 for (j = 0; j < XVECLEN (x, i); j++)
3114 if (expr_killed_p (XVECEXP (x, i, j), bb))
3115 return 1;
3118 return 0;
3121 /* Compute the set of available expressions killed in each basic block. */
3123 static void
3124 compute_ae_kill (sbitmap *ae_gen, sbitmap *ae_kill,
3125 struct hash_table *expr_hash_table)
3127 basic_block bb;
3128 unsigned int i;
3129 struct expr *expr;
3131 FOR_EACH_BB (bb)
3132 for (i = 0; i < expr_hash_table->size; i++)
3133 for (expr = expr_hash_table->table[i]; expr; expr = expr->next_same_hash)
3135 /* Skip EXPR if generated in this block. */
3136 if (TEST_BIT (ae_gen[bb->index], expr->bitmap_index))
3137 continue;
3139 if (expr_killed_p (expr->expr, bb))
3140 SET_BIT (ae_kill[bb->index], expr->bitmap_index);
3144 /* Actually perform the Classic GCSE optimizations. */
3146 /* Return nonzero if occurrence OCCR of expression EXPR reaches block BB.
3148 CHECK_SELF_LOOP is nonzero if we should consider a block reaching itself
3149 as a positive reach. We want to do this when there are two computations
3150 of the expression in the block.
3152 VISITED is a pointer to a working buffer for tracking which BB's have
3153 been visited. It is NULL for the top-level call.
3155 We treat reaching expressions that go through blocks containing the same
3156 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
3157 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
3158 2 as not reaching. The intent is to improve the probability of finding
3159 only one reaching expression and to reduce register lifetimes by picking
3160 the closest such expression. */
3162 static int
3163 expr_reaches_here_p_work (struct occr *occr, struct expr *expr,
3164 basic_block bb, int check_self_loop, char *visited)
3166 edge pred;
3168 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
3170 basic_block pred_bb = pred->src;
3172 if (visited[pred_bb->index])
3173 /* This predecessor has already been visited. Nothing to do. */
3175 else if (pred_bb == bb)
3177 /* BB loops on itself. */
3178 if (check_self_loop
3179 && TEST_BIT (ae_gen[pred_bb->index], expr->bitmap_index)
3180 && BLOCK_NUM (occr->insn) == pred_bb->index)
3181 return 1;
3183 visited[pred_bb->index] = 1;
3186 /* Ignore this predecessor if it kills the expression. */
3187 else if (TEST_BIT (ae_kill[pred_bb->index], expr->bitmap_index))
3188 visited[pred_bb->index] = 1;
3190 /* Does this predecessor generate this expression? */
3191 else if (TEST_BIT (ae_gen[pred_bb->index], expr->bitmap_index))
3193 /* Is this the occurrence we're looking for?
3194 Note that there's only one generating occurrence per block
3195 so we just need to check the block number. */
3196 if (BLOCK_NUM (occr->insn) == pred_bb->index)
3197 return 1;
3199 visited[pred_bb->index] = 1;
3202 /* Neither gen nor kill. */
3203 else
3205 visited[pred_bb->index] = 1;
3206 if (expr_reaches_here_p_work (occr, expr, pred_bb, check_self_loop,
3207 visited))
3209 return 1;
3213 /* All paths have been checked. */
3214 return 0;
3217 /* This wrapper for expr_reaches_here_p_work() is to ensure that any
3218 memory allocated for that function is returned. */
3220 static int
3221 expr_reaches_here_p (struct occr *occr, struct expr *expr, basic_block bb,
3222 int check_self_loop)
3224 int rval;
3225 char *visited = xcalloc (last_basic_block, 1);
3227 rval = expr_reaches_here_p_work (occr, expr, bb, check_self_loop, visited);
3229 free (visited);
3230 return rval;
3233 /* Return the instruction that computes EXPR that reaches INSN's basic block.
3234 If there is more than one such instruction, return NULL.
3236 Called only by handle_avail_expr. */
3238 static rtx
3239 computing_insn (struct expr *expr, rtx insn)
3241 basic_block bb = BLOCK_FOR_INSN (insn);
3243 if (expr->avail_occr->next == NULL)
3245 if (BLOCK_FOR_INSN (expr->avail_occr->insn) == bb)
3246 /* The available expression is actually itself
3247 (i.e. a loop in the flow graph) so do nothing. */
3248 return NULL;
3250 /* (FIXME) Case that we found a pattern that was created by
3251 a substitution that took place. */
3252 return expr->avail_occr->insn;
3254 else
3256 /* Pattern is computed more than once.
3257 Search backwards from this insn to see how many of these
3258 computations actually reach this insn. */
3259 struct occr *occr;
3260 rtx insn_computes_expr = NULL;
3261 int can_reach = 0;
3263 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
3265 if (BLOCK_FOR_INSN (occr->insn) == bb)
3267 /* The expression is generated in this block.
3268 The only time we care about this is when the expression
3269 is generated later in the block [and thus there's a loop].
3270 We let the normal cse pass handle the other cases. */
3271 if (INSN_CUID (insn) < INSN_CUID (occr->insn)
3272 && expr_reaches_here_p (occr, expr, bb, 1))
3274 can_reach++;
3275 if (can_reach > 1)
3276 return NULL;
3278 insn_computes_expr = occr->insn;
3281 else if (expr_reaches_here_p (occr, expr, bb, 0))
3283 can_reach++;
3284 if (can_reach > 1)
3285 return NULL;
3287 insn_computes_expr = occr->insn;
3291 if (insn_computes_expr == NULL)
3292 abort ();
3294 return insn_computes_expr;
3298 /* Return nonzero if the definition in DEF_INSN can reach INSN.
3299 Only called by can_disregard_other_sets. */
3301 static int
3302 def_reaches_here_p (rtx insn, rtx def_insn)
3304 rtx reg;
3306 if (TEST_BIT (reaching_defs[BLOCK_NUM (insn)], INSN_CUID (def_insn)))
3307 return 1;
3309 if (BLOCK_NUM (insn) == BLOCK_NUM (def_insn))
3311 if (INSN_CUID (def_insn) < INSN_CUID (insn))
3313 if (GET_CODE (PATTERN (def_insn)) == PARALLEL)
3314 return 1;
3315 else if (GET_CODE (PATTERN (def_insn)) == CLOBBER)
3316 reg = XEXP (PATTERN (def_insn), 0);
3317 else if (GET_CODE (PATTERN (def_insn)) == SET)
3318 reg = SET_DEST (PATTERN (def_insn));
3319 else
3320 abort ();
3322 return ! reg_set_between_p (reg, NEXT_INSN (def_insn), insn);
3324 else
3325 return 0;
3328 return 0;
3331 /* Return nonzero if *ADDR_THIS_REG can only have one value at INSN. The
3332 value returned is the number of definitions that reach INSN. Returning a
3333 value of zero means that [maybe] more than one definition reaches INSN and
3334 the caller can't perform whatever optimization it is trying. i.e. it is
3335 always safe to return zero. */
3337 static int
3338 can_disregard_other_sets (struct reg_set **addr_this_reg, rtx insn, int for_combine)
3340 int number_of_reaching_defs = 0;
3341 struct reg_set *this_reg;
3343 for (this_reg = *addr_this_reg; this_reg != 0; this_reg = this_reg->next)
3344 if (def_reaches_here_p (insn, this_reg->insn))
3346 number_of_reaching_defs++;
3347 /* Ignore parallels for now. */
3348 if (GET_CODE (PATTERN (this_reg->insn)) == PARALLEL)
3349 return 0;
3351 if (!for_combine
3352 && (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER
3353 || ! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)),
3354 SET_SRC (PATTERN (insn)))))
3355 /* A setting of the reg to a different value reaches INSN. */
3356 return 0;
3358 if (number_of_reaching_defs > 1)
3360 /* If in this setting the value the register is being set to is
3361 equal to the previous value the register was set to and this
3362 setting reaches the insn we are trying to do the substitution
3363 on then we are ok. */
3364 if (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER)
3365 return 0;
3366 else if (! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)),
3367 SET_SRC (PATTERN (insn))))
3368 return 0;
3371 *addr_this_reg = this_reg;
3374 return number_of_reaching_defs;
3377 /* Expression computed by insn is available and the substitution is legal,
3378 so try to perform the substitution.
3380 The result is nonzero if any changes were made. */
3382 static int
3383 handle_avail_expr (rtx insn, struct expr *expr)
3385 rtx pat, insn_computes_expr, expr_set;
3386 rtx to;
3387 struct reg_set *this_reg;
3388 int found_setting, use_src;
3389 int changed = 0;
3391 /* We only handle the case where one computation of the expression
3392 reaches this instruction. */
3393 insn_computes_expr = computing_insn (expr, insn);
3394 if (insn_computes_expr == NULL)
3395 return 0;
3396 expr_set = single_set (insn_computes_expr);
3397 /* The set might be in a parallel with multiple sets; we could
3398 probably handle that, but there's currently no easy way to find
3399 the relevant sub-expression. */
3400 if (!expr_set)
3401 return 0;
3403 found_setting = 0;
3404 use_src = 0;
3406 /* At this point we know only one computation of EXPR outside of this
3407 block reaches this insn. Now try to find a register that the
3408 expression is computed into. */
3409 if (GET_CODE (SET_SRC (expr_set)) == REG)
3411 /* This is the case when the available expression that reaches
3412 here has already been handled as an available expression. */
3413 unsigned int regnum_for_replacing
3414 = REGNO (SET_SRC (expr_set));
3416 /* If the register was created by GCSE we can't use `reg_set_table',
3417 however we know it's set only once. */
3418 if (regnum_for_replacing >= max_gcse_regno
3419 /* If the register the expression is computed into is set only once,
3420 or only one set reaches this insn, we can use it. */
3421 || (((this_reg = reg_set_table[regnum_for_replacing]),
3422 this_reg->next == NULL)
3423 || can_disregard_other_sets (&this_reg, insn, 0)))
3425 use_src = 1;
3426 found_setting = 1;
3430 if (!found_setting)
3432 unsigned int regnum_for_replacing
3433 = REGNO (SET_DEST (expr_set));
3435 /* This shouldn't happen. */
3436 if (regnum_for_replacing >= max_gcse_regno)
3437 abort ();
3439 this_reg = reg_set_table[regnum_for_replacing];
3441 /* If the register the expression is computed into is set only once,
3442 or only one set reaches this insn, use it. */
3443 if (this_reg->next == NULL
3444 || can_disregard_other_sets (&this_reg, insn, 0))
3445 found_setting = 1;
3448 if (found_setting)
3450 pat = PATTERN (insn);
3451 if (use_src)
3452 to = SET_SRC (expr_set);
3453 else
3454 to = SET_DEST (expr_set);
3455 changed = validate_change (insn, &SET_SRC (pat), to, 0);
3457 /* We should be able to ignore the return code from validate_change but
3458 to play it safe we check. */
3459 if (changed)
3461 gcse_subst_count++;
3462 if (gcse_file != NULL)
3464 fprintf (gcse_file, "GCSE: Replacing the source in insn %d with",
3465 INSN_UID (insn));
3466 fprintf (gcse_file, " reg %d %s insn %d\n",
3467 REGNO (to), use_src ? "from" : "set in",
3468 INSN_UID (insn_computes_expr));
3473 /* The register that the expr is computed into is set more than once. */
3474 else if (1 /*expensive_op(this_pattrn->op) && do_expensive_gcse)*/)
3476 /* Insert an insn after insnx that copies the reg set in insnx
3477 into a new pseudo register call this new register REGN.
3478 From insnb until end of basic block or until REGB is set
3479 replace all uses of REGB with REGN. */
3480 rtx new_insn;
3482 to = gen_reg_rtx (GET_MODE (SET_DEST (expr_set)));
3484 /* Generate the new insn. */
3485 /* ??? If the change fails, we return 0, even though we created
3486 an insn. I think this is ok. */
3487 new_insn
3488 = emit_insn_after (gen_rtx_SET (VOIDmode, to,
3489 SET_DEST (expr_set)),
3490 insn_computes_expr);
3492 /* Keep register set table up to date. */
3493 record_one_set (REGNO (to), new_insn);
3495 gcse_create_count++;
3496 if (gcse_file != NULL)
3498 fprintf (gcse_file, "GCSE: Creating insn %d to copy value of reg %d",
3499 INSN_UID (NEXT_INSN (insn_computes_expr)),
3500 REGNO (SET_SRC (PATTERN (NEXT_INSN (insn_computes_expr)))));
3501 fprintf (gcse_file, ", computed in insn %d,\n",
3502 INSN_UID (insn_computes_expr));
3503 fprintf (gcse_file, " into newly allocated reg %d\n",
3504 REGNO (to));
3507 pat = PATTERN (insn);
3509 /* Do register replacement for INSN. */
3510 changed = validate_change (insn, &SET_SRC (pat),
3511 SET_DEST (PATTERN
3512 (NEXT_INSN (insn_computes_expr))),
3515 /* We should be able to ignore the return code from validate_change but
3516 to play it safe we check. */
3517 if (changed)
3519 gcse_subst_count++;
3520 if (gcse_file != NULL)
3522 fprintf (gcse_file,
3523 "GCSE: Replacing the source in insn %d with reg %d ",
3524 INSN_UID (insn),
3525 REGNO (SET_DEST (PATTERN (NEXT_INSN
3526 (insn_computes_expr)))));
3527 fprintf (gcse_file, "set in insn %d\n",
3528 INSN_UID (insn_computes_expr));
3533 return changed;
3536 /* Perform classic GCSE. This is called by one_classic_gcse_pass after all
3537 the dataflow analysis has been done.
3539 The result is nonzero if a change was made. */
3541 static int
3542 classic_gcse (void)
3544 int changed;
3545 rtx insn;
3546 basic_block bb;
3548 /* Note we start at block 1. */
3550 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
3551 return 0;
3553 changed = 0;
3554 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR, next_bb)
3556 /* Reset tables used to keep track of what's still valid [since the
3557 start of the block]. */
3558 reset_opr_set_tables ();
3560 for (insn = BB_HEAD (bb);
3561 insn != NULL && insn != NEXT_INSN (BB_END (bb));
3562 insn = NEXT_INSN (insn))
3564 /* Is insn of form (set (pseudo-reg) ...)? */
3565 if (GET_CODE (insn) == INSN
3566 && GET_CODE (PATTERN (insn)) == SET
3567 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
3568 && REGNO (SET_DEST (PATTERN (insn))) >= FIRST_PSEUDO_REGISTER)
3570 rtx pat = PATTERN (insn);
3571 rtx src = SET_SRC (pat);
3572 struct expr *expr;
3574 if (want_to_gcse_p (src)
3575 /* Is the expression recorded? */
3576 && ((expr = lookup_expr (src, &expr_hash_table)) != NULL)
3577 /* Is the expression available [at the start of the
3578 block]? */
3579 && TEST_BIT (ae_in[bb->index], expr->bitmap_index)
3580 /* Are the operands unchanged since the start of the
3581 block? */
3582 && oprs_not_set_p (src, insn))
3583 changed |= handle_avail_expr (insn, expr);
3586 /* Keep track of everything modified by this insn. */
3587 /* ??? Need to be careful w.r.t. mods done to INSN. */
3588 if (INSN_P (insn))
3589 mark_oprs_set (insn);
3593 return changed;
3596 /* Top level routine to perform one classic GCSE pass.
3598 Return nonzero if a change was made. */
3600 static int
3601 one_classic_gcse_pass (int pass)
3603 int changed = 0;
3605 gcse_subst_count = 0;
3606 gcse_create_count = 0;
3608 alloc_hash_table (max_cuid, &expr_hash_table, 0);
3609 alloc_rd_mem (last_basic_block, max_cuid);
3610 compute_hash_table (&expr_hash_table);
3611 if (gcse_file)
3612 dump_hash_table (gcse_file, "Expression", &expr_hash_table);
3614 if (expr_hash_table.n_elems > 0)
3616 compute_kill_rd ();
3617 compute_rd ();
3618 alloc_avail_expr_mem (last_basic_block, expr_hash_table.n_elems);
3619 compute_ae_gen (&expr_hash_table);
3620 compute_ae_kill (ae_gen, ae_kill, &expr_hash_table);
3621 compute_available (ae_gen, ae_kill, ae_out, ae_in);
3622 changed = classic_gcse ();
3623 free_avail_expr_mem ();
3626 free_rd_mem ();
3627 free_hash_table (&expr_hash_table);
3629 if (gcse_file)
3631 fprintf (gcse_file, "\n");
3632 fprintf (gcse_file, "GCSE of %s, pass %d: %d bytes needed, %d substs,",
3633 current_function_name (), pass, bytes_used, gcse_subst_count);
3634 fprintf (gcse_file, "%d insns created\n", gcse_create_count);
3637 return changed;
3640 /* Compute copy/constant propagation working variables. */
3642 /* Local properties of assignments. */
3643 static sbitmap *cprop_pavloc;
3644 static sbitmap *cprop_absaltered;
3646 /* Global properties of assignments (computed from the local properties). */
3647 static sbitmap *cprop_avin;
3648 static sbitmap *cprop_avout;
3650 /* Allocate vars used for copy/const propagation. N_BLOCKS is the number of
3651 basic blocks. N_SETS is the number of sets. */
3653 static void
3654 alloc_cprop_mem (int n_blocks, int n_sets)
3656 cprop_pavloc = sbitmap_vector_alloc (n_blocks, n_sets);
3657 cprop_absaltered = sbitmap_vector_alloc (n_blocks, n_sets);
3659 cprop_avin = sbitmap_vector_alloc (n_blocks, n_sets);
3660 cprop_avout = sbitmap_vector_alloc (n_blocks, n_sets);
3663 /* Free vars used by copy/const propagation. */
3665 static void
3666 free_cprop_mem (void)
3668 sbitmap_vector_free (cprop_pavloc);
3669 sbitmap_vector_free (cprop_absaltered);
3670 sbitmap_vector_free (cprop_avin);
3671 sbitmap_vector_free (cprop_avout);
3674 /* For each block, compute whether X is transparent. X is either an
3675 expression or an assignment [though we don't care which, for this context
3676 an assignment is treated as an expression]. For each block where an
3677 element of X is modified, set (SET_P == 1) or reset (SET_P == 0) the INDX
3678 bit in BMAP. */
3680 static void
3681 compute_transp (rtx x, int indx, sbitmap *bmap, int set_p)
3683 int i, j;
3684 basic_block bb;
3685 enum rtx_code code;
3686 reg_set *r;
3687 const char *fmt;
3689 /* repeat is used to turn tail-recursion into iteration since GCC
3690 can't do it when there's no return value. */
3691 repeat:
3693 if (x == 0)
3694 return;
3696 code = GET_CODE (x);
3697 switch (code)
3699 case REG:
3700 if (set_p)
3702 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3704 FOR_EACH_BB (bb)
3705 if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
3706 SET_BIT (bmap[bb->index], indx);
3708 else
3710 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
3711 SET_BIT (bmap[BLOCK_NUM (r->insn)], indx);
3714 else
3716 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3718 FOR_EACH_BB (bb)
3719 if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
3720 RESET_BIT (bmap[bb->index], indx);
3722 else
3724 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
3725 RESET_BIT (bmap[BLOCK_NUM (r->insn)], indx);
3729 return;
3731 case MEM:
3732 FOR_EACH_BB (bb)
3734 rtx list_entry = canon_modify_mem_list[bb->index];
3736 while (list_entry)
3738 rtx dest, dest_addr;
3740 if (GET_CODE (XEXP (list_entry, 0)) == CALL_INSN)
3742 if (set_p)
3743 SET_BIT (bmap[bb->index], indx);
3744 else
3745 RESET_BIT (bmap[bb->index], indx);
3746 break;
3748 /* LIST_ENTRY must be an INSN of some kind that sets memory.
3749 Examine each hunk of memory that is modified. */
3751 dest = XEXP (list_entry, 0);
3752 list_entry = XEXP (list_entry, 1);
3753 dest_addr = XEXP (list_entry, 0);
3755 if (canon_true_dependence (dest, GET_MODE (dest), dest_addr,
3756 x, rtx_addr_varies_p))
3758 if (set_p)
3759 SET_BIT (bmap[bb->index], indx);
3760 else
3761 RESET_BIT (bmap[bb->index], indx);
3762 break;
3764 list_entry = XEXP (list_entry, 1);
3768 x = XEXP (x, 0);
3769 goto repeat;
3771 case PC:
3772 case CC0: /*FIXME*/
3773 case CONST:
3774 case CONST_INT:
3775 case CONST_DOUBLE:
3776 case CONST_VECTOR:
3777 case SYMBOL_REF:
3778 case LABEL_REF:
3779 case ADDR_VEC:
3780 case ADDR_DIFF_VEC:
3781 return;
3783 default:
3784 break;
3787 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
3789 if (fmt[i] == 'e')
3791 /* If we are about to do the last recursive call
3792 needed at this level, change it into iteration.
3793 This function is called enough to be worth it. */
3794 if (i == 0)
3796 x = XEXP (x, i);
3797 goto repeat;
3800 compute_transp (XEXP (x, i), indx, bmap, set_p);
3802 else if (fmt[i] == 'E')
3803 for (j = 0; j < XVECLEN (x, i); j++)
3804 compute_transp (XVECEXP (x, i, j), indx, bmap, set_p);
3808 /* Top level routine to do the dataflow analysis needed by copy/const
3809 propagation. */
3811 static void
3812 compute_cprop_data (void)
3814 compute_local_properties (cprop_absaltered, cprop_pavloc, NULL, &set_hash_table);
3815 compute_available (cprop_pavloc, cprop_absaltered,
3816 cprop_avout, cprop_avin);
3819 /* Copy/constant propagation. */
3821 /* Maximum number of register uses in an insn that we handle. */
3822 #define MAX_USES 8
3824 /* Table of uses found in an insn.
3825 Allocated statically to avoid alloc/free complexity and overhead. */
3826 static struct reg_use reg_use_table[MAX_USES];
3828 /* Index into `reg_use_table' while building it. */
3829 static int reg_use_count;
3831 /* Set up a list of register numbers used in INSN. The found uses are stored
3832 in `reg_use_table'. `reg_use_count' is initialized to zero before entry,
3833 and contains the number of uses in the table upon exit.
3835 ??? If a register appears multiple times we will record it multiple times.
3836 This doesn't hurt anything but it will slow things down. */
3838 static void
3839 find_used_regs (rtx *xptr, void *data ATTRIBUTE_UNUSED)
3841 int i, j;
3842 enum rtx_code code;
3843 const char *fmt;
3844 rtx x = *xptr;
3846 /* repeat is used to turn tail-recursion into iteration since GCC
3847 can't do it when there's no return value. */
3848 repeat:
3849 if (x == 0)
3850 return;
3852 code = GET_CODE (x);
3853 if (REG_P (x))
3855 if (reg_use_count == MAX_USES)
3856 return;
3858 reg_use_table[reg_use_count].reg_rtx = x;
3859 reg_use_count++;
3862 /* Recursively scan the operands of this expression. */
3864 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
3866 if (fmt[i] == 'e')
3868 /* If we are about to do the last recursive call
3869 needed at this level, change it into iteration.
3870 This function is called enough to be worth it. */
3871 if (i == 0)
3873 x = XEXP (x, 0);
3874 goto repeat;
3877 find_used_regs (&XEXP (x, i), data);
3879 else if (fmt[i] == 'E')
3880 for (j = 0; j < XVECLEN (x, i); j++)
3881 find_used_regs (&XVECEXP (x, i, j), data);
3885 /* Try to replace all non-SET_DEST occurrences of FROM in INSN with TO.
3886 Returns nonzero is successful. */
3888 static int
3889 try_replace_reg (rtx from, rtx to, rtx insn)
3891 rtx note = find_reg_equal_equiv_note (insn);
3892 rtx src = 0;
3893 int success = 0;
3894 rtx set = single_set (insn);
3896 validate_replace_src_group (from, to, insn);
3897 if (num_changes_pending () && apply_change_group ())
3898 success = 1;
3900 /* Try to simplify SET_SRC if we have substituted a constant. */
3901 if (success && set && CONSTANT_P (to))
3903 src = simplify_rtx (SET_SRC (set));
3905 if (src)
3906 validate_change (insn, &SET_SRC (set), src, 0);
3909 /* If there is already a NOTE, update the expression in it with our
3910 replacement. */
3911 if (note != 0)
3912 XEXP (note, 0) = simplify_replace_rtx (XEXP (note, 0), from, to);
3914 if (!success && set && reg_mentioned_p (from, SET_SRC (set)))
3916 /* If above failed and this is a single set, try to simplify the source of
3917 the set given our substitution. We could perhaps try this for multiple
3918 SETs, but it probably won't buy us anything. */
3919 src = simplify_replace_rtx (SET_SRC (set), from, to);
3921 if (!rtx_equal_p (src, SET_SRC (set))
3922 && validate_change (insn, &SET_SRC (set), src, 0))
3923 success = 1;
3925 /* If we've failed to do replacement, have a single SET, don't already
3926 have a note, and have no special SET, add a REG_EQUAL note to not
3927 lose information. */
3928 if (!success && note == 0 && set != 0
3929 && GET_CODE (XEXP (set, 0)) != ZERO_EXTRACT
3930 && GET_CODE (XEXP (set, 0)) != SIGN_EXTRACT)
3931 note = set_unique_reg_note (insn, REG_EQUAL, copy_rtx (src));
3934 /* REG_EQUAL may get simplified into register.
3935 We don't allow that. Remove that note. This code ought
3936 not to happen, because previous code ought to synthesize
3937 reg-reg move, but be on the safe side. */
3938 if (note && REG_P (XEXP (note, 0)))
3939 remove_note (insn, note);
3941 return success;
3944 /* Find a set of REGNOs that are available on entry to INSN's block. Returns
3945 NULL no such set is found. */
3947 static struct expr *
3948 find_avail_set (int regno, rtx insn)
3950 /* SET1 contains the last set found that can be returned to the caller for
3951 use in a substitution. */
3952 struct expr *set1 = 0;
3954 /* Loops are not possible here. To get a loop we would need two sets
3955 available at the start of the block containing INSN. ie we would
3956 need two sets like this available at the start of the block:
3958 (set (reg X) (reg Y))
3959 (set (reg Y) (reg X))
3961 This can not happen since the set of (reg Y) would have killed the
3962 set of (reg X) making it unavailable at the start of this block. */
3963 while (1)
3965 rtx src;
3966 struct expr *set = lookup_set (regno, &set_hash_table);
3968 /* Find a set that is available at the start of the block
3969 which contains INSN. */
3970 while (set)
3972 if (TEST_BIT (cprop_avin[BLOCK_NUM (insn)], set->bitmap_index))
3973 break;
3974 set = next_set (regno, set);
3977 /* If no available set was found we've reached the end of the
3978 (possibly empty) copy chain. */
3979 if (set == 0)
3980 break;
3982 if (GET_CODE (set->expr) != SET)
3983 abort ();
3985 src = SET_SRC (set->expr);
3987 /* We know the set is available.
3988 Now check that SRC is ANTLOC (i.e. none of the source operands
3989 have changed since the start of the block).
3991 If the source operand changed, we may still use it for the next
3992 iteration of this loop, but we may not use it for substitutions. */
3994 if (gcse_constant_p (src) || oprs_not_set_p (src, insn))
3995 set1 = set;
3997 /* If the source of the set is anything except a register, then
3998 we have reached the end of the copy chain. */
3999 if (GET_CODE (src) != REG)
4000 break;
4002 /* Follow the copy chain, ie start another iteration of the loop
4003 and see if we have an available copy into SRC. */
4004 regno = REGNO (src);
4007 /* SET1 holds the last set that was available and anticipatable at
4008 INSN. */
4009 return set1;
4012 /* Subroutine of cprop_insn that tries to propagate constants into
4013 JUMP_INSNS. JUMP must be a conditional jump. If SETCC is non-NULL
4014 it is the instruction that immediately precedes JUMP, and must be a
4015 single SET of a register. FROM is what we will try to replace,
4016 SRC is the constant we will try to substitute for it. Returns nonzero
4017 if a change was made. */
4019 static int
4020 cprop_jump (basic_block bb, rtx setcc, rtx jump, rtx from, rtx src)
4022 rtx new, set_src, note_src;
4023 rtx set = pc_set (jump);
4024 rtx note = find_reg_equal_equiv_note (jump);
4026 if (note)
4028 note_src = XEXP (note, 0);
4029 if (GET_CODE (note_src) == EXPR_LIST)
4030 note_src = NULL_RTX;
4032 else note_src = NULL_RTX;
4034 /* Prefer REG_EQUAL notes except those containing EXPR_LISTs. */
4035 set_src = note_src ? note_src : SET_SRC (set);
4037 /* First substitute the SETCC condition into the JUMP instruction,
4038 then substitute that given values into this expanded JUMP. */
4039 if (setcc != NULL_RTX
4040 && !modified_between_p (from, setcc, jump)
4041 && !modified_between_p (src, setcc, jump))
4043 rtx setcc_src;
4044 rtx setcc_set = single_set (setcc);
4045 rtx setcc_note = find_reg_equal_equiv_note (setcc);
4046 setcc_src = (setcc_note && GET_CODE (XEXP (setcc_note, 0)) != EXPR_LIST)
4047 ? XEXP (setcc_note, 0) : SET_SRC (setcc_set);
4048 set_src = simplify_replace_rtx (set_src, SET_DEST (setcc_set),
4049 setcc_src);
4051 else
4052 setcc = NULL_RTX;
4054 new = simplify_replace_rtx (set_src, from, src);
4056 /* If no simplification can be made, then try the next register. */
4057 if (rtx_equal_p (new, SET_SRC (set)))
4058 return 0;
4060 /* If this is now a no-op delete it, otherwise this must be a valid insn. */
4061 if (new == pc_rtx)
4062 delete_insn (jump);
4063 else
4065 /* Ensure the value computed inside the jump insn to be equivalent
4066 to one computed by setcc. */
4067 if (setcc && modified_in_p (new, setcc))
4068 return 0;
4069 if (! validate_change (jump, &SET_SRC (set), new, 0))
4071 /* When (some) constants are not valid in a comparison, and there
4072 are two registers to be replaced by constants before the entire
4073 comparison can be folded into a constant, we need to keep
4074 intermediate information in REG_EQUAL notes. For targets with
4075 separate compare insns, such notes are added by try_replace_reg.
4076 When we have a combined compare-and-branch instruction, however,
4077 we need to attach a note to the branch itself to make this
4078 optimization work. */
4080 if (!rtx_equal_p (new, note_src))
4081 set_unique_reg_note (jump, REG_EQUAL, copy_rtx (new));
4082 return 0;
4085 /* Remove REG_EQUAL note after simplification. */
4086 if (note_src)
4087 remove_note (jump, note);
4089 /* If this has turned into an unconditional jump,
4090 then put a barrier after it so that the unreachable
4091 code will be deleted. */
4092 if (GET_CODE (SET_SRC (set)) == LABEL_REF)
4093 emit_barrier_after (jump);
4096 #ifdef HAVE_cc0
4097 /* Delete the cc0 setter. */
4098 if (setcc != NULL && CC0_P (SET_DEST (single_set (setcc))))
4099 delete_insn (setcc);
4100 #endif
4102 run_jump_opt_after_gcse = 1;
4104 const_prop_count++;
4105 if (gcse_file != NULL)
4107 fprintf (gcse_file,
4108 "CONST-PROP: Replacing reg %d in jump_insn %d with constant ",
4109 REGNO (from), INSN_UID (jump));
4110 print_rtl (gcse_file, src);
4111 fprintf (gcse_file, "\n");
4113 purge_dead_edges (bb);
4115 return 1;
4118 static bool
4119 constprop_register (rtx insn, rtx from, rtx to, int alter_jumps)
4121 rtx sset;
4123 /* Check for reg or cc0 setting instructions followed by
4124 conditional branch instructions first. */
4125 if (alter_jumps
4126 && (sset = single_set (insn)) != NULL
4127 && NEXT_INSN (insn)
4128 && any_condjump_p (NEXT_INSN (insn)) && onlyjump_p (NEXT_INSN (insn)))
4130 rtx dest = SET_DEST (sset);
4131 if ((REG_P (dest) || CC0_P (dest))
4132 && cprop_jump (BLOCK_FOR_INSN (insn), insn, NEXT_INSN (insn), from, to))
4133 return 1;
4136 /* Handle normal insns next. */
4137 if (GET_CODE (insn) == INSN
4138 && try_replace_reg (from, to, insn))
4139 return 1;
4141 /* Try to propagate a CONST_INT into a conditional jump.
4142 We're pretty specific about what we will handle in this
4143 code, we can extend this as necessary over time.
4145 Right now the insn in question must look like
4146 (set (pc) (if_then_else ...)) */
4147 else if (alter_jumps && any_condjump_p (insn) && onlyjump_p (insn))
4148 return cprop_jump (BLOCK_FOR_INSN (insn), NULL, insn, from, to);
4149 return 0;
4152 /* Perform constant and copy propagation on INSN.
4153 The result is nonzero if a change was made. */
4155 static int
4156 cprop_insn (rtx insn, int alter_jumps)
4158 struct reg_use *reg_used;
4159 int changed = 0;
4160 rtx note;
4162 if (!INSN_P (insn))
4163 return 0;
4165 reg_use_count = 0;
4166 note_uses (&PATTERN (insn), find_used_regs, NULL);
4168 note = find_reg_equal_equiv_note (insn);
4170 /* We may win even when propagating constants into notes. */
4171 if (note)
4172 find_used_regs (&XEXP (note, 0), NULL);
4174 for (reg_used = &reg_use_table[0]; reg_use_count > 0;
4175 reg_used++, reg_use_count--)
4177 unsigned int regno = REGNO (reg_used->reg_rtx);
4178 rtx pat, src;
4179 struct expr *set;
4181 /* Ignore registers created by GCSE.
4182 We do this because ... */
4183 if (regno >= max_gcse_regno)
4184 continue;
4186 /* If the register has already been set in this block, there's
4187 nothing we can do. */
4188 if (! oprs_not_set_p (reg_used->reg_rtx, insn))
4189 continue;
4191 /* Find an assignment that sets reg_used and is available
4192 at the start of the block. */
4193 set = find_avail_set (regno, insn);
4194 if (! set)
4195 continue;
4197 pat = set->expr;
4198 /* ??? We might be able to handle PARALLELs. Later. */
4199 if (GET_CODE (pat) != SET)
4200 abort ();
4202 src = SET_SRC (pat);
4204 /* Constant propagation. */
4205 if (gcse_constant_p (src))
4207 if (constprop_register (insn, reg_used->reg_rtx, src, alter_jumps))
4209 changed = 1;
4210 const_prop_count++;
4211 if (gcse_file != NULL)
4213 fprintf (gcse_file, "GLOBAL CONST-PROP: Replacing reg %d in ", regno);
4214 fprintf (gcse_file, "insn %d with constant ", INSN_UID (insn));
4215 print_rtl (gcse_file, src);
4216 fprintf (gcse_file, "\n");
4218 if (INSN_DELETED_P (insn))
4219 return 1;
4222 else if (GET_CODE (src) == REG
4223 && REGNO (src) >= FIRST_PSEUDO_REGISTER
4224 && REGNO (src) != regno)
4226 if (try_replace_reg (reg_used->reg_rtx, src, insn))
4228 changed = 1;
4229 copy_prop_count++;
4230 if (gcse_file != NULL)
4232 fprintf (gcse_file, "GLOBAL COPY-PROP: Replacing reg %d in insn %d",
4233 regno, INSN_UID (insn));
4234 fprintf (gcse_file, " with reg %d\n", REGNO (src));
4237 /* The original insn setting reg_used may or may not now be
4238 deletable. We leave the deletion to flow. */
4239 /* FIXME: If it turns out that the insn isn't deletable,
4240 then we may have unnecessarily extended register lifetimes
4241 and made things worse. */
4246 return changed;
4249 /* Like find_used_regs, but avoid recording uses that appear in
4250 input-output contexts such as zero_extract or pre_dec. This
4251 restricts the cases we consider to those for which local cprop
4252 can legitimately make replacements. */
4254 static void
4255 local_cprop_find_used_regs (rtx *xptr, void *data)
4257 rtx x = *xptr;
4259 if (x == 0)
4260 return;
4262 switch (GET_CODE (x))
4264 case ZERO_EXTRACT:
4265 case SIGN_EXTRACT:
4266 case STRICT_LOW_PART:
4267 return;
4269 case PRE_DEC:
4270 case PRE_INC:
4271 case POST_DEC:
4272 case POST_INC:
4273 case PRE_MODIFY:
4274 case POST_MODIFY:
4275 /* Can only legitimately appear this early in the context of
4276 stack pushes for function arguments, but handle all of the
4277 codes nonetheless. */
4278 return;
4280 case SUBREG:
4281 /* Setting a subreg of a register larger than word_mode leaves
4282 the non-written words unchanged. */
4283 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) > BITS_PER_WORD)
4284 return;
4285 break;
4287 default:
4288 break;
4291 find_used_regs (xptr, data);
4294 /* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
4295 their REG_EQUAL notes need updating. */
4297 static bool
4298 do_local_cprop (rtx x, rtx insn, int alter_jumps, rtx *libcall_sp)
4300 rtx newreg = NULL, newcnst = NULL;
4302 /* Rule out USE instructions and ASM statements as we don't want to
4303 change the hard registers mentioned. */
4304 if (GET_CODE (x) == REG
4305 && (REGNO (x) >= FIRST_PSEUDO_REGISTER
4306 || (GET_CODE (PATTERN (insn)) != USE
4307 && asm_noperands (PATTERN (insn)) < 0)))
4309 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0);
4310 struct elt_loc_list *l;
4312 if (!val)
4313 return false;
4314 for (l = val->locs; l; l = l->next)
4316 rtx this_rtx = l->loc;
4317 rtx note;
4319 if (l->in_libcall)
4320 continue;
4322 if (gcse_constant_p (this_rtx))
4323 newcnst = this_rtx;
4324 if (REG_P (this_rtx) && REGNO (this_rtx) >= FIRST_PSEUDO_REGISTER
4325 /* Don't copy propagate if it has attached REG_EQUIV note.
4326 At this point this only function parameters should have
4327 REG_EQUIV notes and if the argument slot is used somewhere
4328 explicitly, it means address of parameter has been taken,
4329 so we should not extend the lifetime of the pseudo. */
4330 && (!(note = find_reg_note (l->setting_insn, REG_EQUIV, NULL_RTX))
4331 || GET_CODE (XEXP (note, 0)) != MEM))
4332 newreg = this_rtx;
4334 if (newcnst && constprop_register (insn, x, newcnst, alter_jumps))
4336 /* If we find a case where we can't fix the retval REG_EQUAL notes
4337 match the new register, we either have to abandon this replacement
4338 or fix delete_trivially_dead_insns to preserve the setting insn,
4339 or make it delete the REG_EUAQL note, and fix up all passes that
4340 require the REG_EQUAL note there. */
4341 if (!adjust_libcall_notes (x, newcnst, insn, libcall_sp))
4342 abort ();
4343 if (gcse_file != NULL)
4345 fprintf (gcse_file, "LOCAL CONST-PROP: Replacing reg %d in ",
4346 REGNO (x));
4347 fprintf (gcse_file, "insn %d with constant ",
4348 INSN_UID (insn));
4349 print_rtl (gcse_file, newcnst);
4350 fprintf (gcse_file, "\n");
4352 const_prop_count++;
4353 return true;
4355 else if (newreg && newreg != x && try_replace_reg (x, newreg, insn))
4357 adjust_libcall_notes (x, newreg, insn, libcall_sp);
4358 if (gcse_file != NULL)
4360 fprintf (gcse_file,
4361 "LOCAL COPY-PROP: Replacing reg %d in insn %d",
4362 REGNO (x), INSN_UID (insn));
4363 fprintf (gcse_file, " with reg %d\n", REGNO (newreg));
4365 copy_prop_count++;
4366 return true;
4369 return false;
4372 /* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
4373 their REG_EQUAL notes need updating to reflect that OLDREG has been
4374 replaced with NEWVAL in INSN. Return true if all substitutions could
4375 be made. */
4376 static bool
4377 adjust_libcall_notes (rtx oldreg, rtx newval, rtx insn, rtx *libcall_sp)
4379 rtx end;
4381 while ((end = *libcall_sp++))
4383 rtx note = find_reg_equal_equiv_note (end);
4385 if (! note)
4386 continue;
4388 if (REG_P (newval))
4390 if (reg_set_between_p (newval, PREV_INSN (insn), end))
4394 note = find_reg_equal_equiv_note (end);
4395 if (! note)
4396 continue;
4397 if (reg_mentioned_p (newval, XEXP (note, 0)))
4398 return false;
4400 while ((end = *libcall_sp++));
4401 return true;
4404 XEXP (note, 0) = replace_rtx (XEXP (note, 0), oldreg, newval);
4405 insn = end;
4407 return true;
4410 #define MAX_NESTED_LIBCALLS 9
4412 static void
4413 local_cprop_pass (int alter_jumps)
4415 rtx insn;
4416 struct reg_use *reg_used;
4417 rtx libcall_stack[MAX_NESTED_LIBCALLS + 1], *libcall_sp;
4418 bool changed = false;
4420 cselib_init (false);
4421 libcall_sp = &libcall_stack[MAX_NESTED_LIBCALLS];
4422 *libcall_sp = 0;
4423 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4425 if (INSN_P (insn))
4427 rtx note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
4429 if (note)
4431 if (libcall_sp == libcall_stack)
4432 abort ();
4433 *--libcall_sp = XEXP (note, 0);
4435 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
4436 if (note)
4437 libcall_sp++;
4438 note = find_reg_equal_equiv_note (insn);
4441 reg_use_count = 0;
4442 note_uses (&PATTERN (insn), local_cprop_find_used_regs, NULL);
4443 if (note)
4444 local_cprop_find_used_regs (&XEXP (note, 0), NULL);
4446 for (reg_used = &reg_use_table[0]; reg_use_count > 0;
4447 reg_used++, reg_use_count--)
4448 if (do_local_cprop (reg_used->reg_rtx, insn, alter_jumps,
4449 libcall_sp))
4451 changed = true;
4452 break;
4454 if (INSN_DELETED_P (insn))
4455 break;
4457 while (reg_use_count);
4459 cselib_process_insn (insn);
4461 cselib_finish ();
4462 /* Global analysis may get into infinite loops for unreachable blocks. */
4463 if (changed && alter_jumps)
4465 delete_unreachable_blocks ();
4466 free_reg_set_mem ();
4467 alloc_reg_set_mem (max_reg_num ());
4468 compute_sets (get_insns ());
4472 /* Forward propagate copies. This includes copies and constants. Return
4473 nonzero if a change was made. */
4475 static int
4476 cprop (int alter_jumps)
4478 int changed;
4479 basic_block bb;
4480 rtx insn;
4482 /* Note we start at block 1. */
4483 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
4485 if (gcse_file != NULL)
4486 fprintf (gcse_file, "\n");
4487 return 0;
4490 changed = 0;
4491 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR, next_bb)
4493 /* Reset tables used to keep track of what's still valid [since the
4494 start of the block]. */
4495 reset_opr_set_tables ();
4497 for (insn = BB_HEAD (bb);
4498 insn != NULL && insn != NEXT_INSN (BB_END (bb));
4499 insn = NEXT_INSN (insn))
4500 if (INSN_P (insn))
4502 changed |= cprop_insn (insn, alter_jumps);
4504 /* Keep track of everything modified by this insn. */
4505 /* ??? Need to be careful w.r.t. mods done to INSN. Don't
4506 call mark_oprs_set if we turned the insn into a NOTE. */
4507 if (GET_CODE (insn) != NOTE)
4508 mark_oprs_set (insn);
4512 if (gcse_file != NULL)
4513 fprintf (gcse_file, "\n");
4515 return changed;
4518 /* Similar to get_condition, only the resulting condition must be
4519 valid at JUMP, instead of at EARLIEST.
4521 This differs from noce_get_condition in ifcvt.c in that we prefer not to
4522 settle for the condition variable in the jump instruction being integral.
4523 We prefer to be able to record the value of a user variable, rather than
4524 the value of a temporary used in a condition. This could be solved by
4525 recording the value of *every* register scaned by canonicalize_condition,
4526 but this would require some code reorganization. */
4529 fis_get_condition (rtx jump)
4531 rtx cond, set, tmp, insn, earliest;
4532 bool reverse;
4534 if (! any_condjump_p (jump))
4535 return NULL_RTX;
4537 set = pc_set (jump);
4538 cond = XEXP (SET_SRC (set), 0);
4540 /* If this branches to JUMP_LABEL when the condition is false,
4541 reverse the condition. */
4542 reverse = (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
4543 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump));
4545 /* Use canonicalize_condition to do the dirty work of manipulating
4546 MODE_CC values and COMPARE rtx codes. */
4547 tmp = canonicalize_condition (jump, cond, reverse, &earliest, NULL_RTX,
4548 false);
4549 if (!tmp)
4550 return NULL_RTX;
4552 /* Verify that the given condition is valid at JUMP by virtue of not
4553 having been modified since EARLIEST. */
4554 for (insn = earliest; insn != jump; insn = NEXT_INSN (insn))
4555 if (INSN_P (insn) && modified_in_p (tmp, insn))
4556 break;
4557 if (insn == jump)
4558 return tmp;
4560 /* The condition was modified. See if we can get a partial result
4561 that doesn't follow all the reversals. Perhaps combine can fold
4562 them together later. */
4563 tmp = XEXP (tmp, 0);
4564 if (!REG_P (tmp) || GET_MODE_CLASS (GET_MODE (tmp)) != MODE_INT)
4565 return NULL_RTX;
4566 tmp = canonicalize_condition (jump, cond, reverse, &earliest, tmp,
4567 false);
4568 if (!tmp)
4569 return NULL_RTX;
4571 /* For sanity's sake, re-validate the new result. */
4572 for (insn = earliest; insn != jump; insn = NEXT_INSN (insn))
4573 if (INSN_P (insn) && modified_in_p (tmp, insn))
4574 return NULL_RTX;
4576 return tmp;
4579 /* Check the comparison COND to see if we can safely form an implicit set from
4580 it. COND is either an EQ or NE comparison. */
4582 static bool
4583 implicit_set_cond_p (rtx cond)
4585 enum machine_mode mode = GET_MODE (XEXP (cond, 0));
4586 rtx cst = XEXP (cond, 1);
4588 /* We can't perform this optimization if either operand might be or might
4589 contain a signed zero. */
4590 if (HONOR_SIGNED_ZEROS (mode))
4592 /* It is sufficient to check if CST is or contains a zero. We must
4593 handle float, complex, and vector. If any subpart is a zero, then
4594 the optimization can't be performed. */
4595 /* ??? The complex and vector checks are not implemented yet. We just
4596 always return zero for them. */
4597 if (GET_CODE (cst) == CONST_DOUBLE)
4599 REAL_VALUE_TYPE d;
4600 REAL_VALUE_FROM_CONST_DOUBLE (d, cst);
4601 if (REAL_VALUES_EQUAL (d, dconst0))
4602 return 0;
4604 else
4605 return 0;
4608 return gcse_constant_p (cst);
4611 /* Find the implicit sets of a function. An "implicit set" is a constraint
4612 on the value of a variable, implied by a conditional jump. For example,
4613 following "if (x == 2)", the then branch may be optimized as though the
4614 conditional performed an "explicit set", in this example, "x = 2". This
4615 function records the set patterns that are implicit at the start of each
4616 basic block. */
4618 static void
4619 find_implicit_sets (void)
4621 basic_block bb, dest;
4622 unsigned int count;
4623 rtx cond, new;
4625 count = 0;
4626 FOR_EACH_BB (bb)
4627 /* Check for more than one successor. */
4628 if (bb->succ && bb->succ->succ_next)
4630 cond = fis_get_condition (BB_END (bb));
4632 if (cond
4633 && (GET_CODE (cond) == EQ || GET_CODE (cond) == NE)
4634 && GET_CODE (XEXP (cond, 0)) == REG
4635 && REGNO (XEXP (cond, 0)) >= FIRST_PSEUDO_REGISTER
4636 && implicit_set_cond_p (cond))
4638 dest = GET_CODE (cond) == EQ ? BRANCH_EDGE (bb)->dest
4639 : FALLTHRU_EDGE (bb)->dest;
4641 if (dest && ! dest->pred->pred_next
4642 && dest != EXIT_BLOCK_PTR)
4644 new = gen_rtx_SET (VOIDmode, XEXP (cond, 0),
4645 XEXP (cond, 1));
4646 implicit_sets[dest->index] = new;
4647 if (gcse_file)
4649 fprintf(gcse_file, "Implicit set of reg %d in ",
4650 REGNO (XEXP (cond, 0)));
4651 fprintf(gcse_file, "basic block %d\n", dest->index);
4653 count++;
4658 if (gcse_file)
4659 fprintf (gcse_file, "Found %d implicit sets\n", count);
4662 /* Perform one copy/constant propagation pass.
4663 PASS is the pass count. If CPROP_JUMPS is true, perform constant
4664 propagation into conditional jumps. If BYPASS_JUMPS is true,
4665 perform conditional jump bypassing optimizations. */
4667 static int
4668 one_cprop_pass (int pass, int cprop_jumps, int bypass_jumps)
4670 int changed = 0;
4672 const_prop_count = 0;
4673 copy_prop_count = 0;
4675 local_cprop_pass (cprop_jumps);
4677 /* Determine implicit sets. */
4678 implicit_sets = xcalloc (last_basic_block, sizeof (rtx));
4679 find_implicit_sets ();
4681 alloc_hash_table (max_cuid, &set_hash_table, 1);
4682 compute_hash_table (&set_hash_table);
4684 /* Free implicit_sets before peak usage. */
4685 free (implicit_sets);
4686 implicit_sets = NULL;
4688 if (gcse_file)
4689 dump_hash_table (gcse_file, "SET", &set_hash_table);
4690 if (set_hash_table.n_elems > 0)
4692 alloc_cprop_mem (last_basic_block, set_hash_table.n_elems);
4693 compute_cprop_data ();
4694 changed = cprop (cprop_jumps);
4695 if (bypass_jumps)
4696 changed |= bypass_conditional_jumps ();
4697 free_cprop_mem ();
4700 free_hash_table (&set_hash_table);
4702 if (gcse_file)
4704 fprintf (gcse_file, "CPROP of %s, pass %d: %d bytes needed, ",
4705 current_function_name (), pass, bytes_used);
4706 fprintf (gcse_file, "%d const props, %d copy props\n\n",
4707 const_prop_count, copy_prop_count);
4709 /* Global analysis may get into infinite loops for unreachable blocks. */
4710 if (changed && cprop_jumps)
4711 delete_unreachable_blocks ();
4713 return changed;
4716 /* Bypass conditional jumps. */
4718 /* The value of last_basic_block at the beginning of the jump_bypass
4719 pass. The use of redirect_edge_and_branch_force may introduce new
4720 basic blocks, but the data flow analysis is only valid for basic
4721 block indices less than bypass_last_basic_block. */
4723 static int bypass_last_basic_block;
4725 /* Find a set of REGNO to a constant that is available at the end of basic
4726 block BB. Returns NULL if no such set is found. Based heavily upon
4727 find_avail_set. */
4729 static struct expr *
4730 find_bypass_set (int regno, int bb)
4732 struct expr *result = 0;
4734 for (;;)
4736 rtx src;
4737 struct expr *set = lookup_set (regno, &set_hash_table);
4739 while (set)
4741 if (TEST_BIT (cprop_avout[bb], set->bitmap_index))
4742 break;
4743 set = next_set (regno, set);
4746 if (set == 0)
4747 break;
4749 if (GET_CODE (set->expr) != SET)
4750 abort ();
4752 src = SET_SRC (set->expr);
4753 if (gcse_constant_p (src))
4754 result = set;
4756 if (GET_CODE (src) != REG)
4757 break;
4759 regno = REGNO (src);
4761 return result;
4765 /* Subroutine of bypass_block that checks whether a pseudo is killed by
4766 any of the instructions inserted on an edge. Jump bypassing places
4767 condition code setters on CFG edges using insert_insn_on_edge. This
4768 function is required to check that our data flow analysis is still
4769 valid prior to commit_edge_insertions. */
4771 static bool
4772 reg_killed_on_edge (rtx reg, edge e)
4774 rtx insn;
4776 for (insn = e->insns; insn; insn = NEXT_INSN (insn))
4777 if (INSN_P (insn) && reg_set_p (reg, insn))
4778 return true;
4780 return false;
4783 /* Subroutine of bypass_conditional_jumps that attempts to bypass the given
4784 basic block BB which has more than one predecessor. If not NULL, SETCC
4785 is the first instruction of BB, which is immediately followed by JUMP_INSN
4786 JUMP. Otherwise, SETCC is NULL, and JUMP is the first insn of BB.
4787 Returns nonzero if a change was made.
4789 During the jump bypassing pass, we may place copies of SETCC instructions
4790 on CFG edges. The following routine must be careful to pay attention to
4791 these inserted insns when performing its transformations. */
4793 static int
4794 bypass_block (basic_block bb, rtx setcc, rtx jump)
4796 rtx insn, note;
4797 edge e, enext, edest;
4798 int i, change;
4799 int may_be_loop_header;
4801 insn = (setcc != NULL) ? setcc : jump;
4803 /* Determine set of register uses in INSN. */
4804 reg_use_count = 0;
4805 note_uses (&PATTERN (insn), find_used_regs, NULL);
4806 note = find_reg_equal_equiv_note (insn);
4807 if (note)
4808 find_used_regs (&XEXP (note, 0), NULL);
4810 may_be_loop_header = false;
4811 for (e = bb->pred; e; e = e->pred_next)
4812 if (e->flags & EDGE_DFS_BACK)
4814 may_be_loop_header = true;
4815 break;
4818 change = 0;
4819 for (e = bb->pred; e; e = enext)
4821 enext = e->pred_next;
4822 if (e->flags & EDGE_COMPLEX)
4823 continue;
4825 /* We can't redirect edges from new basic blocks. */
4826 if (e->src->index >= bypass_last_basic_block)
4827 continue;
4829 /* The irreducible loops created by redirecting of edges entering the
4830 loop from outside would decrease effectiveness of some of the following
4831 optimizations, so prevent this. */
4832 if (may_be_loop_header
4833 && !(e->flags & EDGE_DFS_BACK))
4834 continue;
4836 for (i = 0; i < reg_use_count; i++)
4838 struct reg_use *reg_used = &reg_use_table[i];
4839 unsigned int regno = REGNO (reg_used->reg_rtx);
4840 basic_block dest, old_dest;
4841 struct expr *set;
4842 rtx src, new;
4844 if (regno >= max_gcse_regno)
4845 continue;
4847 set = find_bypass_set (regno, e->src->index);
4849 if (! set)
4850 continue;
4852 /* Check the data flow is valid after edge insertions. */
4853 if (e->insns && reg_killed_on_edge (reg_used->reg_rtx, e))
4854 continue;
4856 src = SET_SRC (pc_set (jump));
4858 if (setcc != NULL)
4859 src = simplify_replace_rtx (src,
4860 SET_DEST (PATTERN (setcc)),
4861 SET_SRC (PATTERN (setcc)));
4863 new = simplify_replace_rtx (src, reg_used->reg_rtx,
4864 SET_SRC (set->expr));
4866 /* Jump bypassing may have already placed instructions on
4867 edges of the CFG. We can't bypass an outgoing edge that
4868 has instructions associated with it, as these insns won't
4869 get executed if the incoming edge is redirected. */
4871 if (new == pc_rtx)
4873 edest = FALLTHRU_EDGE (bb);
4874 dest = edest->insns ? NULL : edest->dest;
4876 else if (GET_CODE (new) == LABEL_REF)
4878 dest = BLOCK_FOR_INSN (XEXP (new, 0));
4879 /* Don't bypass edges containing instructions. */
4880 for (edest = bb->succ; edest; edest = edest->succ_next)
4881 if (edest->dest == dest && edest->insns)
4883 dest = NULL;
4884 break;
4887 else
4888 dest = NULL;
4890 /* Avoid unification of the edge with other edges from original
4891 branch. We would end up emitting the instruction on "both"
4892 edges. */
4894 if (dest && setcc && !CC0_P (SET_DEST (PATTERN (setcc))))
4896 edge e2;
4897 for (e2 = e->src->succ; e2; e2 = e2->succ_next)
4898 if (e2->dest == dest)
4900 dest = NULL;
4901 break;
4905 old_dest = e->dest;
4906 if (dest != NULL
4907 && dest != old_dest
4908 && dest != EXIT_BLOCK_PTR)
4910 redirect_edge_and_branch_force (e, dest);
4912 /* Copy the register setter to the redirected edge.
4913 Don't copy CC0 setters, as CC0 is dead after jump. */
4914 if (setcc)
4916 rtx pat = PATTERN (setcc);
4917 if (!CC0_P (SET_DEST (pat)))
4918 insert_insn_on_edge (copy_insn (pat), e);
4921 if (gcse_file != NULL)
4923 fprintf (gcse_file, "JUMP-BYPASS: Proved reg %d in jump_insn %d equals constant ",
4924 regno, INSN_UID (jump));
4925 print_rtl (gcse_file, SET_SRC (set->expr));
4926 fprintf (gcse_file, "\nBypass edge from %d->%d to %d\n",
4927 e->src->index, old_dest->index, dest->index);
4929 change = 1;
4930 break;
4934 return change;
4937 /* Find basic blocks with more than one predecessor that only contain a
4938 single conditional jump. If the result of the comparison is known at
4939 compile-time from any incoming edge, redirect that edge to the
4940 appropriate target. Returns nonzero if a change was made.
4942 This function is now mis-named, because we also handle indirect jumps. */
4944 static int
4945 bypass_conditional_jumps (void)
4947 basic_block bb;
4948 int changed;
4949 rtx setcc;
4950 rtx insn;
4951 rtx dest;
4953 /* Note we start at block 1. */
4954 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
4955 return 0;
4957 bypass_last_basic_block = last_basic_block;
4958 mark_dfs_back_edges ();
4960 changed = 0;
4961 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb,
4962 EXIT_BLOCK_PTR, next_bb)
4964 /* Check for more than one predecessor. */
4965 if (bb->pred && bb->pred->pred_next)
4967 setcc = NULL_RTX;
4968 for (insn = BB_HEAD (bb);
4969 insn != NULL && insn != NEXT_INSN (BB_END (bb));
4970 insn = NEXT_INSN (insn))
4971 if (GET_CODE (insn) == INSN)
4973 if (setcc)
4974 break;
4975 if (GET_CODE (PATTERN (insn)) != SET)
4976 break;
4978 dest = SET_DEST (PATTERN (insn));
4979 if (REG_P (dest) || CC0_P (dest))
4980 setcc = insn;
4981 else
4982 break;
4984 else if (GET_CODE (insn) == JUMP_INSN)
4986 if ((any_condjump_p (insn) || computed_jump_p (insn))
4987 && onlyjump_p (insn))
4988 changed |= bypass_block (bb, setcc, insn);
4989 break;
4991 else if (INSN_P (insn))
4992 break;
4996 /* If we bypassed any register setting insns, we inserted a
4997 copy on the redirected edge. These need to be committed. */
4998 if (changed)
4999 commit_edge_insertions();
5001 return changed;
5004 /* Compute PRE+LCM working variables. */
5006 /* Local properties of expressions. */
5007 /* Nonzero for expressions that are transparent in the block. */
5008 static sbitmap *transp;
5010 /* Nonzero for expressions that are transparent at the end of the block.
5011 This is only zero for expressions killed by abnormal critical edge
5012 created by a calls. */
5013 static sbitmap *transpout;
5015 /* Nonzero for expressions that are computed (available) in the block. */
5016 static sbitmap *comp;
5018 /* Nonzero for expressions that are locally anticipatable in the block. */
5019 static sbitmap *antloc;
5021 /* Nonzero for expressions where this block is an optimal computation
5022 point. */
5023 static sbitmap *pre_optimal;
5025 /* Nonzero for expressions which are redundant in a particular block. */
5026 static sbitmap *pre_redundant;
5028 /* Nonzero for expressions which should be inserted on a specific edge. */
5029 static sbitmap *pre_insert_map;
5031 /* Nonzero for expressions which should be deleted in a specific block. */
5032 static sbitmap *pre_delete_map;
5034 /* Contains the edge_list returned by pre_edge_lcm. */
5035 static struct edge_list *edge_list;
5037 /* Redundant insns. */
5038 static sbitmap pre_redundant_insns;
5040 /* Allocate vars used for PRE analysis. */
5042 static void
5043 alloc_pre_mem (int n_blocks, int n_exprs)
5045 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
5046 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
5047 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
5049 pre_optimal = NULL;
5050 pre_redundant = NULL;
5051 pre_insert_map = NULL;
5052 pre_delete_map = NULL;
5053 ae_in = NULL;
5054 ae_out = NULL;
5055 ae_kill = sbitmap_vector_alloc (n_blocks, n_exprs);
5057 /* pre_insert and pre_delete are allocated later. */
5060 /* Free vars used for PRE analysis. */
5062 static void
5063 free_pre_mem (void)
5065 sbitmap_vector_free (transp);
5066 sbitmap_vector_free (comp);
5068 /* ANTLOC and AE_KILL are freed just after pre_lcm finishes. */
5070 if (pre_optimal)
5071 sbitmap_vector_free (pre_optimal);
5072 if (pre_redundant)
5073 sbitmap_vector_free (pre_redundant);
5074 if (pre_insert_map)
5075 sbitmap_vector_free (pre_insert_map);
5076 if (pre_delete_map)
5077 sbitmap_vector_free (pre_delete_map);
5078 if (ae_in)
5079 sbitmap_vector_free (ae_in);
5080 if (ae_out)
5081 sbitmap_vector_free (ae_out);
5083 transp = comp = NULL;
5084 pre_optimal = pre_redundant = pre_insert_map = pre_delete_map = NULL;
5085 ae_in = ae_out = NULL;
5088 /* Top level routine to do the dataflow analysis needed by PRE. */
5090 static void
5091 compute_pre_data (void)
5093 sbitmap trapping_expr;
5094 basic_block bb;
5095 unsigned int ui;
5097 compute_local_properties (transp, comp, antloc, &expr_hash_table);
5098 sbitmap_vector_zero (ae_kill, last_basic_block);
5100 /* Collect expressions which might trap. */
5101 trapping_expr = sbitmap_alloc (expr_hash_table.n_elems);
5102 sbitmap_zero (trapping_expr);
5103 for (ui = 0; ui < expr_hash_table.size; ui++)
5105 struct expr *e;
5106 for (e = expr_hash_table.table[ui]; e != NULL; e = e->next_same_hash)
5107 if (may_trap_p (e->expr))
5108 SET_BIT (trapping_expr, e->bitmap_index);
5111 /* Compute ae_kill for each basic block using:
5113 ~(TRANSP | COMP)
5115 This is significantly faster than compute_ae_kill. */
5117 FOR_EACH_BB (bb)
5119 edge e;
5121 /* If the current block is the destination of an abnormal edge, we
5122 kill all trapping expressions because we won't be able to properly
5123 place the instruction on the edge. So make them neither
5124 anticipatable nor transparent. This is fairly conservative. */
5125 for (e = bb->pred; e ; e = e->pred_next)
5126 if (e->flags & EDGE_ABNORMAL)
5128 sbitmap_difference (antloc[bb->index], antloc[bb->index], trapping_expr);
5129 sbitmap_difference (transp[bb->index], transp[bb->index], trapping_expr);
5130 break;
5133 sbitmap_a_or_b (ae_kill[bb->index], transp[bb->index], comp[bb->index]);
5134 sbitmap_not (ae_kill[bb->index], ae_kill[bb->index]);
5137 edge_list = pre_edge_lcm (gcse_file, expr_hash_table.n_elems, transp, comp, antloc,
5138 ae_kill, &pre_insert_map, &pre_delete_map);
5139 sbitmap_vector_free (antloc);
5140 antloc = NULL;
5141 sbitmap_vector_free (ae_kill);
5142 ae_kill = NULL;
5143 sbitmap_free (trapping_expr);
5146 /* PRE utilities */
5148 /* Return nonzero if an occurrence of expression EXPR in OCCR_BB would reach
5149 block BB.
5151 VISITED is a pointer to a working buffer for tracking which BB's have
5152 been visited. It is NULL for the top-level call.
5154 We treat reaching expressions that go through blocks containing the same
5155 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
5156 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
5157 2 as not reaching. The intent is to improve the probability of finding
5158 only one reaching expression and to reduce register lifetimes by picking
5159 the closest such expression. */
5161 static int
5162 pre_expr_reaches_here_p_work (basic_block occr_bb, struct expr *expr, basic_block bb, char *visited)
5164 edge pred;
5166 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
5168 basic_block pred_bb = pred->src;
5170 if (pred->src == ENTRY_BLOCK_PTR
5171 /* Has predecessor has already been visited? */
5172 || visited[pred_bb->index])
5173 ;/* Nothing to do. */
5175 /* Does this predecessor generate this expression? */
5176 else if (TEST_BIT (comp[pred_bb->index], expr->bitmap_index))
5178 /* Is this the occurrence we're looking for?
5179 Note that there's only one generating occurrence per block
5180 so we just need to check the block number. */
5181 if (occr_bb == pred_bb)
5182 return 1;
5184 visited[pred_bb->index] = 1;
5186 /* Ignore this predecessor if it kills the expression. */
5187 else if (! TEST_BIT (transp[pred_bb->index], expr->bitmap_index))
5188 visited[pred_bb->index] = 1;
5190 /* Neither gen nor kill. */
5191 else
5193 visited[pred_bb->index] = 1;
5194 if (pre_expr_reaches_here_p_work (occr_bb, expr, pred_bb, visited))
5195 return 1;
5199 /* All paths have been checked. */
5200 return 0;
5203 /* The wrapper for pre_expr_reaches_here_work that ensures that any
5204 memory allocated for that function is returned. */
5206 static int
5207 pre_expr_reaches_here_p (basic_block occr_bb, struct expr *expr, basic_block bb)
5209 int rval;
5210 char *visited = xcalloc (last_basic_block, 1);
5212 rval = pre_expr_reaches_here_p_work (occr_bb, expr, bb, visited);
5214 free (visited);
5215 return rval;
5219 /* Given an expr, generate RTL which we can insert at the end of a BB,
5220 or on an edge. Set the block number of any insns generated to
5221 the value of BB. */
5223 static rtx
5224 process_insert_insn (struct expr *expr)
5226 rtx reg = expr->reaching_reg;
5227 rtx exp = copy_rtx (expr->expr);
5228 rtx pat;
5230 start_sequence ();
5232 /* If the expression is something that's an operand, like a constant,
5233 just copy it to a register. */
5234 if (general_operand (exp, GET_MODE (reg)))
5235 emit_move_insn (reg, exp);
5237 /* Otherwise, make a new insn to compute this expression and make sure the
5238 insn will be recognized (this also adds any needed CLOBBERs). Copy the
5239 expression to make sure we don't have any sharing issues. */
5240 else if (insn_invalid_p (emit_insn (gen_rtx_SET (VOIDmode, reg, exp))))
5241 abort ();
5243 pat = get_insns ();
5244 end_sequence ();
5246 return pat;
5249 /* Add EXPR to the end of basic block BB.
5251 This is used by both the PRE and code hoisting.
5253 For PRE, we want to verify that the expr is either transparent
5254 or locally anticipatable in the target block. This check makes
5255 no sense for code hoisting. */
5257 static void
5258 insert_insn_end_bb (struct expr *expr, basic_block bb, int pre)
5260 rtx insn = BB_END (bb);
5261 rtx new_insn;
5262 rtx reg = expr->reaching_reg;
5263 int regno = REGNO (reg);
5264 rtx pat, pat_end;
5266 pat = process_insert_insn (expr);
5267 if (pat == NULL_RTX || ! INSN_P (pat))
5268 abort ();
5270 pat_end = pat;
5271 while (NEXT_INSN (pat_end) != NULL_RTX)
5272 pat_end = NEXT_INSN (pat_end);
5274 /* If the last insn is a jump, insert EXPR in front [taking care to
5275 handle cc0, etc. properly]. Similarly we need to care trapping
5276 instructions in presence of non-call exceptions. */
5278 if (GET_CODE (insn) == JUMP_INSN
5279 || (GET_CODE (insn) == INSN
5280 && (bb->succ->succ_next || (bb->succ->flags & EDGE_ABNORMAL))))
5282 #ifdef HAVE_cc0
5283 rtx note;
5284 #endif
5285 /* It should always be the case that we can put these instructions
5286 anywhere in the basic block with performing PRE optimizations.
5287 Check this. */
5288 if (GET_CODE (insn) == INSN && pre
5289 && !TEST_BIT (antloc[bb->index], expr->bitmap_index)
5290 && !TEST_BIT (transp[bb->index], expr->bitmap_index))
5291 abort ();
5293 /* If this is a jump table, then we can't insert stuff here. Since
5294 we know the previous real insn must be the tablejump, we insert
5295 the new instruction just before the tablejump. */
5296 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
5297 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
5298 insn = prev_real_insn (insn);
5300 #ifdef HAVE_cc0
5301 /* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts
5302 if cc0 isn't set. */
5303 note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
5304 if (note)
5305 insn = XEXP (note, 0);
5306 else
5308 rtx maybe_cc0_setter = prev_nonnote_insn (insn);
5309 if (maybe_cc0_setter
5310 && INSN_P (maybe_cc0_setter)
5311 && sets_cc0_p (PATTERN (maybe_cc0_setter)))
5312 insn = maybe_cc0_setter;
5314 #endif
5315 /* FIXME: What if something in cc0/jump uses value set in new insn? */
5316 new_insn = emit_insn_before (pat, insn);
5319 /* Likewise if the last insn is a call, as will happen in the presence
5320 of exception handling. */
5321 else if (GET_CODE (insn) == CALL_INSN
5322 && (bb->succ->succ_next || (bb->succ->flags & EDGE_ABNORMAL)))
5324 /* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers,
5325 we search backward and place the instructions before the first
5326 parameter is loaded. Do this for everyone for consistency and a
5327 presumption that we'll get better code elsewhere as well.
5329 It should always be the case that we can put these instructions
5330 anywhere in the basic block with performing PRE optimizations.
5331 Check this. */
5333 if (pre
5334 && !TEST_BIT (antloc[bb->index], expr->bitmap_index)
5335 && !TEST_BIT (transp[bb->index], expr->bitmap_index))
5336 abort ();
5338 /* Since different machines initialize their parameter registers
5339 in different orders, assume nothing. Collect the set of all
5340 parameter registers. */
5341 insn = find_first_parameter_load (insn, BB_HEAD (bb));
5343 /* If we found all the parameter loads, then we want to insert
5344 before the first parameter load.
5346 If we did not find all the parameter loads, then we might have
5347 stopped on the head of the block, which could be a CODE_LABEL.
5348 If we inserted before the CODE_LABEL, then we would be putting
5349 the insn in the wrong basic block. In that case, put the insn
5350 after the CODE_LABEL. Also, respect NOTE_INSN_BASIC_BLOCK. */
5351 while (GET_CODE (insn) == CODE_LABEL
5352 || NOTE_INSN_BASIC_BLOCK_P (insn))
5353 insn = NEXT_INSN (insn);
5355 new_insn = emit_insn_before (pat, insn);
5357 else
5358 new_insn = emit_insn_after (pat, insn);
5360 while (1)
5362 if (INSN_P (pat))
5364 add_label_notes (PATTERN (pat), new_insn);
5365 note_stores (PATTERN (pat), record_set_info, pat);
5367 if (pat == pat_end)
5368 break;
5369 pat = NEXT_INSN (pat);
5372 gcse_create_count++;
5374 if (gcse_file)
5376 fprintf (gcse_file, "PRE/HOIST: end of bb %d, insn %d, ",
5377 bb->index, INSN_UID (new_insn));
5378 fprintf (gcse_file, "copying expression %d to reg %d\n",
5379 expr->bitmap_index, regno);
5383 /* Insert partially redundant expressions on edges in the CFG to make
5384 the expressions fully redundant. */
5386 static int
5387 pre_edge_insert (struct edge_list *edge_list, struct expr **index_map)
5389 int e, i, j, num_edges, set_size, did_insert = 0;
5390 sbitmap *inserted;
5392 /* Where PRE_INSERT_MAP is nonzero, we add the expression on that edge
5393 if it reaches any of the deleted expressions. */
5395 set_size = pre_insert_map[0]->size;
5396 num_edges = NUM_EDGES (edge_list);
5397 inserted = sbitmap_vector_alloc (num_edges, expr_hash_table.n_elems);
5398 sbitmap_vector_zero (inserted, num_edges);
5400 for (e = 0; e < num_edges; e++)
5402 int indx;
5403 basic_block bb = INDEX_EDGE_PRED_BB (edge_list, e);
5405 for (i = indx = 0; i < set_size; i++, indx += SBITMAP_ELT_BITS)
5407 SBITMAP_ELT_TYPE insert = pre_insert_map[e]->elms[i];
5409 for (j = indx; insert && j < (int) expr_hash_table.n_elems; j++, insert >>= 1)
5410 if ((insert & 1) != 0 && index_map[j]->reaching_reg != NULL_RTX)
5412 struct expr *expr = index_map[j];
5413 struct occr *occr;
5415 /* Now look at each deleted occurrence of this expression. */
5416 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5418 if (! occr->deleted_p)
5419 continue;
5421 /* Insert this expression on this edge if if it would
5422 reach the deleted occurrence in BB. */
5423 if (!TEST_BIT (inserted[e], j))
5425 rtx insn;
5426 edge eg = INDEX_EDGE (edge_list, e);
5428 /* We can't insert anything on an abnormal and
5429 critical edge, so we insert the insn at the end of
5430 the previous block. There are several alternatives
5431 detailed in Morgans book P277 (sec 10.5) for
5432 handling this situation. This one is easiest for
5433 now. */
5435 if ((eg->flags & EDGE_ABNORMAL) == EDGE_ABNORMAL)
5436 insert_insn_end_bb (index_map[j], bb, 0);
5437 else
5439 insn = process_insert_insn (index_map[j]);
5440 insert_insn_on_edge (insn, eg);
5443 if (gcse_file)
5445 fprintf (gcse_file, "PRE/HOIST: edge (%d,%d), ",
5446 bb->index,
5447 INDEX_EDGE_SUCC_BB (edge_list, e)->index);
5448 fprintf (gcse_file, "copy expression %d\n",
5449 expr->bitmap_index);
5452 update_ld_motion_stores (expr);
5453 SET_BIT (inserted[e], j);
5454 did_insert = 1;
5455 gcse_create_count++;
5462 sbitmap_vector_free (inserted);
5463 return did_insert;
5466 /* Copy the result of EXPR->EXPR generated by INSN to EXPR->REACHING_REG.
5467 Given "old_reg <- expr" (INSN), instead of adding after it
5468 reaching_reg <- old_reg
5469 it's better to do the following:
5470 reaching_reg <- expr
5471 old_reg <- reaching_reg
5472 because this way copy propagation can discover additional PRE
5473 opportunities. But if this fails, we try the old way.
5474 When "expr" is a store, i.e.
5475 given "MEM <- old_reg", instead of adding after it
5476 reaching_reg <- old_reg
5477 it's better to add it before as follows:
5478 reaching_reg <- old_reg
5479 MEM <- reaching_reg. */
5481 static void
5482 pre_insert_copy_insn (struct expr *expr, rtx insn)
5484 rtx reg = expr->reaching_reg;
5485 int regno = REGNO (reg);
5486 int indx = expr->bitmap_index;
5487 rtx pat = PATTERN (insn);
5488 rtx set, new_insn;
5489 rtx old_reg;
5490 int i;
5492 /* This block matches the logic in hash_scan_insn. */
5493 if (GET_CODE (pat) == SET)
5494 set = pat;
5495 else if (GET_CODE (pat) == PARALLEL)
5497 /* Search through the parallel looking for the set whose
5498 source was the expression that we're interested in. */
5499 set = NULL_RTX;
5500 for (i = 0; i < XVECLEN (pat, 0); i++)
5502 rtx x = XVECEXP (pat, 0, i);
5503 if (GET_CODE (x) == SET
5504 && expr_equiv_p (SET_SRC (x), expr->expr))
5506 set = x;
5507 break;
5511 else
5512 abort ();
5514 if (GET_CODE (SET_DEST (set)) == REG)
5516 old_reg = SET_DEST (set);
5517 /* Check if we can modify the set destination in the original insn. */
5518 if (validate_change (insn, &SET_DEST (set), reg, 0))
5520 new_insn = gen_move_insn (old_reg, reg);
5521 new_insn = emit_insn_after (new_insn, insn);
5523 /* Keep register set table up to date. */
5524 replace_one_set (REGNO (old_reg), insn, new_insn);
5525 record_one_set (regno, insn);
5527 else
5529 new_insn = gen_move_insn (reg, old_reg);
5530 new_insn = emit_insn_after (new_insn, insn);
5532 /* Keep register set table up to date. */
5533 record_one_set (regno, new_insn);
5536 else /* This is possible only in case of a store to memory. */
5538 old_reg = SET_SRC (set);
5539 new_insn = gen_move_insn (reg, old_reg);
5541 /* Check if we can modify the set source in the original insn. */
5542 if (validate_change (insn, &SET_SRC (set), reg, 0))
5543 new_insn = emit_insn_before (new_insn, insn);
5544 else
5545 new_insn = emit_insn_after (new_insn, insn);
5547 /* Keep register set table up to date. */
5548 record_one_set (regno, new_insn);
5551 gcse_create_count++;
5553 if (gcse_file)
5554 fprintf (gcse_file,
5555 "PRE: bb %d, insn %d, copy expression %d in insn %d to reg %d\n",
5556 BLOCK_NUM (insn), INSN_UID (new_insn), indx,
5557 INSN_UID (insn), regno);
5560 /* Copy available expressions that reach the redundant expression
5561 to `reaching_reg'. */
5563 static void
5564 pre_insert_copies (void)
5566 unsigned int i, added_copy;
5567 struct expr *expr;
5568 struct occr *occr;
5569 struct occr *avail;
5571 /* For each available expression in the table, copy the result to
5572 `reaching_reg' if the expression reaches a deleted one.
5574 ??? The current algorithm is rather brute force.
5575 Need to do some profiling. */
5577 for (i = 0; i < expr_hash_table.size; i++)
5578 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
5580 /* If the basic block isn't reachable, PPOUT will be TRUE. However,
5581 we don't want to insert a copy here because the expression may not
5582 really be redundant. So only insert an insn if the expression was
5583 deleted. This test also avoids further processing if the
5584 expression wasn't deleted anywhere. */
5585 if (expr->reaching_reg == NULL)
5586 continue;
5588 /* Set when we add a copy for that expression. */
5589 added_copy = 0;
5591 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5593 if (! occr->deleted_p)
5594 continue;
5596 for (avail = expr->avail_occr; avail != NULL; avail = avail->next)
5598 rtx insn = avail->insn;
5600 /* No need to handle this one if handled already. */
5601 if (avail->copied_p)
5602 continue;
5604 /* Don't handle this one if it's a redundant one. */
5605 if (TEST_BIT (pre_redundant_insns, INSN_CUID (insn)))
5606 continue;
5608 /* Or if the expression doesn't reach the deleted one. */
5609 if (! pre_expr_reaches_here_p (BLOCK_FOR_INSN (avail->insn),
5610 expr,
5611 BLOCK_FOR_INSN (occr->insn)))
5612 continue;
5614 added_copy = 1;
5616 /* Copy the result of avail to reaching_reg. */
5617 pre_insert_copy_insn (expr, insn);
5618 avail->copied_p = 1;
5622 if (added_copy)
5623 update_ld_motion_stores (expr);
5627 /* Emit move from SRC to DEST noting the equivalence with expression computed
5628 in INSN. */
5629 static rtx
5630 gcse_emit_move_after (rtx src, rtx dest, rtx insn)
5632 rtx new;
5633 rtx set = single_set (insn), set2;
5634 rtx note;
5635 rtx eqv;
5637 /* This should never fail since we're creating a reg->reg copy
5638 we've verified to be valid. */
5640 new = emit_insn_after (gen_move_insn (dest, src), insn);
5642 /* Note the equivalence for local CSE pass. */
5643 set2 = single_set (new);
5644 if (!set2 || !rtx_equal_p (SET_DEST (set2), dest))
5645 return new;
5646 if ((note = find_reg_equal_equiv_note (insn)))
5647 eqv = XEXP (note, 0);
5648 else
5649 eqv = SET_SRC (set);
5651 set_unique_reg_note (new, REG_EQUAL, copy_insn_1 (eqv));
5653 return new;
5656 /* Delete redundant computations.
5657 Deletion is done by changing the insn to copy the `reaching_reg' of
5658 the expression into the result of the SET. It is left to later passes
5659 (cprop, cse2, flow, combine, regmove) to propagate the copy or eliminate it.
5661 Returns nonzero if a change is made. */
5663 static int
5664 pre_delete (void)
5666 unsigned int i;
5667 int changed;
5668 struct expr *expr;
5669 struct occr *occr;
5671 changed = 0;
5672 for (i = 0; i < expr_hash_table.size; i++)
5673 for (expr = expr_hash_table.table[i];
5674 expr != NULL;
5675 expr = expr->next_same_hash)
5677 int indx = expr->bitmap_index;
5679 /* We only need to search antic_occr since we require
5680 ANTLOC != 0. */
5682 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5684 rtx insn = occr->insn;
5685 rtx set;
5686 basic_block bb = BLOCK_FOR_INSN (insn);
5688 /* We only delete insns that have a single_set. */
5689 if (TEST_BIT (pre_delete_map[bb->index], indx)
5690 && (set = single_set (insn)) != 0)
5692 /* Create a pseudo-reg to store the result of reaching
5693 expressions into. Get the mode for the new pseudo from
5694 the mode of the original destination pseudo. */
5695 if (expr->reaching_reg == NULL)
5696 expr->reaching_reg
5697 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
5699 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
5700 delete_insn (insn);
5701 occr->deleted_p = 1;
5702 SET_BIT (pre_redundant_insns, INSN_CUID (insn));
5703 changed = 1;
5704 gcse_subst_count++;
5706 if (gcse_file)
5708 fprintf (gcse_file,
5709 "PRE: redundant insn %d (expression %d) in ",
5710 INSN_UID (insn), indx);
5711 fprintf (gcse_file, "bb %d, reaching reg is %d\n",
5712 bb->index, REGNO (expr->reaching_reg));
5718 return changed;
5721 /* Perform GCSE optimizations using PRE.
5722 This is called by one_pre_gcse_pass after all the dataflow analysis
5723 has been done.
5725 This is based on the original Morel-Renvoise paper Fred Chow's thesis, and
5726 lazy code motion from Knoop, Ruthing and Steffen as described in Advanced
5727 Compiler Design and Implementation.
5729 ??? A new pseudo reg is created to hold the reaching expression. The nice
5730 thing about the classical approach is that it would try to use an existing
5731 reg. If the register can't be adequately optimized [i.e. we introduce
5732 reload problems], one could add a pass here to propagate the new register
5733 through the block.
5735 ??? We don't handle single sets in PARALLELs because we're [currently] not
5736 able to copy the rest of the parallel when we insert copies to create full
5737 redundancies from partial redundancies. However, there's no reason why we
5738 can't handle PARALLELs in the cases where there are no partial
5739 redundancies. */
5741 static int
5742 pre_gcse (void)
5744 unsigned int i;
5745 int did_insert, changed;
5746 struct expr **index_map;
5747 struct expr *expr;
5749 /* Compute a mapping from expression number (`bitmap_index') to
5750 hash table entry. */
5752 index_map = xcalloc (expr_hash_table.n_elems, sizeof (struct expr *));
5753 for (i = 0; i < expr_hash_table.size; i++)
5754 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
5755 index_map[expr->bitmap_index] = expr;
5757 /* Reset bitmap used to track which insns are redundant. */
5758 pre_redundant_insns = sbitmap_alloc (max_cuid);
5759 sbitmap_zero (pre_redundant_insns);
5761 /* Delete the redundant insns first so that
5762 - we know what register to use for the new insns and for the other
5763 ones with reaching expressions
5764 - we know which insns are redundant when we go to create copies */
5766 changed = pre_delete ();
5768 did_insert = pre_edge_insert (edge_list, index_map);
5770 /* In other places with reaching expressions, copy the expression to the
5771 specially allocated pseudo-reg that reaches the redundant expr. */
5772 pre_insert_copies ();
5773 if (did_insert)
5775 commit_edge_insertions ();
5776 changed = 1;
5779 free (index_map);
5780 sbitmap_free (pre_redundant_insns);
5781 return changed;
5784 /* Top level routine to perform one PRE GCSE pass.
5786 Return nonzero if a change was made. */
5788 static int
5789 one_pre_gcse_pass (int pass)
5791 int changed = 0;
5793 gcse_subst_count = 0;
5794 gcse_create_count = 0;
5796 alloc_hash_table (max_cuid, &expr_hash_table, 0);
5797 add_noreturn_fake_exit_edges ();
5798 if (flag_gcse_lm)
5799 compute_ld_motion_mems ();
5801 compute_hash_table (&expr_hash_table);
5802 trim_ld_motion_mems ();
5803 if (gcse_file)
5804 dump_hash_table (gcse_file, "Expression", &expr_hash_table);
5806 if (expr_hash_table.n_elems > 0)
5808 alloc_pre_mem (last_basic_block, expr_hash_table.n_elems);
5809 compute_pre_data ();
5810 changed |= pre_gcse ();
5811 free_edge_list (edge_list);
5812 free_pre_mem ();
5815 free_ldst_mems ();
5816 remove_fake_edges ();
5817 free_hash_table (&expr_hash_table);
5819 if (gcse_file)
5821 fprintf (gcse_file, "\nPRE GCSE of %s, pass %d: %d bytes needed, ",
5822 current_function_name (), pass, bytes_used);
5823 fprintf (gcse_file, "%d substs, %d insns created\n",
5824 gcse_subst_count, gcse_create_count);
5827 return changed;
5830 /* If X contains any LABEL_REF's, add REG_LABEL notes for them to INSN.
5831 If notes are added to an insn which references a CODE_LABEL, the
5832 LABEL_NUSES count is incremented. We have to add REG_LABEL notes,
5833 because the following loop optimization pass requires them. */
5835 /* ??? This is very similar to the loop.c add_label_notes function. We
5836 could probably share code here. */
5838 /* ??? If there was a jump optimization pass after gcse and before loop,
5839 then we would not need to do this here, because jump would add the
5840 necessary REG_LABEL notes. */
5842 static void
5843 add_label_notes (rtx x, rtx insn)
5845 enum rtx_code code = GET_CODE (x);
5846 int i, j;
5847 const char *fmt;
5849 if (code == LABEL_REF && !LABEL_REF_NONLOCAL_P (x))
5851 /* This code used to ignore labels that referred to dispatch tables to
5852 avoid flow generating (slightly) worse code.
5854 We no longer ignore such label references (see LABEL_REF handling in
5855 mark_jump_label for additional information). */
5857 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, XEXP (x, 0),
5858 REG_NOTES (insn));
5859 if (LABEL_P (XEXP (x, 0)))
5860 LABEL_NUSES (XEXP (x, 0))++;
5861 return;
5864 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
5866 if (fmt[i] == 'e')
5867 add_label_notes (XEXP (x, i), insn);
5868 else if (fmt[i] == 'E')
5869 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5870 add_label_notes (XVECEXP (x, i, j), insn);
5874 /* Compute transparent outgoing information for each block.
5876 An expression is transparent to an edge unless it is killed by
5877 the edge itself. This can only happen with abnormal control flow,
5878 when the edge is traversed through a call. This happens with
5879 non-local labels and exceptions.
5881 This would not be necessary if we split the edge. While this is
5882 normally impossible for abnormal critical edges, with some effort
5883 it should be possible with exception handling, since we still have
5884 control over which handler should be invoked. But due to increased
5885 EH table sizes, this may not be worthwhile. */
5887 static void
5888 compute_transpout (void)
5890 basic_block bb;
5891 unsigned int i;
5892 struct expr *expr;
5894 sbitmap_vector_ones (transpout, last_basic_block);
5896 FOR_EACH_BB (bb)
5898 /* Note that flow inserted a nop a the end of basic blocks that
5899 end in call instructions for reasons other than abnormal
5900 control flow. */
5901 if (GET_CODE (BB_END (bb)) != CALL_INSN)
5902 continue;
5904 for (i = 0; i < expr_hash_table.size; i++)
5905 for (expr = expr_hash_table.table[i]; expr ; expr = expr->next_same_hash)
5906 if (GET_CODE (expr->expr) == MEM)
5908 if (GET_CODE (XEXP (expr->expr, 0)) == SYMBOL_REF
5909 && CONSTANT_POOL_ADDRESS_P (XEXP (expr->expr, 0)))
5910 continue;
5912 /* ??? Optimally, we would use interprocedural alias
5913 analysis to determine if this mem is actually killed
5914 by this call. */
5915 RESET_BIT (transpout[bb->index], expr->bitmap_index);
5920 /* Removal of useless null pointer checks */
5922 /* Called via note_stores. X is set by SETTER. If X is a register we must
5923 invalidate nonnull_local and set nonnull_killed. DATA is really a
5924 `null_pointer_info *'.
5926 We ignore hard registers. */
5928 static void
5929 invalidate_nonnull_info (rtx x, rtx setter ATTRIBUTE_UNUSED, void *data)
5931 unsigned int regno;
5932 struct null_pointer_info *npi = (struct null_pointer_info *) data;
5934 while (GET_CODE (x) == SUBREG)
5935 x = SUBREG_REG (x);
5937 /* Ignore anything that is not a register or is a hard register. */
5938 if (GET_CODE (x) != REG
5939 || REGNO (x) < npi->min_reg
5940 || REGNO (x) >= npi->max_reg)
5941 return;
5943 regno = REGNO (x) - npi->min_reg;
5945 RESET_BIT (npi->nonnull_local[npi->current_block->index], regno);
5946 SET_BIT (npi->nonnull_killed[npi->current_block->index], regno);
5949 /* Do null-pointer check elimination for the registers indicated in
5950 NPI. NONNULL_AVIN and NONNULL_AVOUT are pre-allocated sbitmaps;
5951 they are not our responsibility to free. */
5953 static int
5954 delete_null_pointer_checks_1 (unsigned int *block_reg, sbitmap *nonnull_avin,
5955 sbitmap *nonnull_avout,
5956 struct null_pointer_info *npi)
5958 basic_block bb, current_block;
5959 sbitmap *nonnull_local = npi->nonnull_local;
5960 sbitmap *nonnull_killed = npi->nonnull_killed;
5961 int something_changed = 0;
5963 /* Compute local properties, nonnull and killed. A register will have
5964 the nonnull property if at the end of the current block its value is
5965 known to be nonnull. The killed property indicates that somewhere in
5966 the block any information we had about the register is killed.
5968 Note that a register can have both properties in a single block. That
5969 indicates that it's killed, then later in the block a new value is
5970 computed. */
5971 sbitmap_vector_zero (nonnull_local, last_basic_block);
5972 sbitmap_vector_zero (nonnull_killed, last_basic_block);
5974 FOR_EACH_BB (current_block)
5976 rtx insn, stop_insn;
5978 /* Set the current block for invalidate_nonnull_info. */
5979 npi->current_block = current_block;
5981 /* Scan each insn in the basic block looking for memory references and
5982 register sets. */
5983 stop_insn = NEXT_INSN (BB_END (current_block));
5984 for (insn = BB_HEAD (current_block);
5985 insn != stop_insn;
5986 insn = NEXT_INSN (insn))
5988 rtx set;
5989 rtx reg;
5991 /* Ignore anything that is not a normal insn. */
5992 if (! INSN_P (insn))
5993 continue;
5995 /* Basically ignore anything that is not a simple SET. We do have
5996 to make sure to invalidate nonnull_local and set nonnull_killed
5997 for such insns though. */
5998 set = single_set (insn);
5999 if (!set)
6001 note_stores (PATTERN (insn), invalidate_nonnull_info, npi);
6002 continue;
6005 /* See if we've got a usable memory load. We handle it first
6006 in case it uses its address register as a dest (which kills
6007 the nonnull property). */
6008 if (GET_CODE (SET_SRC (set)) == MEM
6009 && GET_CODE ((reg = XEXP (SET_SRC (set), 0))) == REG
6010 && REGNO (reg) >= npi->min_reg
6011 && REGNO (reg) < npi->max_reg)
6012 SET_BIT (nonnull_local[current_block->index],
6013 REGNO (reg) - npi->min_reg);
6015 /* Now invalidate stuff clobbered by this insn. */
6016 note_stores (PATTERN (insn), invalidate_nonnull_info, npi);
6018 /* And handle stores, we do these last since any sets in INSN can
6019 not kill the nonnull property if it is derived from a MEM
6020 appearing in a SET_DEST. */
6021 if (GET_CODE (SET_DEST (set)) == MEM
6022 && GET_CODE ((reg = XEXP (SET_DEST (set), 0))) == REG
6023 && REGNO (reg) >= npi->min_reg
6024 && REGNO (reg) < npi->max_reg)
6025 SET_BIT (nonnull_local[current_block->index],
6026 REGNO (reg) - npi->min_reg);
6030 /* Now compute global properties based on the local properties. This
6031 is a classic global availability algorithm. */
6032 compute_available (nonnull_local, nonnull_killed,
6033 nonnull_avout, nonnull_avin);
6035 /* Now look at each bb and see if it ends with a compare of a value
6036 against zero. */
6037 FOR_EACH_BB (bb)
6039 rtx last_insn = BB_END (bb);
6040 rtx condition, earliest;
6041 int compare_and_branch;
6043 /* Since MIN_REG is always at least FIRST_PSEUDO_REGISTER, and
6044 since BLOCK_REG[BB] is zero if this block did not end with a
6045 comparison against zero, this condition works. */
6046 if (block_reg[bb->index] < npi->min_reg
6047 || block_reg[bb->index] >= npi->max_reg)
6048 continue;
6050 /* LAST_INSN is a conditional jump. Get its condition. */
6051 condition = get_condition (last_insn, &earliest, false);
6053 /* If we can't determine the condition then skip. */
6054 if (! condition)
6055 continue;
6057 /* Is the register known to have a nonzero value? */
6058 if (!TEST_BIT (nonnull_avout[bb->index], block_reg[bb->index] - npi->min_reg))
6059 continue;
6061 /* Try to compute whether the compare/branch at the loop end is one or
6062 two instructions. */
6063 if (earliest == last_insn)
6064 compare_and_branch = 1;
6065 else if (earliest == prev_nonnote_insn (last_insn))
6066 compare_and_branch = 2;
6067 else
6068 continue;
6070 /* We know the register in this comparison is nonnull at exit from
6071 this block. We can optimize this comparison. */
6072 if (GET_CODE (condition) == NE)
6074 rtx new_jump;
6076 new_jump = emit_jump_insn_after (gen_jump (JUMP_LABEL (last_insn)),
6077 last_insn);
6078 JUMP_LABEL (new_jump) = JUMP_LABEL (last_insn);
6079 LABEL_NUSES (JUMP_LABEL (new_jump))++;
6080 emit_barrier_after (new_jump);
6083 something_changed = 1;
6084 delete_insn (last_insn);
6085 #ifdef HAVE_cc0
6086 if (compare_and_branch == 2)
6087 delete_insn (earliest);
6088 #endif
6089 purge_dead_edges (bb);
6091 /* Don't check this block again. (Note that BB_END is
6092 invalid here; we deleted the last instruction in the
6093 block.) */
6094 block_reg[bb->index] = 0;
6097 return something_changed;
6100 /* Find EQ/NE comparisons against zero which can be (indirectly) evaluated
6101 at compile time.
6103 This is conceptually similar to global constant/copy propagation and
6104 classic global CSE (it even uses the same dataflow equations as cprop).
6106 If a register is used as memory address with the form (mem (reg)), then we
6107 know that REG can not be zero at that point in the program. Any instruction
6108 which sets REG "kills" this property.
6110 So, if every path leading to a conditional branch has an available memory
6111 reference of that form, then we know the register can not have the value
6112 zero at the conditional branch.
6114 So we merely need to compute the local properties and propagate that data
6115 around the cfg, then optimize where possible.
6117 We run this pass two times. Once before CSE, then again after CSE. This
6118 has proven to be the most profitable approach. It is rare for new
6119 optimization opportunities of this nature to appear after the first CSE
6120 pass.
6122 This could probably be integrated with global cprop with a little work. */
6125 delete_null_pointer_checks (rtx f ATTRIBUTE_UNUSED)
6127 sbitmap *nonnull_avin, *nonnull_avout;
6128 unsigned int *block_reg;
6129 basic_block bb;
6130 int reg;
6131 int regs_per_pass;
6132 int max_reg = max_reg_num ();
6133 struct null_pointer_info npi;
6134 int something_changed = 0;
6136 /* If we have only a single block, or it is too expensive, give up. */
6137 if (n_basic_blocks <= 1
6138 || is_too_expensive (_ ("NULL pointer checks disabled")))
6139 return 0;
6141 /* We need four bitmaps, each with a bit for each register in each
6142 basic block. */
6143 regs_per_pass = get_bitmap_width (4, last_basic_block, max_reg);
6145 /* Allocate bitmaps to hold local and global properties. */
6146 npi.nonnull_local = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
6147 npi.nonnull_killed = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
6148 nonnull_avin = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
6149 nonnull_avout = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
6151 /* Go through the basic blocks, seeing whether or not each block
6152 ends with a conditional branch whose condition is a comparison
6153 against zero. Record the register compared in BLOCK_REG. */
6154 block_reg = xcalloc (last_basic_block, sizeof (int));
6155 FOR_EACH_BB (bb)
6157 rtx last_insn = BB_END (bb);
6158 rtx condition, earliest, reg;
6160 /* We only want conditional branches. */
6161 if (GET_CODE (last_insn) != JUMP_INSN
6162 || !any_condjump_p (last_insn)
6163 || !onlyjump_p (last_insn))
6164 continue;
6166 /* LAST_INSN is a conditional jump. Get its condition. */
6167 condition = get_condition (last_insn, &earliest, false);
6169 /* If we were unable to get the condition, or it is not an equality
6170 comparison against zero then there's nothing we can do. */
6171 if (!condition
6172 || (GET_CODE (condition) != NE && GET_CODE (condition) != EQ)
6173 || GET_CODE (XEXP (condition, 1)) != CONST_INT
6174 || (XEXP (condition, 1)
6175 != CONST0_RTX (GET_MODE (XEXP (condition, 0)))))
6176 continue;
6178 /* We must be checking a register against zero. */
6179 reg = XEXP (condition, 0);
6180 if (GET_CODE (reg) != REG)
6181 continue;
6183 block_reg[bb->index] = REGNO (reg);
6186 /* Go through the algorithm for each block of registers. */
6187 for (reg = FIRST_PSEUDO_REGISTER; reg < max_reg; reg += regs_per_pass)
6189 npi.min_reg = reg;
6190 npi.max_reg = MIN (reg + regs_per_pass, max_reg);
6191 something_changed |= delete_null_pointer_checks_1 (block_reg,
6192 nonnull_avin,
6193 nonnull_avout,
6194 &npi);
6197 /* Free the table of registers compared at the end of every block. */
6198 free (block_reg);
6200 /* Free bitmaps. */
6201 sbitmap_vector_free (npi.nonnull_local);
6202 sbitmap_vector_free (npi.nonnull_killed);
6203 sbitmap_vector_free (nonnull_avin);
6204 sbitmap_vector_free (nonnull_avout);
6206 return something_changed;
6209 /* Code Hoisting variables and subroutines. */
6211 /* Very busy expressions. */
6212 static sbitmap *hoist_vbein;
6213 static sbitmap *hoist_vbeout;
6215 /* Hoistable expressions. */
6216 static sbitmap *hoist_exprs;
6218 /* ??? We could compute post dominators and run this algorithm in
6219 reverse to perform tail merging, doing so would probably be
6220 more effective than the tail merging code in jump.c.
6222 It's unclear if tail merging could be run in parallel with
6223 code hoisting. It would be nice. */
6225 /* Allocate vars used for code hoisting analysis. */
6227 static void
6228 alloc_code_hoist_mem (int n_blocks, int n_exprs)
6230 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
6231 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
6232 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
6234 hoist_vbein = sbitmap_vector_alloc (n_blocks, n_exprs);
6235 hoist_vbeout = sbitmap_vector_alloc (n_blocks, n_exprs);
6236 hoist_exprs = sbitmap_vector_alloc (n_blocks, n_exprs);
6237 transpout = sbitmap_vector_alloc (n_blocks, n_exprs);
6240 /* Free vars used for code hoisting analysis. */
6242 static void
6243 free_code_hoist_mem (void)
6245 sbitmap_vector_free (antloc);
6246 sbitmap_vector_free (transp);
6247 sbitmap_vector_free (comp);
6249 sbitmap_vector_free (hoist_vbein);
6250 sbitmap_vector_free (hoist_vbeout);
6251 sbitmap_vector_free (hoist_exprs);
6252 sbitmap_vector_free (transpout);
6254 free_dominance_info (CDI_DOMINATORS);
6257 /* Compute the very busy expressions at entry/exit from each block.
6259 An expression is very busy if all paths from a given point
6260 compute the expression. */
6262 static void
6263 compute_code_hoist_vbeinout (void)
6265 int changed, passes;
6266 basic_block bb;
6268 sbitmap_vector_zero (hoist_vbeout, last_basic_block);
6269 sbitmap_vector_zero (hoist_vbein, last_basic_block);
6271 passes = 0;
6272 changed = 1;
6274 while (changed)
6276 changed = 0;
6278 /* We scan the blocks in the reverse order to speed up
6279 the convergence. */
6280 FOR_EACH_BB_REVERSE (bb)
6282 changed |= sbitmap_a_or_b_and_c_cg (hoist_vbein[bb->index], antloc[bb->index],
6283 hoist_vbeout[bb->index], transp[bb->index]);
6284 if (bb->next_bb != EXIT_BLOCK_PTR)
6285 sbitmap_intersection_of_succs (hoist_vbeout[bb->index], hoist_vbein, bb->index);
6288 passes++;
6291 if (gcse_file)
6292 fprintf (gcse_file, "hoisting vbeinout computation: %d passes\n", passes);
6295 /* Top level routine to do the dataflow analysis needed by code hoisting. */
6297 static void
6298 compute_code_hoist_data (void)
6300 compute_local_properties (transp, comp, antloc, &expr_hash_table);
6301 compute_transpout ();
6302 compute_code_hoist_vbeinout ();
6303 calculate_dominance_info (CDI_DOMINATORS);
6304 if (gcse_file)
6305 fprintf (gcse_file, "\n");
6308 /* Determine if the expression identified by EXPR_INDEX would
6309 reach BB unimpared if it was placed at the end of EXPR_BB.
6311 It's unclear exactly what Muchnick meant by "unimpared". It seems
6312 to me that the expression must either be computed or transparent in
6313 *every* block in the path(s) from EXPR_BB to BB. Any other definition
6314 would allow the expression to be hoisted out of loops, even if
6315 the expression wasn't a loop invariant.
6317 Contrast this to reachability for PRE where an expression is
6318 considered reachable if *any* path reaches instead of *all*
6319 paths. */
6321 static int
6322 hoist_expr_reaches_here_p (basic_block expr_bb, int expr_index, basic_block bb, char *visited)
6324 edge pred;
6325 int visited_allocated_locally = 0;
6328 if (visited == NULL)
6330 visited_allocated_locally = 1;
6331 visited = xcalloc (last_basic_block, 1);
6334 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
6336 basic_block pred_bb = pred->src;
6338 if (pred->src == ENTRY_BLOCK_PTR)
6339 break;
6340 else if (pred_bb == expr_bb)
6341 continue;
6342 else if (visited[pred_bb->index])
6343 continue;
6345 /* Does this predecessor generate this expression? */
6346 else if (TEST_BIT (comp[pred_bb->index], expr_index))
6347 break;
6348 else if (! TEST_BIT (transp[pred_bb->index], expr_index))
6349 break;
6351 /* Not killed. */
6352 else
6354 visited[pred_bb->index] = 1;
6355 if (! hoist_expr_reaches_here_p (expr_bb, expr_index,
6356 pred_bb, visited))
6357 break;
6360 if (visited_allocated_locally)
6361 free (visited);
6363 return (pred == NULL);
6366 /* Actually perform code hoisting. */
6368 static void
6369 hoist_code (void)
6371 basic_block bb, dominated;
6372 basic_block *domby;
6373 unsigned int domby_len;
6374 unsigned int i,j;
6375 struct expr **index_map;
6376 struct expr *expr;
6378 sbitmap_vector_zero (hoist_exprs, last_basic_block);
6380 /* Compute a mapping from expression number (`bitmap_index') to
6381 hash table entry. */
6383 index_map = xcalloc (expr_hash_table.n_elems, sizeof (struct expr *));
6384 for (i = 0; i < expr_hash_table.size; i++)
6385 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
6386 index_map[expr->bitmap_index] = expr;
6388 /* Walk over each basic block looking for potentially hoistable
6389 expressions, nothing gets hoisted from the entry block. */
6390 FOR_EACH_BB (bb)
6392 int found = 0;
6393 int insn_inserted_p;
6395 domby_len = get_dominated_by (CDI_DOMINATORS, bb, &domby);
6396 /* Examine each expression that is very busy at the exit of this
6397 block. These are the potentially hoistable expressions. */
6398 for (i = 0; i < hoist_vbeout[bb->index]->n_bits; i++)
6400 int hoistable = 0;
6402 if (TEST_BIT (hoist_vbeout[bb->index], i)
6403 && TEST_BIT (transpout[bb->index], i))
6405 /* We've found a potentially hoistable expression, now
6406 we look at every block BB dominates to see if it
6407 computes the expression. */
6408 for (j = 0; j < domby_len; j++)
6410 dominated = domby[j];
6411 /* Ignore self dominance. */
6412 if (bb == dominated)
6413 continue;
6414 /* We've found a dominated block, now see if it computes
6415 the busy expression and whether or not moving that
6416 expression to the "beginning" of that block is safe. */
6417 if (!TEST_BIT (antloc[dominated->index], i))
6418 continue;
6420 /* Note if the expression would reach the dominated block
6421 unimpared if it was placed at the end of BB.
6423 Keep track of how many times this expression is hoistable
6424 from a dominated block into BB. */
6425 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
6426 hoistable++;
6429 /* If we found more than one hoistable occurrence of this
6430 expression, then note it in the bitmap of expressions to
6431 hoist. It makes no sense to hoist things which are computed
6432 in only one BB, and doing so tends to pessimize register
6433 allocation. One could increase this value to try harder
6434 to avoid any possible code expansion due to register
6435 allocation issues; however experiments have shown that
6436 the vast majority of hoistable expressions are only movable
6437 from two successors, so raising this threshold is likely
6438 to nullify any benefit we get from code hoisting. */
6439 if (hoistable > 1)
6441 SET_BIT (hoist_exprs[bb->index], i);
6442 found = 1;
6446 /* If we found nothing to hoist, then quit now. */
6447 if (! found)
6449 free (domby);
6450 continue;
6453 /* Loop over all the hoistable expressions. */
6454 for (i = 0; i < hoist_exprs[bb->index]->n_bits; i++)
6456 /* We want to insert the expression into BB only once, so
6457 note when we've inserted it. */
6458 insn_inserted_p = 0;
6460 /* These tests should be the same as the tests above. */
6461 if (TEST_BIT (hoist_vbeout[bb->index], i))
6463 /* We've found a potentially hoistable expression, now
6464 we look at every block BB dominates to see if it
6465 computes the expression. */
6466 for (j = 0; j < domby_len; j++)
6468 dominated = domby[j];
6469 /* Ignore self dominance. */
6470 if (bb == dominated)
6471 continue;
6473 /* We've found a dominated block, now see if it computes
6474 the busy expression and whether or not moving that
6475 expression to the "beginning" of that block is safe. */
6476 if (!TEST_BIT (antloc[dominated->index], i))
6477 continue;
6479 /* The expression is computed in the dominated block and
6480 it would be safe to compute it at the start of the
6481 dominated block. Now we have to determine if the
6482 expression would reach the dominated block if it was
6483 placed at the end of BB. */
6484 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
6486 struct expr *expr = index_map[i];
6487 struct occr *occr = expr->antic_occr;
6488 rtx insn;
6489 rtx set;
6491 /* Find the right occurrence of this expression. */
6492 while (BLOCK_FOR_INSN (occr->insn) != dominated && occr)
6493 occr = occr->next;
6495 /* Should never happen. */
6496 if (!occr)
6497 abort ();
6499 insn = occr->insn;
6501 set = single_set (insn);
6502 if (! set)
6503 abort ();
6505 /* Create a pseudo-reg to store the result of reaching
6506 expressions into. Get the mode for the new pseudo
6507 from the mode of the original destination pseudo. */
6508 if (expr->reaching_reg == NULL)
6509 expr->reaching_reg
6510 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
6512 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
6513 delete_insn (insn);
6514 occr->deleted_p = 1;
6515 if (!insn_inserted_p)
6517 insert_insn_end_bb (index_map[i], bb, 0);
6518 insn_inserted_p = 1;
6524 free (domby);
6527 free (index_map);
6530 /* Top level routine to perform one code hoisting (aka unification) pass
6532 Return nonzero if a change was made. */
6534 static int
6535 one_code_hoisting_pass (void)
6537 int changed = 0;
6539 alloc_hash_table (max_cuid, &expr_hash_table, 0);
6540 compute_hash_table (&expr_hash_table);
6541 if (gcse_file)
6542 dump_hash_table (gcse_file, "Code Hosting Expressions", &expr_hash_table);
6544 if (expr_hash_table.n_elems > 0)
6546 alloc_code_hoist_mem (last_basic_block, expr_hash_table.n_elems);
6547 compute_code_hoist_data ();
6548 hoist_code ();
6549 free_code_hoist_mem ();
6552 free_hash_table (&expr_hash_table);
6554 return changed;
6557 /* Here we provide the things required to do store motion towards
6558 the exit. In order for this to be effective, gcse also needed to
6559 be taught how to move a load when it is kill only by a store to itself.
6561 int i;
6562 float a[10];
6564 void foo(float scale)
6566 for (i=0; i<10; i++)
6567 a[i] *= scale;
6570 'i' is both loaded and stored to in the loop. Normally, gcse cannot move
6571 the load out since its live around the loop, and stored at the bottom
6572 of the loop.
6574 The 'Load Motion' referred to and implemented in this file is
6575 an enhancement to gcse which when using edge based lcm, recognizes
6576 this situation and allows gcse to move the load out of the loop.
6578 Once gcse has hoisted the load, store motion can then push this
6579 load towards the exit, and we end up with no loads or stores of 'i'
6580 in the loop. */
6582 /* This will search the ldst list for a matching expression. If it
6583 doesn't find one, we create one and initialize it. */
6585 static struct ls_expr *
6586 ldst_entry (rtx x)
6588 int do_not_record_p = 0;
6589 struct ls_expr * ptr;
6590 unsigned int hash;
6592 hash = hash_expr_1 (x, GET_MODE (x), & do_not_record_p);
6594 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
6595 if (ptr->hash_index == hash && expr_equiv_p (ptr->pattern, x))
6596 return ptr;
6598 ptr = xmalloc (sizeof (struct ls_expr));
6600 ptr->next = pre_ldst_mems;
6601 ptr->expr = NULL;
6602 ptr->pattern = x;
6603 ptr->pattern_regs = NULL_RTX;
6604 ptr->loads = NULL_RTX;
6605 ptr->stores = NULL_RTX;
6606 ptr->reaching_reg = NULL_RTX;
6607 ptr->invalid = 0;
6608 ptr->index = 0;
6609 ptr->hash_index = hash;
6610 pre_ldst_mems = ptr;
6612 return ptr;
6615 /* Free up an individual ldst entry. */
6617 static void
6618 free_ldst_entry (struct ls_expr * ptr)
6620 free_INSN_LIST_list (& ptr->loads);
6621 free_INSN_LIST_list (& ptr->stores);
6623 free (ptr);
6626 /* Free up all memory associated with the ldst list. */
6628 static void
6629 free_ldst_mems (void)
6631 while (pre_ldst_mems)
6633 struct ls_expr * tmp = pre_ldst_mems;
6635 pre_ldst_mems = pre_ldst_mems->next;
6637 free_ldst_entry (tmp);
6640 pre_ldst_mems = NULL;
6643 /* Dump debugging info about the ldst list. */
6645 static void
6646 print_ldst_list (FILE * file)
6648 struct ls_expr * ptr;
6650 fprintf (file, "LDST list: \n");
6652 for (ptr = first_ls_expr(); ptr != NULL; ptr = next_ls_expr (ptr))
6654 fprintf (file, " Pattern (%3d): ", ptr->index);
6656 print_rtl (file, ptr->pattern);
6658 fprintf (file, "\n Loads : ");
6660 if (ptr->loads)
6661 print_rtl (file, ptr->loads);
6662 else
6663 fprintf (file, "(nil)");
6665 fprintf (file, "\n Stores : ");
6667 if (ptr->stores)
6668 print_rtl (file, ptr->stores);
6669 else
6670 fprintf (file, "(nil)");
6672 fprintf (file, "\n\n");
6675 fprintf (file, "\n");
6678 /* Returns 1 if X is in the list of ldst only expressions. */
6680 static struct ls_expr *
6681 find_rtx_in_ldst (rtx x)
6683 struct ls_expr * ptr;
6685 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
6686 if (expr_equiv_p (ptr->pattern, x) && ! ptr->invalid)
6687 return ptr;
6689 return NULL;
6692 /* Assign each element of the list of mems a monotonically increasing value. */
6694 static int
6695 enumerate_ldsts (void)
6697 struct ls_expr * ptr;
6698 int n = 0;
6700 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
6701 ptr->index = n++;
6703 return n;
6706 /* Return first item in the list. */
6708 static inline struct ls_expr *
6709 first_ls_expr (void)
6711 return pre_ldst_mems;
6714 /* Return the next item in the list after the specified one. */
6716 static inline struct ls_expr *
6717 next_ls_expr (struct ls_expr * ptr)
6719 return ptr->next;
6722 /* Load Motion for loads which only kill themselves. */
6724 /* Return true if x is a simple MEM operation, with no registers or
6725 side effects. These are the types of loads we consider for the
6726 ld_motion list, otherwise we let the usual aliasing take care of it. */
6728 static int
6729 simple_mem (rtx x)
6731 if (GET_CODE (x) != MEM)
6732 return 0;
6734 if (MEM_VOLATILE_P (x))
6735 return 0;
6737 if (GET_MODE (x) == BLKmode)
6738 return 0;
6740 /* If we are handling exceptions, we must be careful with memory references
6741 that may trap. If we are not, the behavior is undefined, so we may just
6742 continue. */
6743 if (flag_non_call_exceptions && may_trap_p (x))
6744 return 0;
6746 if (side_effects_p (x))
6747 return 0;
6749 /* Do not consider function arguments passed on stack. */
6750 if (reg_mentioned_p (stack_pointer_rtx, x))
6751 return 0;
6753 if (flag_float_store && FLOAT_MODE_P (GET_MODE (x)))
6754 return 0;
6756 return 1;
6759 /* Make sure there isn't a buried reference in this pattern anywhere.
6760 If there is, invalidate the entry for it since we're not capable
6761 of fixing it up just yet.. We have to be sure we know about ALL
6762 loads since the aliasing code will allow all entries in the
6763 ld_motion list to not-alias itself. If we miss a load, we will get
6764 the wrong value since gcse might common it and we won't know to
6765 fix it up. */
6767 static void
6768 invalidate_any_buried_refs (rtx x)
6770 const char * fmt;
6771 int i, j;
6772 struct ls_expr * ptr;
6774 /* Invalidate it in the list. */
6775 if (GET_CODE (x) == MEM && simple_mem (x))
6777 ptr = ldst_entry (x);
6778 ptr->invalid = 1;
6781 /* Recursively process the insn. */
6782 fmt = GET_RTX_FORMAT (GET_CODE (x));
6784 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6786 if (fmt[i] == 'e')
6787 invalidate_any_buried_refs (XEXP (x, i));
6788 else if (fmt[i] == 'E')
6789 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6790 invalidate_any_buried_refs (XVECEXP (x, i, j));
6794 /* Find all the 'simple' MEMs which are used in LOADs and STORES. Simple
6795 being defined as MEM loads and stores to symbols, with no side effects
6796 and no registers in the expression. For a MEM destination, we also
6797 check that the insn is still valid if we replace the destination with a
6798 REG, as is done in update_ld_motion_stores. If there are any uses/defs
6799 which don't match this criteria, they are invalidated and trimmed out
6800 later. */
6802 static void
6803 compute_ld_motion_mems (void)
6805 struct ls_expr * ptr;
6806 basic_block bb;
6807 rtx insn;
6809 pre_ldst_mems = NULL;
6811 FOR_EACH_BB (bb)
6813 for (insn = BB_HEAD (bb);
6814 insn && insn != NEXT_INSN (BB_END (bb));
6815 insn = NEXT_INSN (insn))
6817 if (INSN_P (insn))
6819 if (GET_CODE (PATTERN (insn)) == SET)
6821 rtx src = SET_SRC (PATTERN (insn));
6822 rtx dest = SET_DEST (PATTERN (insn));
6824 /* Check for a simple LOAD... */
6825 if (GET_CODE (src) == MEM && simple_mem (src))
6827 ptr = ldst_entry (src);
6828 if (GET_CODE (dest) == REG)
6829 ptr->loads = alloc_INSN_LIST (insn, ptr->loads);
6830 else
6831 ptr->invalid = 1;
6833 else
6835 /* Make sure there isn't a buried load somewhere. */
6836 invalidate_any_buried_refs (src);
6839 /* Check for stores. Don't worry about aliased ones, they
6840 will block any movement we might do later. We only care
6841 about this exact pattern since those are the only
6842 circumstance that we will ignore the aliasing info. */
6843 if (GET_CODE (dest) == MEM && simple_mem (dest))
6845 ptr = ldst_entry (dest);
6847 if (GET_CODE (src) != MEM
6848 && GET_CODE (src) != ASM_OPERANDS
6849 /* Check for REG manually since want_to_gcse_p
6850 returns 0 for all REGs. */
6851 && can_assign_to_reg_p (src))
6852 ptr->stores = alloc_INSN_LIST (insn, ptr->stores);
6853 else
6854 ptr->invalid = 1;
6857 else
6858 invalidate_any_buried_refs (PATTERN (insn));
6864 /* Remove any references that have been either invalidated or are not in the
6865 expression list for pre gcse. */
6867 static void
6868 trim_ld_motion_mems (void)
6870 struct ls_expr * * last = & pre_ldst_mems;
6871 struct ls_expr * ptr = pre_ldst_mems;
6873 while (ptr != NULL)
6875 struct expr * expr;
6877 /* Delete if entry has been made invalid. */
6878 if (! ptr->invalid)
6880 /* Delete if we cannot find this mem in the expression list. */
6881 unsigned int hash = ptr->hash_index % expr_hash_table.size;
6883 for (expr = expr_hash_table.table[hash];
6884 expr != NULL;
6885 expr = expr->next_same_hash)
6886 if (expr_equiv_p (expr->expr, ptr->pattern))
6887 break;
6889 else
6890 expr = (struct expr *) 0;
6892 if (expr)
6894 /* Set the expression field if we are keeping it. */
6895 ptr->expr = expr;
6896 last = & ptr->next;
6897 ptr = ptr->next;
6899 else
6901 *last = ptr->next;
6902 free_ldst_entry (ptr);
6903 ptr = * last;
6907 /* Show the world what we've found. */
6908 if (gcse_file && pre_ldst_mems != NULL)
6909 print_ldst_list (gcse_file);
6912 /* This routine will take an expression which we are replacing with
6913 a reaching register, and update any stores that are needed if
6914 that expression is in the ld_motion list. Stores are updated by
6915 copying their SRC to the reaching register, and then storing
6916 the reaching register into the store location. These keeps the
6917 correct value in the reaching register for the loads. */
6919 static void
6920 update_ld_motion_stores (struct expr * expr)
6922 struct ls_expr * mem_ptr;
6924 if ((mem_ptr = find_rtx_in_ldst (expr->expr)))
6926 /* We can try to find just the REACHED stores, but is shouldn't
6927 matter to set the reaching reg everywhere... some might be
6928 dead and should be eliminated later. */
6930 /* We replace (set mem expr) with (set reg expr) (set mem reg)
6931 where reg is the reaching reg used in the load. We checked in
6932 compute_ld_motion_mems that we can replace (set mem expr) with
6933 (set reg expr) in that insn. */
6934 rtx list = mem_ptr->stores;
6936 for ( ; list != NULL_RTX; list = XEXP (list, 1))
6938 rtx insn = XEXP (list, 0);
6939 rtx pat = PATTERN (insn);
6940 rtx src = SET_SRC (pat);
6941 rtx reg = expr->reaching_reg;
6942 rtx copy, new;
6944 /* If we've already copied it, continue. */
6945 if (expr->reaching_reg == src)
6946 continue;
6948 if (gcse_file)
6950 fprintf (gcse_file, "PRE: store updated with reaching reg ");
6951 print_rtl (gcse_file, expr->reaching_reg);
6952 fprintf (gcse_file, ":\n ");
6953 print_inline_rtx (gcse_file, insn, 8);
6954 fprintf (gcse_file, "\n");
6957 copy = gen_move_insn ( reg, copy_rtx (SET_SRC (pat)));
6958 new = emit_insn_before (copy, insn);
6959 record_one_set (REGNO (reg), new);
6960 SET_SRC (pat) = reg;
6962 /* un-recognize this pattern since it's probably different now. */
6963 INSN_CODE (insn) = -1;
6964 gcse_create_count++;
6969 /* Store motion code. */
6971 #define ANTIC_STORE_LIST(x) ((x)->loads)
6972 #define AVAIL_STORE_LIST(x) ((x)->stores)
6973 #define LAST_AVAIL_CHECK_FAILURE(x) ((x)->reaching_reg)
6975 /* This is used to communicate the target bitvector we want to use in the
6976 reg_set_info routine when called via the note_stores mechanism. */
6977 static int * regvec;
6979 /* And current insn, for the same routine. */
6980 static rtx compute_store_table_current_insn;
6982 /* Used in computing the reverse edge graph bit vectors. */
6983 static sbitmap * st_antloc;
6985 /* Global holding the number of store expressions we are dealing with. */
6986 static int num_stores;
6988 /* Checks to set if we need to mark a register set. Called from
6989 note_stores. */
6991 static void
6992 reg_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED,
6993 void *data)
6995 sbitmap bb_reg = data;
6997 if (GET_CODE (dest) == SUBREG)
6998 dest = SUBREG_REG (dest);
7000 if (GET_CODE (dest) == REG)
7002 regvec[REGNO (dest)] = INSN_UID (compute_store_table_current_insn);
7003 if (bb_reg)
7004 SET_BIT (bb_reg, REGNO (dest));
7008 /* Clear any mark that says that this insn sets dest. Called from
7009 note_stores. */
7011 static void
7012 reg_clear_last_set (rtx dest, rtx setter ATTRIBUTE_UNUSED,
7013 void *data)
7015 int *dead_vec = data;
7017 if (GET_CODE (dest) == SUBREG)
7018 dest = SUBREG_REG (dest);
7020 if (GET_CODE (dest) == REG &&
7021 dead_vec[REGNO (dest)] == INSN_UID (compute_store_table_current_insn))
7022 dead_vec[REGNO (dest)] = 0;
7025 /* Return zero if some of the registers in list X are killed
7026 due to set of registers in bitmap REGS_SET. */
7028 static bool
7029 store_ops_ok (rtx x, int *regs_set)
7031 rtx reg;
7033 for (; x; x = XEXP (x, 1))
7035 reg = XEXP (x, 0);
7036 if (regs_set[REGNO(reg)])
7037 return false;
7040 return true;
7043 /* Returns a list of registers mentioned in X. */
7044 static rtx
7045 extract_mentioned_regs (rtx x)
7047 return extract_mentioned_regs_helper (x, NULL_RTX);
7050 /* Helper for extract_mentioned_regs; ACCUM is used to accumulate used
7051 registers. */
7052 static rtx
7053 extract_mentioned_regs_helper (rtx x, rtx accum)
7055 int i;
7056 enum rtx_code code;
7057 const char * fmt;
7059 /* Repeat is used to turn tail-recursion into iteration. */
7060 repeat:
7062 if (x == 0)
7063 return accum;
7065 code = GET_CODE (x);
7066 switch (code)
7068 case REG:
7069 return alloc_EXPR_LIST (0, x, accum);
7071 case MEM:
7072 x = XEXP (x, 0);
7073 goto repeat;
7075 case PRE_DEC:
7076 case PRE_INC:
7077 case POST_DEC:
7078 case POST_INC:
7079 /* We do not run this function with arguments having side effects. */
7080 abort ();
7082 case PC:
7083 case CC0: /*FIXME*/
7084 case CONST:
7085 case CONST_INT:
7086 case CONST_DOUBLE:
7087 case CONST_VECTOR:
7088 case SYMBOL_REF:
7089 case LABEL_REF:
7090 case ADDR_VEC:
7091 case ADDR_DIFF_VEC:
7092 return accum;
7094 default:
7095 break;
7098 i = GET_RTX_LENGTH (code) - 1;
7099 fmt = GET_RTX_FORMAT (code);
7101 for (; i >= 0; i--)
7103 if (fmt[i] == 'e')
7105 rtx tem = XEXP (x, i);
7107 /* If we are about to do the last recursive call
7108 needed at this level, change it into iteration. */
7109 if (i == 0)
7111 x = tem;
7112 goto repeat;
7115 accum = extract_mentioned_regs_helper (tem, accum);
7117 else if (fmt[i] == 'E')
7119 int j;
7121 for (j = 0; j < XVECLEN (x, i); j++)
7122 accum = extract_mentioned_regs_helper (XVECEXP (x, i, j), accum);
7126 return accum;
7129 /* Determine whether INSN is MEM store pattern that we will consider moving.
7130 REGS_SET_BEFORE is bitmap of registers set before (and including) the
7131 current insn, REGS_SET_AFTER is bitmap of registers set after (and
7132 including) the insn in this basic block. We must be passing through BB from
7133 head to end, as we are using this fact to speed things up.
7135 The results are stored this way:
7137 -- the first anticipatable expression is added into ANTIC_STORE_LIST
7138 -- if the processed expression is not anticipatable, NULL_RTX is added
7139 there instead, so that we can use it as indicator that no further
7140 expression of this type may be anticipatable
7141 -- if the expression is available, it is added as head of AVAIL_STORE_LIST;
7142 consequently, all of them but this head are dead and may be deleted.
7143 -- if the expression is not available, the insn due to that it fails to be
7144 available is stored in reaching_reg.
7146 The things are complicated a bit by fact that there already may be stores
7147 to the same MEM from other blocks; also caller must take care of the
7148 necessary cleanup of the temporary markers after end of the basic block.
7151 static void
7152 find_moveable_store (rtx insn, int *regs_set_before, int *regs_set_after)
7154 struct ls_expr * ptr;
7155 rtx dest, set, tmp;
7156 int check_anticipatable, check_available;
7157 basic_block bb = BLOCK_FOR_INSN (insn);
7159 set = single_set (insn);
7160 if (!set)
7161 return;
7163 dest = SET_DEST (set);
7165 if (GET_CODE (dest) != MEM || MEM_VOLATILE_P (dest)
7166 || GET_MODE (dest) == BLKmode)
7167 return;
7169 if (side_effects_p (dest))
7170 return;
7172 /* If we are handling exceptions, we must be careful with memory references
7173 that may trap. If we are not, the behavior is undefined, so we may just
7174 continue. */
7175 if (flag_non_call_exceptions && may_trap_p (dest))
7176 return;
7178 /* Even if the destination cannot trap, the source may. In this case we'd
7179 need to handle updating the REG_EH_REGION note. */
7180 if (find_reg_note (insn, REG_EH_REGION, NULL_RTX))
7181 return;
7183 ptr = ldst_entry (dest);
7184 if (!ptr->pattern_regs)
7185 ptr->pattern_regs = extract_mentioned_regs (dest);
7187 /* Do not check for anticipatability if we either found one anticipatable
7188 store already, or tested for one and found out that it was killed. */
7189 check_anticipatable = 0;
7190 if (!ANTIC_STORE_LIST (ptr))
7191 check_anticipatable = 1;
7192 else
7194 tmp = XEXP (ANTIC_STORE_LIST (ptr), 0);
7195 if (tmp != NULL_RTX
7196 && BLOCK_FOR_INSN (tmp) != bb)
7197 check_anticipatable = 1;
7199 if (check_anticipatable)
7201 if (store_killed_before (dest, ptr->pattern_regs, insn, bb, regs_set_before))
7202 tmp = NULL_RTX;
7203 else
7204 tmp = insn;
7205 ANTIC_STORE_LIST (ptr) = alloc_INSN_LIST (tmp,
7206 ANTIC_STORE_LIST (ptr));
7209 /* It is not necessary to check whether store is available if we did
7210 it successfully before; if we failed before, do not bother to check
7211 until we reach the insn that caused us to fail. */
7212 check_available = 0;
7213 if (!AVAIL_STORE_LIST (ptr))
7214 check_available = 1;
7215 else
7217 tmp = XEXP (AVAIL_STORE_LIST (ptr), 0);
7218 if (BLOCK_FOR_INSN (tmp) != bb)
7219 check_available = 1;
7221 if (check_available)
7223 /* Check that we have already reached the insn at that the check
7224 failed last time. */
7225 if (LAST_AVAIL_CHECK_FAILURE (ptr))
7227 for (tmp = BB_END (bb);
7228 tmp != insn && tmp != LAST_AVAIL_CHECK_FAILURE (ptr);
7229 tmp = PREV_INSN (tmp))
7230 continue;
7231 if (tmp == insn)
7232 check_available = 0;
7234 else
7235 check_available = store_killed_after (dest, ptr->pattern_regs, insn,
7236 bb, regs_set_after,
7237 &LAST_AVAIL_CHECK_FAILURE (ptr));
7239 if (!check_available)
7240 AVAIL_STORE_LIST (ptr) = alloc_INSN_LIST (insn, AVAIL_STORE_LIST (ptr));
7243 /* Find available and anticipatable stores. */
7245 static int
7246 compute_store_table (void)
7248 int ret;
7249 basic_block bb;
7250 unsigned regno;
7251 rtx insn, pat, tmp;
7252 int *last_set_in, *already_set;
7253 struct ls_expr * ptr, **prev_next_ptr_ptr;
7255 max_gcse_regno = max_reg_num ();
7257 reg_set_in_block = sbitmap_vector_alloc (last_basic_block,
7258 max_gcse_regno);
7259 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
7260 pre_ldst_mems = 0;
7261 last_set_in = xcalloc (max_gcse_regno, sizeof (int));
7262 already_set = xmalloc (sizeof (int) * max_gcse_regno);
7264 /* Find all the stores we care about. */
7265 FOR_EACH_BB (bb)
7267 /* First compute the registers set in this block. */
7268 regvec = last_set_in;
7270 for (insn = BB_HEAD (bb);
7271 insn != NEXT_INSN (BB_END (bb));
7272 insn = NEXT_INSN (insn))
7274 if (! INSN_P (insn))
7275 continue;
7277 if (GET_CODE (insn) == CALL_INSN)
7279 bool clobbers_all = false;
7280 #ifdef NON_SAVING_SETJMP
7281 if (NON_SAVING_SETJMP
7282 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
7283 clobbers_all = true;
7284 #endif
7286 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7287 if (clobbers_all
7288 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
7290 last_set_in[regno] = INSN_UID (insn);
7291 SET_BIT (reg_set_in_block[bb->index], regno);
7295 pat = PATTERN (insn);
7296 compute_store_table_current_insn = insn;
7297 note_stores (pat, reg_set_info, reg_set_in_block[bb->index]);
7300 /* Now find the stores. */
7301 memset (already_set, 0, sizeof (int) * max_gcse_regno);
7302 regvec = already_set;
7303 for (insn = BB_HEAD (bb);
7304 insn != NEXT_INSN (BB_END (bb));
7305 insn = NEXT_INSN (insn))
7307 if (! INSN_P (insn))
7308 continue;
7310 if (GET_CODE (insn) == CALL_INSN)
7312 bool clobbers_all = false;
7313 #ifdef NON_SAVING_SETJMP
7314 if (NON_SAVING_SETJMP
7315 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
7316 clobbers_all = true;
7317 #endif
7319 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7320 if (clobbers_all
7321 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
7322 already_set[regno] = 1;
7325 pat = PATTERN (insn);
7326 note_stores (pat, reg_set_info, NULL);
7328 /* Now that we've marked regs, look for stores. */
7329 find_moveable_store (insn, already_set, last_set_in);
7331 /* Unmark regs that are no longer set. */
7332 compute_store_table_current_insn = insn;
7333 note_stores (pat, reg_clear_last_set, last_set_in);
7334 if (GET_CODE (insn) == CALL_INSN)
7336 bool clobbers_all = false;
7337 #ifdef NON_SAVING_SETJMP
7338 if (NON_SAVING_SETJMP
7339 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
7340 clobbers_all = true;
7341 #endif
7343 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7344 if ((clobbers_all
7345 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
7346 && last_set_in[regno] == INSN_UID (insn))
7347 last_set_in[regno] = 0;
7351 #ifdef ENABLE_CHECKING
7352 /* last_set_in should now be all-zero. */
7353 for (regno = 0; regno < max_gcse_regno; regno++)
7354 if (last_set_in[regno] != 0)
7355 abort ();
7356 #endif
7358 /* Clear temporary marks. */
7359 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7361 LAST_AVAIL_CHECK_FAILURE(ptr) = NULL_RTX;
7362 if (ANTIC_STORE_LIST (ptr)
7363 && (tmp = XEXP (ANTIC_STORE_LIST (ptr), 0)) == NULL_RTX)
7364 ANTIC_STORE_LIST (ptr) = XEXP (ANTIC_STORE_LIST (ptr), 1);
7368 /* Remove the stores that are not available anywhere, as there will
7369 be no opportunity to optimize them. */
7370 for (ptr = pre_ldst_mems, prev_next_ptr_ptr = &pre_ldst_mems;
7371 ptr != NULL;
7372 ptr = *prev_next_ptr_ptr)
7374 if (!AVAIL_STORE_LIST (ptr))
7376 *prev_next_ptr_ptr = ptr->next;
7377 free_ldst_entry (ptr);
7379 else
7380 prev_next_ptr_ptr = &ptr->next;
7383 ret = enumerate_ldsts ();
7385 if (gcse_file)
7387 fprintf (gcse_file, "ST_avail and ST_antic (shown under loads..)\n");
7388 print_ldst_list (gcse_file);
7391 free (last_set_in);
7392 free (already_set);
7393 return ret;
7396 /* Check to see if the load X is aliased with STORE_PATTERN.
7397 AFTER is true if we are checking the case when STORE_PATTERN occurs
7398 after the X. */
7400 static bool
7401 load_kills_store (rtx x, rtx store_pattern, int after)
7403 if (after)
7404 return anti_dependence (x, store_pattern);
7405 else
7406 return true_dependence (store_pattern, GET_MODE (store_pattern), x,
7407 rtx_addr_varies_p);
7410 /* Go through the entire insn X, looking for any loads which might alias
7411 STORE_PATTERN. Return true if found.
7412 AFTER is true if we are checking the case when STORE_PATTERN occurs
7413 after the insn X. */
7415 static bool
7416 find_loads (rtx x, rtx store_pattern, int after)
7418 const char * fmt;
7419 int i, j;
7420 int ret = false;
7422 if (!x)
7423 return false;
7425 if (GET_CODE (x) == SET)
7426 x = SET_SRC (x);
7428 if (GET_CODE (x) == MEM)
7430 if (load_kills_store (x, store_pattern, after))
7431 return true;
7434 /* Recursively process the insn. */
7435 fmt = GET_RTX_FORMAT (GET_CODE (x));
7437 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0 && !ret; i--)
7439 if (fmt[i] == 'e')
7440 ret |= find_loads (XEXP (x, i), store_pattern, after);
7441 else if (fmt[i] == 'E')
7442 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7443 ret |= find_loads (XVECEXP (x, i, j), store_pattern, after);
7445 return ret;
7448 /* Check if INSN kills the store pattern X (is aliased with it).
7449 AFTER is true if we are checking the case when store X occurs
7450 after the insn. Return true if it it does. */
7452 static bool
7453 store_killed_in_insn (rtx x, rtx x_regs, rtx insn, int after)
7455 rtx reg, base, note;
7457 if (!INSN_P (insn))
7458 return false;
7460 if (GET_CODE (insn) == CALL_INSN)
7462 /* A normal or pure call might read from pattern,
7463 but a const call will not. */
7464 if (! CONST_OR_PURE_CALL_P (insn) || pure_call_p (insn))
7465 return true;
7467 /* But even a const call reads its parameters. Check whether the
7468 base of some of registers used in mem is stack pointer. */
7469 for (reg = x_regs; reg; reg = XEXP (reg, 1))
7471 base = find_base_term (XEXP (reg, 0));
7472 if (!base
7473 || (GET_CODE (base) == ADDRESS
7474 && GET_MODE (base) == Pmode
7475 && XEXP (base, 0) == stack_pointer_rtx))
7476 return true;
7479 return false;
7482 if (GET_CODE (PATTERN (insn)) == SET)
7484 rtx pat = PATTERN (insn);
7485 rtx dest = SET_DEST (pat);
7487 if (GET_CODE (dest) == SIGN_EXTRACT
7488 || GET_CODE (dest) == ZERO_EXTRACT)
7489 dest = XEXP (dest, 0);
7491 /* Check for memory stores to aliased objects. */
7492 if (GET_CODE (dest) == MEM
7493 && !expr_equiv_p (dest, x))
7495 if (after)
7497 if (output_dependence (dest, x))
7498 return true;
7500 else
7502 if (output_dependence (x, dest))
7503 return true;
7506 if (find_loads (SET_SRC (pat), x, after))
7507 return true;
7509 else if (find_loads (PATTERN (insn), x, after))
7510 return true;
7512 /* If this insn has a REG_EQUAL or REG_EQUIV note referencing a memory
7513 location aliased with X, then this insn kills X. */
7514 note = find_reg_equal_equiv_note (insn);
7515 if (! note)
7516 return false;
7517 note = XEXP (note, 0);
7519 /* However, if the note represents a must alias rather than a may
7520 alias relationship, then it does not kill X. */
7521 if (expr_equiv_p (note, x))
7522 return false;
7524 /* See if there are any aliased loads in the note. */
7525 return find_loads (note, x, after);
7528 /* Returns true if the expression X is loaded or clobbered on or after INSN
7529 within basic block BB. REGS_SET_AFTER is bitmap of registers set in
7530 or after the insn. X_REGS is list of registers mentioned in X. If the store
7531 is killed, return the last insn in that it occurs in FAIL_INSN. */
7533 static bool
7534 store_killed_after (rtx x, rtx x_regs, rtx insn, basic_block bb,
7535 int *regs_set_after, rtx *fail_insn)
7537 rtx last = BB_END (bb), act;
7539 if (!store_ops_ok (x_regs, regs_set_after))
7541 /* We do not know where it will happen. */
7542 if (fail_insn)
7543 *fail_insn = NULL_RTX;
7544 return true;
7547 /* Scan from the end, so that fail_insn is determined correctly. */
7548 for (act = last; act != PREV_INSN (insn); act = PREV_INSN (act))
7549 if (store_killed_in_insn (x, x_regs, act, false))
7551 if (fail_insn)
7552 *fail_insn = act;
7553 return true;
7556 return false;
7559 /* Returns true if the expression X is loaded or clobbered on or before INSN
7560 within basic block BB. X_REGS is list of registers mentioned in X.
7561 REGS_SET_BEFORE is bitmap of registers set before or in this insn. */
7562 static bool
7563 store_killed_before (rtx x, rtx x_regs, rtx insn, basic_block bb,
7564 int *regs_set_before)
7566 rtx first = BB_HEAD (bb);
7568 if (!store_ops_ok (x_regs, regs_set_before))
7569 return true;
7571 for ( ; insn != PREV_INSN (first); insn = PREV_INSN (insn))
7572 if (store_killed_in_insn (x, x_regs, insn, true))
7573 return true;
7575 return false;
7578 /* Fill in available, anticipatable, transparent and kill vectors in
7579 STORE_DATA, based on lists of available and anticipatable stores. */
7580 static void
7581 build_store_vectors (void)
7583 basic_block bb;
7584 int *regs_set_in_block;
7585 rtx insn, st;
7586 struct ls_expr * ptr;
7587 unsigned regno;
7589 /* Build the gen_vector. This is any store in the table which is not killed
7590 by aliasing later in its block. */
7591 ae_gen = sbitmap_vector_alloc (last_basic_block, num_stores);
7592 sbitmap_vector_zero (ae_gen, last_basic_block);
7594 st_antloc = sbitmap_vector_alloc (last_basic_block, num_stores);
7595 sbitmap_vector_zero (st_antloc, last_basic_block);
7597 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7599 for (st = AVAIL_STORE_LIST (ptr); st != NULL; st = XEXP (st, 1))
7601 insn = XEXP (st, 0);
7602 bb = BLOCK_FOR_INSN (insn);
7604 /* If we've already seen an available expression in this block,
7605 we can delete this one (It occurs earlier in the block). We'll
7606 copy the SRC expression to an unused register in case there
7607 are any side effects. */
7608 if (TEST_BIT (ae_gen[bb->index], ptr->index))
7610 rtx r = gen_reg_rtx (GET_MODE (ptr->pattern));
7611 if (gcse_file)
7612 fprintf (gcse_file, "Removing redundant store:\n");
7613 replace_store_insn (r, XEXP (st, 0), bb, ptr);
7614 continue;
7616 SET_BIT (ae_gen[bb->index], ptr->index);
7619 for (st = ANTIC_STORE_LIST (ptr); st != NULL; st = XEXP (st, 1))
7621 insn = XEXP (st, 0);
7622 bb = BLOCK_FOR_INSN (insn);
7623 SET_BIT (st_antloc[bb->index], ptr->index);
7627 ae_kill = sbitmap_vector_alloc (last_basic_block, num_stores);
7628 sbitmap_vector_zero (ae_kill, last_basic_block);
7630 transp = sbitmap_vector_alloc (last_basic_block, num_stores);
7631 sbitmap_vector_zero (transp, last_basic_block);
7632 regs_set_in_block = xmalloc (sizeof (int) * max_gcse_regno);
7634 FOR_EACH_BB (bb)
7636 for (regno = 0; regno < max_gcse_regno; regno++)
7637 regs_set_in_block[regno] = TEST_BIT (reg_set_in_block[bb->index], regno);
7639 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7641 if (store_killed_after (ptr->pattern, ptr->pattern_regs, BB_HEAD (bb),
7642 bb, regs_set_in_block, NULL))
7644 /* It should not be necessary to consider the expression
7645 killed if it is both anticipatable and available. */
7646 if (!TEST_BIT (st_antloc[bb->index], ptr->index)
7647 || !TEST_BIT (ae_gen[bb->index], ptr->index))
7648 SET_BIT (ae_kill[bb->index], ptr->index);
7650 else
7651 SET_BIT (transp[bb->index], ptr->index);
7655 free (regs_set_in_block);
7657 if (gcse_file)
7659 dump_sbitmap_vector (gcse_file, "st_antloc", "", st_antloc, last_basic_block);
7660 dump_sbitmap_vector (gcse_file, "st_kill", "", ae_kill, last_basic_block);
7661 dump_sbitmap_vector (gcse_file, "Transpt", "", transp, last_basic_block);
7662 dump_sbitmap_vector (gcse_file, "st_avloc", "", ae_gen, last_basic_block);
7666 /* Insert an instruction at the beginning of a basic block, and update
7667 the BB_HEAD if needed. */
7669 static void
7670 insert_insn_start_bb (rtx insn, basic_block bb)
7672 /* Insert at start of successor block. */
7673 rtx prev = PREV_INSN (BB_HEAD (bb));
7674 rtx before = BB_HEAD (bb);
7675 while (before != 0)
7677 if (GET_CODE (before) != CODE_LABEL
7678 && (GET_CODE (before) != NOTE
7679 || NOTE_LINE_NUMBER (before) != NOTE_INSN_BASIC_BLOCK))
7680 break;
7681 prev = before;
7682 if (prev == BB_END (bb))
7683 break;
7684 before = NEXT_INSN (before);
7687 insn = emit_insn_after (insn, prev);
7689 if (gcse_file)
7691 fprintf (gcse_file, "STORE_MOTION insert store at start of BB %d:\n",
7692 bb->index);
7693 print_inline_rtx (gcse_file, insn, 6);
7694 fprintf (gcse_file, "\n");
7698 /* This routine will insert a store on an edge. EXPR is the ldst entry for
7699 the memory reference, and E is the edge to insert it on. Returns nonzero
7700 if an edge insertion was performed. */
7702 static int
7703 insert_store (struct ls_expr * expr, edge e)
7705 rtx reg, insn;
7706 basic_block bb;
7707 edge tmp;
7709 /* We did all the deleted before this insert, so if we didn't delete a
7710 store, then we haven't set the reaching reg yet either. */
7711 if (expr->reaching_reg == NULL_RTX)
7712 return 0;
7714 if (e->flags & EDGE_FAKE)
7715 return 0;
7717 reg = expr->reaching_reg;
7718 insn = gen_move_insn (copy_rtx (expr->pattern), reg);
7720 /* If we are inserting this expression on ALL predecessor edges of a BB,
7721 insert it at the start of the BB, and reset the insert bits on the other
7722 edges so we don't try to insert it on the other edges. */
7723 bb = e->dest;
7724 for (tmp = e->dest->pred; tmp ; tmp = tmp->pred_next)
7725 if (!(tmp->flags & EDGE_FAKE))
7727 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
7728 if (index == EDGE_INDEX_NO_EDGE)
7729 abort ();
7730 if (! TEST_BIT (pre_insert_map[index], expr->index))
7731 break;
7734 /* If tmp is NULL, we found an insertion on every edge, blank the
7735 insertion vector for these edges, and insert at the start of the BB. */
7736 if (!tmp && bb != EXIT_BLOCK_PTR)
7738 for (tmp = e->dest->pred; tmp ; tmp = tmp->pred_next)
7740 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
7741 RESET_BIT (pre_insert_map[index], expr->index);
7743 insert_insn_start_bb (insn, bb);
7744 return 0;
7747 /* We can't insert on this edge, so we'll insert at the head of the
7748 successors block. See Morgan, sec 10.5. */
7749 if ((e->flags & EDGE_ABNORMAL) == EDGE_ABNORMAL)
7751 insert_insn_start_bb (insn, bb);
7752 return 0;
7755 insert_insn_on_edge (insn, e);
7757 if (gcse_file)
7759 fprintf (gcse_file, "STORE_MOTION insert insn on edge (%d, %d):\n",
7760 e->src->index, e->dest->index);
7761 print_inline_rtx (gcse_file, insn, 6);
7762 fprintf (gcse_file, "\n");
7765 return 1;
7768 /* Remove any REG_EQUAL or REG_EQUIV notes containing a reference to the
7769 memory location in SMEXPR set in basic block BB.
7771 This could be rather expensive. */
7773 static void
7774 remove_reachable_equiv_notes (basic_block bb, struct ls_expr *smexpr)
7776 edge *stack = xmalloc (sizeof (edge) * n_basic_blocks), act;
7777 sbitmap visited = sbitmap_alloc (last_basic_block);
7778 int stack_top = 0;
7779 rtx last, insn, note;
7780 rtx mem = smexpr->pattern;
7782 sbitmap_zero (visited);
7783 act = bb->succ;
7785 while (1)
7787 if (!act)
7789 if (!stack_top)
7791 free (stack);
7792 sbitmap_free (visited);
7793 return;
7795 act = stack[--stack_top];
7797 bb = act->dest;
7799 if (bb == EXIT_BLOCK_PTR
7800 || TEST_BIT (visited, bb->index))
7802 act = act->succ_next;
7803 continue;
7805 SET_BIT (visited, bb->index);
7807 if (TEST_BIT (st_antloc[bb->index], smexpr->index))
7809 for (last = ANTIC_STORE_LIST (smexpr);
7810 BLOCK_FOR_INSN (XEXP (last, 0)) != bb;
7811 last = XEXP (last, 1))
7812 continue;
7813 last = XEXP (last, 0);
7815 else
7816 last = NEXT_INSN (BB_END (bb));
7818 for (insn = BB_HEAD (bb); insn != last; insn = NEXT_INSN (insn))
7819 if (INSN_P (insn))
7821 note = find_reg_equal_equiv_note (insn);
7822 if (!note || !expr_equiv_p (XEXP (note, 0), mem))
7823 continue;
7825 if (gcse_file)
7826 fprintf (gcse_file, "STORE_MOTION drop REG_EQUAL note at insn %d:\n",
7827 INSN_UID (insn));
7828 remove_note (insn, note);
7830 act = act->succ_next;
7831 if (bb->succ)
7833 if (act)
7834 stack[stack_top++] = act;
7835 act = bb->succ;
7840 /* This routine will replace a store with a SET to a specified register. */
7842 static void
7843 replace_store_insn (rtx reg, rtx del, basic_block bb, struct ls_expr *smexpr)
7845 rtx insn, mem, note, set, ptr;
7847 mem = smexpr->pattern;
7848 insn = gen_move_insn (reg, SET_SRC (single_set (del)));
7849 insn = emit_insn_after (insn, del);
7851 if (gcse_file)
7853 fprintf (gcse_file,
7854 "STORE_MOTION delete insn in BB %d:\n ", bb->index);
7855 print_inline_rtx (gcse_file, del, 6);
7856 fprintf (gcse_file, "\nSTORE MOTION replaced with insn:\n ");
7857 print_inline_rtx (gcse_file, insn, 6);
7858 fprintf (gcse_file, "\n");
7861 for (ptr = ANTIC_STORE_LIST (smexpr); ptr; ptr = XEXP (ptr, 1))
7862 if (XEXP (ptr, 0) == del)
7864 XEXP (ptr, 0) = insn;
7865 break;
7867 delete_insn (del);
7869 /* Now we must handle REG_EQUAL notes whose contents is equal to the mem;
7870 they are no longer accurate provided that they are reached by this
7871 definition, so drop them. */
7872 for (; insn != NEXT_INSN (BB_END (bb)); insn = NEXT_INSN (insn))
7873 if (INSN_P (insn))
7875 set = single_set (insn);
7876 if (!set)
7877 continue;
7878 if (expr_equiv_p (SET_DEST (set), mem))
7879 return;
7880 note = find_reg_equal_equiv_note (insn);
7881 if (!note || !expr_equiv_p (XEXP (note, 0), mem))
7882 continue;
7884 if (gcse_file)
7885 fprintf (gcse_file, "STORE_MOTION drop REG_EQUAL note at insn %d:\n",
7886 INSN_UID (insn));
7887 remove_note (insn, note);
7889 remove_reachable_equiv_notes (bb, smexpr);
7893 /* Delete a store, but copy the value that would have been stored into
7894 the reaching_reg for later storing. */
7896 static void
7897 delete_store (struct ls_expr * expr, basic_block bb)
7899 rtx reg, i, del;
7901 if (expr->reaching_reg == NULL_RTX)
7902 expr->reaching_reg = gen_reg_rtx (GET_MODE (expr->pattern));
7904 reg = expr->reaching_reg;
7906 for (i = AVAIL_STORE_LIST (expr); i; i = XEXP (i, 1))
7908 del = XEXP (i, 0);
7909 if (BLOCK_FOR_INSN (del) == bb)
7911 /* We know there is only one since we deleted redundant
7912 ones during the available computation. */
7913 replace_store_insn (reg, del, bb, expr);
7914 break;
7919 /* Free memory used by store motion. */
7921 static void
7922 free_store_memory (void)
7924 free_ldst_mems ();
7926 if (ae_gen)
7927 sbitmap_vector_free (ae_gen);
7928 if (ae_kill)
7929 sbitmap_vector_free (ae_kill);
7930 if (transp)
7931 sbitmap_vector_free (transp);
7932 if (st_antloc)
7933 sbitmap_vector_free (st_antloc);
7934 if (pre_insert_map)
7935 sbitmap_vector_free (pre_insert_map);
7936 if (pre_delete_map)
7937 sbitmap_vector_free (pre_delete_map);
7938 if (reg_set_in_block)
7939 sbitmap_vector_free (reg_set_in_block);
7941 ae_gen = ae_kill = transp = st_antloc = NULL;
7942 pre_insert_map = pre_delete_map = reg_set_in_block = NULL;
7945 /* Perform store motion. Much like gcse, except we move expressions the
7946 other way by looking at the flowgraph in reverse. */
7948 static void
7949 store_motion (void)
7951 basic_block bb;
7952 int x;
7953 struct ls_expr * ptr;
7954 int update_flow = 0;
7956 if (gcse_file)
7958 fprintf (gcse_file, "before store motion\n");
7959 print_rtl (gcse_file, get_insns ());
7962 init_alias_analysis ();
7964 /* Find all the available and anticipatable stores. */
7965 num_stores = compute_store_table ();
7966 if (num_stores == 0)
7968 sbitmap_vector_free (reg_set_in_block);
7969 end_alias_analysis ();
7970 return;
7973 /* Now compute kill & transp vectors. */
7974 build_store_vectors ();
7975 add_noreturn_fake_exit_edges ();
7976 connect_infinite_loops_to_exit ();
7978 edge_list = pre_edge_rev_lcm (gcse_file, num_stores, transp, ae_gen,
7979 st_antloc, ae_kill, &pre_insert_map,
7980 &pre_delete_map);
7982 /* Now we want to insert the new stores which are going to be needed. */
7983 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7985 FOR_EACH_BB (bb)
7986 if (TEST_BIT (pre_delete_map[bb->index], ptr->index))
7987 delete_store (ptr, bb);
7989 for (x = 0; x < NUM_EDGES (edge_list); x++)
7990 if (TEST_BIT (pre_insert_map[x], ptr->index))
7991 update_flow |= insert_store (ptr, INDEX_EDGE (edge_list, x));
7994 if (update_flow)
7995 commit_edge_insertions ();
7997 free_store_memory ();
7998 free_edge_list (edge_list);
7999 remove_fake_edges ();
8000 end_alias_analysis ();
8004 /* Entry point for jump bypassing optimization pass. */
8007 bypass_jumps (FILE *file)
8009 int changed;
8011 /* We do not construct an accurate cfg in functions which call
8012 setjmp, so just punt to be safe. */
8013 if (current_function_calls_setjmp)
8014 return 0;
8016 /* For calling dump_foo fns from gdb. */
8017 debug_stderr = stderr;
8018 gcse_file = file;
8020 /* Identify the basic block information for this function, including
8021 successors and predecessors. */
8022 max_gcse_regno = max_reg_num ();
8024 if (file)
8025 dump_flow_info (file);
8027 /* Return if there's nothing to do, or it is too expensive. */
8028 if (n_basic_blocks <= 1 || is_too_expensive (_ ("jump bypassing disabled")))
8029 return 0;
8031 gcc_obstack_init (&gcse_obstack);
8032 bytes_used = 0;
8034 /* We need alias. */
8035 init_alias_analysis ();
8037 /* Record where pseudo-registers are set. This data is kept accurate
8038 during each pass. ??? We could also record hard-reg information here
8039 [since it's unchanging], however it is currently done during hash table
8040 computation.
8042 It may be tempting to compute MEM set information here too, but MEM sets
8043 will be subject to code motion one day and thus we need to compute
8044 information about memory sets when we build the hash tables. */
8046 alloc_reg_set_mem (max_gcse_regno);
8047 compute_sets (get_insns ());
8049 max_gcse_regno = max_reg_num ();
8050 alloc_gcse_mem (get_insns ());
8051 changed = one_cprop_pass (1, 1, 1);
8052 free_gcse_mem ();
8054 if (file)
8056 fprintf (file, "BYPASS of %s: %d basic blocks, ",
8057 current_function_name (), n_basic_blocks);
8058 fprintf (file, "%d bytes\n\n", bytes_used);
8061 obstack_free (&gcse_obstack, NULL);
8062 free_reg_set_mem ();
8064 /* We are finished with alias. */
8065 end_alias_analysis ();
8066 allocate_reg_info (max_reg_num (), FALSE, FALSE);
8068 return changed;
8071 /* Return true if the graph is too expensive to optimize. PASS is the
8072 optimization about to be performed. */
8074 static bool
8075 is_too_expensive (const char *pass)
8077 /* Trying to perform global optimizations on flow graphs which have
8078 a high connectivity will take a long time and is unlikely to be
8079 particularly useful.
8081 In normal circumstances a cfg should have about twice as many
8082 edges as blocks. But we do not want to punish small functions
8083 which have a couple switch statements. Rather than simply
8084 threshold the number of blocks, uses something with a more
8085 graceful degradation. */
8086 if (n_edges > 20000 + n_basic_blocks * 4)
8088 if (warn_disabled_optimization)
8089 warning ("%s: %d basic blocks and %d edges/basic block",
8090 pass, n_basic_blocks, n_edges / n_basic_blocks);
8092 return true;
8095 /* If allocating memory for the cprop bitmap would take up too much
8096 storage it's better just to disable the optimization. */
8097 if ((n_basic_blocks
8098 * SBITMAP_SET_SIZE (max_reg_num ())
8099 * sizeof (SBITMAP_ELT_TYPE)) > MAX_GCSE_MEMORY)
8101 if (warn_disabled_optimization)
8102 warning ("%s: %d basic blocks and %d registers",
8103 pass, n_basic_blocks, max_reg_num ());
8105 return true;
8108 return false;
8111 /* The following code implements gcse after reload, the purpose of this
8112 pass is to cleanup redundant loads generated by reload and other
8113 optimizations that come after gcse. It searches for simple inter-block
8114 redundancies and tries to eliminate them by adding moves and loads
8115 in cold places. */
8117 /* The following structure holds the information about the occurrences of
8118 the redundant instructions. */
8119 struct unoccr
8121 struct unoccr *next;
8122 edge pred;
8123 rtx insn;
8126 static bool reg_used_on_edge (rtx, edge);
8127 static rtx reg_set_between_after_reload_p (rtx, rtx, rtx);
8128 static rtx reg_used_between_after_reload_p (rtx, rtx, rtx);
8129 static rtx get_avail_load_store_reg (rtx);
8130 static bool is_jump_table_basic_block (basic_block);
8131 static bool bb_has_well_behaved_predecessors (basic_block);
8132 static struct occr* get_bb_avail_insn (basic_block, struct occr *);
8133 static void hash_scan_set_after_reload (rtx, rtx, struct hash_table *);
8134 static void compute_hash_table_after_reload (struct hash_table *);
8135 static void eliminate_partially_redundant_loads (basic_block,
8136 rtx,
8137 struct expr *);
8138 static void gcse_after_reload (void);
8139 static struct occr* get_bb_avail_insn (basic_block, struct occr *);
8140 void gcse_after_reload_main (rtx, FILE *);
8143 /* Check if register REG is used in any insn waiting to be inserted on E.
8144 Assumes no such insn can be a CALL_INSN; if so call reg_used_between_p
8145 with PREV(insn),NEXT(insn) instead of calling
8146 reg_overlap_mentioned_p. */
8148 static bool
8149 reg_used_on_edge (rtx reg, edge e)
8151 rtx insn;
8153 for (insn = e->insns; insn; insn = NEXT_INSN (insn))
8154 if (INSN_P (insn) && reg_overlap_mentioned_p (reg, PATTERN (insn)))
8155 return true;
8157 return false;
8160 /* Return the insn that sets register REG or clobbers it in between
8161 FROM_INSN and TO_INSN (exclusive of those two).
8162 Just like reg_set_between but for hard registers and not pseudos. */
8164 static rtx
8165 reg_set_between_after_reload_p (rtx reg, rtx from_insn, rtx to_insn)
8167 rtx insn;
8168 int regno;
8170 if (GET_CODE (reg) != REG)
8171 abort ();
8172 regno = REGNO (reg);
8174 /* We are called after register allocation. */
8175 if (regno >= FIRST_PSEUDO_REGISTER)
8176 abort ();
8178 if (from_insn == to_insn)
8179 return NULL_RTX;
8181 for (insn = NEXT_INSN (from_insn);
8182 insn != to_insn;
8183 insn = NEXT_INSN (insn))
8185 if (INSN_P (insn))
8187 if (FIND_REG_INC_NOTE (insn, reg)
8188 || (GET_CODE (insn) == CALL_INSN
8189 && call_used_regs[regno])
8190 || find_reg_fusage (insn, CLOBBER, reg))
8191 return insn;
8193 if (set_of (reg, insn) != NULL_RTX)
8194 return insn;
8196 return NULL_RTX;
8199 /* Return the insn that uses register REG in between FROM_INSN and TO_INSN
8200 (exclusive of those two). Similar to reg_used_between but for hard
8201 registers and not pseudos. */
8203 static rtx
8204 reg_used_between_after_reload_p (rtx reg, rtx from_insn, rtx to_insn)
8206 rtx insn;
8207 int regno;
8209 if (GET_CODE (reg) != REG)
8210 return to_insn;
8211 regno = REGNO (reg);
8213 /* We are called after register allocation. */
8214 if (regno >= FIRST_PSEUDO_REGISTER)
8215 abort ();
8216 if (from_insn == to_insn)
8217 return NULL_RTX;
8219 for (insn = NEXT_INSN (from_insn);
8220 insn != to_insn;
8221 insn = NEXT_INSN (insn))
8222 if (INSN_P (insn)
8223 && (reg_overlap_mentioned_p (reg, PATTERN (insn))
8224 || (GET_CODE (insn) == CALL_INSN
8225 && call_used_regs[regno])
8226 || find_reg_fusage (insn, USE, reg)
8227 || find_reg_fusage (insn, CLOBBER, reg)))
8228 return insn;
8229 return NULL_RTX;
8232 /* Return the loaded/stored register of a load/store instruction. */
8234 static rtx
8235 get_avail_load_store_reg (rtx insn)
8237 if (GET_CODE (SET_DEST (PATTERN (insn))) == REG) /* A load. */
8238 return SET_DEST(PATTERN(insn));
8239 if (GET_CODE (SET_SRC (PATTERN (insn))) == REG) /* A store. */
8240 return SET_SRC (PATTERN (insn));
8241 abort ();
8244 /* Don't handle ABNORMAL edges or jump tables. */
8246 static bool
8247 is_jump_table_basic_block (basic_block bb)
8249 rtx insn = BB_END (bb);
8251 if (GET_CODE (insn) == JUMP_INSN &&
8252 (GET_CODE (PATTERN (insn)) == ADDR_VEC
8253 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
8254 return true;
8255 return false;
8258 /* Return nonzero if the predecessors of BB are "well behaved". */
8260 static bool
8261 bb_has_well_behaved_predecessors (basic_block bb)
8263 edge pred;
8265 if (! bb->pred)
8266 return false;
8267 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
8268 if (((pred->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (pred))
8269 || is_jump_table_basic_block (pred->src))
8270 return false;
8271 return true;
8275 /* Search for the occurrences of expression in BB. */
8277 static struct occr*
8278 get_bb_avail_insn (basic_block bb, struct occr *occr)
8280 for (; occr != NULL; occr = occr->next)
8281 if (BLOCK_FOR_INSN (occr->insn)->index == bb->index)
8282 return occr;
8283 return NULL;
8286 /* Perform partial GCSE pass after reload, try to eliminate redundant loads
8287 created by the reload pass. We try to look for a full or partial
8288 redundant loads fed by one or more loads/stores in predecessor BBs,
8289 and try adding loads to make them fully redundant. We also check if
8290 it's worth adding loads to be able to delete the redundant load.
8292 Algorithm:
8293 1. Build available expressions hash table:
8294 For each load/store instruction, if the loaded/stored memory didn't
8295 change until the end of the basic block add this memory expression to
8296 the hash table.
8297 2. Perform Redundancy elimination:
8298 For each load instruction do the following:
8299 perform partial redundancy elimination, check if it's worth adding
8300 loads to make the load fully redundant. If so add loads and
8301 register copies and delete the load.
8303 Future enhancement:
8304 if loaded register is used/defined between load and some store,
8305 look for some other free register between load and all its stores,
8306 and replace load with a copy from this register to the loaded
8307 register. */
8310 /* This handles the case where several stores feed a partially redundant
8311 load. It checks if the redundancy elimination is possible and if it's
8312 worth it. */
8314 static void
8315 eliminate_partially_redundant_loads (basic_block bb, rtx insn,
8316 struct expr *expr)
8318 edge pred;
8319 rtx avail_insn = NULL_RTX;
8320 rtx avail_reg;
8321 rtx dest, pat;
8322 struct occr *a_occr;
8323 struct unoccr *occr, *avail_occrs = NULL;
8324 struct unoccr *unoccr, *unavail_occrs = NULL;
8325 int npred_ok = 0;
8326 gcov_type ok_count = 0; /* Redundant load execution count. */
8327 gcov_type critical_count = 0; /* Execution count of critical edges. */
8329 /* The execution count of the loads to be added to make the
8330 load fully redundant. */
8331 gcov_type not_ok_count = 0;
8332 basic_block pred_bb;
8334 pat = PATTERN (insn);
8335 dest = SET_DEST (pat);
8336 /* Check that the loaded register is not used, set, or killed from the
8337 beginning of the block. */
8338 if (reg_used_between_after_reload_p (dest,
8339 PREV_INSN (BB_HEAD (bb)), insn)
8340 || reg_set_between_after_reload_p (dest,
8341 PREV_INSN (BB_HEAD (bb)), insn))
8342 return;
8344 /* Check potential for replacing load with copy for predecessors. */
8345 for (pred = bb->pred; pred; pred = pred->pred_next)
8347 rtx next_pred_bb_end;
8349 avail_insn = NULL_RTX;
8350 pred_bb = pred->src;
8351 next_pred_bb_end = NEXT_INSN (BB_END (pred_bb));
8352 for (a_occr = get_bb_avail_insn (pred_bb, expr->avail_occr); a_occr;
8353 a_occr = get_bb_avail_insn (pred_bb, a_occr->next))
8355 /* Check if the loaded register is not used. */
8356 avail_insn = a_occr->insn;
8357 if (! (avail_reg = get_avail_load_store_reg (avail_insn)))
8358 abort ();
8359 /* Make sure we can generate a move from register avail_reg to
8360 dest. */
8361 extract_insn (gen_move_insn (copy_rtx (dest),
8362 copy_rtx (avail_reg)));
8363 if (! constrain_operands (1)
8364 || reg_killed_on_edge (avail_reg, pred)
8365 || reg_used_on_edge (dest, pred))
8367 avail_insn = NULL;
8368 continue;
8370 if (! reg_set_between_after_reload_p (avail_reg, avail_insn,
8371 next_pred_bb_end))
8372 /* AVAIL_INSN remains non-null. */
8373 break;
8374 else
8375 avail_insn = NULL;
8377 if (avail_insn != NULL_RTX)
8379 npred_ok++;
8380 ok_count += pred->count;
8381 if (EDGE_CRITICAL_P (pred))
8382 critical_count += pred->count;
8383 occr = gmalloc (sizeof (struct unoccr));
8384 occr->insn = avail_insn;
8385 occr->pred = pred;
8386 occr->next = avail_occrs;
8387 avail_occrs = occr;
8389 else
8391 not_ok_count += pred->count;
8392 if (EDGE_CRITICAL_P (pred))
8393 critical_count += pred->count;
8394 unoccr = gmalloc (sizeof (struct unoccr));
8395 unoccr->insn = NULL_RTX;
8396 unoccr->pred = pred;
8397 unoccr->next = unavail_occrs;
8398 unavail_occrs = unoccr;
8402 if (npred_ok == 0 /* No load can be replaced by copy. */
8403 || (optimize_size && npred_ok > 1)) /* Prevent exploding the code. */
8404 return;
8406 /* Check if it's worth applying the partial redundancy elimination. */
8407 if (ok_count < GCSE_AFTER_RELOAD_PARTIAL_FRACTION * not_ok_count)
8408 return;
8410 if (ok_count < GCSE_AFTER_RELOAD_CRITICAL_FRACTION * critical_count)
8411 return;
8413 /* Generate moves to the loaded register from where
8414 the memory is available. */
8415 for (occr = avail_occrs; occr; occr = occr->next)
8417 avail_insn = occr->insn;
8418 pred = occr->pred;
8419 /* Set avail_reg to be the register having the value of the
8420 memory. */
8421 avail_reg = get_avail_load_store_reg (avail_insn);
8422 if (! avail_reg)
8423 abort ();
8425 insert_insn_on_edge (gen_move_insn (copy_rtx (dest),
8426 copy_rtx (avail_reg)),
8427 pred);
8429 if (gcse_file)
8430 fprintf (gcse_file,
8431 "GCSE AFTER reload generating move from %d to %d on \
8432 edge from %d to %d\n",
8433 REGNO (avail_reg),
8434 REGNO (dest),
8435 pred->src->index,
8436 pred->dest->index);
8439 /* Regenerate loads where the memory is unavailable. */
8440 for (unoccr = unavail_occrs; unoccr; unoccr = unoccr->next)
8442 pred = unoccr->pred;
8443 insert_insn_on_edge (copy_insn (PATTERN (insn)), pred);
8445 if (gcse_file)
8446 fprintf (gcse_file,
8447 "GCSE AFTER reload: generating on edge from %d to %d\
8448 a copy of load:\n",
8449 pred->src->index,
8450 pred->dest->index);
8453 /* Delete the insn if it is not available in this block and mark it
8454 for deletion if it is available. If insn is available it may help
8455 discover additional redundancies, so mark it for later deletion.*/
8456 for (a_occr = get_bb_avail_insn (bb, expr->avail_occr);
8457 a_occr && (a_occr->insn != insn);
8458 a_occr = get_bb_avail_insn (bb, a_occr->next));
8460 if (!a_occr)
8461 delete_insn (insn);
8462 else
8463 a_occr->deleted_p = 1;
8466 /* Performing the redundancy elimination as described before. */
8468 static void
8469 gcse_after_reload (void)
8471 unsigned int i;
8472 rtx insn;
8473 basic_block bb;
8474 struct expr *expr;
8475 struct occr *occr;
8477 /* Note we start at block 1. */
8479 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
8480 return;
8482 FOR_BB_BETWEEN (bb,
8483 ENTRY_BLOCK_PTR->next_bb->next_bb,
8484 EXIT_BLOCK_PTR,
8485 next_bb)
8487 if (! bb_has_well_behaved_predecessors (bb))
8488 continue;
8490 /* Do not try this optimization on cold basic blocks. */
8491 if (probably_cold_bb_p (bb))
8492 continue;
8494 reset_opr_set_tables ();
8496 for (insn = BB_HEAD (bb);
8497 insn != NULL
8498 && insn != NEXT_INSN (BB_END (bb));
8499 insn = NEXT_INSN (insn))
8501 /* Is it a load - of the form (set (reg) (mem))? */
8502 if (GET_CODE (insn) == INSN
8503 && GET_CODE (PATTERN (insn)) == SET
8504 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
8505 && GET_CODE (SET_SRC (PATTERN (insn))) == MEM)
8507 rtx pat = PATTERN (insn);
8508 rtx src = SET_SRC (pat);
8509 struct expr *expr;
8511 if (general_operand (src, GET_MODE (src))
8512 /* Is the expression recorded? */
8513 && (expr = lookup_expr (src, &expr_hash_table)) != NULL
8514 /* Are the operands unchanged since the start of the
8515 block? */
8516 && oprs_not_set_p (src, insn)
8517 && ! MEM_VOLATILE_P (src)
8518 && GET_MODE (src) != BLKmode
8519 && !(flag_non_call_exceptions && may_trap_p (src))
8520 && !side_effects_p (src))
8522 /* We now have a load (insn) and an available memory at
8523 its BB start (expr). Try to remove the loads if it is
8524 redundant. */
8525 eliminate_partially_redundant_loads (bb, insn, expr);
8529 /* Keep track of everything modified by this insn. */
8530 if (INSN_P (insn))
8531 mark_oprs_set (insn);
8535 commit_edge_insertions ();
8537 /* Go over the expression hash table and delete insns that were
8538 marked for later deletion. */
8539 for (i = 0; i < expr_hash_table.size; i++)
8541 for (expr = expr_hash_table.table[i];
8542 expr != NULL;
8543 expr = expr->next_same_hash)
8544 for (occr = expr->avail_occr; occr; occr = occr->next)
8545 if (occr->deleted_p)
8546 delete_insn (occr->insn);
8550 /* Scan pattern PAT of INSN and add an entry to the hash TABLE.
8551 After reload we are interested in loads/stores only. */
8553 static void
8554 hash_scan_set_after_reload (rtx pat, rtx insn, struct hash_table *table)
8556 rtx src = SET_SRC (pat);
8557 rtx dest = SET_DEST (pat);
8559 if (GET_CODE (src) != MEM && GET_CODE (dest) != MEM)
8560 return;
8562 if (GET_CODE (dest) == REG)
8564 if (/* Don't GCSE something if we can't do a reg/reg copy. */
8565 can_copy_p (GET_MODE (dest))
8566 /* GCSE commonly inserts instruction after the insn. We can't
8567 do that easily for EH_REGION notes so disable GCSE on these
8568 for now. */
8569 && ! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
8570 /* Is SET_SRC something we want to gcse? */
8571 && general_operand (src, GET_MODE (src))
8572 /* Don't CSE a nop. */
8573 && ! set_noop_p (pat)
8574 && ! JUMP_P (insn))
8576 /* An expression is not available if its operands are
8577 subsequently modified, including this insn. */
8578 if (oprs_available_p (src, insn))
8579 insert_expr_in_table (src, GET_MODE (dest), insn, 0, 1, table);
8582 else if ((GET_CODE (src) == REG))
8584 /* Only record sets of pseudo-regs in the hash table. */
8585 if (/* Don't GCSE something if we can't do a reg/reg copy. */
8586 can_copy_p (GET_MODE (src))
8587 /* GCSE commonly inserts instruction after the insn. We can't
8588 do that easily for EH_REGION notes so disable GCSE on these
8589 for now. */
8590 && ! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
8591 /* Is SET_DEST something we want to gcse? */
8592 && general_operand (dest, GET_MODE (dest))
8593 /* Don't CSE a nop. */
8594 && ! set_noop_p (pat)
8595 &&! JUMP_P (insn)
8596 && ! (flag_float_store && FLOAT_MODE_P (GET_MODE (dest)))
8597 /* Check if the memory expression is killed after insn. */
8598 && ! load_killed_in_block_p (BLOCK_FOR_INSN (insn),
8599 INSN_CUID (insn) + 1,
8600 dest,
8602 && oprs_unchanged_p (XEXP (dest, 0), insn, 1))
8604 insert_expr_in_table (dest, GET_MODE (dest), insn, 0, 1, table);
8610 /* Create hash table of memory expressions available at end of basic
8611 blocks. */
8613 static void
8614 compute_hash_table_after_reload (struct hash_table *table)
8616 unsigned int i;
8618 table->set_p = 0;
8620 /* Initialize count of number of entries in hash table. */
8621 table->n_elems = 0;
8622 memset ((char *) table->table, 0,
8623 table->size * sizeof (struct expr *));
8625 /* While we compute the hash table we also compute a bit array of which
8626 registers are set in which blocks. */
8627 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
8629 /* Re-cache any INSN_LIST nodes we have allocated. */
8630 clear_modify_mem_tables ();
8632 /* Some working arrays used to track first and last set in each block. */
8633 reg_avail_info = gmalloc (max_gcse_regno * sizeof (struct reg_avail_info));
8635 for (i = 0; i < max_gcse_regno; ++i)
8636 reg_avail_info[i].last_bb = NULL;
8638 FOR_EACH_BB (current_bb)
8640 rtx insn;
8641 unsigned int regno;
8643 /* First pass over the instructions records information used to
8644 determine when registers and memory are first and last set. */
8645 for (insn = BB_HEAD (current_bb);
8646 insn && insn != NEXT_INSN (BB_END (current_bb));
8647 insn = NEXT_INSN (insn))
8649 if (! INSN_P (insn))
8650 continue;
8652 if (GET_CODE (insn) == CALL_INSN)
8654 bool clobbers_all = false;
8656 #ifdef NON_SAVING_SETJMP
8657 if (NON_SAVING_SETJMP
8658 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
8659 clobbers_all = true;
8660 #endif
8662 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8663 if (clobbers_all
8664 || TEST_HARD_REG_BIT (regs_invalidated_by_call,
8665 regno))
8666 record_last_reg_set_info (insn, regno);
8668 mark_call (insn);
8671 note_stores (PATTERN (insn), record_last_set_info, insn);
8673 if (GET_CODE (PATTERN (insn)) == SET)
8675 rtx src, dest;
8677 src = SET_SRC (PATTERN (insn));
8678 dest = SET_DEST (PATTERN (insn));
8679 if (GET_CODE (src) == MEM && auto_inc_p (XEXP (src, 0)))
8681 regno = REGNO (XEXP (XEXP (src, 0), 0));
8682 record_last_reg_set_info (insn, regno);
8684 if (GET_CODE (dest) == MEM && auto_inc_p (XEXP (dest, 0)))
8686 regno = REGNO (XEXP (XEXP (dest, 0), 0));
8687 record_last_reg_set_info (insn, regno);
8692 /* The next pass builds the hash table. */
8693 for (insn = BB_HEAD (current_bb);
8694 insn && insn != NEXT_INSN (BB_END (current_bb));
8695 insn = NEXT_INSN (insn))
8696 if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == SET)
8697 if (! find_reg_note (insn, REG_LIBCALL, NULL_RTX))
8698 hash_scan_set_after_reload (PATTERN (insn), insn, table);
8701 free (reg_avail_info);
8702 reg_avail_info = NULL;
8706 /* Main entry point of the GCSE after reload - clean some redundant loads
8707 due to spilling. */
8709 void
8710 gcse_after_reload_main (rtx f, FILE* file)
8712 gcse_subst_count = 0;
8713 gcse_create_count = 0;
8715 gcse_file = file;
8717 gcc_obstack_init (&gcse_obstack);
8718 bytes_used = 0;
8720 /* We need alias. */
8721 init_alias_analysis ();
8723 max_gcse_regno = max_reg_num ();
8725 alloc_reg_set_mem (max_gcse_regno);
8726 alloc_gcse_mem (f);
8727 alloc_hash_table (max_cuid, &expr_hash_table, 0);
8728 compute_hash_table_after_reload (&expr_hash_table);
8730 if (gcse_file)
8731 dump_hash_table (gcse_file, "Expression", &expr_hash_table);
8733 if (expr_hash_table.n_elems > 0)
8734 gcse_after_reload ();
8736 free_hash_table (&expr_hash_table);
8738 free_gcse_mem ();
8739 free_reg_set_mem ();
8741 /* We are finished with alias. */
8742 end_alias_analysis ();
8744 obstack_free (&gcse_obstack, NULL);
8747 #include "gt-gcse.h"