2005-01-22 Thomas Koenig <Thomas.Koenig@online.de>
[official-gcc.git] / gcc / gcse.c
blob1b2fdbe3d688c210fe6b39bb814d2481b8925c50
1 /* Global common subexpression elimination/Partial redundancy elimination
2 and global constant/copy propagation for GNU compiler.
3 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 02111-1307, USA. */
23 /* TODO
24 - reordering of memory allocation and freeing to be more space efficient
25 - do rough calc of how many regs are needed in each block, and a rough
26 calc of how many regs are available in each class and use that to
27 throttle back the code in cases where RTX_COST is minimal.
28 - a store to the same address as a load does not kill the load if the
29 source of the store is also the destination of the load. Handling this
30 allows more load motion, particularly out of loops.
31 - ability to realloc sbitmap vectors would allow one initial computation
32 of reg_set_in_block with only subsequent additions, rather than
33 recomputing it for each pass
37 /* References searched while implementing this.
39 Compilers Principles, Techniques and Tools
40 Aho, Sethi, Ullman
41 Addison-Wesley, 1988
43 Global Optimization by Suppression of Partial Redundancies
44 E. Morel, C. Renvoise
45 communications of the acm, Vol. 22, Num. 2, Feb. 1979
47 A Portable Machine-Independent Global Optimizer - Design and Measurements
48 Frederick Chow
49 Stanford Ph.D. thesis, Dec. 1983
51 A Fast Algorithm for Code Movement Optimization
52 D.M. Dhamdhere
53 SIGPLAN Notices, Vol. 23, Num. 10, Oct. 1988
55 A Solution to a Problem with Morel and Renvoise's
56 Global Optimization by Suppression of Partial Redundancies
57 K-H Drechsler, M.P. Stadel
58 ACM TOPLAS, Vol. 10, Num. 4, Oct. 1988
60 Practical Adaptation of the Global Optimization
61 Algorithm of Morel and Renvoise
62 D.M. Dhamdhere
63 ACM TOPLAS, Vol. 13, Num. 2. Apr. 1991
65 Efficiently Computing Static Single Assignment Form and the Control
66 Dependence Graph
67 R. Cytron, J. Ferrante, B.K. Rosen, M.N. Wegman, and F.K. Zadeck
68 ACM TOPLAS, Vol. 13, Num. 4, Oct. 1991
70 Lazy Code Motion
71 J. Knoop, O. Ruthing, B. Steffen
72 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
74 What's In a Region? Or Computing Control Dependence Regions in Near-Linear
75 Time for Reducible Flow Control
76 Thomas Ball
77 ACM Letters on Programming Languages and Systems,
78 Vol. 2, Num. 1-4, Mar-Dec 1993
80 An Efficient Representation for Sparse Sets
81 Preston Briggs, Linda Torczon
82 ACM Letters on Programming Languages and Systems,
83 Vol. 2, Num. 1-4, Mar-Dec 1993
85 A Variation of Knoop, Ruthing, and Steffen's Lazy Code Motion
86 K-H Drechsler, M.P. Stadel
87 ACM SIGPLAN Notices, Vol. 28, Num. 5, May 1993
89 Partial Dead Code Elimination
90 J. Knoop, O. Ruthing, B. Steffen
91 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
93 Effective Partial Redundancy Elimination
94 P. Briggs, K.D. Cooper
95 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
97 The Program Structure Tree: Computing Control Regions in Linear Time
98 R. Johnson, D. Pearson, K. Pingali
99 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
101 Optimal Code Motion: Theory and Practice
102 J. Knoop, O. Ruthing, B. Steffen
103 ACM TOPLAS, Vol. 16, Num. 4, Jul. 1994
105 The power of assignment motion
106 J. Knoop, O. Ruthing, B. Steffen
107 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
109 Global code motion / global value numbering
110 C. Click
111 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
113 Value Driven Redundancy Elimination
114 L.T. Simpson
115 Rice University Ph.D. thesis, Apr. 1996
117 Value Numbering
118 L.T. Simpson
119 Massively Scalar Compiler Project, Rice University, Sep. 1996
121 High Performance Compilers for Parallel Computing
122 Michael Wolfe
123 Addison-Wesley, 1996
125 Advanced Compiler Design and Implementation
126 Steven Muchnick
127 Morgan Kaufmann, 1997
129 Building an Optimizing Compiler
130 Robert Morgan
131 Digital Press, 1998
133 People wishing to speed up the code here should read:
134 Elimination Algorithms for Data Flow Analysis
135 B.G. Ryder, M.C. Paull
136 ACM Computing Surveys, Vol. 18, Num. 3, Sep. 1986
138 How to Analyze Large Programs Efficiently and Informatively
139 D.M. Dhamdhere, B.K. Rosen, F.K. Zadeck
140 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
142 People wishing to do something different can find various possibilities
143 in the above papers and elsewhere.
146 #include "config.h"
147 #include "system.h"
148 #include "coretypes.h"
149 #include "tm.h"
150 #include "toplev.h"
152 #include "rtl.h"
153 #include "tree.h"
154 #include "tm_p.h"
155 #include "regs.h"
156 #include "hard-reg-set.h"
157 #include "flags.h"
158 #include "real.h"
159 #include "insn-config.h"
160 #include "recog.h"
161 #include "basic-block.h"
162 #include "output.h"
163 #include "function.h"
164 #include "expr.h"
165 #include "except.h"
166 #include "ggc.h"
167 #include "params.h"
168 #include "cselib.h"
169 #include "intl.h"
170 #include "obstack.h"
171 #include "timevar.h"
173 /* Propagate flow information through back edges and thus enable PRE's
174 moving loop invariant calculations out of loops.
176 Originally this tended to create worse overall code, but several
177 improvements during the development of PRE seem to have made following
178 back edges generally a win.
180 Note much of the loop invariant code motion done here would normally
181 be done by loop.c, which has more heuristics for when to move invariants
182 out of loops. At some point we might need to move some of those
183 heuristics into gcse.c. */
185 /* We support GCSE via Partial Redundancy Elimination. PRE optimizations
186 are a superset of those done by GCSE.
188 We perform the following steps:
190 1) Compute basic block information.
192 2) Compute table of places where registers are set.
194 3) Perform copy/constant propagation.
196 4) Perform global cse using lazy code motion if not optimizing
197 for size, or code hoisting if we are.
199 5) Perform another pass of copy/constant propagation.
201 Two passes of copy/constant propagation are done because the first one
202 enables more GCSE and the second one helps to clean up the copies that
203 GCSE creates. This is needed more for PRE than for Classic because Classic
204 GCSE will try to use an existing register containing the common
205 subexpression rather than create a new one. This is harder to do for PRE
206 because of the code motion (which Classic GCSE doesn't do).
208 Expressions we are interested in GCSE-ing are of the form
209 (set (pseudo-reg) (expression)).
210 Function want_to_gcse_p says what these are.
212 PRE handles moving invariant expressions out of loops (by treating them as
213 partially redundant).
215 Eventually it would be nice to replace cse.c/gcse.c with SSA (static single
216 assignment) based GVN (global value numbering). L. T. Simpson's paper
217 (Rice University) on value numbering is a useful reference for this.
219 **********************
221 We used to support multiple passes but there are diminishing returns in
222 doing so. The first pass usually makes 90% of the changes that are doable.
223 A second pass can make a few more changes made possible by the first pass.
224 Experiments show any further passes don't make enough changes to justify
225 the expense.
227 A study of spec92 using an unlimited number of passes:
228 [1 pass] = 1208 substitutions, [2] = 577, [3] = 202, [4] = 192, [5] = 83,
229 [6] = 34, [7] = 17, [8] = 9, [9] = 4, [10] = 4, [11] = 2,
230 [12] = 2, [13] = 1, [15] = 1, [16] = 2, [41] = 1
232 It was found doing copy propagation between each pass enables further
233 substitutions.
235 PRE is quite expensive in complicated functions because the DFA can take
236 a while to converge. Hence we only perform one pass. The parameter
237 max-gcse-passes can be modified if one wants to experiment.
239 **********************
241 The steps for PRE are:
243 1) Build the hash table of expressions we wish to GCSE (expr_hash_table).
245 2) Perform the data flow analysis for PRE.
247 3) Delete the redundant instructions
249 4) Insert the required copies [if any] that make the partially
250 redundant instructions fully redundant.
252 5) For other reaching expressions, insert an instruction to copy the value
253 to a newly created pseudo that will reach the redundant instruction.
255 The deletion is done first so that when we do insertions we
256 know which pseudo reg to use.
258 Various papers have argued that PRE DFA is expensive (O(n^2)) and others
259 argue it is not. The number of iterations for the algorithm to converge
260 is typically 2-4 so I don't view it as that expensive (relatively speaking).
262 PRE GCSE depends heavily on the second CSE pass to clean up the copies
263 we create. To make an expression reach the place where it's redundant,
264 the result of the expression is copied to a new register, and the redundant
265 expression is deleted by replacing it with this new register. Classic GCSE
266 doesn't have this problem as much as it computes the reaching defs of
267 each register in each block and thus can try to use an existing register.
269 **********************
271 A fair bit of simplicity is created by creating small functions for simple
272 tasks, even when the function is only called in one place. This may
273 measurably slow things down [or may not] by creating more function call
274 overhead than is necessary. The source is laid out so that it's trivial
275 to make the affected functions inline so that one can measure what speed
276 up, if any, can be achieved, and maybe later when things settle things can
277 be rearranged.
279 Help stamp out big monolithic functions! */
281 /* GCSE global vars. */
283 /* -dG dump file. */
284 static FILE *gcse_file;
286 /* Note whether or not we should run jump optimization after gcse. We
287 want to do this for two cases.
289 * If we changed any jumps via cprop.
291 * If we added any labels via edge splitting. */
292 static int run_jump_opt_after_gcse;
294 /* Bitmaps are normally not included in debugging dumps.
295 However it's useful to be able to print them from GDB.
296 We could create special functions for this, but it's simpler to
297 just allow passing stderr to the dump_foo fns. Since stderr can
298 be a macro, we store a copy here. */
299 static FILE *debug_stderr;
301 /* An obstack for our working variables. */
302 static struct obstack gcse_obstack;
304 struct reg_use {rtx reg_rtx; };
306 /* Hash table of expressions. */
308 struct expr
310 /* The expression (SET_SRC for expressions, PATTERN for assignments). */
311 rtx expr;
312 /* Index in the available expression bitmaps. */
313 int bitmap_index;
314 /* Next entry with the same hash. */
315 struct expr *next_same_hash;
316 /* List of anticipatable occurrences in basic blocks in the function.
317 An "anticipatable occurrence" is one that is the first occurrence in the
318 basic block, the operands are not modified in the basic block prior
319 to the occurrence and the output is not used between the start of
320 the block and the occurrence. */
321 struct occr *antic_occr;
322 /* List of available occurrence in basic blocks in the function.
323 An "available occurrence" is one that is the last occurrence in the
324 basic block and the operands are not modified by following statements in
325 the basic block [including this insn]. */
326 struct occr *avail_occr;
327 /* Non-null if the computation is PRE redundant.
328 The value is the newly created pseudo-reg to record a copy of the
329 expression in all the places that reach the redundant copy. */
330 rtx reaching_reg;
333 /* Occurrence of an expression.
334 There is one per basic block. If a pattern appears more than once the
335 last appearance is used [or first for anticipatable expressions]. */
337 struct occr
339 /* Next occurrence of this expression. */
340 struct occr *next;
341 /* The insn that computes the expression. */
342 rtx insn;
343 /* Nonzero if this [anticipatable] occurrence has been deleted. */
344 char deleted_p;
345 /* Nonzero if this [available] occurrence has been copied to
346 reaching_reg. */
347 /* ??? This is mutually exclusive with deleted_p, so they could share
348 the same byte. */
349 char copied_p;
352 /* Expression and copy propagation hash tables.
353 Each hash table is an array of buckets.
354 ??? It is known that if it were an array of entries, structure elements
355 `next_same_hash' and `bitmap_index' wouldn't be necessary. However, it is
356 not clear whether in the final analysis a sufficient amount of memory would
357 be saved as the size of the available expression bitmaps would be larger
358 [one could build a mapping table without holes afterwards though].
359 Someday I'll perform the computation and figure it out. */
361 struct hash_table
363 /* The table itself.
364 This is an array of `expr_hash_table_size' elements. */
365 struct expr **table;
367 /* Size of the hash table, in elements. */
368 unsigned int size;
370 /* Number of hash table elements. */
371 unsigned int n_elems;
373 /* Whether the table is expression of copy propagation one. */
374 int set_p;
377 /* Expression hash table. */
378 static struct hash_table expr_hash_table;
380 /* Copy propagation hash table. */
381 static struct hash_table set_hash_table;
383 /* Mapping of uids to cuids.
384 Only real insns get cuids. */
385 static int *uid_cuid;
387 /* Highest UID in UID_CUID. */
388 static int max_uid;
390 /* Get the cuid of an insn. */
391 #ifdef ENABLE_CHECKING
392 #define INSN_CUID(INSN) \
393 (gcc_assert (INSN_UID (INSN) <= max_uid), uid_cuid[INSN_UID (INSN)])
394 #else
395 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
396 #endif
398 /* Number of cuids. */
399 static int max_cuid;
401 /* Mapping of cuids to insns. */
402 static rtx *cuid_insn;
404 /* Get insn from cuid. */
405 #define CUID_INSN(CUID) (cuid_insn[CUID])
407 /* Maximum register number in function prior to doing gcse + 1.
408 Registers created during this pass have regno >= max_gcse_regno.
409 This is named with "gcse" to not collide with global of same name. */
410 static unsigned int max_gcse_regno;
412 /* Table of registers that are modified.
414 For each register, each element is a list of places where the pseudo-reg
415 is set.
417 For simplicity, GCSE is done on sets of pseudo-regs only. PRE GCSE only
418 requires knowledge of which blocks kill which regs [and thus could use
419 a bitmap instead of the lists `reg_set_table' uses].
421 `reg_set_table' and could be turned into an array of bitmaps (num-bbs x
422 num-regs) [however perhaps it may be useful to keep the data as is]. One
423 advantage of recording things this way is that `reg_set_table' is fairly
424 sparse with respect to pseudo regs but for hard regs could be fairly dense
425 [relatively speaking]. And recording sets of pseudo-regs in lists speeds
426 up functions like compute_transp since in the case of pseudo-regs we only
427 need to iterate over the number of times a pseudo-reg is set, not over the
428 number of basic blocks [clearly there is a bit of a slow down in the cases
429 where a pseudo is set more than once in a block, however it is believed
430 that the net effect is to speed things up]. This isn't done for hard-regs
431 because recording call-clobbered hard-regs in `reg_set_table' at each
432 function call can consume a fair bit of memory, and iterating over
433 hard-regs stored this way in compute_transp will be more expensive. */
435 typedef struct reg_set
437 /* The next setting of this register. */
438 struct reg_set *next;
439 /* The insn where it was set. */
440 rtx insn;
441 } reg_set;
443 static reg_set **reg_set_table;
445 /* Size of `reg_set_table'.
446 The table starts out at max_gcse_regno + slop, and is enlarged as
447 necessary. */
448 static int reg_set_table_size;
450 /* Amount to grow `reg_set_table' by when it's full. */
451 #define REG_SET_TABLE_SLOP 100
453 /* This is a list of expressions which are MEMs and will be used by load
454 or store motion.
455 Load motion tracks MEMs which aren't killed by
456 anything except itself. (i.e., loads and stores to a single location).
457 We can then allow movement of these MEM refs with a little special
458 allowance. (all stores copy the same value to the reaching reg used
459 for the loads). This means all values used to store into memory must have
460 no side effects so we can re-issue the setter value.
461 Store Motion uses this structure as an expression table to track stores
462 which look interesting, and might be moveable towards the exit block. */
464 struct ls_expr
466 struct expr * expr; /* Gcse expression reference for LM. */
467 rtx pattern; /* Pattern of this mem. */
468 rtx pattern_regs; /* List of registers mentioned by the mem. */
469 rtx loads; /* INSN list of loads seen. */
470 rtx stores; /* INSN list of stores seen. */
471 struct ls_expr * next; /* Next in the list. */
472 int invalid; /* Invalid for some reason. */
473 int index; /* If it maps to a bitmap index. */
474 unsigned int hash_index; /* Index when in a hash table. */
475 rtx reaching_reg; /* Register to use when re-writing. */
478 /* Array of implicit set patterns indexed by basic block index. */
479 static rtx *implicit_sets;
481 /* Head of the list of load/store memory refs. */
482 static struct ls_expr * pre_ldst_mems = NULL;
484 /* Bitmap containing one bit for each register in the program.
485 Used when performing GCSE to track which registers have been set since
486 the start of the basic block. */
487 static regset reg_set_bitmap;
489 /* For each block, a bitmap of registers set in the block.
490 This is used by compute_transp.
491 It is computed during hash table computation and not by compute_sets
492 as it includes registers added since the last pass (or between cprop and
493 gcse) and it's currently not easy to realloc sbitmap vectors. */
494 static sbitmap *reg_set_in_block;
496 /* Array, indexed by basic block number for a list of insns which modify
497 memory within that block. */
498 static rtx * modify_mem_list;
499 static bitmap modify_mem_list_set;
501 /* This array parallels modify_mem_list, but is kept canonicalized. */
502 static rtx * canon_modify_mem_list;
503 static bitmap canon_modify_mem_list_set;
505 /* Various variables for statistics gathering. */
507 /* Memory used in a pass.
508 This isn't intended to be absolutely precise. Its intent is only
509 to keep an eye on memory usage. */
510 static int bytes_used;
512 /* GCSE substitutions made. */
513 static int gcse_subst_count;
514 /* Number of copy instructions created. */
515 static int gcse_create_count;
516 /* Number of local constants propagated. */
517 static int local_const_prop_count;
518 /* Number of local copys propagated. */
519 static int local_copy_prop_count;
520 /* Number of global constants propagated. */
521 static int global_const_prop_count;
522 /* Number of global copys propagated. */
523 static int global_copy_prop_count;
525 /* For available exprs */
526 static sbitmap *ae_kill, *ae_gen;
528 /* Objects of this type are passed around by the null-pointer check
529 removal routines. */
530 struct null_pointer_info
532 /* The basic block being processed. */
533 basic_block current_block;
534 /* The first register to be handled in this pass. */
535 unsigned int min_reg;
536 /* One greater than the last register to be handled in this pass. */
537 unsigned int max_reg;
538 sbitmap *nonnull_local;
539 sbitmap *nonnull_killed;
542 static void compute_can_copy (void);
543 static void *gmalloc (size_t) ATTRIBUTE_MALLOC;
544 static void *gcalloc (size_t, size_t) ATTRIBUTE_MALLOC;
545 static void *grealloc (void *, size_t);
546 static void *gcse_alloc (unsigned long);
547 static void alloc_gcse_mem (rtx);
548 static void free_gcse_mem (void);
549 static void alloc_reg_set_mem (int);
550 static void free_reg_set_mem (void);
551 static void record_one_set (int, rtx);
552 static void replace_one_set (int, rtx, rtx);
553 static void record_set_info (rtx, rtx, void *);
554 static void compute_sets (rtx);
555 static void hash_scan_insn (rtx, struct hash_table *, int);
556 static void hash_scan_set (rtx, rtx, struct hash_table *);
557 static void hash_scan_clobber (rtx, rtx, struct hash_table *);
558 static void hash_scan_call (rtx, rtx, struct hash_table *);
559 static int want_to_gcse_p (rtx);
560 static bool can_assign_to_reg_p (rtx);
561 static bool gcse_constant_p (rtx);
562 static int oprs_unchanged_p (rtx, rtx, int);
563 static int oprs_anticipatable_p (rtx, rtx);
564 static int oprs_available_p (rtx, rtx);
565 static void insert_expr_in_table (rtx, enum machine_mode, rtx, int, int,
566 struct hash_table *);
567 static void insert_set_in_table (rtx, rtx, struct hash_table *);
568 static unsigned int hash_expr (rtx, enum machine_mode, int *, int);
569 static unsigned int hash_set (int, int);
570 static int expr_equiv_p (rtx, rtx);
571 static void record_last_reg_set_info (rtx, int);
572 static void record_last_mem_set_info (rtx);
573 static void record_last_set_info (rtx, rtx, void *);
574 static void compute_hash_table (struct hash_table *);
575 static void alloc_hash_table (int, struct hash_table *, int);
576 static void free_hash_table (struct hash_table *);
577 static void compute_hash_table_work (struct hash_table *);
578 static void dump_hash_table (FILE *, const char *, struct hash_table *);
579 static struct expr *lookup_set (unsigned int, struct hash_table *);
580 static struct expr *next_set (unsigned int, struct expr *);
581 static void reset_opr_set_tables (void);
582 static int oprs_not_set_p (rtx, rtx);
583 static void mark_call (rtx);
584 static void mark_set (rtx, rtx);
585 static void mark_clobber (rtx, rtx);
586 static void mark_oprs_set (rtx);
587 static void alloc_cprop_mem (int, int);
588 static void free_cprop_mem (void);
589 static void compute_transp (rtx, int, sbitmap *, int);
590 static void compute_transpout (void);
591 static void compute_local_properties (sbitmap *, sbitmap *, sbitmap *,
592 struct hash_table *);
593 static void compute_cprop_data (void);
594 static void find_used_regs (rtx *, void *);
595 static int try_replace_reg (rtx, rtx, rtx);
596 static struct expr *find_avail_set (int, rtx);
597 static int cprop_jump (basic_block, rtx, rtx, rtx, rtx);
598 static void mems_conflict_for_gcse_p (rtx, rtx, void *);
599 static int load_killed_in_block_p (basic_block, int, rtx, int);
600 static void canon_list_insert (rtx, rtx, void *);
601 static int cprop_insn (rtx, int);
602 static int cprop (int);
603 static void find_implicit_sets (void);
604 static int one_cprop_pass (int, int, int);
605 static bool constprop_register (rtx, rtx, rtx, int);
606 static struct expr *find_bypass_set (int, int);
607 static bool reg_killed_on_edge (rtx, edge);
608 static int bypass_block (basic_block, rtx, rtx);
609 static int bypass_conditional_jumps (void);
610 static void alloc_pre_mem (int, int);
611 static void free_pre_mem (void);
612 static void compute_pre_data (void);
613 static int pre_expr_reaches_here_p (basic_block, struct expr *,
614 basic_block);
615 static void insert_insn_end_bb (struct expr *, basic_block, int);
616 static void pre_insert_copy_insn (struct expr *, rtx);
617 static void pre_insert_copies (void);
618 static int pre_delete (void);
619 static int pre_gcse (void);
620 static int one_pre_gcse_pass (int);
621 static void add_label_notes (rtx, rtx);
622 static void alloc_code_hoist_mem (int, int);
623 static void free_code_hoist_mem (void);
624 static void compute_code_hoist_vbeinout (void);
625 static void compute_code_hoist_data (void);
626 static int hoist_expr_reaches_here_p (basic_block, int, basic_block, char *);
627 static void hoist_code (void);
628 static int one_code_hoisting_pass (void);
629 static rtx process_insert_insn (struct expr *);
630 static int pre_edge_insert (struct edge_list *, struct expr **);
631 static int pre_expr_reaches_here_p_work (basic_block, struct expr *,
632 basic_block, char *);
633 static struct ls_expr * ldst_entry (rtx);
634 static void free_ldst_entry (struct ls_expr *);
635 static void free_ldst_mems (void);
636 static void print_ldst_list (FILE *);
637 static struct ls_expr * find_rtx_in_ldst (rtx);
638 static int enumerate_ldsts (void);
639 static inline struct ls_expr * first_ls_expr (void);
640 static inline struct ls_expr * next_ls_expr (struct ls_expr *);
641 static int simple_mem (rtx);
642 static void invalidate_any_buried_refs (rtx);
643 static void compute_ld_motion_mems (void);
644 static void trim_ld_motion_mems (void);
645 static void update_ld_motion_stores (struct expr *);
646 static void reg_set_info (rtx, rtx, void *);
647 static void reg_clear_last_set (rtx, rtx, void *);
648 static bool store_ops_ok (rtx, int *);
649 static rtx extract_mentioned_regs (rtx);
650 static rtx extract_mentioned_regs_helper (rtx, rtx);
651 static void find_moveable_store (rtx, int *, int *);
652 static int compute_store_table (void);
653 static bool load_kills_store (rtx, rtx, int);
654 static bool find_loads (rtx, rtx, int);
655 static bool store_killed_in_insn (rtx, rtx, rtx, int);
656 static bool store_killed_after (rtx, rtx, rtx, basic_block, int *, rtx *);
657 static bool store_killed_before (rtx, rtx, rtx, basic_block, int *);
658 static void build_store_vectors (void);
659 static void insert_insn_start_bb (rtx, basic_block);
660 static int insert_store (struct ls_expr *, edge);
661 static void remove_reachable_equiv_notes (basic_block, struct ls_expr *);
662 static void replace_store_insn (rtx, rtx, basic_block, struct ls_expr *);
663 static void delete_store (struct ls_expr *, basic_block);
664 static void free_store_memory (void);
665 static void store_motion (void);
666 static void free_insn_expr_list_list (rtx *);
667 static void clear_modify_mem_tables (void);
668 static void free_modify_mem_tables (void);
669 static rtx gcse_emit_move_after (rtx, rtx, rtx);
670 static void local_cprop_find_used_regs (rtx *, void *);
671 static bool do_local_cprop (rtx, rtx, int, rtx*);
672 static bool adjust_libcall_notes (rtx, rtx, rtx, rtx*);
673 static void local_cprop_pass (int);
674 static bool is_too_expensive (const char *);
677 /* Entry point for global common subexpression elimination.
678 F is the first instruction in the function. Return nonzero if a
679 change is mode. */
682 gcse_main (rtx f, FILE *file)
684 int changed, pass;
685 /* Bytes used at start of pass. */
686 int initial_bytes_used;
687 /* Maximum number of bytes used by a pass. */
688 int max_pass_bytes;
689 /* Point to release obstack data from for each pass. */
690 char *gcse_obstack_bottom;
692 /* We do not construct an accurate cfg in functions which call
693 setjmp, so just punt to be safe. */
694 if (current_function_calls_setjmp)
695 return 0;
697 /* Assume that we do not need to run jump optimizations after gcse. */
698 run_jump_opt_after_gcse = 0;
700 /* For calling dump_foo fns from gdb. */
701 debug_stderr = stderr;
702 gcse_file = file;
704 /* Identify the basic block information for this function, including
705 successors and predecessors. */
706 max_gcse_regno = max_reg_num ();
708 if (file)
709 dump_flow_info (file);
711 /* Return if there's nothing to do, or it is too expensive. */
712 if (n_basic_blocks <= 1 || is_too_expensive (_("GCSE disabled")))
713 return 0;
715 gcc_obstack_init (&gcse_obstack);
716 bytes_used = 0;
718 /* We need alias. */
719 init_alias_analysis ();
720 /* Record where pseudo-registers are set. This data is kept accurate
721 during each pass. ??? We could also record hard-reg information here
722 [since it's unchanging], however it is currently done during hash table
723 computation.
725 It may be tempting to compute MEM set information here too, but MEM sets
726 will be subject to code motion one day and thus we need to compute
727 information about memory sets when we build the hash tables. */
729 alloc_reg_set_mem (max_gcse_regno);
730 compute_sets (f);
732 pass = 0;
733 initial_bytes_used = bytes_used;
734 max_pass_bytes = 0;
735 gcse_obstack_bottom = gcse_alloc (1);
736 changed = 1;
737 while (changed && pass < MAX_GCSE_PASSES)
739 changed = 0;
740 if (file)
741 fprintf (file, "GCSE pass %d\n\n", pass + 1);
743 /* Initialize bytes_used to the space for the pred/succ lists,
744 and the reg_set_table data. */
745 bytes_used = initial_bytes_used;
747 /* Each pass may create new registers, so recalculate each time. */
748 max_gcse_regno = max_reg_num ();
750 alloc_gcse_mem (f);
752 /* Don't allow constant propagation to modify jumps
753 during this pass. */
754 timevar_push (TV_CPROP1);
755 changed = one_cprop_pass (pass + 1, 0, 0);
756 timevar_pop (TV_CPROP1);
758 if (optimize_size)
759 /* Do nothing. */ ;
760 else
762 timevar_push (TV_PRE);
763 changed |= one_pre_gcse_pass (pass + 1);
764 /* We may have just created new basic blocks. Release and
765 recompute various things which are sized on the number of
766 basic blocks. */
767 if (changed)
769 free_modify_mem_tables ();
770 modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
771 canon_modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
773 free_reg_set_mem ();
774 alloc_reg_set_mem (max_reg_num ());
775 compute_sets (f);
776 run_jump_opt_after_gcse = 1;
777 timevar_pop (TV_PRE);
780 if (max_pass_bytes < bytes_used)
781 max_pass_bytes = bytes_used;
783 /* Free up memory, then reallocate for code hoisting. We can
784 not re-use the existing allocated memory because the tables
785 will not have info for the insns or registers created by
786 partial redundancy elimination. */
787 free_gcse_mem ();
789 /* It does not make sense to run code hoisting unless we are optimizing
790 for code size -- it rarely makes programs faster, and can make
791 them bigger if we did partial redundancy elimination (when optimizing
792 for space, we don't run the partial redundancy algorithms). */
793 if (optimize_size)
795 timevar_push (TV_HOIST);
796 max_gcse_regno = max_reg_num ();
797 alloc_gcse_mem (f);
798 changed |= one_code_hoisting_pass ();
799 free_gcse_mem ();
801 if (max_pass_bytes < bytes_used)
802 max_pass_bytes = bytes_used;
803 timevar_pop (TV_HOIST);
806 if (file)
808 fprintf (file, "\n");
809 fflush (file);
812 obstack_free (&gcse_obstack, gcse_obstack_bottom);
813 pass++;
816 /* Do one last pass of copy propagation, including cprop into
817 conditional jumps. */
819 max_gcse_regno = max_reg_num ();
820 alloc_gcse_mem (f);
821 /* This time, go ahead and allow cprop to alter jumps. */
822 timevar_push (TV_CPROP2);
823 one_cprop_pass (pass + 1, 1, 0);
824 timevar_pop (TV_CPROP2);
825 free_gcse_mem ();
827 if (file)
829 fprintf (file, "GCSE of %s: %d basic blocks, ",
830 current_function_name (), n_basic_blocks);
831 fprintf (file, "%d pass%s, %d bytes\n\n",
832 pass, pass > 1 ? "es" : "", max_pass_bytes);
835 obstack_free (&gcse_obstack, NULL);
836 free_reg_set_mem ();
838 /* We are finished with alias. */
839 end_alias_analysis ();
840 allocate_reg_info (max_reg_num (), FALSE, FALSE);
842 if (!optimize_size && flag_gcse_sm)
844 timevar_push (TV_LSM);
845 store_motion ();
846 timevar_pop (TV_LSM);
849 /* Record where pseudo-registers are set. */
850 return run_jump_opt_after_gcse;
853 /* Misc. utilities. */
855 /* Nonzero for each mode that supports (set (reg) (reg)).
856 This is trivially true for integer and floating point values.
857 It may or may not be true for condition codes. */
858 static char can_copy[(int) NUM_MACHINE_MODES];
860 /* Compute which modes support reg/reg copy operations. */
862 static void
863 compute_can_copy (void)
865 int i;
866 #ifndef AVOID_CCMODE_COPIES
867 rtx reg, insn;
868 #endif
869 memset (can_copy, 0, NUM_MACHINE_MODES);
871 start_sequence ();
872 for (i = 0; i < NUM_MACHINE_MODES; i++)
873 if (GET_MODE_CLASS (i) == MODE_CC)
875 #ifdef AVOID_CCMODE_COPIES
876 can_copy[i] = 0;
877 #else
878 reg = gen_rtx_REG ((enum machine_mode) i, LAST_VIRTUAL_REGISTER + 1);
879 insn = emit_insn (gen_rtx_SET (VOIDmode, reg, reg));
880 if (recog (PATTERN (insn), insn, NULL) >= 0)
881 can_copy[i] = 1;
882 #endif
884 else
885 can_copy[i] = 1;
887 end_sequence ();
890 /* Returns whether the mode supports reg/reg copy operations. */
892 bool
893 can_copy_p (enum machine_mode mode)
895 static bool can_copy_init_p = false;
897 if (! can_copy_init_p)
899 compute_can_copy ();
900 can_copy_init_p = true;
903 return can_copy[mode] != 0;
906 /* Cover function to xmalloc to record bytes allocated. */
908 static void *
909 gmalloc (size_t size)
911 bytes_used += size;
912 return xmalloc (size);
915 /* Cover function to xcalloc to record bytes allocated. */
917 static void *
918 gcalloc (size_t nelem, size_t elsize)
920 bytes_used += nelem * elsize;
921 return xcalloc (nelem, elsize);
924 /* Cover function to xrealloc.
925 We don't record the additional size since we don't know it.
926 It won't affect memory usage stats much anyway. */
928 static void *
929 grealloc (void *ptr, size_t size)
931 return xrealloc (ptr, size);
934 /* Cover function to obstack_alloc. */
936 static void *
937 gcse_alloc (unsigned long size)
939 bytes_used += size;
940 return obstack_alloc (&gcse_obstack, size);
943 /* Allocate memory for the cuid mapping array,
944 and reg/memory set tracking tables.
946 This is called at the start of each pass. */
948 static void
949 alloc_gcse_mem (rtx f)
951 int i;
952 rtx insn;
954 /* Find the largest UID and create a mapping from UIDs to CUIDs.
955 CUIDs are like UIDs except they increase monotonically, have no gaps,
956 and only apply to real insns. */
958 max_uid = get_max_uid ();
959 uid_cuid = gcalloc (max_uid + 1, sizeof (int));
960 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
962 if (INSN_P (insn))
963 uid_cuid[INSN_UID (insn)] = i++;
964 else
965 uid_cuid[INSN_UID (insn)] = i;
968 /* Create a table mapping cuids to insns. */
970 max_cuid = i;
971 cuid_insn = gcalloc (max_cuid + 1, sizeof (rtx));
972 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
973 if (INSN_P (insn))
974 CUID_INSN (i++) = insn;
976 /* Allocate vars to track sets of regs. */
977 reg_set_bitmap = BITMAP_XMALLOC ();
979 /* Allocate vars to track sets of regs, memory per block. */
980 reg_set_in_block = sbitmap_vector_alloc (last_basic_block, max_gcse_regno);
981 /* Allocate array to keep a list of insns which modify memory in each
982 basic block. */
983 modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
984 canon_modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
985 modify_mem_list_set = BITMAP_XMALLOC ();
986 canon_modify_mem_list_set = BITMAP_XMALLOC ();
989 /* Free memory allocated by alloc_gcse_mem. */
991 static void
992 free_gcse_mem (void)
994 free (uid_cuid);
995 free (cuid_insn);
997 BITMAP_XFREE (reg_set_bitmap);
999 sbitmap_vector_free (reg_set_in_block);
1000 free_modify_mem_tables ();
1001 BITMAP_XFREE (modify_mem_list_set);
1002 BITMAP_XFREE (canon_modify_mem_list_set);
1005 /* Compute the local properties of each recorded expression.
1007 Local properties are those that are defined by the block, irrespective of
1008 other blocks.
1010 An expression is transparent in a block if its operands are not modified
1011 in the block.
1013 An expression is computed (locally available) in a block if it is computed
1014 at least once and expression would contain the same value if the
1015 computation was moved to the end of the block.
1017 An expression is locally anticipatable in a block if it is computed at
1018 least once and expression would contain the same value if the computation
1019 was moved to the beginning of the block.
1021 We call this routine for cprop, pre and code hoisting. They all compute
1022 basically the same information and thus can easily share this code.
1024 TRANSP, COMP, and ANTLOC are destination sbitmaps for recording local
1025 properties. If NULL, then it is not necessary to compute or record that
1026 particular property.
1028 TABLE controls which hash table to look at. If it is set hash table,
1029 additionally, TRANSP is computed as ~TRANSP, since this is really cprop's
1030 ABSALTERED. */
1032 static void
1033 compute_local_properties (sbitmap *transp, sbitmap *comp, sbitmap *antloc,
1034 struct hash_table *table)
1036 unsigned int i;
1038 /* Initialize any bitmaps that were passed in. */
1039 if (transp)
1041 if (table->set_p)
1042 sbitmap_vector_zero (transp, last_basic_block);
1043 else
1044 sbitmap_vector_ones (transp, last_basic_block);
1047 if (comp)
1048 sbitmap_vector_zero (comp, last_basic_block);
1049 if (antloc)
1050 sbitmap_vector_zero (antloc, last_basic_block);
1052 for (i = 0; i < table->size; i++)
1054 struct expr *expr;
1056 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
1058 int indx = expr->bitmap_index;
1059 struct occr *occr;
1061 /* The expression is transparent in this block if it is not killed.
1062 We start by assuming all are transparent [none are killed], and
1063 then reset the bits for those that are. */
1064 if (transp)
1065 compute_transp (expr->expr, indx, transp, table->set_p);
1067 /* The occurrences recorded in antic_occr are exactly those that
1068 we want to set to nonzero in ANTLOC. */
1069 if (antloc)
1070 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
1072 SET_BIT (antloc[BLOCK_NUM (occr->insn)], indx);
1074 /* While we're scanning the table, this is a good place to
1075 initialize this. */
1076 occr->deleted_p = 0;
1079 /* The occurrences recorded in avail_occr are exactly those that
1080 we want to set to nonzero in COMP. */
1081 if (comp)
1082 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
1084 SET_BIT (comp[BLOCK_NUM (occr->insn)], indx);
1086 /* While we're scanning the table, this is a good place to
1087 initialize this. */
1088 occr->copied_p = 0;
1091 /* While we're scanning the table, this is a good place to
1092 initialize this. */
1093 expr->reaching_reg = 0;
1098 /* Register set information.
1100 `reg_set_table' records where each register is set or otherwise
1101 modified. */
1103 static struct obstack reg_set_obstack;
1105 static void
1106 alloc_reg_set_mem (int n_regs)
1108 reg_set_table_size = n_regs + REG_SET_TABLE_SLOP;
1109 reg_set_table = gcalloc (reg_set_table_size, sizeof (struct reg_set *));
1111 gcc_obstack_init (&reg_set_obstack);
1114 static void
1115 free_reg_set_mem (void)
1117 free (reg_set_table);
1118 obstack_free (&reg_set_obstack, NULL);
1121 /* An OLD_INSN that used to set REGNO was replaced by NEW_INSN.
1122 Update the corresponding `reg_set_table' entry accordingly.
1123 We assume that NEW_INSN is not already recorded in reg_set_table[regno]. */
1125 static void
1126 replace_one_set (int regno, rtx old_insn, rtx new_insn)
1128 struct reg_set *reg_info;
1129 if (regno >= reg_set_table_size)
1130 return;
1131 for (reg_info = reg_set_table[regno]; reg_info; reg_info = reg_info->next)
1132 if (reg_info->insn == old_insn)
1134 reg_info->insn = new_insn;
1135 break;
1139 /* Record REGNO in the reg_set table. */
1141 static void
1142 record_one_set (int regno, rtx insn)
1144 /* Allocate a new reg_set element and link it onto the list. */
1145 struct reg_set *new_reg_info;
1147 /* If the table isn't big enough, enlarge it. */
1148 if (regno >= reg_set_table_size)
1150 int new_size = regno + REG_SET_TABLE_SLOP;
1152 reg_set_table = grealloc (reg_set_table,
1153 new_size * sizeof (struct reg_set *));
1154 memset (reg_set_table + reg_set_table_size, 0,
1155 (new_size - reg_set_table_size) * sizeof (struct reg_set *));
1156 reg_set_table_size = new_size;
1159 new_reg_info = obstack_alloc (&reg_set_obstack, sizeof (struct reg_set));
1160 bytes_used += sizeof (struct reg_set);
1161 new_reg_info->insn = insn;
1162 new_reg_info->next = reg_set_table[regno];
1163 reg_set_table[regno] = new_reg_info;
1166 /* Called from compute_sets via note_stores to handle one SET or CLOBBER in
1167 an insn. The DATA is really the instruction in which the SET is
1168 occurring. */
1170 static void
1171 record_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED, void *data)
1173 rtx record_set_insn = (rtx) data;
1175 if (REG_P (dest) && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
1176 record_one_set (REGNO (dest), record_set_insn);
1179 /* Scan the function and record each set of each pseudo-register.
1181 This is called once, at the start of the gcse pass. See the comments for
1182 `reg_set_table' for further documentation. */
1184 static void
1185 compute_sets (rtx f)
1187 rtx insn;
1189 for (insn = f; insn != 0; insn = NEXT_INSN (insn))
1190 if (INSN_P (insn))
1191 note_stores (PATTERN (insn), record_set_info, insn);
1194 /* Hash table support. */
1196 struct reg_avail_info
1198 basic_block last_bb;
1199 int first_set;
1200 int last_set;
1203 static struct reg_avail_info *reg_avail_info;
1204 static basic_block current_bb;
1207 /* See whether X, the source of a set, is something we want to consider for
1208 GCSE. */
1210 static int
1211 want_to_gcse_p (rtx x)
1213 switch (GET_CODE (x))
1215 case REG:
1216 case SUBREG:
1217 case CONST_INT:
1218 case CONST_DOUBLE:
1219 case CONST_VECTOR:
1220 case CALL:
1221 return 0;
1223 default:
1224 return can_assign_to_reg_p (x);
1228 /* Used internally by can_assign_to_reg_p. */
1230 static GTY(()) rtx test_insn;
1232 /* Return true if we can assign X to a pseudo register. */
1234 static bool
1235 can_assign_to_reg_p (rtx x)
1237 int num_clobbers = 0;
1238 int icode;
1240 /* If this is a valid operand, we are OK. If it's VOIDmode, we aren't. */
1241 if (general_operand (x, GET_MODE (x)))
1242 return 1;
1243 else if (GET_MODE (x) == VOIDmode)
1244 return 0;
1246 /* Otherwise, check if we can make a valid insn from it. First initialize
1247 our test insn if we haven't already. */
1248 if (test_insn == 0)
1250 test_insn
1251 = make_insn_raw (gen_rtx_SET (VOIDmode,
1252 gen_rtx_REG (word_mode,
1253 FIRST_PSEUDO_REGISTER * 2),
1254 const0_rtx));
1255 NEXT_INSN (test_insn) = PREV_INSN (test_insn) = 0;
1258 /* Now make an insn like the one we would make when GCSE'ing and see if
1259 valid. */
1260 PUT_MODE (SET_DEST (PATTERN (test_insn)), GET_MODE (x));
1261 SET_SRC (PATTERN (test_insn)) = x;
1262 return ((icode = recog (PATTERN (test_insn), test_insn, &num_clobbers)) >= 0
1263 && (num_clobbers == 0 || ! added_clobbers_hard_reg_p (icode)));
1266 /* Return nonzero if the operands of expression X are unchanged from the
1267 start of INSN's basic block up to but not including INSN (if AVAIL_P == 0),
1268 or from INSN to the end of INSN's basic block (if AVAIL_P != 0). */
1270 static int
1271 oprs_unchanged_p (rtx x, rtx insn, int avail_p)
1273 int i, j;
1274 enum rtx_code code;
1275 const char *fmt;
1277 if (x == 0)
1278 return 1;
1280 code = GET_CODE (x);
1281 switch (code)
1283 case REG:
1285 struct reg_avail_info *info = &reg_avail_info[REGNO (x)];
1287 if (info->last_bb != current_bb)
1288 return 1;
1289 if (avail_p)
1290 return info->last_set < INSN_CUID (insn);
1291 else
1292 return info->first_set >= INSN_CUID (insn);
1295 case MEM:
1296 if (load_killed_in_block_p (current_bb, INSN_CUID (insn),
1297 x, avail_p))
1298 return 0;
1299 else
1300 return oprs_unchanged_p (XEXP (x, 0), insn, avail_p);
1302 case PRE_DEC:
1303 case PRE_INC:
1304 case POST_DEC:
1305 case POST_INC:
1306 case PRE_MODIFY:
1307 case POST_MODIFY:
1308 return 0;
1310 case PC:
1311 case CC0: /*FIXME*/
1312 case CONST:
1313 case CONST_INT:
1314 case CONST_DOUBLE:
1315 case CONST_VECTOR:
1316 case SYMBOL_REF:
1317 case LABEL_REF:
1318 case ADDR_VEC:
1319 case ADDR_DIFF_VEC:
1320 return 1;
1322 default:
1323 break;
1326 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
1328 if (fmt[i] == 'e')
1330 /* If we are about to do the last recursive call needed at this
1331 level, change it into iteration. This function is called enough
1332 to be worth it. */
1333 if (i == 0)
1334 return oprs_unchanged_p (XEXP (x, i), insn, avail_p);
1336 else if (! oprs_unchanged_p (XEXP (x, i), insn, avail_p))
1337 return 0;
1339 else if (fmt[i] == 'E')
1340 for (j = 0; j < XVECLEN (x, i); j++)
1341 if (! oprs_unchanged_p (XVECEXP (x, i, j), insn, avail_p))
1342 return 0;
1345 return 1;
1348 /* Used for communication between mems_conflict_for_gcse_p and
1349 load_killed_in_block_p. Nonzero if mems_conflict_for_gcse_p finds a
1350 conflict between two memory references. */
1351 static int gcse_mems_conflict_p;
1353 /* Used for communication between mems_conflict_for_gcse_p and
1354 load_killed_in_block_p. A memory reference for a load instruction,
1355 mems_conflict_for_gcse_p will see if a memory store conflicts with
1356 this memory load. */
1357 static rtx gcse_mem_operand;
1359 /* DEST is the output of an instruction. If it is a memory reference, and
1360 possibly conflicts with the load found in gcse_mem_operand, then set
1361 gcse_mems_conflict_p to a nonzero value. */
1363 static void
1364 mems_conflict_for_gcse_p (rtx dest, rtx setter ATTRIBUTE_UNUSED,
1365 void *data ATTRIBUTE_UNUSED)
1367 while (GET_CODE (dest) == SUBREG
1368 || GET_CODE (dest) == ZERO_EXTRACT
1369 || GET_CODE (dest) == STRICT_LOW_PART)
1370 dest = XEXP (dest, 0);
1372 /* If DEST is not a MEM, then it will not conflict with the load. Note
1373 that function calls are assumed to clobber memory, but are handled
1374 elsewhere. */
1375 if (! MEM_P (dest))
1376 return;
1378 /* If we are setting a MEM in our list of specially recognized MEMs,
1379 don't mark as killed this time. */
1381 if (expr_equiv_p (dest, gcse_mem_operand) && pre_ldst_mems != NULL)
1383 if (!find_rtx_in_ldst (dest))
1384 gcse_mems_conflict_p = 1;
1385 return;
1388 if (true_dependence (dest, GET_MODE (dest), gcse_mem_operand,
1389 rtx_addr_varies_p))
1390 gcse_mems_conflict_p = 1;
1393 /* Return nonzero if the expression in X (a memory reference) is killed
1394 in block BB before or after the insn with the CUID in UID_LIMIT.
1395 AVAIL_P is nonzero for kills after UID_LIMIT, and zero for kills
1396 before UID_LIMIT.
1398 To check the entire block, set UID_LIMIT to max_uid + 1 and
1399 AVAIL_P to 0. */
1401 static int
1402 load_killed_in_block_p (basic_block bb, int uid_limit, rtx x, int avail_p)
1404 rtx list_entry = modify_mem_list[bb->index];
1405 while (list_entry)
1407 rtx setter;
1408 /* Ignore entries in the list that do not apply. */
1409 if ((avail_p
1410 && INSN_CUID (XEXP (list_entry, 0)) < uid_limit)
1411 || (! avail_p
1412 && INSN_CUID (XEXP (list_entry, 0)) > uid_limit))
1414 list_entry = XEXP (list_entry, 1);
1415 continue;
1418 setter = XEXP (list_entry, 0);
1420 /* If SETTER is a call everything is clobbered. Note that calls
1421 to pure functions are never put on the list, so we need not
1422 worry about them. */
1423 if (CALL_P (setter))
1424 return 1;
1426 /* SETTER must be an INSN of some kind that sets memory. Call
1427 note_stores to examine each hunk of memory that is modified.
1429 The note_stores interface is pretty limited, so we have to
1430 communicate via global variables. Yuk. */
1431 gcse_mem_operand = x;
1432 gcse_mems_conflict_p = 0;
1433 note_stores (PATTERN (setter), mems_conflict_for_gcse_p, NULL);
1434 if (gcse_mems_conflict_p)
1435 return 1;
1436 list_entry = XEXP (list_entry, 1);
1438 return 0;
1441 /* Return nonzero if the operands of expression X are unchanged from
1442 the start of INSN's basic block up to but not including INSN. */
1444 static int
1445 oprs_anticipatable_p (rtx x, rtx insn)
1447 return oprs_unchanged_p (x, insn, 0);
1450 /* Return nonzero if the operands of expression X are unchanged from
1451 INSN to the end of INSN's basic block. */
1453 static int
1454 oprs_available_p (rtx x, rtx insn)
1456 return oprs_unchanged_p (x, insn, 1);
1459 /* Hash expression X.
1461 MODE is only used if X is a CONST_INT. DO_NOT_RECORD_P is a boolean
1462 indicating if a volatile operand is found or if the expression contains
1463 something we don't want to insert in the table. HASH_TABLE_SIZE is
1464 the current size of the hash table to be probed. */
1466 static unsigned int
1467 hash_expr (rtx x, enum machine_mode mode, int *do_not_record_p,
1468 int hash_table_size)
1470 unsigned int hash;
1472 *do_not_record_p = 0;
1474 hash = hash_rtx (x, mode, do_not_record_p,
1475 NULL, /*have_reg_qty=*/false);
1476 return hash % hash_table_size;
1479 /* Hash a set of register REGNO.
1481 Sets are hashed on the register that is set. This simplifies the PRE copy
1482 propagation code.
1484 ??? May need to make things more elaborate. Later, as necessary. */
1486 static unsigned int
1487 hash_set (int regno, int hash_table_size)
1489 unsigned int hash;
1491 hash = regno;
1492 return hash % hash_table_size;
1495 /* Return nonzero if exp1 is equivalent to exp2. */
1497 static int
1498 expr_equiv_p (rtx x, rtx y)
1500 return exp_equiv_p (x, y, 0, true);
1503 /* Insert expression X in INSN in the hash TABLE.
1504 If it is already present, record it as the last occurrence in INSN's
1505 basic block.
1507 MODE is the mode of the value X is being stored into.
1508 It is only used if X is a CONST_INT.
1510 ANTIC_P is nonzero if X is an anticipatable expression.
1511 AVAIL_P is nonzero if X is an available expression. */
1513 static void
1514 insert_expr_in_table (rtx x, enum machine_mode mode, rtx insn, int antic_p,
1515 int avail_p, struct hash_table *table)
1517 int found, do_not_record_p;
1518 unsigned int hash;
1519 struct expr *cur_expr, *last_expr = NULL;
1520 struct occr *antic_occr, *avail_occr;
1521 struct occr *last_occr = NULL;
1523 hash = hash_expr (x, mode, &do_not_record_p, table->size);
1525 /* Do not insert expression in table if it contains volatile operands,
1526 or if hash_expr determines the expression is something we don't want
1527 to or can't handle. */
1528 if (do_not_record_p)
1529 return;
1531 cur_expr = table->table[hash];
1532 found = 0;
1534 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
1536 /* If the expression isn't found, save a pointer to the end of
1537 the list. */
1538 last_expr = cur_expr;
1539 cur_expr = cur_expr->next_same_hash;
1542 if (! found)
1544 cur_expr = gcse_alloc (sizeof (struct expr));
1545 bytes_used += sizeof (struct expr);
1546 if (table->table[hash] == NULL)
1547 /* This is the first pattern that hashed to this index. */
1548 table->table[hash] = cur_expr;
1549 else
1550 /* Add EXPR to end of this hash chain. */
1551 last_expr->next_same_hash = cur_expr;
1553 /* Set the fields of the expr element. */
1554 cur_expr->expr = x;
1555 cur_expr->bitmap_index = table->n_elems++;
1556 cur_expr->next_same_hash = NULL;
1557 cur_expr->antic_occr = NULL;
1558 cur_expr->avail_occr = NULL;
1561 /* Now record the occurrence(s). */
1562 if (antic_p)
1564 antic_occr = cur_expr->antic_occr;
1566 /* Search for another occurrence in the same basic block. */
1567 while (antic_occr && BLOCK_NUM (antic_occr->insn) != BLOCK_NUM (insn))
1569 /* If an occurrence isn't found, save a pointer to the end of
1570 the list. */
1571 last_occr = antic_occr;
1572 antic_occr = antic_occr->next;
1575 if (antic_occr)
1576 /* Found another instance of the expression in the same basic block.
1577 Prefer the currently recorded one. We want the first one in the
1578 block and the block is scanned from start to end. */
1579 ; /* nothing to do */
1580 else
1582 /* First occurrence of this expression in this basic block. */
1583 antic_occr = gcse_alloc (sizeof (struct occr));
1584 bytes_used += sizeof (struct occr);
1585 /* First occurrence of this expression in any block? */
1586 if (cur_expr->antic_occr == NULL)
1587 cur_expr->antic_occr = antic_occr;
1588 else
1589 last_occr->next = antic_occr;
1591 antic_occr->insn = insn;
1592 antic_occr->next = NULL;
1593 antic_occr->deleted_p = 0;
1597 if (avail_p)
1599 avail_occr = cur_expr->avail_occr;
1601 /* Search for another occurrence in the same basic block. */
1602 while (avail_occr && BLOCK_NUM (avail_occr->insn) != BLOCK_NUM (insn))
1604 /* If an occurrence isn't found, save a pointer to the end of
1605 the list. */
1606 last_occr = avail_occr;
1607 avail_occr = avail_occr->next;
1610 if (avail_occr)
1611 /* Found another instance of the expression in the same basic block.
1612 Prefer this occurrence to the currently recorded one. We want
1613 the last one in the block and the block is scanned from start
1614 to end. */
1615 avail_occr->insn = insn;
1616 else
1618 /* First occurrence of this expression in this basic block. */
1619 avail_occr = gcse_alloc (sizeof (struct occr));
1620 bytes_used += sizeof (struct occr);
1622 /* First occurrence of this expression in any block? */
1623 if (cur_expr->avail_occr == NULL)
1624 cur_expr->avail_occr = avail_occr;
1625 else
1626 last_occr->next = avail_occr;
1628 avail_occr->insn = insn;
1629 avail_occr->next = NULL;
1630 avail_occr->deleted_p = 0;
1635 /* Insert pattern X in INSN in the hash table.
1636 X is a SET of a reg to either another reg or a constant.
1637 If it is already present, record it as the last occurrence in INSN's
1638 basic block. */
1640 static void
1641 insert_set_in_table (rtx x, rtx insn, struct hash_table *table)
1643 int found;
1644 unsigned int hash;
1645 struct expr *cur_expr, *last_expr = NULL;
1646 struct occr *cur_occr, *last_occr = NULL;
1648 gcc_assert (GET_CODE (x) == SET && REG_P (SET_DEST (x)));
1650 hash = hash_set (REGNO (SET_DEST (x)), table->size);
1652 cur_expr = table->table[hash];
1653 found = 0;
1655 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
1657 /* If the expression isn't found, save a pointer to the end of
1658 the list. */
1659 last_expr = cur_expr;
1660 cur_expr = cur_expr->next_same_hash;
1663 if (! found)
1665 cur_expr = gcse_alloc (sizeof (struct expr));
1666 bytes_used += sizeof (struct expr);
1667 if (table->table[hash] == NULL)
1668 /* This is the first pattern that hashed to this index. */
1669 table->table[hash] = cur_expr;
1670 else
1671 /* Add EXPR to end of this hash chain. */
1672 last_expr->next_same_hash = cur_expr;
1674 /* Set the fields of the expr element.
1675 We must copy X because it can be modified when copy propagation is
1676 performed on its operands. */
1677 cur_expr->expr = copy_rtx (x);
1678 cur_expr->bitmap_index = table->n_elems++;
1679 cur_expr->next_same_hash = NULL;
1680 cur_expr->antic_occr = NULL;
1681 cur_expr->avail_occr = NULL;
1684 /* Now record the occurrence. */
1685 cur_occr = cur_expr->avail_occr;
1687 /* Search for another occurrence in the same basic block. */
1688 while (cur_occr && BLOCK_NUM (cur_occr->insn) != BLOCK_NUM (insn))
1690 /* If an occurrence isn't found, save a pointer to the end of
1691 the list. */
1692 last_occr = cur_occr;
1693 cur_occr = cur_occr->next;
1696 if (cur_occr)
1697 /* Found another instance of the expression in the same basic block.
1698 Prefer this occurrence to the currently recorded one. We want the
1699 last one in the block and the block is scanned from start to end. */
1700 cur_occr->insn = insn;
1701 else
1703 /* First occurrence of this expression in this basic block. */
1704 cur_occr = gcse_alloc (sizeof (struct occr));
1705 bytes_used += sizeof (struct occr);
1707 /* First occurrence of this expression in any block? */
1708 if (cur_expr->avail_occr == NULL)
1709 cur_expr->avail_occr = cur_occr;
1710 else
1711 last_occr->next = cur_occr;
1713 cur_occr->insn = insn;
1714 cur_occr->next = NULL;
1715 cur_occr->deleted_p = 0;
1719 /* Determine whether the rtx X should be treated as a constant for
1720 the purposes of GCSE's constant propagation. */
1722 static bool
1723 gcse_constant_p (rtx x)
1725 /* Consider a COMPARE of two integers constant. */
1726 if (GET_CODE (x) == COMPARE
1727 && GET_CODE (XEXP (x, 0)) == CONST_INT
1728 && GET_CODE (XEXP (x, 1)) == CONST_INT)
1729 return true;
1731 /* Consider a COMPARE of the same registers is a constant
1732 if they are not floating point registers. */
1733 if (GET_CODE(x) == COMPARE
1734 && REG_P (XEXP (x, 0)) && REG_P (XEXP (x, 1))
1735 && REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 1))
1736 && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0)))
1737 && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 1))))
1738 return true;
1740 return CONSTANT_P (x);
1743 /* Scan pattern PAT of INSN and add an entry to the hash TABLE (set or
1744 expression one). */
1746 static void
1747 hash_scan_set (rtx pat, rtx insn, struct hash_table *table)
1749 rtx src = SET_SRC (pat);
1750 rtx dest = SET_DEST (pat);
1751 rtx note;
1753 if (GET_CODE (src) == CALL)
1754 hash_scan_call (src, insn, table);
1756 else if (REG_P (dest))
1758 unsigned int regno = REGNO (dest);
1759 rtx tmp;
1761 /* If this is a single set and we are doing constant propagation,
1762 see if a REG_NOTE shows this equivalent to a constant. */
1763 if (table->set_p && (note = find_reg_equal_equiv_note (insn)) != 0
1764 && gcse_constant_p (XEXP (note, 0)))
1765 src = XEXP (note, 0), pat = gen_rtx_SET (VOIDmode, dest, src);
1767 /* Only record sets of pseudo-regs in the hash table. */
1768 if (! table->set_p
1769 && regno >= FIRST_PSEUDO_REGISTER
1770 /* Don't GCSE something if we can't do a reg/reg copy. */
1771 && can_copy_p (GET_MODE (dest))
1772 /* GCSE commonly inserts instruction after the insn. We can't
1773 do that easily for EH_REGION notes so disable GCSE on these
1774 for now. */
1775 && !find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1776 /* Is SET_SRC something we want to gcse? */
1777 && want_to_gcse_p (src)
1778 /* Don't CSE a nop. */
1779 && ! set_noop_p (pat)
1780 /* Don't GCSE if it has attached REG_EQUIV note.
1781 At this point this only function parameters should have
1782 REG_EQUIV notes and if the argument slot is used somewhere
1783 explicitly, it means address of parameter has been taken,
1784 so we should not extend the lifetime of the pseudo. */
1785 && ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) == 0
1786 || ! MEM_P (XEXP (note, 0))))
1788 /* An expression is not anticipatable if its operands are
1789 modified before this insn or if this is not the only SET in
1790 this insn. */
1791 int antic_p = oprs_anticipatable_p (src, insn) && single_set (insn);
1792 /* An expression is not available if its operands are
1793 subsequently modified, including this insn. It's also not
1794 available if this is a branch, because we can't insert
1795 a set after the branch. */
1796 int avail_p = (oprs_available_p (src, insn)
1797 && ! JUMP_P (insn));
1799 insert_expr_in_table (src, GET_MODE (dest), insn, antic_p, avail_p, table);
1802 /* Record sets for constant/copy propagation. */
1803 else if (table->set_p
1804 && regno >= FIRST_PSEUDO_REGISTER
1805 && ((REG_P (src)
1806 && REGNO (src) >= FIRST_PSEUDO_REGISTER
1807 && can_copy_p (GET_MODE (dest))
1808 && REGNO (src) != regno)
1809 || gcse_constant_p (src))
1810 /* A copy is not available if its src or dest is subsequently
1811 modified. Here we want to search from INSN+1 on, but
1812 oprs_available_p searches from INSN on. */
1813 && (insn == BB_END (BLOCK_FOR_INSN (insn))
1814 || ((tmp = next_nonnote_insn (insn)) != NULL_RTX
1815 && oprs_available_p (pat, tmp))))
1816 insert_set_in_table (pat, insn, table);
1818 /* In case of store we want to consider the memory value as available in
1819 the REG stored in that memory. This makes it possible to remove
1820 redundant loads from due to stores to the same location. */
1821 else if (flag_gcse_las && REG_P (src) && MEM_P (dest))
1823 unsigned int regno = REGNO (src);
1825 /* Do not do this for constant/copy propagation. */
1826 if (! table->set_p
1827 /* Only record sets of pseudo-regs in the hash table. */
1828 && regno >= FIRST_PSEUDO_REGISTER
1829 /* Don't GCSE something if we can't do a reg/reg copy. */
1830 && can_copy_p (GET_MODE (src))
1831 /* GCSE commonly inserts instruction after the insn. We can't
1832 do that easily for EH_REGION notes so disable GCSE on these
1833 for now. */
1834 && ! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1835 /* Is SET_DEST something we want to gcse? */
1836 && want_to_gcse_p (dest)
1837 /* Don't CSE a nop. */
1838 && ! set_noop_p (pat)
1839 /* Don't GCSE if it has attached REG_EQUIV note.
1840 At this point this only function parameters should have
1841 REG_EQUIV notes and if the argument slot is used somewhere
1842 explicitly, it means address of parameter has been taken,
1843 so we should not extend the lifetime of the pseudo. */
1844 && ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) == 0
1845 || ! MEM_P (XEXP (note, 0))))
1847 /* Stores are never anticipatable. */
1848 int antic_p = 0;
1849 /* An expression is not available if its operands are
1850 subsequently modified, including this insn. It's also not
1851 available if this is a branch, because we can't insert
1852 a set after the branch. */
1853 int avail_p = oprs_available_p (dest, insn)
1854 && ! JUMP_P (insn);
1856 /* Record the memory expression (DEST) in the hash table. */
1857 insert_expr_in_table (dest, GET_MODE (dest), insn,
1858 antic_p, avail_p, table);
1863 static void
1864 hash_scan_clobber (rtx x ATTRIBUTE_UNUSED, rtx insn ATTRIBUTE_UNUSED,
1865 struct hash_table *table ATTRIBUTE_UNUSED)
1867 /* Currently nothing to do. */
1870 static void
1871 hash_scan_call (rtx x ATTRIBUTE_UNUSED, rtx insn ATTRIBUTE_UNUSED,
1872 struct hash_table *table ATTRIBUTE_UNUSED)
1874 /* Currently nothing to do. */
1877 /* Process INSN and add hash table entries as appropriate.
1879 Only available expressions that set a single pseudo-reg are recorded.
1881 Single sets in a PARALLEL could be handled, but it's an extra complication
1882 that isn't dealt with right now. The trick is handling the CLOBBERs that
1883 are also in the PARALLEL. Later.
1885 If SET_P is nonzero, this is for the assignment hash table,
1886 otherwise it is for the expression hash table.
1887 If IN_LIBCALL_BLOCK nonzero, we are in a libcall block, and should
1888 not record any expressions. */
1890 static void
1891 hash_scan_insn (rtx insn, struct hash_table *table, int in_libcall_block)
1893 rtx pat = PATTERN (insn);
1894 int i;
1896 if (in_libcall_block)
1897 return;
1899 /* Pick out the sets of INSN and for other forms of instructions record
1900 what's been modified. */
1902 if (GET_CODE (pat) == SET)
1903 hash_scan_set (pat, insn, table);
1904 else if (GET_CODE (pat) == PARALLEL)
1905 for (i = 0; i < XVECLEN (pat, 0); i++)
1907 rtx x = XVECEXP (pat, 0, i);
1909 if (GET_CODE (x) == SET)
1910 hash_scan_set (x, insn, table);
1911 else if (GET_CODE (x) == CLOBBER)
1912 hash_scan_clobber (x, insn, table);
1913 else if (GET_CODE (x) == CALL)
1914 hash_scan_call (x, insn, table);
1917 else if (GET_CODE (pat) == CLOBBER)
1918 hash_scan_clobber (pat, insn, table);
1919 else if (GET_CODE (pat) == CALL)
1920 hash_scan_call (pat, insn, table);
1923 static void
1924 dump_hash_table (FILE *file, const char *name, struct hash_table *table)
1926 int i;
1927 /* Flattened out table, so it's printed in proper order. */
1928 struct expr **flat_table;
1929 unsigned int *hash_val;
1930 struct expr *expr;
1932 flat_table = xcalloc (table->n_elems, sizeof (struct expr *));
1933 hash_val = xmalloc (table->n_elems * sizeof (unsigned int));
1935 for (i = 0; i < (int) table->size; i++)
1936 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
1938 flat_table[expr->bitmap_index] = expr;
1939 hash_val[expr->bitmap_index] = i;
1942 fprintf (file, "%s hash table (%d buckets, %d entries)\n",
1943 name, table->size, table->n_elems);
1945 for (i = 0; i < (int) table->n_elems; i++)
1946 if (flat_table[i] != 0)
1948 expr = flat_table[i];
1949 fprintf (file, "Index %d (hash value %d)\n ",
1950 expr->bitmap_index, hash_val[i]);
1951 print_rtl (file, expr->expr);
1952 fprintf (file, "\n");
1955 fprintf (file, "\n");
1957 free (flat_table);
1958 free (hash_val);
1961 /* Record register first/last/block set information for REGNO in INSN.
1963 first_set records the first place in the block where the register
1964 is set and is used to compute "anticipatability".
1966 last_set records the last place in the block where the register
1967 is set and is used to compute "availability".
1969 last_bb records the block for which first_set and last_set are
1970 valid, as a quick test to invalidate them.
1972 reg_set_in_block records whether the register is set in the block
1973 and is used to compute "transparency". */
1975 static void
1976 record_last_reg_set_info (rtx insn, int regno)
1978 struct reg_avail_info *info = &reg_avail_info[regno];
1979 int cuid = INSN_CUID (insn);
1981 info->last_set = cuid;
1982 if (info->last_bb != current_bb)
1984 info->last_bb = current_bb;
1985 info->first_set = cuid;
1986 SET_BIT (reg_set_in_block[current_bb->index], regno);
1991 /* Record all of the canonicalized MEMs of record_last_mem_set_info's insn.
1992 Note we store a pair of elements in the list, so they have to be
1993 taken off pairwise. */
1995 static void
1996 canon_list_insert (rtx dest ATTRIBUTE_UNUSED, rtx unused1 ATTRIBUTE_UNUSED,
1997 void * v_insn)
1999 rtx dest_addr, insn;
2000 int bb;
2002 while (GET_CODE (dest) == SUBREG
2003 || GET_CODE (dest) == ZERO_EXTRACT
2004 || GET_CODE (dest) == STRICT_LOW_PART)
2005 dest = XEXP (dest, 0);
2007 /* If DEST is not a MEM, then it will not conflict with a load. Note
2008 that function calls are assumed to clobber memory, but are handled
2009 elsewhere. */
2011 if (! MEM_P (dest))
2012 return;
2014 dest_addr = get_addr (XEXP (dest, 0));
2015 dest_addr = canon_rtx (dest_addr);
2016 insn = (rtx) v_insn;
2017 bb = BLOCK_NUM (insn);
2019 canon_modify_mem_list[bb] =
2020 alloc_EXPR_LIST (VOIDmode, dest_addr, canon_modify_mem_list[bb]);
2021 canon_modify_mem_list[bb] =
2022 alloc_EXPR_LIST (VOIDmode, dest, canon_modify_mem_list[bb]);
2023 bitmap_set_bit (canon_modify_mem_list_set, bb);
2026 /* Record memory modification information for INSN. We do not actually care
2027 about the memory location(s) that are set, or even how they are set (consider
2028 a CALL_INSN). We merely need to record which insns modify memory. */
2030 static void
2031 record_last_mem_set_info (rtx insn)
2033 int bb = BLOCK_NUM (insn);
2035 /* load_killed_in_block_p will handle the case of calls clobbering
2036 everything. */
2037 modify_mem_list[bb] = alloc_INSN_LIST (insn, modify_mem_list[bb]);
2038 bitmap_set_bit (modify_mem_list_set, bb);
2040 if (CALL_P (insn))
2042 /* Note that traversals of this loop (other than for free-ing)
2043 will break after encountering a CALL_INSN. So, there's no
2044 need to insert a pair of items, as canon_list_insert does. */
2045 canon_modify_mem_list[bb] =
2046 alloc_INSN_LIST (insn, canon_modify_mem_list[bb]);
2047 bitmap_set_bit (canon_modify_mem_list_set, bb);
2049 else
2050 note_stores (PATTERN (insn), canon_list_insert, (void*) insn);
2053 /* Called from compute_hash_table via note_stores to handle one
2054 SET or CLOBBER in an insn. DATA is really the instruction in which
2055 the SET is taking place. */
2057 static void
2058 record_last_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED, void *data)
2060 rtx last_set_insn = (rtx) data;
2062 if (GET_CODE (dest) == SUBREG)
2063 dest = SUBREG_REG (dest);
2065 if (REG_P (dest))
2066 record_last_reg_set_info (last_set_insn, REGNO (dest));
2067 else if (MEM_P (dest)
2068 /* Ignore pushes, they clobber nothing. */
2069 && ! push_operand (dest, GET_MODE (dest)))
2070 record_last_mem_set_info (last_set_insn);
2073 /* Top level function to create an expression or assignment hash table.
2075 Expression entries are placed in the hash table if
2076 - they are of the form (set (pseudo-reg) src),
2077 - src is something we want to perform GCSE on,
2078 - none of the operands are subsequently modified in the block
2080 Assignment entries are placed in the hash table if
2081 - they are of the form (set (pseudo-reg) src),
2082 - src is something we want to perform const/copy propagation on,
2083 - none of the operands or target are subsequently modified in the block
2085 Currently src must be a pseudo-reg or a const_int.
2087 TABLE is the table computed. */
2089 static void
2090 compute_hash_table_work (struct hash_table *table)
2092 unsigned int i;
2094 /* While we compute the hash table we also compute a bit array of which
2095 registers are set in which blocks.
2096 ??? This isn't needed during const/copy propagation, but it's cheap to
2097 compute. Later. */
2098 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
2100 /* re-Cache any INSN_LIST nodes we have allocated. */
2101 clear_modify_mem_tables ();
2102 /* Some working arrays used to track first and last set in each block. */
2103 reg_avail_info = gmalloc (max_gcse_regno * sizeof (struct reg_avail_info));
2105 for (i = 0; i < max_gcse_regno; ++i)
2106 reg_avail_info[i].last_bb = NULL;
2108 FOR_EACH_BB (current_bb)
2110 rtx insn;
2111 unsigned int regno;
2112 int in_libcall_block;
2114 /* First pass over the instructions records information used to
2115 determine when registers and memory are first and last set.
2116 ??? hard-reg reg_set_in_block computation
2117 could be moved to compute_sets since they currently don't change. */
2119 for (insn = BB_HEAD (current_bb);
2120 insn && insn != NEXT_INSN (BB_END (current_bb));
2121 insn = NEXT_INSN (insn))
2123 if (! INSN_P (insn))
2124 continue;
2126 if (CALL_P (insn))
2128 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2129 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2130 record_last_reg_set_info (insn, regno);
2132 mark_call (insn);
2135 note_stores (PATTERN (insn), record_last_set_info, insn);
2138 /* Insert implicit sets in the hash table. */
2139 if (table->set_p
2140 && implicit_sets[current_bb->index] != NULL_RTX)
2141 hash_scan_set (implicit_sets[current_bb->index],
2142 BB_HEAD (current_bb), table);
2144 /* The next pass builds the hash table. */
2146 for (insn = BB_HEAD (current_bb), in_libcall_block = 0;
2147 insn && insn != NEXT_INSN (BB_END (current_bb));
2148 insn = NEXT_INSN (insn))
2149 if (INSN_P (insn))
2151 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
2152 in_libcall_block = 1;
2153 else if (table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
2154 in_libcall_block = 0;
2155 hash_scan_insn (insn, table, in_libcall_block);
2156 if (!table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
2157 in_libcall_block = 0;
2161 free (reg_avail_info);
2162 reg_avail_info = NULL;
2165 /* Allocate space for the set/expr hash TABLE.
2166 N_INSNS is the number of instructions in the function.
2167 It is used to determine the number of buckets to use.
2168 SET_P determines whether set or expression table will
2169 be created. */
2171 static void
2172 alloc_hash_table (int n_insns, struct hash_table *table, int set_p)
2174 int n;
2176 table->size = n_insns / 4;
2177 if (table->size < 11)
2178 table->size = 11;
2180 /* Attempt to maintain efficient use of hash table.
2181 Making it an odd number is simplest for now.
2182 ??? Later take some measurements. */
2183 table->size |= 1;
2184 n = table->size * sizeof (struct expr *);
2185 table->table = gmalloc (n);
2186 table->set_p = set_p;
2189 /* Free things allocated by alloc_hash_table. */
2191 static void
2192 free_hash_table (struct hash_table *table)
2194 free (table->table);
2197 /* Compute the hash TABLE for doing copy/const propagation or
2198 expression hash table. */
2200 static void
2201 compute_hash_table (struct hash_table *table)
2203 /* Initialize count of number of entries in hash table. */
2204 table->n_elems = 0;
2205 memset (table->table, 0, table->size * sizeof (struct expr *));
2207 compute_hash_table_work (table);
2210 /* Expression tracking support. */
2212 /* Lookup REGNO in the set TABLE. The result is a pointer to the
2213 table entry, or NULL if not found. */
2215 static struct expr *
2216 lookup_set (unsigned int regno, struct hash_table *table)
2218 unsigned int hash = hash_set (regno, table->size);
2219 struct expr *expr;
2221 expr = table->table[hash];
2223 while (expr && REGNO (SET_DEST (expr->expr)) != regno)
2224 expr = expr->next_same_hash;
2226 return expr;
2229 /* Return the next entry for REGNO in list EXPR. */
2231 static struct expr *
2232 next_set (unsigned int regno, struct expr *expr)
2235 expr = expr->next_same_hash;
2236 while (expr && REGNO (SET_DEST (expr->expr)) != regno);
2238 return expr;
2241 /* Like free_INSN_LIST_list or free_EXPR_LIST_list, except that the node
2242 types may be mixed. */
2244 static void
2245 free_insn_expr_list_list (rtx *listp)
2247 rtx list, next;
2249 for (list = *listp; list ; list = next)
2251 next = XEXP (list, 1);
2252 if (GET_CODE (list) == EXPR_LIST)
2253 free_EXPR_LIST_node (list);
2254 else
2255 free_INSN_LIST_node (list);
2258 *listp = NULL;
2261 /* Clear canon_modify_mem_list and modify_mem_list tables. */
2262 static void
2263 clear_modify_mem_tables (void)
2265 unsigned i;
2266 bitmap_iterator bi;
2268 EXECUTE_IF_SET_IN_BITMAP (modify_mem_list_set, 0, i, bi)
2270 free_INSN_LIST_list (modify_mem_list + i);
2272 bitmap_clear (modify_mem_list_set);
2274 EXECUTE_IF_SET_IN_BITMAP (canon_modify_mem_list_set, 0, i, bi)
2276 free_insn_expr_list_list (canon_modify_mem_list + i);
2278 bitmap_clear (canon_modify_mem_list_set);
2281 /* Release memory used by modify_mem_list_set and canon_modify_mem_list_set. */
2283 static void
2284 free_modify_mem_tables (void)
2286 clear_modify_mem_tables ();
2287 free (modify_mem_list);
2288 free (canon_modify_mem_list);
2289 modify_mem_list = 0;
2290 canon_modify_mem_list = 0;
2293 /* Reset tables used to keep track of what's still available [since the
2294 start of the block]. */
2296 static void
2297 reset_opr_set_tables (void)
2299 /* Maintain a bitmap of which regs have been set since beginning of
2300 the block. */
2301 CLEAR_REG_SET (reg_set_bitmap);
2303 /* Also keep a record of the last instruction to modify memory.
2304 For now this is very trivial, we only record whether any memory
2305 location has been modified. */
2306 clear_modify_mem_tables ();
2309 /* Return nonzero if the operands of X are not set before INSN in
2310 INSN's basic block. */
2312 static int
2313 oprs_not_set_p (rtx x, rtx insn)
2315 int i, j;
2316 enum rtx_code code;
2317 const char *fmt;
2319 if (x == 0)
2320 return 1;
2322 code = GET_CODE (x);
2323 switch (code)
2325 case PC:
2326 case CC0:
2327 case CONST:
2328 case CONST_INT:
2329 case CONST_DOUBLE:
2330 case CONST_VECTOR:
2331 case SYMBOL_REF:
2332 case LABEL_REF:
2333 case ADDR_VEC:
2334 case ADDR_DIFF_VEC:
2335 return 1;
2337 case MEM:
2338 if (load_killed_in_block_p (BLOCK_FOR_INSN (insn),
2339 INSN_CUID (insn), x, 0))
2340 return 0;
2341 else
2342 return oprs_not_set_p (XEXP (x, 0), insn);
2344 case REG:
2345 return ! REGNO_REG_SET_P (reg_set_bitmap, REGNO (x));
2347 default:
2348 break;
2351 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
2353 if (fmt[i] == 'e')
2355 /* If we are about to do the last recursive call
2356 needed at this level, change it into iteration.
2357 This function is called enough to be worth it. */
2358 if (i == 0)
2359 return oprs_not_set_p (XEXP (x, i), insn);
2361 if (! oprs_not_set_p (XEXP (x, i), insn))
2362 return 0;
2364 else if (fmt[i] == 'E')
2365 for (j = 0; j < XVECLEN (x, i); j++)
2366 if (! oprs_not_set_p (XVECEXP (x, i, j), insn))
2367 return 0;
2370 return 1;
2373 /* Mark things set by a CALL. */
2375 static void
2376 mark_call (rtx insn)
2378 if (! CONST_OR_PURE_CALL_P (insn))
2379 record_last_mem_set_info (insn);
2382 /* Mark things set by a SET. */
2384 static void
2385 mark_set (rtx pat, rtx insn)
2387 rtx dest = SET_DEST (pat);
2389 while (GET_CODE (dest) == SUBREG
2390 || GET_CODE (dest) == ZERO_EXTRACT
2391 || GET_CODE (dest) == STRICT_LOW_PART)
2392 dest = XEXP (dest, 0);
2394 if (REG_P (dest))
2395 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (dest));
2396 else if (MEM_P (dest))
2397 record_last_mem_set_info (insn);
2399 if (GET_CODE (SET_SRC (pat)) == CALL)
2400 mark_call (insn);
2403 /* Record things set by a CLOBBER. */
2405 static void
2406 mark_clobber (rtx pat, rtx insn)
2408 rtx clob = XEXP (pat, 0);
2410 while (GET_CODE (clob) == SUBREG || GET_CODE (clob) == STRICT_LOW_PART)
2411 clob = XEXP (clob, 0);
2413 if (REG_P (clob))
2414 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (clob));
2415 else
2416 record_last_mem_set_info (insn);
2419 /* Record things set by INSN.
2420 This data is used by oprs_not_set_p. */
2422 static void
2423 mark_oprs_set (rtx insn)
2425 rtx pat = PATTERN (insn);
2426 int i;
2428 if (GET_CODE (pat) == SET)
2429 mark_set (pat, insn);
2430 else if (GET_CODE (pat) == PARALLEL)
2431 for (i = 0; i < XVECLEN (pat, 0); i++)
2433 rtx x = XVECEXP (pat, 0, i);
2435 if (GET_CODE (x) == SET)
2436 mark_set (x, insn);
2437 else if (GET_CODE (x) == CLOBBER)
2438 mark_clobber (x, insn);
2439 else if (GET_CODE (x) == CALL)
2440 mark_call (insn);
2443 else if (GET_CODE (pat) == CLOBBER)
2444 mark_clobber (pat, insn);
2445 else if (GET_CODE (pat) == CALL)
2446 mark_call (insn);
2450 /* Compute copy/constant propagation working variables. */
2452 /* Local properties of assignments. */
2453 static sbitmap *cprop_pavloc;
2454 static sbitmap *cprop_absaltered;
2456 /* Global properties of assignments (computed from the local properties). */
2457 static sbitmap *cprop_avin;
2458 static sbitmap *cprop_avout;
2460 /* Allocate vars used for copy/const propagation. N_BLOCKS is the number of
2461 basic blocks. N_SETS is the number of sets. */
2463 static void
2464 alloc_cprop_mem (int n_blocks, int n_sets)
2466 cprop_pavloc = sbitmap_vector_alloc (n_blocks, n_sets);
2467 cprop_absaltered = sbitmap_vector_alloc (n_blocks, n_sets);
2469 cprop_avin = sbitmap_vector_alloc (n_blocks, n_sets);
2470 cprop_avout = sbitmap_vector_alloc (n_blocks, n_sets);
2473 /* Free vars used by copy/const propagation. */
2475 static void
2476 free_cprop_mem (void)
2478 sbitmap_vector_free (cprop_pavloc);
2479 sbitmap_vector_free (cprop_absaltered);
2480 sbitmap_vector_free (cprop_avin);
2481 sbitmap_vector_free (cprop_avout);
2484 /* For each block, compute whether X is transparent. X is either an
2485 expression or an assignment [though we don't care which, for this context
2486 an assignment is treated as an expression]. For each block where an
2487 element of X is modified, set (SET_P == 1) or reset (SET_P == 0) the INDX
2488 bit in BMAP. */
2490 static void
2491 compute_transp (rtx x, int indx, sbitmap *bmap, int set_p)
2493 int i, j;
2494 basic_block bb;
2495 enum rtx_code code;
2496 reg_set *r;
2497 const char *fmt;
2499 /* repeat is used to turn tail-recursion into iteration since GCC
2500 can't do it when there's no return value. */
2501 repeat:
2503 if (x == 0)
2504 return;
2506 code = GET_CODE (x);
2507 switch (code)
2509 case REG:
2510 if (set_p)
2512 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
2514 FOR_EACH_BB (bb)
2515 if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
2516 SET_BIT (bmap[bb->index], indx);
2518 else
2520 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
2521 SET_BIT (bmap[BLOCK_NUM (r->insn)], indx);
2524 else
2526 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
2528 FOR_EACH_BB (bb)
2529 if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
2530 RESET_BIT (bmap[bb->index], indx);
2532 else
2534 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
2535 RESET_BIT (bmap[BLOCK_NUM (r->insn)], indx);
2539 return;
2541 case MEM:
2542 FOR_EACH_BB (bb)
2544 rtx list_entry = canon_modify_mem_list[bb->index];
2546 while (list_entry)
2548 rtx dest, dest_addr;
2550 if (CALL_P (XEXP (list_entry, 0)))
2552 if (set_p)
2553 SET_BIT (bmap[bb->index], indx);
2554 else
2555 RESET_BIT (bmap[bb->index], indx);
2556 break;
2558 /* LIST_ENTRY must be an INSN of some kind that sets memory.
2559 Examine each hunk of memory that is modified. */
2561 dest = XEXP (list_entry, 0);
2562 list_entry = XEXP (list_entry, 1);
2563 dest_addr = XEXP (list_entry, 0);
2565 if (canon_true_dependence (dest, GET_MODE (dest), dest_addr,
2566 x, rtx_addr_varies_p))
2568 if (set_p)
2569 SET_BIT (bmap[bb->index], indx);
2570 else
2571 RESET_BIT (bmap[bb->index], indx);
2572 break;
2574 list_entry = XEXP (list_entry, 1);
2578 x = XEXP (x, 0);
2579 goto repeat;
2581 case PC:
2582 case CC0: /*FIXME*/
2583 case CONST:
2584 case CONST_INT:
2585 case CONST_DOUBLE:
2586 case CONST_VECTOR:
2587 case SYMBOL_REF:
2588 case LABEL_REF:
2589 case ADDR_VEC:
2590 case ADDR_DIFF_VEC:
2591 return;
2593 default:
2594 break;
2597 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
2599 if (fmt[i] == 'e')
2601 /* If we are about to do the last recursive call
2602 needed at this level, change it into iteration.
2603 This function is called enough to be worth it. */
2604 if (i == 0)
2606 x = XEXP (x, i);
2607 goto repeat;
2610 compute_transp (XEXP (x, i), indx, bmap, set_p);
2612 else if (fmt[i] == 'E')
2613 for (j = 0; j < XVECLEN (x, i); j++)
2614 compute_transp (XVECEXP (x, i, j), indx, bmap, set_p);
2618 /* Top level routine to do the dataflow analysis needed by copy/const
2619 propagation. */
2621 static void
2622 compute_cprop_data (void)
2624 compute_local_properties (cprop_absaltered, cprop_pavloc, NULL, &set_hash_table);
2625 compute_available (cprop_pavloc, cprop_absaltered,
2626 cprop_avout, cprop_avin);
2629 /* Copy/constant propagation. */
2631 /* Maximum number of register uses in an insn that we handle. */
2632 #define MAX_USES 8
2634 /* Table of uses found in an insn.
2635 Allocated statically to avoid alloc/free complexity and overhead. */
2636 static struct reg_use reg_use_table[MAX_USES];
2638 /* Index into `reg_use_table' while building it. */
2639 static int reg_use_count;
2641 /* Set up a list of register numbers used in INSN. The found uses are stored
2642 in `reg_use_table'. `reg_use_count' is initialized to zero before entry,
2643 and contains the number of uses in the table upon exit.
2645 ??? If a register appears multiple times we will record it multiple times.
2646 This doesn't hurt anything but it will slow things down. */
2648 static void
2649 find_used_regs (rtx *xptr, void *data ATTRIBUTE_UNUSED)
2651 int i, j;
2652 enum rtx_code code;
2653 const char *fmt;
2654 rtx x = *xptr;
2656 /* repeat is used to turn tail-recursion into iteration since GCC
2657 can't do it when there's no return value. */
2658 repeat:
2659 if (x == 0)
2660 return;
2662 code = GET_CODE (x);
2663 if (REG_P (x))
2665 if (reg_use_count == MAX_USES)
2666 return;
2668 reg_use_table[reg_use_count].reg_rtx = x;
2669 reg_use_count++;
2672 /* Recursively scan the operands of this expression. */
2674 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
2676 if (fmt[i] == 'e')
2678 /* If we are about to do the last recursive call
2679 needed at this level, change it into iteration.
2680 This function is called enough to be worth it. */
2681 if (i == 0)
2683 x = XEXP (x, 0);
2684 goto repeat;
2687 find_used_regs (&XEXP (x, i), data);
2689 else if (fmt[i] == 'E')
2690 for (j = 0; j < XVECLEN (x, i); j++)
2691 find_used_regs (&XVECEXP (x, i, j), data);
2695 /* Try to replace all non-SET_DEST occurrences of FROM in INSN with TO.
2696 Returns nonzero is successful. */
2698 static int
2699 try_replace_reg (rtx from, rtx to, rtx insn)
2701 rtx note = find_reg_equal_equiv_note (insn);
2702 rtx src = 0;
2703 int success = 0;
2704 rtx set = single_set (insn);
2706 validate_replace_src_group (from, to, insn);
2707 if (num_changes_pending () && apply_change_group ())
2708 success = 1;
2710 /* Try to simplify SET_SRC if we have substituted a constant. */
2711 if (success && set && CONSTANT_P (to))
2713 src = simplify_rtx (SET_SRC (set));
2715 if (src)
2716 validate_change (insn, &SET_SRC (set), src, 0);
2719 /* If there is already a NOTE, update the expression in it with our
2720 replacement. */
2721 if (note != 0)
2722 XEXP (note, 0) = simplify_replace_rtx (XEXP (note, 0), from, to);
2724 if (!success && set && reg_mentioned_p (from, SET_SRC (set)))
2726 /* If above failed and this is a single set, try to simplify the source of
2727 the set given our substitution. We could perhaps try this for multiple
2728 SETs, but it probably won't buy us anything. */
2729 src = simplify_replace_rtx (SET_SRC (set), from, to);
2731 if (!rtx_equal_p (src, SET_SRC (set))
2732 && validate_change (insn, &SET_SRC (set), src, 0))
2733 success = 1;
2735 /* If we've failed to do replacement, have a single SET, don't already
2736 have a note, and have no special SET, add a REG_EQUAL note to not
2737 lose information. */
2738 if (!success && note == 0 && set != 0
2739 && GET_CODE (SET_DEST (set)) != ZERO_EXTRACT)
2740 note = set_unique_reg_note (insn, REG_EQUAL, copy_rtx (src));
2743 /* REG_EQUAL may get simplified into register.
2744 We don't allow that. Remove that note. This code ought
2745 not to happen, because previous code ought to synthesize
2746 reg-reg move, but be on the safe side. */
2747 if (note && REG_P (XEXP (note, 0)))
2748 remove_note (insn, note);
2750 return success;
2753 /* Find a set of REGNOs that are available on entry to INSN's block. Returns
2754 NULL no such set is found. */
2756 static struct expr *
2757 find_avail_set (int regno, rtx insn)
2759 /* SET1 contains the last set found that can be returned to the caller for
2760 use in a substitution. */
2761 struct expr *set1 = 0;
2763 /* Loops are not possible here. To get a loop we would need two sets
2764 available at the start of the block containing INSN. i.e. we would
2765 need two sets like this available at the start of the block:
2767 (set (reg X) (reg Y))
2768 (set (reg Y) (reg X))
2770 This can not happen since the set of (reg Y) would have killed the
2771 set of (reg X) making it unavailable at the start of this block. */
2772 while (1)
2774 rtx src;
2775 struct expr *set = lookup_set (regno, &set_hash_table);
2777 /* Find a set that is available at the start of the block
2778 which contains INSN. */
2779 while (set)
2781 if (TEST_BIT (cprop_avin[BLOCK_NUM (insn)], set->bitmap_index))
2782 break;
2783 set = next_set (regno, set);
2786 /* If no available set was found we've reached the end of the
2787 (possibly empty) copy chain. */
2788 if (set == 0)
2789 break;
2791 gcc_assert (GET_CODE (set->expr) == SET);
2793 src = SET_SRC (set->expr);
2795 /* We know the set is available.
2796 Now check that SRC is ANTLOC (i.e. none of the source operands
2797 have changed since the start of the block).
2799 If the source operand changed, we may still use it for the next
2800 iteration of this loop, but we may not use it for substitutions. */
2802 if (gcse_constant_p (src) || oprs_not_set_p (src, insn))
2803 set1 = set;
2805 /* If the source of the set is anything except a register, then
2806 we have reached the end of the copy chain. */
2807 if (! REG_P (src))
2808 break;
2810 /* Follow the copy chain, i.e. start another iteration of the loop
2811 and see if we have an available copy into SRC. */
2812 regno = REGNO (src);
2815 /* SET1 holds the last set that was available and anticipatable at
2816 INSN. */
2817 return set1;
2820 /* Subroutine of cprop_insn that tries to propagate constants into
2821 JUMP_INSNS. JUMP must be a conditional jump. If SETCC is non-NULL
2822 it is the instruction that immediately precedes JUMP, and must be a
2823 single SET of a register. FROM is what we will try to replace,
2824 SRC is the constant we will try to substitute for it. Returns nonzero
2825 if a change was made. */
2827 static int
2828 cprop_jump (basic_block bb, rtx setcc, rtx jump, rtx from, rtx src)
2830 rtx new, set_src, note_src;
2831 rtx set = pc_set (jump);
2832 rtx note = find_reg_equal_equiv_note (jump);
2834 if (note)
2836 note_src = XEXP (note, 0);
2837 if (GET_CODE (note_src) == EXPR_LIST)
2838 note_src = NULL_RTX;
2840 else note_src = NULL_RTX;
2842 /* Prefer REG_EQUAL notes except those containing EXPR_LISTs. */
2843 set_src = note_src ? note_src : SET_SRC (set);
2845 /* First substitute the SETCC condition into the JUMP instruction,
2846 then substitute that given values into this expanded JUMP. */
2847 if (setcc != NULL_RTX
2848 && !modified_between_p (from, setcc, jump)
2849 && !modified_between_p (src, setcc, jump))
2851 rtx setcc_src;
2852 rtx setcc_set = single_set (setcc);
2853 rtx setcc_note = find_reg_equal_equiv_note (setcc);
2854 setcc_src = (setcc_note && GET_CODE (XEXP (setcc_note, 0)) != EXPR_LIST)
2855 ? XEXP (setcc_note, 0) : SET_SRC (setcc_set);
2856 set_src = simplify_replace_rtx (set_src, SET_DEST (setcc_set),
2857 setcc_src);
2859 else
2860 setcc = NULL_RTX;
2862 new = simplify_replace_rtx (set_src, from, src);
2864 /* If no simplification can be made, then try the next register. */
2865 if (rtx_equal_p (new, SET_SRC (set)))
2866 return 0;
2868 /* If this is now a no-op delete it, otherwise this must be a valid insn. */
2869 if (new == pc_rtx)
2870 delete_insn (jump);
2871 else
2873 /* Ensure the value computed inside the jump insn to be equivalent
2874 to one computed by setcc. */
2875 if (setcc && modified_in_p (new, setcc))
2876 return 0;
2877 if (! validate_change (jump, &SET_SRC (set), new, 0))
2879 /* When (some) constants are not valid in a comparison, and there
2880 are two registers to be replaced by constants before the entire
2881 comparison can be folded into a constant, we need to keep
2882 intermediate information in REG_EQUAL notes. For targets with
2883 separate compare insns, such notes are added by try_replace_reg.
2884 When we have a combined compare-and-branch instruction, however,
2885 we need to attach a note to the branch itself to make this
2886 optimization work. */
2888 if (!rtx_equal_p (new, note_src))
2889 set_unique_reg_note (jump, REG_EQUAL, copy_rtx (new));
2890 return 0;
2893 /* Remove REG_EQUAL note after simplification. */
2894 if (note_src)
2895 remove_note (jump, note);
2897 /* If this has turned into an unconditional jump,
2898 then put a barrier after it so that the unreachable
2899 code will be deleted. */
2900 if (GET_CODE (SET_SRC (set)) == LABEL_REF)
2901 emit_barrier_after (jump);
2904 #ifdef HAVE_cc0
2905 /* Delete the cc0 setter. */
2906 if (setcc != NULL && CC0_P (SET_DEST (single_set (setcc))))
2907 delete_insn (setcc);
2908 #endif
2910 run_jump_opt_after_gcse = 1;
2912 global_const_prop_count++;
2913 if (gcse_file != NULL)
2915 fprintf (gcse_file,
2916 "GLOBAL CONST-PROP: Replacing reg %d in jump_insn %d with constant ",
2917 REGNO (from), INSN_UID (jump));
2918 print_rtl (gcse_file, src);
2919 fprintf (gcse_file, "\n");
2921 purge_dead_edges (bb);
2923 return 1;
2926 static bool
2927 constprop_register (rtx insn, rtx from, rtx to, int alter_jumps)
2929 rtx sset;
2931 /* Check for reg or cc0 setting instructions followed by
2932 conditional branch instructions first. */
2933 if (alter_jumps
2934 && (sset = single_set (insn)) != NULL
2935 && NEXT_INSN (insn)
2936 && any_condjump_p (NEXT_INSN (insn)) && onlyjump_p (NEXT_INSN (insn)))
2938 rtx dest = SET_DEST (sset);
2939 if ((REG_P (dest) || CC0_P (dest))
2940 && cprop_jump (BLOCK_FOR_INSN (insn), insn, NEXT_INSN (insn), from, to))
2941 return 1;
2944 /* Handle normal insns next. */
2945 if (NONJUMP_INSN_P (insn)
2946 && try_replace_reg (from, to, insn))
2947 return 1;
2949 /* Try to propagate a CONST_INT into a conditional jump.
2950 We're pretty specific about what we will handle in this
2951 code, we can extend this as necessary over time.
2953 Right now the insn in question must look like
2954 (set (pc) (if_then_else ...)) */
2955 else if (alter_jumps && any_condjump_p (insn) && onlyjump_p (insn))
2956 return cprop_jump (BLOCK_FOR_INSN (insn), NULL, insn, from, to);
2957 return 0;
2960 /* Perform constant and copy propagation on INSN.
2961 The result is nonzero if a change was made. */
2963 static int
2964 cprop_insn (rtx insn, int alter_jumps)
2966 struct reg_use *reg_used;
2967 int changed = 0;
2968 rtx note;
2970 if (!INSN_P (insn))
2971 return 0;
2973 reg_use_count = 0;
2974 note_uses (&PATTERN (insn), find_used_regs, NULL);
2976 note = find_reg_equal_equiv_note (insn);
2978 /* We may win even when propagating constants into notes. */
2979 if (note)
2980 find_used_regs (&XEXP (note, 0), NULL);
2982 for (reg_used = &reg_use_table[0]; reg_use_count > 0;
2983 reg_used++, reg_use_count--)
2985 unsigned int regno = REGNO (reg_used->reg_rtx);
2986 rtx pat, src;
2987 struct expr *set;
2989 /* Ignore registers created by GCSE.
2990 We do this because ... */
2991 if (regno >= max_gcse_regno)
2992 continue;
2994 /* If the register has already been set in this block, there's
2995 nothing we can do. */
2996 if (! oprs_not_set_p (reg_used->reg_rtx, insn))
2997 continue;
2999 /* Find an assignment that sets reg_used and is available
3000 at the start of the block. */
3001 set = find_avail_set (regno, insn);
3002 if (! set)
3003 continue;
3005 pat = set->expr;
3006 /* ??? We might be able to handle PARALLELs. Later. */
3007 gcc_assert (GET_CODE (pat) == SET);
3009 src = SET_SRC (pat);
3011 /* Constant propagation. */
3012 if (gcse_constant_p (src))
3014 if (constprop_register (insn, reg_used->reg_rtx, src, alter_jumps))
3016 changed = 1;
3017 global_const_prop_count++;
3018 if (gcse_file != NULL)
3020 fprintf (gcse_file, "GLOBAL CONST-PROP: Replacing reg %d in ", regno);
3021 fprintf (gcse_file, "insn %d with constant ", INSN_UID (insn));
3022 print_rtl (gcse_file, src);
3023 fprintf (gcse_file, "\n");
3025 if (INSN_DELETED_P (insn))
3026 return 1;
3029 else if (REG_P (src)
3030 && REGNO (src) >= FIRST_PSEUDO_REGISTER
3031 && REGNO (src) != regno)
3033 if (try_replace_reg (reg_used->reg_rtx, src, insn))
3035 changed = 1;
3036 global_copy_prop_count++;
3037 if (gcse_file != NULL)
3039 fprintf (gcse_file, "GLOBAL COPY-PROP: Replacing reg %d in insn %d",
3040 regno, INSN_UID (insn));
3041 fprintf (gcse_file, " with reg %d\n", REGNO (src));
3044 /* The original insn setting reg_used may or may not now be
3045 deletable. We leave the deletion to flow. */
3046 /* FIXME: If it turns out that the insn isn't deletable,
3047 then we may have unnecessarily extended register lifetimes
3048 and made things worse. */
3053 return changed;
3056 /* Like find_used_regs, but avoid recording uses that appear in
3057 input-output contexts such as zero_extract or pre_dec. This
3058 restricts the cases we consider to those for which local cprop
3059 can legitimately make replacements. */
3061 static void
3062 local_cprop_find_used_regs (rtx *xptr, void *data)
3064 rtx x = *xptr;
3066 if (x == 0)
3067 return;
3069 switch (GET_CODE (x))
3071 case ZERO_EXTRACT:
3072 case SIGN_EXTRACT:
3073 case STRICT_LOW_PART:
3074 return;
3076 case PRE_DEC:
3077 case PRE_INC:
3078 case POST_DEC:
3079 case POST_INC:
3080 case PRE_MODIFY:
3081 case POST_MODIFY:
3082 /* Can only legitimately appear this early in the context of
3083 stack pushes for function arguments, but handle all of the
3084 codes nonetheless. */
3085 return;
3087 case SUBREG:
3088 /* Setting a subreg of a register larger than word_mode leaves
3089 the non-written words unchanged. */
3090 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) > BITS_PER_WORD)
3091 return;
3092 break;
3094 default:
3095 break;
3098 find_used_regs (xptr, data);
3101 /* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
3102 their REG_EQUAL notes need updating. */
3104 static bool
3105 do_local_cprop (rtx x, rtx insn, int alter_jumps, rtx *libcall_sp)
3107 rtx newreg = NULL, newcnst = NULL;
3109 /* Rule out USE instructions and ASM statements as we don't want to
3110 change the hard registers mentioned. */
3111 if (REG_P (x)
3112 && (REGNO (x) >= FIRST_PSEUDO_REGISTER
3113 || (GET_CODE (PATTERN (insn)) != USE
3114 && asm_noperands (PATTERN (insn)) < 0)))
3116 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0);
3117 struct elt_loc_list *l;
3119 if (!val)
3120 return false;
3121 for (l = val->locs; l; l = l->next)
3123 rtx this_rtx = l->loc;
3124 rtx note;
3126 /* Don't CSE non-constant values out of libcall blocks. */
3127 if (l->in_libcall && ! CONSTANT_P (this_rtx))
3128 continue;
3130 if (gcse_constant_p (this_rtx))
3131 newcnst = this_rtx;
3132 if (REG_P (this_rtx) && REGNO (this_rtx) >= FIRST_PSEUDO_REGISTER
3133 /* Don't copy propagate if it has attached REG_EQUIV note.
3134 At this point this only function parameters should have
3135 REG_EQUIV notes and if the argument slot is used somewhere
3136 explicitly, it means address of parameter has been taken,
3137 so we should not extend the lifetime of the pseudo. */
3138 && (!(note = find_reg_note (l->setting_insn, REG_EQUIV, NULL_RTX))
3139 || ! MEM_P (XEXP (note, 0))))
3140 newreg = this_rtx;
3142 if (newcnst && constprop_register (insn, x, newcnst, alter_jumps))
3144 /* If we find a case where we can't fix the retval REG_EQUAL notes
3145 match the new register, we either have to abandon this replacement
3146 or fix delete_trivially_dead_insns to preserve the setting insn,
3147 or make it delete the REG_EUAQL note, and fix up all passes that
3148 require the REG_EQUAL note there. */
3149 bool adjusted;
3151 adjusted = adjust_libcall_notes (x, newcnst, insn, libcall_sp);
3152 gcc_assert (adjusted);
3154 if (gcse_file != NULL)
3156 fprintf (gcse_file, "LOCAL CONST-PROP: Replacing reg %d in ",
3157 REGNO (x));
3158 fprintf (gcse_file, "insn %d with constant ",
3159 INSN_UID (insn));
3160 print_rtl (gcse_file, newcnst);
3161 fprintf (gcse_file, "\n");
3163 local_const_prop_count++;
3164 return true;
3166 else if (newreg && newreg != x && try_replace_reg (x, newreg, insn))
3168 adjust_libcall_notes (x, newreg, insn, libcall_sp);
3169 if (gcse_file != NULL)
3171 fprintf (gcse_file,
3172 "LOCAL COPY-PROP: Replacing reg %d in insn %d",
3173 REGNO (x), INSN_UID (insn));
3174 fprintf (gcse_file, " with reg %d\n", REGNO (newreg));
3176 local_copy_prop_count++;
3177 return true;
3180 return false;
3183 /* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
3184 their REG_EQUAL notes need updating to reflect that OLDREG has been
3185 replaced with NEWVAL in INSN. Return true if all substitutions could
3186 be made. */
3187 static bool
3188 adjust_libcall_notes (rtx oldreg, rtx newval, rtx insn, rtx *libcall_sp)
3190 rtx end;
3192 while ((end = *libcall_sp++))
3194 rtx note = find_reg_equal_equiv_note (end);
3196 if (! note)
3197 continue;
3199 if (REG_P (newval))
3201 if (reg_set_between_p (newval, PREV_INSN (insn), end))
3205 note = find_reg_equal_equiv_note (end);
3206 if (! note)
3207 continue;
3208 if (reg_mentioned_p (newval, XEXP (note, 0)))
3209 return false;
3211 while ((end = *libcall_sp++));
3212 return true;
3215 XEXP (note, 0) = simplify_replace_rtx (XEXP (note, 0), oldreg, newval);
3216 insn = end;
3218 return true;
3221 #define MAX_NESTED_LIBCALLS 9
3223 static void
3224 local_cprop_pass (int alter_jumps)
3226 rtx insn;
3227 struct reg_use *reg_used;
3228 rtx libcall_stack[MAX_NESTED_LIBCALLS + 1], *libcall_sp;
3229 bool changed = false;
3231 cselib_init (false);
3232 libcall_sp = &libcall_stack[MAX_NESTED_LIBCALLS];
3233 *libcall_sp = 0;
3234 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3236 if (INSN_P (insn))
3238 rtx note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
3240 if (note)
3242 gcc_assert (libcall_sp != libcall_stack);
3243 *--libcall_sp = XEXP (note, 0);
3245 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
3246 if (note)
3247 libcall_sp++;
3248 note = find_reg_equal_equiv_note (insn);
3251 reg_use_count = 0;
3252 note_uses (&PATTERN (insn), local_cprop_find_used_regs, NULL);
3253 if (note)
3254 local_cprop_find_used_regs (&XEXP (note, 0), NULL);
3256 for (reg_used = &reg_use_table[0]; reg_use_count > 0;
3257 reg_used++, reg_use_count--)
3258 if (do_local_cprop (reg_used->reg_rtx, insn, alter_jumps,
3259 libcall_sp))
3261 changed = true;
3262 break;
3264 if (INSN_DELETED_P (insn))
3265 break;
3267 while (reg_use_count);
3269 cselib_process_insn (insn);
3271 cselib_finish ();
3272 /* Global analysis may get into infinite loops for unreachable blocks. */
3273 if (changed && alter_jumps)
3275 delete_unreachable_blocks ();
3276 free_reg_set_mem ();
3277 alloc_reg_set_mem (max_reg_num ());
3278 compute_sets (get_insns ());
3282 /* Forward propagate copies. This includes copies and constants. Return
3283 nonzero if a change was made. */
3285 static int
3286 cprop (int alter_jumps)
3288 int changed;
3289 basic_block bb;
3290 rtx insn;
3292 /* Note we start at block 1. */
3293 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
3295 if (gcse_file != NULL)
3296 fprintf (gcse_file, "\n");
3297 return 0;
3300 changed = 0;
3301 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR, next_bb)
3303 /* Reset tables used to keep track of what's still valid [since the
3304 start of the block]. */
3305 reset_opr_set_tables ();
3307 for (insn = BB_HEAD (bb);
3308 insn != NULL && insn != NEXT_INSN (BB_END (bb));
3309 insn = NEXT_INSN (insn))
3310 if (INSN_P (insn))
3312 changed |= cprop_insn (insn, alter_jumps);
3314 /* Keep track of everything modified by this insn. */
3315 /* ??? Need to be careful w.r.t. mods done to INSN. Don't
3316 call mark_oprs_set if we turned the insn into a NOTE. */
3317 if (! NOTE_P (insn))
3318 mark_oprs_set (insn);
3322 if (gcse_file != NULL)
3323 fprintf (gcse_file, "\n");
3325 return changed;
3328 /* Similar to get_condition, only the resulting condition must be
3329 valid at JUMP, instead of at EARLIEST.
3331 This differs from noce_get_condition in ifcvt.c in that we prefer not to
3332 settle for the condition variable in the jump instruction being integral.
3333 We prefer to be able to record the value of a user variable, rather than
3334 the value of a temporary used in a condition. This could be solved by
3335 recording the value of *every* register scaned by canonicalize_condition,
3336 but this would require some code reorganization. */
3339 fis_get_condition (rtx jump)
3341 return get_condition (jump, NULL, false, true);
3344 /* Check the comparison COND to see if we can safely form an implicit set from
3345 it. COND is either an EQ or NE comparison. */
3347 static bool
3348 implicit_set_cond_p (rtx cond)
3350 enum machine_mode mode = GET_MODE (XEXP (cond, 0));
3351 rtx cst = XEXP (cond, 1);
3353 /* We can't perform this optimization if either operand might be or might
3354 contain a signed zero. */
3355 if (HONOR_SIGNED_ZEROS (mode))
3357 /* It is sufficient to check if CST is or contains a zero. We must
3358 handle float, complex, and vector. If any subpart is a zero, then
3359 the optimization can't be performed. */
3360 /* ??? The complex and vector checks are not implemented yet. We just
3361 always return zero for them. */
3362 if (GET_CODE (cst) == CONST_DOUBLE)
3364 REAL_VALUE_TYPE d;
3365 REAL_VALUE_FROM_CONST_DOUBLE (d, cst);
3366 if (REAL_VALUES_EQUAL (d, dconst0))
3367 return 0;
3369 else
3370 return 0;
3373 return gcse_constant_p (cst);
3376 /* Find the implicit sets of a function. An "implicit set" is a constraint
3377 on the value of a variable, implied by a conditional jump. For example,
3378 following "if (x == 2)", the then branch may be optimized as though the
3379 conditional performed an "explicit set", in this example, "x = 2". This
3380 function records the set patterns that are implicit at the start of each
3381 basic block. */
3383 static void
3384 find_implicit_sets (void)
3386 basic_block bb, dest;
3387 unsigned int count;
3388 rtx cond, new;
3390 count = 0;
3391 FOR_EACH_BB (bb)
3392 /* Check for more than one successor. */
3393 if (EDGE_COUNT (bb->succs) > 1)
3395 cond = fis_get_condition (BB_END (bb));
3397 if (cond
3398 && (GET_CODE (cond) == EQ || GET_CODE (cond) == NE)
3399 && REG_P (XEXP (cond, 0))
3400 && REGNO (XEXP (cond, 0)) >= FIRST_PSEUDO_REGISTER
3401 && implicit_set_cond_p (cond))
3403 dest = GET_CODE (cond) == EQ ? BRANCH_EDGE (bb)->dest
3404 : FALLTHRU_EDGE (bb)->dest;
3406 if (dest && EDGE_COUNT (dest->preds) == 1
3407 && dest != EXIT_BLOCK_PTR)
3409 new = gen_rtx_SET (VOIDmode, XEXP (cond, 0),
3410 XEXP (cond, 1));
3411 implicit_sets[dest->index] = new;
3412 if (gcse_file)
3414 fprintf(gcse_file, "Implicit set of reg %d in ",
3415 REGNO (XEXP (cond, 0)));
3416 fprintf(gcse_file, "basic block %d\n", dest->index);
3418 count++;
3423 if (gcse_file)
3424 fprintf (gcse_file, "Found %d implicit sets\n", count);
3427 /* Perform one copy/constant propagation pass.
3428 PASS is the pass count. If CPROP_JUMPS is true, perform constant
3429 propagation into conditional jumps. If BYPASS_JUMPS is true,
3430 perform conditional jump bypassing optimizations. */
3432 static int
3433 one_cprop_pass (int pass, int cprop_jumps, int bypass_jumps)
3435 int changed = 0;
3437 global_const_prop_count = local_const_prop_count = 0;
3438 global_copy_prop_count = local_copy_prop_count = 0;
3440 local_cprop_pass (cprop_jumps);
3442 /* Determine implicit sets. */
3443 implicit_sets = xcalloc (last_basic_block, sizeof (rtx));
3444 find_implicit_sets ();
3446 alloc_hash_table (max_cuid, &set_hash_table, 1);
3447 compute_hash_table (&set_hash_table);
3449 /* Free implicit_sets before peak usage. */
3450 free (implicit_sets);
3451 implicit_sets = NULL;
3453 if (gcse_file)
3454 dump_hash_table (gcse_file, "SET", &set_hash_table);
3455 if (set_hash_table.n_elems > 0)
3457 alloc_cprop_mem (last_basic_block, set_hash_table.n_elems);
3458 compute_cprop_data ();
3459 changed = cprop (cprop_jumps);
3460 if (bypass_jumps)
3461 changed |= bypass_conditional_jumps ();
3462 free_cprop_mem ();
3465 free_hash_table (&set_hash_table);
3467 if (gcse_file)
3469 fprintf (gcse_file, "CPROP of %s, pass %d: %d bytes needed, ",
3470 current_function_name (), pass, bytes_used);
3471 fprintf (gcse_file, "%d local const props, %d local copy props\n\n",
3472 local_const_prop_count, local_copy_prop_count);
3473 fprintf (gcse_file, "%d global const props, %d global copy props\n\n",
3474 global_const_prop_count, global_copy_prop_count);
3476 /* Global analysis may get into infinite loops for unreachable blocks. */
3477 if (changed && cprop_jumps)
3478 delete_unreachable_blocks ();
3480 return changed;
3483 /* Bypass conditional jumps. */
3485 /* The value of last_basic_block at the beginning of the jump_bypass
3486 pass. The use of redirect_edge_and_branch_force may introduce new
3487 basic blocks, but the data flow analysis is only valid for basic
3488 block indices less than bypass_last_basic_block. */
3490 static int bypass_last_basic_block;
3492 /* Find a set of REGNO to a constant that is available at the end of basic
3493 block BB. Returns NULL if no such set is found. Based heavily upon
3494 find_avail_set. */
3496 static struct expr *
3497 find_bypass_set (int regno, int bb)
3499 struct expr *result = 0;
3501 for (;;)
3503 rtx src;
3504 struct expr *set = lookup_set (regno, &set_hash_table);
3506 while (set)
3508 if (TEST_BIT (cprop_avout[bb], set->bitmap_index))
3509 break;
3510 set = next_set (regno, set);
3513 if (set == 0)
3514 break;
3516 gcc_assert (GET_CODE (set->expr) == SET);
3518 src = SET_SRC (set->expr);
3519 if (gcse_constant_p (src))
3520 result = set;
3522 if (! REG_P (src))
3523 break;
3525 regno = REGNO (src);
3527 return result;
3531 /* Subroutine of bypass_block that checks whether a pseudo is killed by
3532 any of the instructions inserted on an edge. Jump bypassing places
3533 condition code setters on CFG edges using insert_insn_on_edge. This
3534 function is required to check that our data flow analysis is still
3535 valid prior to commit_edge_insertions. */
3537 static bool
3538 reg_killed_on_edge (rtx reg, edge e)
3540 rtx insn;
3542 for (insn = e->insns.r; insn; insn = NEXT_INSN (insn))
3543 if (INSN_P (insn) && reg_set_p (reg, insn))
3544 return true;
3546 return false;
3549 /* Subroutine of bypass_conditional_jumps that attempts to bypass the given
3550 basic block BB which has more than one predecessor. If not NULL, SETCC
3551 is the first instruction of BB, which is immediately followed by JUMP_INSN
3552 JUMP. Otherwise, SETCC is NULL, and JUMP is the first insn of BB.
3553 Returns nonzero if a change was made.
3555 During the jump bypassing pass, we may place copies of SETCC instructions
3556 on CFG edges. The following routine must be careful to pay attention to
3557 these inserted insns when performing its transformations. */
3559 static int
3560 bypass_block (basic_block bb, rtx setcc, rtx jump)
3562 rtx insn, note;
3563 edge e, edest;
3564 int i, change;
3565 int may_be_loop_header;
3566 unsigned removed_p;
3567 edge_iterator ei;
3569 insn = (setcc != NULL) ? setcc : jump;
3571 /* Determine set of register uses in INSN. */
3572 reg_use_count = 0;
3573 note_uses (&PATTERN (insn), find_used_regs, NULL);
3574 note = find_reg_equal_equiv_note (insn);
3575 if (note)
3576 find_used_regs (&XEXP (note, 0), NULL);
3578 may_be_loop_header = false;
3579 FOR_EACH_EDGE (e, ei, bb->preds)
3580 if (e->flags & EDGE_DFS_BACK)
3582 may_be_loop_header = true;
3583 break;
3586 change = 0;
3587 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
3589 removed_p = 0;
3591 if (e->flags & EDGE_COMPLEX)
3593 ei_next (&ei);
3594 continue;
3597 /* We can't redirect edges from new basic blocks. */
3598 if (e->src->index >= bypass_last_basic_block)
3600 ei_next (&ei);
3601 continue;
3604 /* The irreducible loops created by redirecting of edges entering the
3605 loop from outside would decrease effectiveness of some of the following
3606 optimizations, so prevent this. */
3607 if (may_be_loop_header
3608 && !(e->flags & EDGE_DFS_BACK))
3610 ei_next (&ei);
3611 continue;
3614 for (i = 0; i < reg_use_count; i++)
3616 struct reg_use *reg_used = &reg_use_table[i];
3617 unsigned int regno = REGNO (reg_used->reg_rtx);
3618 basic_block dest, old_dest;
3619 struct expr *set;
3620 rtx src, new;
3622 if (regno >= max_gcse_regno)
3623 continue;
3625 set = find_bypass_set (regno, e->src->index);
3627 if (! set)
3628 continue;
3630 /* Check the data flow is valid after edge insertions. */
3631 if (e->insns.r && reg_killed_on_edge (reg_used->reg_rtx, e))
3632 continue;
3634 src = SET_SRC (pc_set (jump));
3636 if (setcc != NULL)
3637 src = simplify_replace_rtx (src,
3638 SET_DEST (PATTERN (setcc)),
3639 SET_SRC (PATTERN (setcc)));
3641 new = simplify_replace_rtx (src, reg_used->reg_rtx,
3642 SET_SRC (set->expr));
3644 /* Jump bypassing may have already placed instructions on
3645 edges of the CFG. We can't bypass an outgoing edge that
3646 has instructions associated with it, as these insns won't
3647 get executed if the incoming edge is redirected. */
3649 if (new == pc_rtx)
3651 edest = FALLTHRU_EDGE (bb);
3652 dest = edest->insns.r ? NULL : edest->dest;
3654 else if (GET_CODE (new) == LABEL_REF)
3656 edge_iterator ei2;
3658 dest = BLOCK_FOR_INSN (XEXP (new, 0));
3659 /* Don't bypass edges containing instructions. */
3660 FOR_EACH_EDGE (edest, ei2, bb->succs)
3661 if (edest->dest == dest && edest->insns.r)
3663 dest = NULL;
3664 break;
3667 else
3668 dest = NULL;
3670 /* Avoid unification of the edge with other edges from original
3671 branch. We would end up emitting the instruction on "both"
3672 edges. */
3674 if (dest && setcc && !CC0_P (SET_DEST (PATTERN (setcc))))
3676 edge e2;
3677 edge_iterator ei2;
3679 FOR_EACH_EDGE (e2, ei2, e->src->succs)
3680 if (e2->dest == dest)
3682 dest = NULL;
3683 break;
3687 old_dest = e->dest;
3688 if (dest != NULL
3689 && dest != old_dest
3690 && dest != EXIT_BLOCK_PTR)
3692 redirect_edge_and_branch_force (e, dest);
3694 /* Copy the register setter to the redirected edge.
3695 Don't copy CC0 setters, as CC0 is dead after jump. */
3696 if (setcc)
3698 rtx pat = PATTERN (setcc);
3699 if (!CC0_P (SET_DEST (pat)))
3700 insert_insn_on_edge (copy_insn (pat), e);
3703 if (gcse_file != NULL)
3705 fprintf (gcse_file, "JUMP-BYPASS: Proved reg %d "
3706 "in jump_insn %d equals constant ",
3707 regno, INSN_UID (jump));
3708 print_rtl (gcse_file, SET_SRC (set->expr));
3709 fprintf (gcse_file, "\nBypass edge from %d->%d to %d\n",
3710 e->src->index, old_dest->index, dest->index);
3712 change = 1;
3713 removed_p = 1;
3714 break;
3717 if (!removed_p)
3718 ei_next (&ei);
3720 return change;
3723 /* Find basic blocks with more than one predecessor that only contain a
3724 single conditional jump. If the result of the comparison is known at
3725 compile-time from any incoming edge, redirect that edge to the
3726 appropriate target. Returns nonzero if a change was made.
3728 This function is now mis-named, because we also handle indirect jumps. */
3730 static int
3731 bypass_conditional_jumps (void)
3733 basic_block bb;
3734 int changed;
3735 rtx setcc;
3736 rtx insn;
3737 rtx dest;
3739 /* Note we start at block 1. */
3740 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
3741 return 0;
3743 bypass_last_basic_block = last_basic_block;
3744 mark_dfs_back_edges ();
3746 changed = 0;
3747 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb,
3748 EXIT_BLOCK_PTR, next_bb)
3750 /* Check for more than one predecessor. */
3751 if (EDGE_COUNT (bb->preds) > 1)
3753 setcc = NULL_RTX;
3754 for (insn = BB_HEAD (bb);
3755 insn != NULL && insn != NEXT_INSN (BB_END (bb));
3756 insn = NEXT_INSN (insn))
3757 if (NONJUMP_INSN_P (insn))
3759 if (setcc)
3760 break;
3761 if (GET_CODE (PATTERN (insn)) != SET)
3762 break;
3764 dest = SET_DEST (PATTERN (insn));
3765 if (REG_P (dest) || CC0_P (dest))
3766 setcc = insn;
3767 else
3768 break;
3770 else if (JUMP_P (insn))
3772 if ((any_condjump_p (insn) || computed_jump_p (insn))
3773 && onlyjump_p (insn))
3774 changed |= bypass_block (bb, setcc, insn);
3775 break;
3777 else if (INSN_P (insn))
3778 break;
3782 /* If we bypassed any register setting insns, we inserted a
3783 copy on the redirected edge. These need to be committed. */
3784 if (changed)
3785 commit_edge_insertions();
3787 return changed;
3790 /* Compute PRE+LCM working variables. */
3792 /* Local properties of expressions. */
3793 /* Nonzero for expressions that are transparent in the block. */
3794 static sbitmap *transp;
3796 /* Nonzero for expressions that are transparent at the end of the block.
3797 This is only zero for expressions killed by abnormal critical edge
3798 created by a calls. */
3799 static sbitmap *transpout;
3801 /* Nonzero for expressions that are computed (available) in the block. */
3802 static sbitmap *comp;
3804 /* Nonzero for expressions that are locally anticipatable in the block. */
3805 static sbitmap *antloc;
3807 /* Nonzero for expressions where this block is an optimal computation
3808 point. */
3809 static sbitmap *pre_optimal;
3811 /* Nonzero for expressions which are redundant in a particular block. */
3812 static sbitmap *pre_redundant;
3814 /* Nonzero for expressions which should be inserted on a specific edge. */
3815 static sbitmap *pre_insert_map;
3817 /* Nonzero for expressions which should be deleted in a specific block. */
3818 static sbitmap *pre_delete_map;
3820 /* Contains the edge_list returned by pre_edge_lcm. */
3821 static struct edge_list *edge_list;
3823 /* Redundant insns. */
3824 static sbitmap pre_redundant_insns;
3826 /* Allocate vars used for PRE analysis. */
3828 static void
3829 alloc_pre_mem (int n_blocks, int n_exprs)
3831 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
3832 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
3833 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
3835 pre_optimal = NULL;
3836 pre_redundant = NULL;
3837 pre_insert_map = NULL;
3838 pre_delete_map = NULL;
3839 ae_kill = sbitmap_vector_alloc (n_blocks, n_exprs);
3841 /* pre_insert and pre_delete are allocated later. */
3844 /* Free vars used for PRE analysis. */
3846 static void
3847 free_pre_mem (void)
3849 sbitmap_vector_free (transp);
3850 sbitmap_vector_free (comp);
3852 /* ANTLOC and AE_KILL are freed just after pre_lcm finishes. */
3854 if (pre_optimal)
3855 sbitmap_vector_free (pre_optimal);
3856 if (pre_redundant)
3857 sbitmap_vector_free (pre_redundant);
3858 if (pre_insert_map)
3859 sbitmap_vector_free (pre_insert_map);
3860 if (pre_delete_map)
3861 sbitmap_vector_free (pre_delete_map);
3863 transp = comp = NULL;
3864 pre_optimal = pre_redundant = pre_insert_map = pre_delete_map = NULL;
3867 /* Top level routine to do the dataflow analysis needed by PRE. */
3869 static void
3870 compute_pre_data (void)
3872 sbitmap trapping_expr;
3873 basic_block bb;
3874 unsigned int ui;
3876 compute_local_properties (transp, comp, antloc, &expr_hash_table);
3877 sbitmap_vector_zero (ae_kill, last_basic_block);
3879 /* Collect expressions which might trap. */
3880 trapping_expr = sbitmap_alloc (expr_hash_table.n_elems);
3881 sbitmap_zero (trapping_expr);
3882 for (ui = 0; ui < expr_hash_table.size; ui++)
3884 struct expr *e;
3885 for (e = expr_hash_table.table[ui]; e != NULL; e = e->next_same_hash)
3886 if (may_trap_p (e->expr))
3887 SET_BIT (trapping_expr, e->bitmap_index);
3890 /* Compute ae_kill for each basic block using:
3892 ~(TRANSP | COMP)
3895 FOR_EACH_BB (bb)
3897 edge e;
3898 edge_iterator ei;
3900 /* If the current block is the destination of an abnormal edge, we
3901 kill all trapping expressions because we won't be able to properly
3902 place the instruction on the edge. So make them neither
3903 anticipatable nor transparent. This is fairly conservative. */
3904 FOR_EACH_EDGE (e, ei, bb->preds)
3905 if (e->flags & EDGE_ABNORMAL)
3907 sbitmap_difference (antloc[bb->index], antloc[bb->index], trapping_expr);
3908 sbitmap_difference (transp[bb->index], transp[bb->index], trapping_expr);
3909 break;
3912 sbitmap_a_or_b (ae_kill[bb->index], transp[bb->index], comp[bb->index]);
3913 sbitmap_not (ae_kill[bb->index], ae_kill[bb->index]);
3916 edge_list = pre_edge_lcm (gcse_file, expr_hash_table.n_elems, transp, comp, antloc,
3917 ae_kill, &pre_insert_map, &pre_delete_map);
3918 sbitmap_vector_free (antloc);
3919 antloc = NULL;
3920 sbitmap_vector_free (ae_kill);
3921 ae_kill = NULL;
3922 sbitmap_free (trapping_expr);
3925 /* PRE utilities */
3927 /* Return nonzero if an occurrence of expression EXPR in OCCR_BB would reach
3928 block BB.
3930 VISITED is a pointer to a working buffer for tracking which BB's have
3931 been visited. It is NULL for the top-level call.
3933 We treat reaching expressions that go through blocks containing the same
3934 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
3935 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
3936 2 as not reaching. The intent is to improve the probability of finding
3937 only one reaching expression and to reduce register lifetimes by picking
3938 the closest such expression. */
3940 static int
3941 pre_expr_reaches_here_p_work (basic_block occr_bb, struct expr *expr, basic_block bb, char *visited)
3943 edge pred;
3944 edge_iterator ei;
3946 FOR_EACH_EDGE (pred, ei, bb->preds)
3948 basic_block pred_bb = pred->src;
3950 if (pred->src == ENTRY_BLOCK_PTR
3951 /* Has predecessor has already been visited? */
3952 || visited[pred_bb->index])
3953 ;/* Nothing to do. */
3955 /* Does this predecessor generate this expression? */
3956 else if (TEST_BIT (comp[pred_bb->index], expr->bitmap_index))
3958 /* Is this the occurrence we're looking for?
3959 Note that there's only one generating occurrence per block
3960 so we just need to check the block number. */
3961 if (occr_bb == pred_bb)
3962 return 1;
3964 visited[pred_bb->index] = 1;
3966 /* Ignore this predecessor if it kills the expression. */
3967 else if (! TEST_BIT (transp[pred_bb->index], expr->bitmap_index))
3968 visited[pred_bb->index] = 1;
3970 /* Neither gen nor kill. */
3971 else
3973 visited[pred_bb->index] = 1;
3974 if (pre_expr_reaches_here_p_work (occr_bb, expr, pred_bb, visited))
3975 return 1;
3979 /* All paths have been checked. */
3980 return 0;
3983 /* The wrapper for pre_expr_reaches_here_work that ensures that any
3984 memory allocated for that function is returned. */
3986 static int
3987 pre_expr_reaches_here_p (basic_block occr_bb, struct expr *expr, basic_block bb)
3989 int rval;
3990 char *visited = xcalloc (last_basic_block, 1);
3992 rval = pre_expr_reaches_here_p_work (occr_bb, expr, bb, visited);
3994 free (visited);
3995 return rval;
3999 /* Given an expr, generate RTL which we can insert at the end of a BB,
4000 or on an edge. Set the block number of any insns generated to
4001 the value of BB. */
4003 static rtx
4004 process_insert_insn (struct expr *expr)
4006 rtx reg = expr->reaching_reg;
4007 rtx exp = copy_rtx (expr->expr);
4008 rtx pat;
4010 start_sequence ();
4012 /* If the expression is something that's an operand, like a constant,
4013 just copy it to a register. */
4014 if (general_operand (exp, GET_MODE (reg)))
4015 emit_move_insn (reg, exp);
4017 /* Otherwise, make a new insn to compute this expression and make sure the
4018 insn will be recognized (this also adds any needed CLOBBERs). Copy the
4019 expression to make sure we don't have any sharing issues. */
4020 else
4022 rtx insn = emit_insn (gen_rtx_SET (VOIDmode, reg, exp));
4024 if (insn_invalid_p (insn))
4025 gcc_unreachable ();
4029 pat = get_insns ();
4030 end_sequence ();
4032 return pat;
4035 /* Add EXPR to the end of basic block BB.
4037 This is used by both the PRE and code hoisting.
4039 For PRE, we want to verify that the expr is either transparent
4040 or locally anticipatable in the target block. This check makes
4041 no sense for code hoisting. */
4043 static void
4044 insert_insn_end_bb (struct expr *expr, basic_block bb, int pre)
4046 rtx insn = BB_END (bb);
4047 rtx new_insn;
4048 rtx reg = expr->reaching_reg;
4049 int regno = REGNO (reg);
4050 rtx pat, pat_end;
4052 pat = process_insert_insn (expr);
4053 gcc_assert (pat && INSN_P (pat));
4055 pat_end = pat;
4056 while (NEXT_INSN (pat_end) != NULL_RTX)
4057 pat_end = NEXT_INSN (pat_end);
4059 /* If the last insn is a jump, insert EXPR in front [taking care to
4060 handle cc0, etc. properly]. Similarly we need to care trapping
4061 instructions in presence of non-call exceptions. */
4063 if (JUMP_P (insn)
4064 || (NONJUMP_INSN_P (insn)
4065 && (EDGE_COUNT (bb->succs) > 1
4066 || EDGE_SUCC (bb, 0)->flags & EDGE_ABNORMAL)))
4068 #ifdef HAVE_cc0
4069 rtx note;
4070 #endif
4071 /* It should always be the case that we can put these instructions
4072 anywhere in the basic block with performing PRE optimizations.
4073 Check this. */
4074 gcc_assert (!NONJUMP_INSN_P (insn) || !pre
4075 || TEST_BIT (antloc[bb->index], expr->bitmap_index)
4076 || TEST_BIT (transp[bb->index], expr->bitmap_index));
4078 /* If this is a jump table, then we can't insert stuff here. Since
4079 we know the previous real insn must be the tablejump, we insert
4080 the new instruction just before the tablejump. */
4081 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
4082 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
4083 insn = prev_real_insn (insn);
4085 #ifdef HAVE_cc0
4086 /* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts
4087 if cc0 isn't set. */
4088 note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
4089 if (note)
4090 insn = XEXP (note, 0);
4091 else
4093 rtx maybe_cc0_setter = prev_nonnote_insn (insn);
4094 if (maybe_cc0_setter
4095 && INSN_P (maybe_cc0_setter)
4096 && sets_cc0_p (PATTERN (maybe_cc0_setter)))
4097 insn = maybe_cc0_setter;
4099 #endif
4100 /* FIXME: What if something in cc0/jump uses value set in new insn? */
4101 new_insn = emit_insn_before_noloc (pat, insn);
4104 /* Likewise if the last insn is a call, as will happen in the presence
4105 of exception handling. */
4106 else if (CALL_P (insn)
4107 && (EDGE_COUNT (bb->succs) > 1 || EDGE_SUCC (bb, 0)->flags & EDGE_ABNORMAL))
4109 /* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers,
4110 we search backward and place the instructions before the first
4111 parameter is loaded. Do this for everyone for consistency and a
4112 presumption that we'll get better code elsewhere as well.
4114 It should always be the case that we can put these instructions
4115 anywhere in the basic block with performing PRE optimizations.
4116 Check this. */
4118 gcc_assert (!pre
4119 || TEST_BIT (antloc[bb->index], expr->bitmap_index)
4120 || TEST_BIT (transp[bb->index], expr->bitmap_index));
4122 /* Since different machines initialize their parameter registers
4123 in different orders, assume nothing. Collect the set of all
4124 parameter registers. */
4125 insn = find_first_parameter_load (insn, BB_HEAD (bb));
4127 /* If we found all the parameter loads, then we want to insert
4128 before the first parameter load.
4130 If we did not find all the parameter loads, then we might have
4131 stopped on the head of the block, which could be a CODE_LABEL.
4132 If we inserted before the CODE_LABEL, then we would be putting
4133 the insn in the wrong basic block. In that case, put the insn
4134 after the CODE_LABEL. Also, respect NOTE_INSN_BASIC_BLOCK. */
4135 while (LABEL_P (insn)
4136 || NOTE_INSN_BASIC_BLOCK_P (insn))
4137 insn = NEXT_INSN (insn);
4139 new_insn = emit_insn_before_noloc (pat, insn);
4141 else
4142 new_insn = emit_insn_after_noloc (pat, insn);
4144 while (1)
4146 if (INSN_P (pat))
4148 add_label_notes (PATTERN (pat), new_insn);
4149 note_stores (PATTERN (pat), record_set_info, pat);
4151 if (pat == pat_end)
4152 break;
4153 pat = NEXT_INSN (pat);
4156 gcse_create_count++;
4158 if (gcse_file)
4160 fprintf (gcse_file, "PRE/HOIST: end of bb %d, insn %d, ",
4161 bb->index, INSN_UID (new_insn));
4162 fprintf (gcse_file, "copying expression %d to reg %d\n",
4163 expr->bitmap_index, regno);
4167 /* Insert partially redundant expressions on edges in the CFG to make
4168 the expressions fully redundant. */
4170 static int
4171 pre_edge_insert (struct edge_list *edge_list, struct expr **index_map)
4173 int e, i, j, num_edges, set_size, did_insert = 0;
4174 sbitmap *inserted;
4176 /* Where PRE_INSERT_MAP is nonzero, we add the expression on that edge
4177 if it reaches any of the deleted expressions. */
4179 set_size = pre_insert_map[0]->size;
4180 num_edges = NUM_EDGES (edge_list);
4181 inserted = sbitmap_vector_alloc (num_edges, expr_hash_table.n_elems);
4182 sbitmap_vector_zero (inserted, num_edges);
4184 for (e = 0; e < num_edges; e++)
4186 int indx;
4187 basic_block bb = INDEX_EDGE_PRED_BB (edge_list, e);
4189 for (i = indx = 0; i < set_size; i++, indx += SBITMAP_ELT_BITS)
4191 SBITMAP_ELT_TYPE insert = pre_insert_map[e]->elms[i];
4193 for (j = indx; insert && j < (int) expr_hash_table.n_elems; j++, insert >>= 1)
4194 if ((insert & 1) != 0 && index_map[j]->reaching_reg != NULL_RTX)
4196 struct expr *expr = index_map[j];
4197 struct occr *occr;
4199 /* Now look at each deleted occurrence of this expression. */
4200 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
4202 if (! occr->deleted_p)
4203 continue;
4205 /* Insert this expression on this edge if if it would
4206 reach the deleted occurrence in BB. */
4207 if (!TEST_BIT (inserted[e], j))
4209 rtx insn;
4210 edge eg = INDEX_EDGE (edge_list, e);
4212 /* We can't insert anything on an abnormal and
4213 critical edge, so we insert the insn at the end of
4214 the previous block. There are several alternatives
4215 detailed in Morgans book P277 (sec 10.5) for
4216 handling this situation. This one is easiest for
4217 now. */
4219 if (eg->flags & EDGE_ABNORMAL)
4220 insert_insn_end_bb (index_map[j], bb, 0);
4221 else
4223 insn = process_insert_insn (index_map[j]);
4224 insert_insn_on_edge (insn, eg);
4227 if (gcse_file)
4229 fprintf (gcse_file, "PRE/HOIST: edge (%d,%d), ",
4230 bb->index,
4231 INDEX_EDGE_SUCC_BB (edge_list, e)->index);
4232 fprintf (gcse_file, "copy expression %d\n",
4233 expr->bitmap_index);
4236 update_ld_motion_stores (expr);
4237 SET_BIT (inserted[e], j);
4238 did_insert = 1;
4239 gcse_create_count++;
4246 sbitmap_vector_free (inserted);
4247 return did_insert;
4250 /* Copy the result of EXPR->EXPR generated by INSN to EXPR->REACHING_REG.
4251 Given "old_reg <- expr" (INSN), instead of adding after it
4252 reaching_reg <- old_reg
4253 it's better to do the following:
4254 reaching_reg <- expr
4255 old_reg <- reaching_reg
4256 because this way copy propagation can discover additional PRE
4257 opportunities. But if this fails, we try the old way.
4258 When "expr" is a store, i.e.
4259 given "MEM <- old_reg", instead of adding after it
4260 reaching_reg <- old_reg
4261 it's better to add it before as follows:
4262 reaching_reg <- old_reg
4263 MEM <- reaching_reg. */
4265 static void
4266 pre_insert_copy_insn (struct expr *expr, rtx insn)
4268 rtx reg = expr->reaching_reg;
4269 int regno = REGNO (reg);
4270 int indx = expr->bitmap_index;
4271 rtx pat = PATTERN (insn);
4272 rtx set, new_insn;
4273 rtx old_reg;
4274 int i;
4276 /* This block matches the logic in hash_scan_insn. */
4277 switch (GET_CODE (pat))
4279 case SET:
4280 set = pat;
4281 break;
4283 case PARALLEL:
4284 /* Search through the parallel looking for the set whose
4285 source was the expression that we're interested in. */
4286 set = NULL_RTX;
4287 for (i = 0; i < XVECLEN (pat, 0); i++)
4289 rtx x = XVECEXP (pat, 0, i);
4290 if (GET_CODE (x) == SET
4291 && expr_equiv_p (SET_SRC (x), expr->expr))
4293 set = x;
4294 break;
4297 break;
4299 default:
4300 gcc_unreachable ();
4303 if (REG_P (SET_DEST (set)))
4305 old_reg = SET_DEST (set);
4306 /* Check if we can modify the set destination in the original insn. */
4307 if (validate_change (insn, &SET_DEST (set), reg, 0))
4309 new_insn = gen_move_insn (old_reg, reg);
4310 new_insn = emit_insn_after (new_insn, insn);
4312 /* Keep register set table up to date. */
4313 replace_one_set (REGNO (old_reg), insn, new_insn);
4314 record_one_set (regno, insn);
4316 else
4318 new_insn = gen_move_insn (reg, old_reg);
4319 new_insn = emit_insn_after (new_insn, insn);
4321 /* Keep register set table up to date. */
4322 record_one_set (regno, new_insn);
4325 else /* This is possible only in case of a store to memory. */
4327 old_reg = SET_SRC (set);
4328 new_insn = gen_move_insn (reg, old_reg);
4330 /* Check if we can modify the set source in the original insn. */
4331 if (validate_change (insn, &SET_SRC (set), reg, 0))
4332 new_insn = emit_insn_before (new_insn, insn);
4333 else
4334 new_insn = emit_insn_after (new_insn, insn);
4336 /* Keep register set table up to date. */
4337 record_one_set (regno, new_insn);
4340 gcse_create_count++;
4342 if (gcse_file)
4343 fprintf (gcse_file,
4344 "PRE: bb %d, insn %d, copy expression %d in insn %d to reg %d\n",
4345 BLOCK_NUM (insn), INSN_UID (new_insn), indx,
4346 INSN_UID (insn), regno);
4349 /* Copy available expressions that reach the redundant expression
4350 to `reaching_reg'. */
4352 static void
4353 pre_insert_copies (void)
4355 unsigned int i, added_copy;
4356 struct expr *expr;
4357 struct occr *occr;
4358 struct occr *avail;
4360 /* For each available expression in the table, copy the result to
4361 `reaching_reg' if the expression reaches a deleted one.
4363 ??? The current algorithm is rather brute force.
4364 Need to do some profiling. */
4366 for (i = 0; i < expr_hash_table.size; i++)
4367 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
4369 /* If the basic block isn't reachable, PPOUT will be TRUE. However,
4370 we don't want to insert a copy here because the expression may not
4371 really be redundant. So only insert an insn if the expression was
4372 deleted. This test also avoids further processing if the
4373 expression wasn't deleted anywhere. */
4374 if (expr->reaching_reg == NULL)
4375 continue;
4377 /* Set when we add a copy for that expression. */
4378 added_copy = 0;
4380 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
4382 if (! occr->deleted_p)
4383 continue;
4385 for (avail = expr->avail_occr; avail != NULL; avail = avail->next)
4387 rtx insn = avail->insn;
4389 /* No need to handle this one if handled already. */
4390 if (avail->copied_p)
4391 continue;
4393 /* Don't handle this one if it's a redundant one. */
4394 if (TEST_BIT (pre_redundant_insns, INSN_CUID (insn)))
4395 continue;
4397 /* Or if the expression doesn't reach the deleted one. */
4398 if (! pre_expr_reaches_here_p (BLOCK_FOR_INSN (avail->insn),
4399 expr,
4400 BLOCK_FOR_INSN (occr->insn)))
4401 continue;
4403 added_copy = 1;
4405 /* Copy the result of avail to reaching_reg. */
4406 pre_insert_copy_insn (expr, insn);
4407 avail->copied_p = 1;
4411 if (added_copy)
4412 update_ld_motion_stores (expr);
4416 /* Emit move from SRC to DEST noting the equivalence with expression computed
4417 in INSN. */
4418 static rtx
4419 gcse_emit_move_after (rtx src, rtx dest, rtx insn)
4421 rtx new;
4422 rtx set = single_set (insn), set2;
4423 rtx note;
4424 rtx eqv;
4426 /* This should never fail since we're creating a reg->reg copy
4427 we've verified to be valid. */
4429 new = emit_insn_after (gen_move_insn (dest, src), insn);
4431 /* Note the equivalence for local CSE pass. */
4432 set2 = single_set (new);
4433 if (!set2 || !rtx_equal_p (SET_DEST (set2), dest))
4434 return new;
4435 if ((note = find_reg_equal_equiv_note (insn)))
4436 eqv = XEXP (note, 0);
4437 else
4438 eqv = SET_SRC (set);
4440 set_unique_reg_note (new, REG_EQUAL, copy_insn_1 (eqv));
4442 return new;
4445 /* Delete redundant computations.
4446 Deletion is done by changing the insn to copy the `reaching_reg' of
4447 the expression into the result of the SET. It is left to later passes
4448 (cprop, cse2, flow, combine, regmove) to propagate the copy or eliminate it.
4450 Returns nonzero if a change is made. */
4452 static int
4453 pre_delete (void)
4455 unsigned int i;
4456 int changed;
4457 struct expr *expr;
4458 struct occr *occr;
4460 changed = 0;
4461 for (i = 0; i < expr_hash_table.size; i++)
4462 for (expr = expr_hash_table.table[i];
4463 expr != NULL;
4464 expr = expr->next_same_hash)
4466 int indx = expr->bitmap_index;
4468 /* We only need to search antic_occr since we require
4469 ANTLOC != 0. */
4471 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
4473 rtx insn = occr->insn;
4474 rtx set;
4475 basic_block bb = BLOCK_FOR_INSN (insn);
4477 /* We only delete insns that have a single_set. */
4478 if (TEST_BIT (pre_delete_map[bb->index], indx)
4479 && (set = single_set (insn)) != 0)
4481 /* Create a pseudo-reg to store the result of reaching
4482 expressions into. Get the mode for the new pseudo from
4483 the mode of the original destination pseudo. */
4484 if (expr->reaching_reg == NULL)
4485 expr->reaching_reg
4486 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
4488 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
4489 delete_insn (insn);
4490 occr->deleted_p = 1;
4491 SET_BIT (pre_redundant_insns, INSN_CUID (insn));
4492 changed = 1;
4493 gcse_subst_count++;
4495 if (gcse_file)
4497 fprintf (gcse_file,
4498 "PRE: redundant insn %d (expression %d) in ",
4499 INSN_UID (insn), indx);
4500 fprintf (gcse_file, "bb %d, reaching reg is %d\n",
4501 bb->index, REGNO (expr->reaching_reg));
4507 return changed;
4510 /* Perform GCSE optimizations using PRE.
4511 This is called by one_pre_gcse_pass after all the dataflow analysis
4512 has been done.
4514 This is based on the original Morel-Renvoise paper Fred Chow's thesis, and
4515 lazy code motion from Knoop, Ruthing and Steffen as described in Advanced
4516 Compiler Design and Implementation.
4518 ??? A new pseudo reg is created to hold the reaching expression. The nice
4519 thing about the classical approach is that it would try to use an existing
4520 reg. If the register can't be adequately optimized [i.e. we introduce
4521 reload problems], one could add a pass here to propagate the new register
4522 through the block.
4524 ??? We don't handle single sets in PARALLELs because we're [currently] not
4525 able to copy the rest of the parallel when we insert copies to create full
4526 redundancies from partial redundancies. However, there's no reason why we
4527 can't handle PARALLELs in the cases where there are no partial
4528 redundancies. */
4530 static int
4531 pre_gcse (void)
4533 unsigned int i;
4534 int did_insert, changed;
4535 struct expr **index_map;
4536 struct expr *expr;
4538 /* Compute a mapping from expression number (`bitmap_index') to
4539 hash table entry. */
4541 index_map = xcalloc (expr_hash_table.n_elems, sizeof (struct expr *));
4542 for (i = 0; i < expr_hash_table.size; i++)
4543 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
4544 index_map[expr->bitmap_index] = expr;
4546 /* Reset bitmap used to track which insns are redundant. */
4547 pre_redundant_insns = sbitmap_alloc (max_cuid);
4548 sbitmap_zero (pre_redundant_insns);
4550 /* Delete the redundant insns first so that
4551 - we know what register to use for the new insns and for the other
4552 ones with reaching expressions
4553 - we know which insns are redundant when we go to create copies */
4555 changed = pre_delete ();
4557 did_insert = pre_edge_insert (edge_list, index_map);
4559 /* In other places with reaching expressions, copy the expression to the
4560 specially allocated pseudo-reg that reaches the redundant expr. */
4561 pre_insert_copies ();
4562 if (did_insert)
4564 commit_edge_insertions ();
4565 changed = 1;
4568 free (index_map);
4569 sbitmap_free (pre_redundant_insns);
4570 return changed;
4573 /* Top level routine to perform one PRE GCSE pass.
4575 Return nonzero if a change was made. */
4577 static int
4578 one_pre_gcse_pass (int pass)
4580 int changed = 0;
4582 gcse_subst_count = 0;
4583 gcse_create_count = 0;
4585 alloc_hash_table (max_cuid, &expr_hash_table, 0);
4586 add_noreturn_fake_exit_edges ();
4587 if (flag_gcse_lm)
4588 compute_ld_motion_mems ();
4590 compute_hash_table (&expr_hash_table);
4591 trim_ld_motion_mems ();
4592 if (gcse_file)
4593 dump_hash_table (gcse_file, "Expression", &expr_hash_table);
4595 if (expr_hash_table.n_elems > 0)
4597 alloc_pre_mem (last_basic_block, expr_hash_table.n_elems);
4598 compute_pre_data ();
4599 changed |= pre_gcse ();
4600 free_edge_list (edge_list);
4601 free_pre_mem ();
4604 free_ldst_mems ();
4605 remove_fake_exit_edges ();
4606 free_hash_table (&expr_hash_table);
4608 if (gcse_file)
4610 fprintf (gcse_file, "\nPRE GCSE of %s, pass %d: %d bytes needed, ",
4611 current_function_name (), pass, bytes_used);
4612 fprintf (gcse_file, "%d substs, %d insns created\n",
4613 gcse_subst_count, gcse_create_count);
4616 return changed;
4619 /* If X contains any LABEL_REF's, add REG_LABEL notes for them to INSN.
4620 If notes are added to an insn which references a CODE_LABEL, the
4621 LABEL_NUSES count is incremented. We have to add REG_LABEL notes,
4622 because the following loop optimization pass requires them. */
4624 /* ??? This is very similar to the loop.c add_label_notes function. We
4625 could probably share code here. */
4627 /* ??? If there was a jump optimization pass after gcse and before loop,
4628 then we would not need to do this here, because jump would add the
4629 necessary REG_LABEL notes. */
4631 static void
4632 add_label_notes (rtx x, rtx insn)
4634 enum rtx_code code = GET_CODE (x);
4635 int i, j;
4636 const char *fmt;
4638 if (code == LABEL_REF && !LABEL_REF_NONLOCAL_P (x))
4640 /* This code used to ignore labels that referred to dispatch tables to
4641 avoid flow generating (slightly) worse code.
4643 We no longer ignore such label references (see LABEL_REF handling in
4644 mark_jump_label for additional information). */
4646 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, XEXP (x, 0),
4647 REG_NOTES (insn));
4648 if (LABEL_P (XEXP (x, 0)))
4649 LABEL_NUSES (XEXP (x, 0))++;
4650 return;
4653 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
4655 if (fmt[i] == 'e')
4656 add_label_notes (XEXP (x, i), insn);
4657 else if (fmt[i] == 'E')
4658 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4659 add_label_notes (XVECEXP (x, i, j), insn);
4663 /* Compute transparent outgoing information for each block.
4665 An expression is transparent to an edge unless it is killed by
4666 the edge itself. This can only happen with abnormal control flow,
4667 when the edge is traversed through a call. This happens with
4668 non-local labels and exceptions.
4670 This would not be necessary if we split the edge. While this is
4671 normally impossible for abnormal critical edges, with some effort
4672 it should be possible with exception handling, since we still have
4673 control over which handler should be invoked. But due to increased
4674 EH table sizes, this may not be worthwhile. */
4676 static void
4677 compute_transpout (void)
4679 basic_block bb;
4680 unsigned int i;
4681 struct expr *expr;
4683 sbitmap_vector_ones (transpout, last_basic_block);
4685 FOR_EACH_BB (bb)
4687 /* Note that flow inserted a nop a the end of basic blocks that
4688 end in call instructions for reasons other than abnormal
4689 control flow. */
4690 if (! CALL_P (BB_END (bb)))
4691 continue;
4693 for (i = 0; i < expr_hash_table.size; i++)
4694 for (expr = expr_hash_table.table[i]; expr ; expr = expr->next_same_hash)
4695 if (MEM_P (expr->expr))
4697 if (GET_CODE (XEXP (expr->expr, 0)) == SYMBOL_REF
4698 && CONSTANT_POOL_ADDRESS_P (XEXP (expr->expr, 0)))
4699 continue;
4701 /* ??? Optimally, we would use interprocedural alias
4702 analysis to determine if this mem is actually killed
4703 by this call. */
4704 RESET_BIT (transpout[bb->index], expr->bitmap_index);
4709 /* Code Hoisting variables and subroutines. */
4711 /* Very busy expressions. */
4712 static sbitmap *hoist_vbein;
4713 static sbitmap *hoist_vbeout;
4715 /* Hoistable expressions. */
4716 static sbitmap *hoist_exprs;
4718 /* ??? We could compute post dominators and run this algorithm in
4719 reverse to perform tail merging, doing so would probably be
4720 more effective than the tail merging code in jump.c.
4722 It's unclear if tail merging could be run in parallel with
4723 code hoisting. It would be nice. */
4725 /* Allocate vars used for code hoisting analysis. */
4727 static void
4728 alloc_code_hoist_mem (int n_blocks, int n_exprs)
4730 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
4731 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
4732 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
4734 hoist_vbein = sbitmap_vector_alloc (n_blocks, n_exprs);
4735 hoist_vbeout = sbitmap_vector_alloc (n_blocks, n_exprs);
4736 hoist_exprs = sbitmap_vector_alloc (n_blocks, n_exprs);
4737 transpout = sbitmap_vector_alloc (n_blocks, n_exprs);
4740 /* Free vars used for code hoisting analysis. */
4742 static void
4743 free_code_hoist_mem (void)
4745 sbitmap_vector_free (antloc);
4746 sbitmap_vector_free (transp);
4747 sbitmap_vector_free (comp);
4749 sbitmap_vector_free (hoist_vbein);
4750 sbitmap_vector_free (hoist_vbeout);
4751 sbitmap_vector_free (hoist_exprs);
4752 sbitmap_vector_free (transpout);
4754 free_dominance_info (CDI_DOMINATORS);
4757 /* Compute the very busy expressions at entry/exit from each block.
4759 An expression is very busy if all paths from a given point
4760 compute the expression. */
4762 static void
4763 compute_code_hoist_vbeinout (void)
4765 int changed, passes;
4766 basic_block bb;
4768 sbitmap_vector_zero (hoist_vbeout, last_basic_block);
4769 sbitmap_vector_zero (hoist_vbein, last_basic_block);
4771 passes = 0;
4772 changed = 1;
4774 while (changed)
4776 changed = 0;
4778 /* We scan the blocks in the reverse order to speed up
4779 the convergence. */
4780 FOR_EACH_BB_REVERSE (bb)
4782 changed |= sbitmap_a_or_b_and_c_cg (hoist_vbein[bb->index], antloc[bb->index],
4783 hoist_vbeout[bb->index], transp[bb->index]);
4784 if (bb->next_bb != EXIT_BLOCK_PTR)
4785 sbitmap_intersection_of_succs (hoist_vbeout[bb->index], hoist_vbein, bb->index);
4788 passes++;
4791 if (gcse_file)
4792 fprintf (gcse_file, "hoisting vbeinout computation: %d passes\n", passes);
4795 /* Top level routine to do the dataflow analysis needed by code hoisting. */
4797 static void
4798 compute_code_hoist_data (void)
4800 compute_local_properties (transp, comp, antloc, &expr_hash_table);
4801 compute_transpout ();
4802 compute_code_hoist_vbeinout ();
4803 calculate_dominance_info (CDI_DOMINATORS);
4804 if (gcse_file)
4805 fprintf (gcse_file, "\n");
4808 /* Determine if the expression identified by EXPR_INDEX would
4809 reach BB unimpared if it was placed at the end of EXPR_BB.
4811 It's unclear exactly what Muchnick meant by "unimpared". It seems
4812 to me that the expression must either be computed or transparent in
4813 *every* block in the path(s) from EXPR_BB to BB. Any other definition
4814 would allow the expression to be hoisted out of loops, even if
4815 the expression wasn't a loop invariant.
4817 Contrast this to reachability for PRE where an expression is
4818 considered reachable if *any* path reaches instead of *all*
4819 paths. */
4821 static int
4822 hoist_expr_reaches_here_p (basic_block expr_bb, int expr_index, basic_block bb, char *visited)
4824 edge pred;
4825 edge_iterator ei;
4826 int visited_allocated_locally = 0;
4829 if (visited == NULL)
4831 visited_allocated_locally = 1;
4832 visited = xcalloc (last_basic_block, 1);
4835 FOR_EACH_EDGE (pred, ei, bb->preds)
4837 basic_block pred_bb = pred->src;
4839 if (pred->src == ENTRY_BLOCK_PTR)
4840 break;
4841 else if (pred_bb == expr_bb)
4842 continue;
4843 else if (visited[pred_bb->index])
4844 continue;
4846 /* Does this predecessor generate this expression? */
4847 else if (TEST_BIT (comp[pred_bb->index], expr_index))
4848 break;
4849 else if (! TEST_BIT (transp[pred_bb->index], expr_index))
4850 break;
4852 /* Not killed. */
4853 else
4855 visited[pred_bb->index] = 1;
4856 if (! hoist_expr_reaches_here_p (expr_bb, expr_index,
4857 pred_bb, visited))
4858 break;
4861 if (visited_allocated_locally)
4862 free (visited);
4864 return (pred == NULL);
4867 /* Actually perform code hoisting. */
4869 static void
4870 hoist_code (void)
4872 basic_block bb, dominated;
4873 basic_block *domby;
4874 unsigned int domby_len;
4875 unsigned int i,j;
4876 struct expr **index_map;
4877 struct expr *expr;
4879 sbitmap_vector_zero (hoist_exprs, last_basic_block);
4881 /* Compute a mapping from expression number (`bitmap_index') to
4882 hash table entry. */
4884 index_map = xcalloc (expr_hash_table.n_elems, sizeof (struct expr *));
4885 for (i = 0; i < expr_hash_table.size; i++)
4886 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
4887 index_map[expr->bitmap_index] = expr;
4889 /* Walk over each basic block looking for potentially hoistable
4890 expressions, nothing gets hoisted from the entry block. */
4891 FOR_EACH_BB (bb)
4893 int found = 0;
4894 int insn_inserted_p;
4896 domby_len = get_dominated_by (CDI_DOMINATORS, bb, &domby);
4897 /* Examine each expression that is very busy at the exit of this
4898 block. These are the potentially hoistable expressions. */
4899 for (i = 0; i < hoist_vbeout[bb->index]->n_bits; i++)
4901 int hoistable = 0;
4903 if (TEST_BIT (hoist_vbeout[bb->index], i)
4904 && TEST_BIT (transpout[bb->index], i))
4906 /* We've found a potentially hoistable expression, now
4907 we look at every block BB dominates to see if it
4908 computes the expression. */
4909 for (j = 0; j < domby_len; j++)
4911 dominated = domby[j];
4912 /* Ignore self dominance. */
4913 if (bb == dominated)
4914 continue;
4915 /* We've found a dominated block, now see if it computes
4916 the busy expression and whether or not moving that
4917 expression to the "beginning" of that block is safe. */
4918 if (!TEST_BIT (antloc[dominated->index], i))
4919 continue;
4921 /* Note if the expression would reach the dominated block
4922 unimpared if it was placed at the end of BB.
4924 Keep track of how many times this expression is hoistable
4925 from a dominated block into BB. */
4926 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
4927 hoistable++;
4930 /* If we found more than one hoistable occurrence of this
4931 expression, then note it in the bitmap of expressions to
4932 hoist. It makes no sense to hoist things which are computed
4933 in only one BB, and doing so tends to pessimize register
4934 allocation. One could increase this value to try harder
4935 to avoid any possible code expansion due to register
4936 allocation issues; however experiments have shown that
4937 the vast majority of hoistable expressions are only movable
4938 from two successors, so raising this threshold is likely
4939 to nullify any benefit we get from code hoisting. */
4940 if (hoistable > 1)
4942 SET_BIT (hoist_exprs[bb->index], i);
4943 found = 1;
4947 /* If we found nothing to hoist, then quit now. */
4948 if (! found)
4950 free (domby);
4951 continue;
4954 /* Loop over all the hoistable expressions. */
4955 for (i = 0; i < hoist_exprs[bb->index]->n_bits; i++)
4957 /* We want to insert the expression into BB only once, so
4958 note when we've inserted it. */
4959 insn_inserted_p = 0;
4961 /* These tests should be the same as the tests above. */
4962 if (TEST_BIT (hoist_vbeout[bb->index], i))
4964 /* We've found a potentially hoistable expression, now
4965 we look at every block BB dominates to see if it
4966 computes the expression. */
4967 for (j = 0; j < domby_len; j++)
4969 dominated = domby[j];
4970 /* Ignore self dominance. */
4971 if (bb == dominated)
4972 continue;
4974 /* We've found a dominated block, now see if it computes
4975 the busy expression and whether or not moving that
4976 expression to the "beginning" of that block is safe. */
4977 if (!TEST_BIT (antloc[dominated->index], i))
4978 continue;
4980 /* The expression is computed in the dominated block and
4981 it would be safe to compute it at the start of the
4982 dominated block. Now we have to determine if the
4983 expression would reach the dominated block if it was
4984 placed at the end of BB. */
4985 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
4987 struct expr *expr = index_map[i];
4988 struct occr *occr = expr->antic_occr;
4989 rtx insn;
4990 rtx set;
4992 /* Find the right occurrence of this expression. */
4993 while (BLOCK_FOR_INSN (occr->insn) != dominated && occr)
4994 occr = occr->next;
4996 gcc_assert (occr);
4997 insn = occr->insn;
4998 set = single_set (insn);
4999 gcc_assert (set);
5001 /* Create a pseudo-reg to store the result of reaching
5002 expressions into. Get the mode for the new pseudo
5003 from the mode of the original destination pseudo. */
5004 if (expr->reaching_reg == NULL)
5005 expr->reaching_reg
5006 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
5008 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
5009 delete_insn (insn);
5010 occr->deleted_p = 1;
5011 if (!insn_inserted_p)
5013 insert_insn_end_bb (index_map[i], bb, 0);
5014 insn_inserted_p = 1;
5020 free (domby);
5023 free (index_map);
5026 /* Top level routine to perform one code hoisting (aka unification) pass
5028 Return nonzero if a change was made. */
5030 static int
5031 one_code_hoisting_pass (void)
5033 int changed = 0;
5035 alloc_hash_table (max_cuid, &expr_hash_table, 0);
5036 compute_hash_table (&expr_hash_table);
5037 if (gcse_file)
5038 dump_hash_table (gcse_file, "Code Hosting Expressions", &expr_hash_table);
5040 if (expr_hash_table.n_elems > 0)
5042 alloc_code_hoist_mem (last_basic_block, expr_hash_table.n_elems);
5043 compute_code_hoist_data ();
5044 hoist_code ();
5045 free_code_hoist_mem ();
5048 free_hash_table (&expr_hash_table);
5050 return changed;
5053 /* Here we provide the things required to do store motion towards
5054 the exit. In order for this to be effective, gcse also needed to
5055 be taught how to move a load when it is kill only by a store to itself.
5057 int i;
5058 float a[10];
5060 void foo(float scale)
5062 for (i=0; i<10; i++)
5063 a[i] *= scale;
5066 'i' is both loaded and stored to in the loop. Normally, gcse cannot move
5067 the load out since its live around the loop, and stored at the bottom
5068 of the loop.
5070 The 'Load Motion' referred to and implemented in this file is
5071 an enhancement to gcse which when using edge based lcm, recognizes
5072 this situation and allows gcse to move the load out of the loop.
5074 Once gcse has hoisted the load, store motion can then push this
5075 load towards the exit, and we end up with no loads or stores of 'i'
5076 in the loop. */
5078 /* This will search the ldst list for a matching expression. If it
5079 doesn't find one, we create one and initialize it. */
5081 static struct ls_expr *
5082 ldst_entry (rtx x)
5084 int do_not_record_p = 0;
5085 struct ls_expr * ptr;
5086 unsigned int hash;
5088 hash = hash_rtx (x, GET_MODE (x), &do_not_record_p,
5089 NULL, /*have_reg_qty=*/false);
5091 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
5092 if (ptr->hash_index == hash && expr_equiv_p (ptr->pattern, x))
5093 return ptr;
5095 ptr = xmalloc (sizeof (struct ls_expr));
5097 ptr->next = pre_ldst_mems;
5098 ptr->expr = NULL;
5099 ptr->pattern = x;
5100 ptr->pattern_regs = NULL_RTX;
5101 ptr->loads = NULL_RTX;
5102 ptr->stores = NULL_RTX;
5103 ptr->reaching_reg = NULL_RTX;
5104 ptr->invalid = 0;
5105 ptr->index = 0;
5106 ptr->hash_index = hash;
5107 pre_ldst_mems = ptr;
5109 return ptr;
5112 /* Free up an individual ldst entry. */
5114 static void
5115 free_ldst_entry (struct ls_expr * ptr)
5117 free_INSN_LIST_list (& ptr->loads);
5118 free_INSN_LIST_list (& ptr->stores);
5120 free (ptr);
5123 /* Free up all memory associated with the ldst list. */
5125 static void
5126 free_ldst_mems (void)
5128 while (pre_ldst_mems)
5130 struct ls_expr * tmp = pre_ldst_mems;
5132 pre_ldst_mems = pre_ldst_mems->next;
5134 free_ldst_entry (tmp);
5137 pre_ldst_mems = NULL;
5140 /* Dump debugging info about the ldst list. */
5142 static void
5143 print_ldst_list (FILE * file)
5145 struct ls_expr * ptr;
5147 fprintf (file, "LDST list: \n");
5149 for (ptr = first_ls_expr(); ptr != NULL; ptr = next_ls_expr (ptr))
5151 fprintf (file, " Pattern (%3d): ", ptr->index);
5153 print_rtl (file, ptr->pattern);
5155 fprintf (file, "\n Loads : ");
5157 if (ptr->loads)
5158 print_rtl (file, ptr->loads);
5159 else
5160 fprintf (file, "(nil)");
5162 fprintf (file, "\n Stores : ");
5164 if (ptr->stores)
5165 print_rtl (file, ptr->stores);
5166 else
5167 fprintf (file, "(nil)");
5169 fprintf (file, "\n\n");
5172 fprintf (file, "\n");
5175 /* Returns 1 if X is in the list of ldst only expressions. */
5177 static struct ls_expr *
5178 find_rtx_in_ldst (rtx x)
5180 struct ls_expr * ptr;
5182 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
5183 if (expr_equiv_p (ptr->pattern, x) && ! ptr->invalid)
5184 return ptr;
5186 return NULL;
5189 /* Assign each element of the list of mems a monotonically increasing value. */
5191 static int
5192 enumerate_ldsts (void)
5194 struct ls_expr * ptr;
5195 int n = 0;
5197 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
5198 ptr->index = n++;
5200 return n;
5203 /* Return first item in the list. */
5205 static inline struct ls_expr *
5206 first_ls_expr (void)
5208 return pre_ldst_mems;
5211 /* Return the next item in the list after the specified one. */
5213 static inline struct ls_expr *
5214 next_ls_expr (struct ls_expr * ptr)
5216 return ptr->next;
5219 /* Load Motion for loads which only kill themselves. */
5221 /* Return true if x is a simple MEM operation, with no registers or
5222 side effects. These are the types of loads we consider for the
5223 ld_motion list, otherwise we let the usual aliasing take care of it. */
5225 static int
5226 simple_mem (rtx x)
5228 if (! MEM_P (x))
5229 return 0;
5231 if (MEM_VOLATILE_P (x))
5232 return 0;
5234 if (GET_MODE (x) == BLKmode)
5235 return 0;
5237 /* If we are handling exceptions, we must be careful with memory references
5238 that may trap. If we are not, the behavior is undefined, so we may just
5239 continue. */
5240 if (flag_non_call_exceptions && may_trap_p (x))
5241 return 0;
5243 if (side_effects_p (x))
5244 return 0;
5246 /* Do not consider function arguments passed on stack. */
5247 if (reg_mentioned_p (stack_pointer_rtx, x))
5248 return 0;
5250 if (flag_float_store && FLOAT_MODE_P (GET_MODE (x)))
5251 return 0;
5253 return 1;
5256 /* Make sure there isn't a buried reference in this pattern anywhere.
5257 If there is, invalidate the entry for it since we're not capable
5258 of fixing it up just yet.. We have to be sure we know about ALL
5259 loads since the aliasing code will allow all entries in the
5260 ld_motion list to not-alias itself. If we miss a load, we will get
5261 the wrong value since gcse might common it and we won't know to
5262 fix it up. */
5264 static void
5265 invalidate_any_buried_refs (rtx x)
5267 const char * fmt;
5268 int i, j;
5269 struct ls_expr * ptr;
5271 /* Invalidate it in the list. */
5272 if (MEM_P (x) && simple_mem (x))
5274 ptr = ldst_entry (x);
5275 ptr->invalid = 1;
5278 /* Recursively process the insn. */
5279 fmt = GET_RTX_FORMAT (GET_CODE (x));
5281 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
5283 if (fmt[i] == 'e')
5284 invalidate_any_buried_refs (XEXP (x, i));
5285 else if (fmt[i] == 'E')
5286 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5287 invalidate_any_buried_refs (XVECEXP (x, i, j));
5291 /* Find all the 'simple' MEMs which are used in LOADs and STORES. Simple
5292 being defined as MEM loads and stores to symbols, with no side effects
5293 and no registers in the expression. For a MEM destination, we also
5294 check that the insn is still valid if we replace the destination with a
5295 REG, as is done in update_ld_motion_stores. If there are any uses/defs
5296 which don't match this criteria, they are invalidated and trimmed out
5297 later. */
5299 static void
5300 compute_ld_motion_mems (void)
5302 struct ls_expr * ptr;
5303 basic_block bb;
5304 rtx insn;
5306 pre_ldst_mems = NULL;
5308 FOR_EACH_BB (bb)
5310 for (insn = BB_HEAD (bb);
5311 insn && insn != NEXT_INSN (BB_END (bb));
5312 insn = NEXT_INSN (insn))
5314 if (INSN_P (insn))
5316 if (GET_CODE (PATTERN (insn)) == SET)
5318 rtx src = SET_SRC (PATTERN (insn));
5319 rtx dest = SET_DEST (PATTERN (insn));
5321 /* Check for a simple LOAD... */
5322 if (MEM_P (src) && simple_mem (src))
5324 ptr = ldst_entry (src);
5325 if (REG_P (dest))
5326 ptr->loads = alloc_INSN_LIST (insn, ptr->loads);
5327 else
5328 ptr->invalid = 1;
5330 else
5332 /* Make sure there isn't a buried load somewhere. */
5333 invalidate_any_buried_refs (src);
5336 /* Check for stores. Don't worry about aliased ones, they
5337 will block any movement we might do later. We only care
5338 about this exact pattern since those are the only
5339 circumstance that we will ignore the aliasing info. */
5340 if (MEM_P (dest) && simple_mem (dest))
5342 ptr = ldst_entry (dest);
5344 if (! MEM_P (src)
5345 && GET_CODE (src) != ASM_OPERANDS
5346 /* Check for REG manually since want_to_gcse_p
5347 returns 0 for all REGs. */
5348 && can_assign_to_reg_p (src))
5349 ptr->stores = alloc_INSN_LIST (insn, ptr->stores);
5350 else
5351 ptr->invalid = 1;
5354 else
5355 invalidate_any_buried_refs (PATTERN (insn));
5361 /* Remove any references that have been either invalidated or are not in the
5362 expression list for pre gcse. */
5364 static void
5365 trim_ld_motion_mems (void)
5367 struct ls_expr * * last = & pre_ldst_mems;
5368 struct ls_expr * ptr = pre_ldst_mems;
5370 while (ptr != NULL)
5372 struct expr * expr;
5374 /* Delete if entry has been made invalid. */
5375 if (! ptr->invalid)
5377 /* Delete if we cannot find this mem in the expression list. */
5378 unsigned int hash = ptr->hash_index % expr_hash_table.size;
5380 for (expr = expr_hash_table.table[hash];
5381 expr != NULL;
5382 expr = expr->next_same_hash)
5383 if (expr_equiv_p (expr->expr, ptr->pattern))
5384 break;
5386 else
5387 expr = (struct expr *) 0;
5389 if (expr)
5391 /* Set the expression field if we are keeping it. */
5392 ptr->expr = expr;
5393 last = & ptr->next;
5394 ptr = ptr->next;
5396 else
5398 *last = ptr->next;
5399 free_ldst_entry (ptr);
5400 ptr = * last;
5404 /* Show the world what we've found. */
5405 if (gcse_file && pre_ldst_mems != NULL)
5406 print_ldst_list (gcse_file);
5409 /* This routine will take an expression which we are replacing with
5410 a reaching register, and update any stores that are needed if
5411 that expression is in the ld_motion list. Stores are updated by
5412 copying their SRC to the reaching register, and then storing
5413 the reaching register into the store location. These keeps the
5414 correct value in the reaching register for the loads. */
5416 static void
5417 update_ld_motion_stores (struct expr * expr)
5419 struct ls_expr * mem_ptr;
5421 if ((mem_ptr = find_rtx_in_ldst (expr->expr)))
5423 /* We can try to find just the REACHED stores, but is shouldn't
5424 matter to set the reaching reg everywhere... some might be
5425 dead and should be eliminated later. */
5427 /* We replace (set mem expr) with (set reg expr) (set mem reg)
5428 where reg is the reaching reg used in the load. We checked in
5429 compute_ld_motion_mems that we can replace (set mem expr) with
5430 (set reg expr) in that insn. */
5431 rtx list = mem_ptr->stores;
5433 for ( ; list != NULL_RTX; list = XEXP (list, 1))
5435 rtx insn = XEXP (list, 0);
5436 rtx pat = PATTERN (insn);
5437 rtx src = SET_SRC (pat);
5438 rtx reg = expr->reaching_reg;
5439 rtx copy, new;
5441 /* If we've already copied it, continue. */
5442 if (expr->reaching_reg == src)
5443 continue;
5445 if (gcse_file)
5447 fprintf (gcse_file, "PRE: store updated with reaching reg ");
5448 print_rtl (gcse_file, expr->reaching_reg);
5449 fprintf (gcse_file, ":\n ");
5450 print_inline_rtx (gcse_file, insn, 8);
5451 fprintf (gcse_file, "\n");
5454 copy = gen_move_insn ( reg, copy_rtx (SET_SRC (pat)));
5455 new = emit_insn_before (copy, insn);
5456 record_one_set (REGNO (reg), new);
5457 SET_SRC (pat) = reg;
5459 /* un-recognize this pattern since it's probably different now. */
5460 INSN_CODE (insn) = -1;
5461 gcse_create_count++;
5466 /* Store motion code. */
5468 #define ANTIC_STORE_LIST(x) ((x)->loads)
5469 #define AVAIL_STORE_LIST(x) ((x)->stores)
5470 #define LAST_AVAIL_CHECK_FAILURE(x) ((x)->reaching_reg)
5472 /* This is used to communicate the target bitvector we want to use in the
5473 reg_set_info routine when called via the note_stores mechanism. */
5474 static int * regvec;
5476 /* And current insn, for the same routine. */
5477 static rtx compute_store_table_current_insn;
5479 /* Used in computing the reverse edge graph bit vectors. */
5480 static sbitmap * st_antloc;
5482 /* Global holding the number of store expressions we are dealing with. */
5483 static int num_stores;
5485 /* Checks to set if we need to mark a register set. Called from
5486 note_stores. */
5488 static void
5489 reg_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED,
5490 void *data)
5492 sbitmap bb_reg = data;
5494 if (GET_CODE (dest) == SUBREG)
5495 dest = SUBREG_REG (dest);
5497 if (REG_P (dest))
5499 regvec[REGNO (dest)] = INSN_UID (compute_store_table_current_insn);
5500 if (bb_reg)
5501 SET_BIT (bb_reg, REGNO (dest));
5505 /* Clear any mark that says that this insn sets dest. Called from
5506 note_stores. */
5508 static void
5509 reg_clear_last_set (rtx dest, rtx setter ATTRIBUTE_UNUSED,
5510 void *data)
5512 int *dead_vec = data;
5514 if (GET_CODE (dest) == SUBREG)
5515 dest = SUBREG_REG (dest);
5517 if (REG_P (dest) &&
5518 dead_vec[REGNO (dest)] == INSN_UID (compute_store_table_current_insn))
5519 dead_vec[REGNO (dest)] = 0;
5522 /* Return zero if some of the registers in list X are killed
5523 due to set of registers in bitmap REGS_SET. */
5525 static bool
5526 store_ops_ok (rtx x, int *regs_set)
5528 rtx reg;
5530 for (; x; x = XEXP (x, 1))
5532 reg = XEXP (x, 0);
5533 if (regs_set[REGNO(reg)])
5534 return false;
5537 return true;
5540 /* Returns a list of registers mentioned in X. */
5541 static rtx
5542 extract_mentioned_regs (rtx x)
5544 return extract_mentioned_regs_helper (x, NULL_RTX);
5547 /* Helper for extract_mentioned_regs; ACCUM is used to accumulate used
5548 registers. */
5549 static rtx
5550 extract_mentioned_regs_helper (rtx x, rtx accum)
5552 int i;
5553 enum rtx_code code;
5554 const char * fmt;
5556 /* Repeat is used to turn tail-recursion into iteration. */
5557 repeat:
5559 if (x == 0)
5560 return accum;
5562 code = GET_CODE (x);
5563 switch (code)
5565 case REG:
5566 return alloc_EXPR_LIST (0, x, accum);
5568 case MEM:
5569 x = XEXP (x, 0);
5570 goto repeat;
5572 case PRE_DEC:
5573 case PRE_INC:
5574 case POST_DEC:
5575 case POST_INC:
5576 /* We do not run this function with arguments having side effects. */
5577 gcc_unreachable ();
5579 case PC:
5580 case CC0: /*FIXME*/
5581 case CONST:
5582 case CONST_INT:
5583 case CONST_DOUBLE:
5584 case CONST_VECTOR:
5585 case SYMBOL_REF:
5586 case LABEL_REF:
5587 case ADDR_VEC:
5588 case ADDR_DIFF_VEC:
5589 return accum;
5591 default:
5592 break;
5595 i = GET_RTX_LENGTH (code) - 1;
5596 fmt = GET_RTX_FORMAT (code);
5598 for (; i >= 0; i--)
5600 if (fmt[i] == 'e')
5602 rtx tem = XEXP (x, i);
5604 /* If we are about to do the last recursive call
5605 needed at this level, change it into iteration. */
5606 if (i == 0)
5608 x = tem;
5609 goto repeat;
5612 accum = extract_mentioned_regs_helper (tem, accum);
5614 else if (fmt[i] == 'E')
5616 int j;
5618 for (j = 0; j < XVECLEN (x, i); j++)
5619 accum = extract_mentioned_regs_helper (XVECEXP (x, i, j), accum);
5623 return accum;
5626 /* Determine whether INSN is MEM store pattern that we will consider moving.
5627 REGS_SET_BEFORE is bitmap of registers set before (and including) the
5628 current insn, REGS_SET_AFTER is bitmap of registers set after (and
5629 including) the insn in this basic block. We must be passing through BB from
5630 head to end, as we are using this fact to speed things up.
5632 The results are stored this way:
5634 -- the first anticipatable expression is added into ANTIC_STORE_LIST
5635 -- if the processed expression is not anticipatable, NULL_RTX is added
5636 there instead, so that we can use it as indicator that no further
5637 expression of this type may be anticipatable
5638 -- if the expression is available, it is added as head of AVAIL_STORE_LIST;
5639 consequently, all of them but this head are dead and may be deleted.
5640 -- if the expression is not available, the insn due to that it fails to be
5641 available is stored in reaching_reg.
5643 The things are complicated a bit by fact that there already may be stores
5644 to the same MEM from other blocks; also caller must take care of the
5645 necessary cleanup of the temporary markers after end of the basic block.
5648 static void
5649 find_moveable_store (rtx insn, int *regs_set_before, int *regs_set_after)
5651 struct ls_expr * ptr;
5652 rtx dest, set, tmp;
5653 int check_anticipatable, check_available;
5654 basic_block bb = BLOCK_FOR_INSN (insn);
5656 set = single_set (insn);
5657 if (!set)
5658 return;
5660 dest = SET_DEST (set);
5662 if (! MEM_P (dest) || MEM_VOLATILE_P (dest)
5663 || GET_MODE (dest) == BLKmode)
5664 return;
5666 if (side_effects_p (dest))
5667 return;
5669 /* If we are handling exceptions, we must be careful with memory references
5670 that may trap. If we are not, the behavior is undefined, so we may just
5671 continue. */
5672 if (flag_non_call_exceptions && may_trap_p (dest))
5673 return;
5675 /* Even if the destination cannot trap, the source may. In this case we'd
5676 need to handle updating the REG_EH_REGION note. */
5677 if (find_reg_note (insn, REG_EH_REGION, NULL_RTX))
5678 return;
5680 ptr = ldst_entry (dest);
5681 if (!ptr->pattern_regs)
5682 ptr->pattern_regs = extract_mentioned_regs (dest);
5684 /* Do not check for anticipatability if we either found one anticipatable
5685 store already, or tested for one and found out that it was killed. */
5686 check_anticipatable = 0;
5687 if (!ANTIC_STORE_LIST (ptr))
5688 check_anticipatable = 1;
5689 else
5691 tmp = XEXP (ANTIC_STORE_LIST (ptr), 0);
5692 if (tmp != NULL_RTX
5693 && BLOCK_FOR_INSN (tmp) != bb)
5694 check_anticipatable = 1;
5696 if (check_anticipatable)
5698 if (store_killed_before (dest, ptr->pattern_regs, insn, bb, regs_set_before))
5699 tmp = NULL_RTX;
5700 else
5701 tmp = insn;
5702 ANTIC_STORE_LIST (ptr) = alloc_INSN_LIST (tmp,
5703 ANTIC_STORE_LIST (ptr));
5706 /* It is not necessary to check whether store is available if we did
5707 it successfully before; if we failed before, do not bother to check
5708 until we reach the insn that caused us to fail. */
5709 check_available = 0;
5710 if (!AVAIL_STORE_LIST (ptr))
5711 check_available = 1;
5712 else
5714 tmp = XEXP (AVAIL_STORE_LIST (ptr), 0);
5715 if (BLOCK_FOR_INSN (tmp) != bb)
5716 check_available = 1;
5718 if (check_available)
5720 /* Check that we have already reached the insn at that the check
5721 failed last time. */
5722 if (LAST_AVAIL_CHECK_FAILURE (ptr))
5724 for (tmp = BB_END (bb);
5725 tmp != insn && tmp != LAST_AVAIL_CHECK_FAILURE (ptr);
5726 tmp = PREV_INSN (tmp))
5727 continue;
5728 if (tmp == insn)
5729 check_available = 0;
5731 else
5732 check_available = store_killed_after (dest, ptr->pattern_regs, insn,
5733 bb, regs_set_after,
5734 &LAST_AVAIL_CHECK_FAILURE (ptr));
5736 if (!check_available)
5737 AVAIL_STORE_LIST (ptr) = alloc_INSN_LIST (insn, AVAIL_STORE_LIST (ptr));
5740 /* Find available and anticipatable stores. */
5742 static int
5743 compute_store_table (void)
5745 int ret;
5746 basic_block bb;
5747 unsigned regno;
5748 rtx insn, pat, tmp;
5749 int *last_set_in, *already_set;
5750 struct ls_expr * ptr, **prev_next_ptr_ptr;
5752 max_gcse_regno = max_reg_num ();
5754 reg_set_in_block = sbitmap_vector_alloc (last_basic_block,
5755 max_gcse_regno);
5756 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
5757 pre_ldst_mems = 0;
5758 last_set_in = xcalloc (max_gcse_regno, sizeof (int));
5759 already_set = xmalloc (sizeof (int) * max_gcse_regno);
5761 /* Find all the stores we care about. */
5762 FOR_EACH_BB (bb)
5764 /* First compute the registers set in this block. */
5765 regvec = last_set_in;
5767 for (insn = BB_HEAD (bb);
5768 insn != NEXT_INSN (BB_END (bb));
5769 insn = NEXT_INSN (insn))
5771 if (! INSN_P (insn))
5772 continue;
5774 if (CALL_P (insn))
5776 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
5777 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
5779 last_set_in[regno] = INSN_UID (insn);
5780 SET_BIT (reg_set_in_block[bb->index], regno);
5784 pat = PATTERN (insn);
5785 compute_store_table_current_insn = insn;
5786 note_stores (pat, reg_set_info, reg_set_in_block[bb->index]);
5789 /* Now find the stores. */
5790 memset (already_set, 0, sizeof (int) * max_gcse_regno);
5791 regvec = already_set;
5792 for (insn = BB_HEAD (bb);
5793 insn != NEXT_INSN (BB_END (bb));
5794 insn = NEXT_INSN (insn))
5796 if (! INSN_P (insn))
5797 continue;
5799 if (CALL_P (insn))
5801 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
5802 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
5803 already_set[regno] = 1;
5806 pat = PATTERN (insn);
5807 note_stores (pat, reg_set_info, NULL);
5809 /* Now that we've marked regs, look for stores. */
5810 find_moveable_store (insn, already_set, last_set_in);
5812 /* Unmark regs that are no longer set. */
5813 compute_store_table_current_insn = insn;
5814 note_stores (pat, reg_clear_last_set, last_set_in);
5815 if (CALL_P (insn))
5817 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
5818 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno)
5819 && last_set_in[regno] == INSN_UID (insn))
5820 last_set_in[regno] = 0;
5824 #ifdef ENABLE_CHECKING
5825 /* last_set_in should now be all-zero. */
5826 for (regno = 0; regno < max_gcse_regno; regno++)
5827 gcc_assert (!last_set_in[regno]);
5828 #endif
5830 /* Clear temporary marks. */
5831 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
5833 LAST_AVAIL_CHECK_FAILURE(ptr) = NULL_RTX;
5834 if (ANTIC_STORE_LIST (ptr)
5835 && (tmp = XEXP (ANTIC_STORE_LIST (ptr), 0)) == NULL_RTX)
5836 ANTIC_STORE_LIST (ptr) = XEXP (ANTIC_STORE_LIST (ptr), 1);
5840 /* Remove the stores that are not available anywhere, as there will
5841 be no opportunity to optimize them. */
5842 for (ptr = pre_ldst_mems, prev_next_ptr_ptr = &pre_ldst_mems;
5843 ptr != NULL;
5844 ptr = *prev_next_ptr_ptr)
5846 if (!AVAIL_STORE_LIST (ptr))
5848 *prev_next_ptr_ptr = ptr->next;
5849 free_ldst_entry (ptr);
5851 else
5852 prev_next_ptr_ptr = &ptr->next;
5855 ret = enumerate_ldsts ();
5857 if (gcse_file)
5859 fprintf (gcse_file, "ST_avail and ST_antic (shown under loads..)\n");
5860 print_ldst_list (gcse_file);
5863 free (last_set_in);
5864 free (already_set);
5865 return ret;
5868 /* Check to see if the load X is aliased with STORE_PATTERN.
5869 AFTER is true if we are checking the case when STORE_PATTERN occurs
5870 after the X. */
5872 static bool
5873 load_kills_store (rtx x, rtx store_pattern, int after)
5875 if (after)
5876 return anti_dependence (x, store_pattern);
5877 else
5878 return true_dependence (store_pattern, GET_MODE (store_pattern), x,
5879 rtx_addr_varies_p);
5882 /* Go through the entire insn X, looking for any loads which might alias
5883 STORE_PATTERN. Return true if found.
5884 AFTER is true if we are checking the case when STORE_PATTERN occurs
5885 after the insn X. */
5887 static bool
5888 find_loads (rtx x, rtx store_pattern, int after)
5890 const char * fmt;
5891 int i, j;
5892 int ret = false;
5894 if (!x)
5895 return false;
5897 if (GET_CODE (x) == SET)
5898 x = SET_SRC (x);
5900 if (MEM_P (x))
5902 if (load_kills_store (x, store_pattern, after))
5903 return true;
5906 /* Recursively process the insn. */
5907 fmt = GET_RTX_FORMAT (GET_CODE (x));
5909 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0 && !ret; i--)
5911 if (fmt[i] == 'e')
5912 ret |= find_loads (XEXP (x, i), store_pattern, after);
5913 else if (fmt[i] == 'E')
5914 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5915 ret |= find_loads (XVECEXP (x, i, j), store_pattern, after);
5917 return ret;
5920 /* Check if INSN kills the store pattern X (is aliased with it).
5921 AFTER is true if we are checking the case when store X occurs
5922 after the insn. Return true if it it does. */
5924 static bool
5925 store_killed_in_insn (rtx x, rtx x_regs, rtx insn, int after)
5927 rtx reg, base, note;
5929 if (!INSN_P (insn))
5930 return false;
5932 if (CALL_P (insn))
5934 /* A normal or pure call might read from pattern,
5935 but a const call will not. */
5936 if (! CONST_OR_PURE_CALL_P (insn) || pure_call_p (insn))
5937 return true;
5939 /* But even a const call reads its parameters. Check whether the
5940 base of some of registers used in mem is stack pointer. */
5941 for (reg = x_regs; reg; reg = XEXP (reg, 1))
5943 base = find_base_term (XEXP (reg, 0));
5944 if (!base
5945 || (GET_CODE (base) == ADDRESS
5946 && GET_MODE (base) == Pmode
5947 && XEXP (base, 0) == stack_pointer_rtx))
5948 return true;
5951 return false;
5954 if (GET_CODE (PATTERN (insn)) == SET)
5956 rtx pat = PATTERN (insn);
5957 rtx dest = SET_DEST (pat);
5959 if (GET_CODE (dest) == ZERO_EXTRACT)
5960 dest = XEXP (dest, 0);
5962 /* Check for memory stores to aliased objects. */
5963 if (MEM_P (dest)
5964 && !expr_equiv_p (dest, x))
5966 if (after)
5968 if (output_dependence (dest, x))
5969 return true;
5971 else
5973 if (output_dependence (x, dest))
5974 return true;
5977 if (find_loads (SET_SRC (pat), x, after))
5978 return true;
5980 else if (find_loads (PATTERN (insn), x, after))
5981 return true;
5983 /* If this insn has a REG_EQUAL or REG_EQUIV note referencing a memory
5984 location aliased with X, then this insn kills X. */
5985 note = find_reg_equal_equiv_note (insn);
5986 if (! note)
5987 return false;
5988 note = XEXP (note, 0);
5990 /* However, if the note represents a must alias rather than a may
5991 alias relationship, then it does not kill X. */
5992 if (expr_equiv_p (note, x))
5993 return false;
5995 /* See if there are any aliased loads in the note. */
5996 return find_loads (note, x, after);
5999 /* Returns true if the expression X is loaded or clobbered on or after INSN
6000 within basic block BB. REGS_SET_AFTER is bitmap of registers set in
6001 or after the insn. X_REGS is list of registers mentioned in X. If the store
6002 is killed, return the last insn in that it occurs in FAIL_INSN. */
6004 static bool
6005 store_killed_after (rtx x, rtx x_regs, rtx insn, basic_block bb,
6006 int *regs_set_after, rtx *fail_insn)
6008 rtx last = BB_END (bb), act;
6010 if (!store_ops_ok (x_regs, regs_set_after))
6012 /* We do not know where it will happen. */
6013 if (fail_insn)
6014 *fail_insn = NULL_RTX;
6015 return true;
6018 /* Scan from the end, so that fail_insn is determined correctly. */
6019 for (act = last; act != PREV_INSN (insn); act = PREV_INSN (act))
6020 if (store_killed_in_insn (x, x_regs, act, false))
6022 if (fail_insn)
6023 *fail_insn = act;
6024 return true;
6027 return false;
6030 /* Returns true if the expression X is loaded or clobbered on or before INSN
6031 within basic block BB. X_REGS is list of registers mentioned in X.
6032 REGS_SET_BEFORE is bitmap of registers set before or in this insn. */
6033 static bool
6034 store_killed_before (rtx x, rtx x_regs, rtx insn, basic_block bb,
6035 int *regs_set_before)
6037 rtx first = BB_HEAD (bb);
6039 if (!store_ops_ok (x_regs, regs_set_before))
6040 return true;
6042 for ( ; insn != PREV_INSN (first); insn = PREV_INSN (insn))
6043 if (store_killed_in_insn (x, x_regs, insn, true))
6044 return true;
6046 return false;
6049 /* Fill in available, anticipatable, transparent and kill vectors in
6050 STORE_DATA, based on lists of available and anticipatable stores. */
6051 static void
6052 build_store_vectors (void)
6054 basic_block bb;
6055 int *regs_set_in_block;
6056 rtx insn, st;
6057 struct ls_expr * ptr;
6058 unsigned regno;
6060 /* Build the gen_vector. This is any store in the table which is not killed
6061 by aliasing later in its block. */
6062 ae_gen = sbitmap_vector_alloc (last_basic_block, num_stores);
6063 sbitmap_vector_zero (ae_gen, last_basic_block);
6065 st_antloc = sbitmap_vector_alloc (last_basic_block, num_stores);
6066 sbitmap_vector_zero (st_antloc, last_basic_block);
6068 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
6070 for (st = AVAIL_STORE_LIST (ptr); st != NULL; st = XEXP (st, 1))
6072 insn = XEXP (st, 0);
6073 bb = BLOCK_FOR_INSN (insn);
6075 /* If we've already seen an available expression in this block,
6076 we can delete this one (It occurs earlier in the block). We'll
6077 copy the SRC expression to an unused register in case there
6078 are any side effects. */
6079 if (TEST_BIT (ae_gen[bb->index], ptr->index))
6081 rtx r = gen_reg_rtx (GET_MODE (ptr->pattern));
6082 if (gcse_file)
6083 fprintf (gcse_file, "Removing redundant store:\n");
6084 replace_store_insn (r, XEXP (st, 0), bb, ptr);
6085 continue;
6087 SET_BIT (ae_gen[bb->index], ptr->index);
6090 for (st = ANTIC_STORE_LIST (ptr); st != NULL; st = XEXP (st, 1))
6092 insn = XEXP (st, 0);
6093 bb = BLOCK_FOR_INSN (insn);
6094 SET_BIT (st_antloc[bb->index], ptr->index);
6098 ae_kill = sbitmap_vector_alloc (last_basic_block, num_stores);
6099 sbitmap_vector_zero (ae_kill, last_basic_block);
6101 transp = sbitmap_vector_alloc (last_basic_block, num_stores);
6102 sbitmap_vector_zero (transp, last_basic_block);
6103 regs_set_in_block = xmalloc (sizeof (int) * max_gcse_regno);
6105 FOR_EACH_BB (bb)
6107 for (regno = 0; regno < max_gcse_regno; regno++)
6108 regs_set_in_block[regno] = TEST_BIT (reg_set_in_block[bb->index], regno);
6110 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
6112 if (store_killed_after (ptr->pattern, ptr->pattern_regs, BB_HEAD (bb),
6113 bb, regs_set_in_block, NULL))
6115 /* It should not be necessary to consider the expression
6116 killed if it is both anticipatable and available. */
6117 if (!TEST_BIT (st_antloc[bb->index], ptr->index)
6118 || !TEST_BIT (ae_gen[bb->index], ptr->index))
6119 SET_BIT (ae_kill[bb->index], ptr->index);
6121 else
6122 SET_BIT (transp[bb->index], ptr->index);
6126 free (regs_set_in_block);
6128 if (gcse_file)
6130 dump_sbitmap_vector (gcse_file, "st_antloc", "", st_antloc, last_basic_block);
6131 dump_sbitmap_vector (gcse_file, "st_kill", "", ae_kill, last_basic_block);
6132 dump_sbitmap_vector (gcse_file, "Transpt", "", transp, last_basic_block);
6133 dump_sbitmap_vector (gcse_file, "st_avloc", "", ae_gen, last_basic_block);
6137 /* Insert an instruction at the beginning of a basic block, and update
6138 the BB_HEAD if needed. */
6140 static void
6141 insert_insn_start_bb (rtx insn, basic_block bb)
6143 /* Insert at start of successor block. */
6144 rtx prev = PREV_INSN (BB_HEAD (bb));
6145 rtx before = BB_HEAD (bb);
6146 while (before != 0)
6148 if (! LABEL_P (before)
6149 && (! NOTE_P (before)
6150 || NOTE_LINE_NUMBER (before) != NOTE_INSN_BASIC_BLOCK))
6151 break;
6152 prev = before;
6153 if (prev == BB_END (bb))
6154 break;
6155 before = NEXT_INSN (before);
6158 insn = emit_insn_after_noloc (insn, prev);
6160 if (gcse_file)
6162 fprintf (gcse_file, "STORE_MOTION insert store at start of BB %d:\n",
6163 bb->index);
6164 print_inline_rtx (gcse_file, insn, 6);
6165 fprintf (gcse_file, "\n");
6169 /* This routine will insert a store on an edge. EXPR is the ldst entry for
6170 the memory reference, and E is the edge to insert it on. Returns nonzero
6171 if an edge insertion was performed. */
6173 static int
6174 insert_store (struct ls_expr * expr, edge e)
6176 rtx reg, insn;
6177 basic_block bb;
6178 edge tmp;
6179 edge_iterator ei;
6181 /* We did all the deleted before this insert, so if we didn't delete a
6182 store, then we haven't set the reaching reg yet either. */
6183 if (expr->reaching_reg == NULL_RTX)
6184 return 0;
6186 if (e->flags & EDGE_FAKE)
6187 return 0;
6189 reg = expr->reaching_reg;
6190 insn = gen_move_insn (copy_rtx (expr->pattern), reg);
6192 /* If we are inserting this expression on ALL predecessor edges of a BB,
6193 insert it at the start of the BB, and reset the insert bits on the other
6194 edges so we don't try to insert it on the other edges. */
6195 bb = e->dest;
6196 FOR_EACH_EDGE (tmp, ei, e->dest->preds)
6197 if (!(tmp->flags & EDGE_FAKE))
6199 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
6201 gcc_assert (index != EDGE_INDEX_NO_EDGE);
6202 if (! TEST_BIT (pre_insert_map[index], expr->index))
6203 break;
6206 /* If tmp is NULL, we found an insertion on every edge, blank the
6207 insertion vector for these edges, and insert at the start of the BB. */
6208 if (!tmp && bb != EXIT_BLOCK_PTR)
6210 FOR_EACH_EDGE (tmp, ei, e->dest->preds)
6212 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
6213 RESET_BIT (pre_insert_map[index], expr->index);
6215 insert_insn_start_bb (insn, bb);
6216 return 0;
6219 /* We can't put stores in the front of blocks pointed to by abnormal
6220 edges since that may put a store where one didn't used to be. */
6221 gcc_assert (!(e->flags & EDGE_ABNORMAL));
6223 insert_insn_on_edge (insn, e);
6225 if (gcse_file)
6227 fprintf (gcse_file, "STORE_MOTION insert insn on edge (%d, %d):\n",
6228 e->src->index, e->dest->index);
6229 print_inline_rtx (gcse_file, insn, 6);
6230 fprintf (gcse_file, "\n");
6233 return 1;
6236 /* Remove any REG_EQUAL or REG_EQUIV notes containing a reference to the
6237 memory location in SMEXPR set in basic block BB.
6239 This could be rather expensive. */
6241 static void
6242 remove_reachable_equiv_notes (basic_block bb, struct ls_expr *smexpr)
6244 edge_iterator *stack, ei;
6245 int sp;
6246 edge act;
6247 sbitmap visited = sbitmap_alloc (last_basic_block);
6248 rtx last, insn, note;
6249 rtx mem = smexpr->pattern;
6251 stack = xmalloc (sizeof (edge_iterator) * n_basic_blocks);
6252 sp = 0;
6253 ei = ei_start (bb->succs);
6255 sbitmap_zero (visited);
6257 act = (EDGE_COUNT (ei_container (ei)) > 0 ? EDGE_I (ei_container (ei), 0) : NULL);
6258 while (1)
6260 if (!act)
6262 if (!sp)
6264 free (stack);
6265 sbitmap_free (visited);
6266 return;
6268 act = ei_edge (stack[--sp]);
6270 bb = act->dest;
6272 if (bb == EXIT_BLOCK_PTR
6273 || TEST_BIT (visited, bb->index))
6275 if (!ei_end_p (ei))
6276 ei_next (&ei);
6277 act = (! ei_end_p (ei)) ? ei_edge (ei) : NULL;
6278 continue;
6280 SET_BIT (visited, bb->index);
6282 if (TEST_BIT (st_antloc[bb->index], smexpr->index))
6284 for (last = ANTIC_STORE_LIST (smexpr);
6285 BLOCK_FOR_INSN (XEXP (last, 0)) != bb;
6286 last = XEXP (last, 1))
6287 continue;
6288 last = XEXP (last, 0);
6290 else
6291 last = NEXT_INSN (BB_END (bb));
6293 for (insn = BB_HEAD (bb); insn != last; insn = NEXT_INSN (insn))
6294 if (INSN_P (insn))
6296 note = find_reg_equal_equiv_note (insn);
6297 if (!note || !expr_equiv_p (XEXP (note, 0), mem))
6298 continue;
6300 if (gcse_file)
6301 fprintf (gcse_file, "STORE_MOTION drop REG_EQUAL note at insn %d:\n",
6302 INSN_UID (insn));
6303 remove_note (insn, note);
6306 if (!ei_end_p (ei))
6307 ei_next (&ei);
6308 act = (! ei_end_p (ei)) ? ei_edge (ei) : NULL;
6310 if (EDGE_COUNT (bb->succs) > 0)
6312 if (act)
6313 stack[sp++] = ei;
6314 ei = ei_start (bb->succs);
6315 act = (EDGE_COUNT (ei_container (ei)) > 0 ? EDGE_I (ei_container (ei), 0) : NULL);
6320 /* This routine will replace a store with a SET to a specified register. */
6322 static void
6323 replace_store_insn (rtx reg, rtx del, basic_block bb, struct ls_expr *smexpr)
6325 rtx insn, mem, note, set, ptr, pair;
6327 mem = smexpr->pattern;
6328 insn = gen_move_insn (reg, SET_SRC (single_set (del)));
6329 insn = emit_insn_after (insn, del);
6331 if (gcse_file)
6333 fprintf (gcse_file,
6334 "STORE_MOTION delete insn in BB %d:\n ", bb->index);
6335 print_inline_rtx (gcse_file, del, 6);
6336 fprintf (gcse_file, "\nSTORE MOTION replaced with insn:\n ");
6337 print_inline_rtx (gcse_file, insn, 6);
6338 fprintf (gcse_file, "\n");
6341 for (ptr = ANTIC_STORE_LIST (smexpr); ptr; ptr = XEXP (ptr, 1))
6342 if (XEXP (ptr, 0) == del)
6344 XEXP (ptr, 0) = insn;
6345 break;
6348 /* Move the notes from the deleted insn to its replacement, and patch
6349 up the LIBCALL notes. */
6350 REG_NOTES (insn) = REG_NOTES (del);
6352 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
6353 if (note)
6355 pair = XEXP (note, 0);
6356 note = find_reg_note (pair, REG_LIBCALL, NULL_RTX);
6357 XEXP (note, 0) = insn;
6359 note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
6360 if (note)
6362 pair = XEXP (note, 0);
6363 note = find_reg_note (pair, REG_RETVAL, NULL_RTX);
6364 XEXP (note, 0) = insn;
6367 delete_insn (del);
6369 /* Now we must handle REG_EQUAL notes whose contents is equal to the mem;
6370 they are no longer accurate provided that they are reached by this
6371 definition, so drop them. */
6372 for (; insn != NEXT_INSN (BB_END (bb)); insn = NEXT_INSN (insn))
6373 if (INSN_P (insn))
6375 set = single_set (insn);
6376 if (!set)
6377 continue;
6378 if (expr_equiv_p (SET_DEST (set), mem))
6379 return;
6380 note = find_reg_equal_equiv_note (insn);
6381 if (!note || !expr_equiv_p (XEXP (note, 0), mem))
6382 continue;
6384 if (gcse_file)
6385 fprintf (gcse_file, "STORE_MOTION drop REG_EQUAL note at insn %d:\n",
6386 INSN_UID (insn));
6387 remove_note (insn, note);
6389 remove_reachable_equiv_notes (bb, smexpr);
6393 /* Delete a store, but copy the value that would have been stored into
6394 the reaching_reg for later storing. */
6396 static void
6397 delete_store (struct ls_expr * expr, basic_block bb)
6399 rtx reg, i, del;
6401 if (expr->reaching_reg == NULL_RTX)
6402 expr->reaching_reg = gen_reg_rtx (GET_MODE (expr->pattern));
6404 reg = expr->reaching_reg;
6406 for (i = AVAIL_STORE_LIST (expr); i; i = XEXP (i, 1))
6408 del = XEXP (i, 0);
6409 if (BLOCK_FOR_INSN (del) == bb)
6411 /* We know there is only one since we deleted redundant
6412 ones during the available computation. */
6413 replace_store_insn (reg, del, bb, expr);
6414 break;
6419 /* Free memory used by store motion. */
6421 static void
6422 free_store_memory (void)
6424 free_ldst_mems ();
6426 if (ae_gen)
6427 sbitmap_vector_free (ae_gen);
6428 if (ae_kill)
6429 sbitmap_vector_free (ae_kill);
6430 if (transp)
6431 sbitmap_vector_free (transp);
6432 if (st_antloc)
6433 sbitmap_vector_free (st_antloc);
6434 if (pre_insert_map)
6435 sbitmap_vector_free (pre_insert_map);
6436 if (pre_delete_map)
6437 sbitmap_vector_free (pre_delete_map);
6438 if (reg_set_in_block)
6439 sbitmap_vector_free (reg_set_in_block);
6441 ae_gen = ae_kill = transp = st_antloc = NULL;
6442 pre_insert_map = pre_delete_map = reg_set_in_block = NULL;
6445 /* Perform store motion. Much like gcse, except we move expressions the
6446 other way by looking at the flowgraph in reverse. */
6448 static void
6449 store_motion (void)
6451 basic_block bb;
6452 int x;
6453 struct ls_expr * ptr;
6454 int update_flow = 0;
6456 if (gcse_file)
6458 fprintf (gcse_file, "before store motion\n");
6459 print_rtl (gcse_file, get_insns ());
6462 init_alias_analysis ();
6464 /* Find all the available and anticipatable stores. */
6465 num_stores = compute_store_table ();
6466 if (num_stores == 0)
6468 sbitmap_vector_free (reg_set_in_block);
6469 end_alias_analysis ();
6470 return;
6473 /* Now compute kill & transp vectors. */
6474 build_store_vectors ();
6475 add_noreturn_fake_exit_edges ();
6476 connect_infinite_loops_to_exit ();
6478 edge_list = pre_edge_rev_lcm (gcse_file, num_stores, transp, ae_gen,
6479 st_antloc, ae_kill, &pre_insert_map,
6480 &pre_delete_map);
6482 /* Now we want to insert the new stores which are going to be needed. */
6483 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
6485 /* If any of the edges we have above are abnormal, we can't move this
6486 store. */
6487 for (x = NUM_EDGES (edge_list) - 1; x >= 0; x--)
6488 if (TEST_BIT (pre_insert_map[x], ptr->index)
6489 && (INDEX_EDGE (edge_list, x)->flags & EDGE_ABNORMAL))
6490 break;
6492 if (x >= 0)
6494 if (gcse_file != NULL)
6495 fprintf (gcse_file,
6496 "Can't replace store %d: abnormal edge from %d to %d\n",
6497 ptr->index, INDEX_EDGE (edge_list, x)->src->index,
6498 INDEX_EDGE (edge_list, x)->dest->index);
6499 continue;
6502 /* Now we want to insert the new stores which are going to be needed. */
6504 FOR_EACH_BB (bb)
6505 if (TEST_BIT (pre_delete_map[bb->index], ptr->index))
6506 delete_store (ptr, bb);
6508 for (x = 0; x < NUM_EDGES (edge_list); x++)
6509 if (TEST_BIT (pre_insert_map[x], ptr->index))
6510 update_flow |= insert_store (ptr, INDEX_EDGE (edge_list, x));
6513 if (update_flow)
6514 commit_edge_insertions ();
6516 free_store_memory ();
6517 free_edge_list (edge_list);
6518 remove_fake_exit_edges ();
6519 end_alias_analysis ();
6523 /* Entry point for jump bypassing optimization pass. */
6526 bypass_jumps (FILE *file)
6528 int changed;
6530 /* We do not construct an accurate cfg in functions which call
6531 setjmp, so just punt to be safe. */
6532 if (current_function_calls_setjmp)
6533 return 0;
6535 /* For calling dump_foo fns from gdb. */
6536 debug_stderr = stderr;
6537 gcse_file = file;
6539 /* Identify the basic block information for this function, including
6540 successors and predecessors. */
6541 max_gcse_regno = max_reg_num ();
6543 if (file)
6544 dump_flow_info (file);
6546 /* Return if there's nothing to do, or it is too expensive. */
6547 if (n_basic_blocks <= 1 || is_too_expensive (_ ("jump bypassing disabled")))
6548 return 0;
6550 gcc_obstack_init (&gcse_obstack);
6551 bytes_used = 0;
6553 /* We need alias. */
6554 init_alias_analysis ();
6556 /* Record where pseudo-registers are set. This data is kept accurate
6557 during each pass. ??? We could also record hard-reg information here
6558 [since it's unchanging], however it is currently done during hash table
6559 computation.
6561 It may be tempting to compute MEM set information here too, but MEM sets
6562 will be subject to code motion one day and thus we need to compute
6563 information about memory sets when we build the hash tables. */
6565 alloc_reg_set_mem (max_gcse_regno);
6566 compute_sets (get_insns ());
6568 max_gcse_regno = max_reg_num ();
6569 alloc_gcse_mem (get_insns ());
6570 changed = one_cprop_pass (MAX_GCSE_PASSES + 2, 1, 1);
6571 free_gcse_mem ();
6573 if (file)
6575 fprintf (file, "BYPASS of %s: %d basic blocks, ",
6576 current_function_name (), n_basic_blocks);
6577 fprintf (file, "%d bytes\n\n", bytes_used);
6580 obstack_free (&gcse_obstack, NULL);
6581 free_reg_set_mem ();
6583 /* We are finished with alias. */
6584 end_alias_analysis ();
6585 allocate_reg_info (max_reg_num (), FALSE, FALSE);
6587 return changed;
6590 /* Return true if the graph is too expensive to optimize. PASS is the
6591 optimization about to be performed. */
6593 static bool
6594 is_too_expensive (const char *pass)
6596 /* Trying to perform global optimizations on flow graphs which have
6597 a high connectivity will take a long time and is unlikely to be
6598 particularly useful.
6600 In normal circumstances a cfg should have about twice as many
6601 edges as blocks. But we do not want to punish small functions
6602 which have a couple switch statements. Rather than simply
6603 threshold the number of blocks, uses something with a more
6604 graceful degradation. */
6605 if (n_edges > 20000 + n_basic_blocks * 4)
6607 if (warn_disabled_optimization)
6608 warning ("%s: %d basic blocks and %d edges/basic block",
6609 pass, n_basic_blocks, n_edges / n_basic_blocks);
6611 return true;
6614 /* If allocating memory for the cprop bitmap would take up too much
6615 storage it's better just to disable the optimization. */
6616 if ((n_basic_blocks
6617 * SBITMAP_SET_SIZE (max_reg_num ())
6618 * sizeof (SBITMAP_ELT_TYPE)) > MAX_GCSE_MEMORY)
6620 if (warn_disabled_optimization)
6621 warning ("%s: %d basic blocks and %d registers",
6622 pass, n_basic_blocks, max_reg_num ());
6624 return true;
6627 return false;
6630 #include "gt-gcse.h"