2004-08-18 David Daney <ddaney@avtrex.com>
[official-gcc.git] / gcc / gcse.c
blobb9a7874c348e7fbe0e5a0bb1f530bc10daf7f360
1 /* Global common subexpression elimination/Partial redundancy elimination
2 and global constant/copy propagation for GNU compiler.
3 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 02111-1307, USA. */
23 /* TODO
24 - reordering of memory allocation and freeing to be more space efficient
25 - do rough calc of how many regs are needed in each block, and a rough
26 calc of how many regs are available in each class and use that to
27 throttle back the code in cases where RTX_COST is minimal.
28 - a store to the same address as a load does not kill the load if the
29 source of the store is also the destination of the load. Handling this
30 allows more load motion, particularly out of loops.
31 - ability to realloc sbitmap vectors would allow one initial computation
32 of reg_set_in_block with only subsequent additions, rather than
33 recomputing it for each pass
37 /* References searched while implementing this.
39 Compilers Principles, Techniques and Tools
40 Aho, Sethi, Ullman
41 Addison-Wesley, 1988
43 Global Optimization by Suppression of Partial Redundancies
44 E. Morel, C. Renvoise
45 communications of the acm, Vol. 22, Num. 2, Feb. 1979
47 A Portable Machine-Independent Global Optimizer - Design and Measurements
48 Frederick Chow
49 Stanford Ph.D. thesis, Dec. 1983
51 A Fast Algorithm for Code Movement Optimization
52 D.M. Dhamdhere
53 SIGPLAN Notices, Vol. 23, Num. 10, Oct. 1988
55 A Solution to a Problem with Morel and Renvoise's
56 Global Optimization by Suppression of Partial Redundancies
57 K-H Drechsler, M.P. Stadel
58 ACM TOPLAS, Vol. 10, Num. 4, Oct. 1988
60 Practical Adaptation of the Global Optimization
61 Algorithm of Morel and Renvoise
62 D.M. Dhamdhere
63 ACM TOPLAS, Vol. 13, Num. 2. Apr. 1991
65 Efficiently Computing Static Single Assignment Form and the Control
66 Dependence Graph
67 R. Cytron, J. Ferrante, B.K. Rosen, M.N. Wegman, and F.K. Zadeck
68 ACM TOPLAS, Vol. 13, Num. 4, Oct. 1991
70 Lazy Code Motion
71 J. Knoop, O. Ruthing, B. Steffen
72 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
74 What's In a Region? Or Computing Control Dependence Regions in Near-Linear
75 Time for Reducible Flow Control
76 Thomas Ball
77 ACM Letters on Programming Languages and Systems,
78 Vol. 2, Num. 1-4, Mar-Dec 1993
80 An Efficient Representation for Sparse Sets
81 Preston Briggs, Linda Torczon
82 ACM Letters on Programming Languages and Systems,
83 Vol. 2, Num. 1-4, Mar-Dec 1993
85 A Variation of Knoop, Ruthing, and Steffen's Lazy Code Motion
86 K-H Drechsler, M.P. Stadel
87 ACM SIGPLAN Notices, Vol. 28, Num. 5, May 1993
89 Partial Dead Code Elimination
90 J. Knoop, O. Ruthing, B. Steffen
91 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
93 Effective Partial Redundancy Elimination
94 P. Briggs, K.D. Cooper
95 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
97 The Program Structure Tree: Computing Control Regions in Linear Time
98 R. Johnson, D. Pearson, K. Pingali
99 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
101 Optimal Code Motion: Theory and Practice
102 J. Knoop, O. Ruthing, B. Steffen
103 ACM TOPLAS, Vol. 16, Num. 4, Jul. 1994
105 The power of assignment motion
106 J. Knoop, O. Ruthing, B. Steffen
107 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
109 Global code motion / global value numbering
110 C. Click
111 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
113 Value Driven Redundancy Elimination
114 L.T. Simpson
115 Rice University Ph.D. thesis, Apr. 1996
117 Value Numbering
118 L.T. Simpson
119 Massively Scalar Compiler Project, Rice University, Sep. 1996
121 High Performance Compilers for Parallel Computing
122 Michael Wolfe
123 Addison-Wesley, 1996
125 Advanced Compiler Design and Implementation
126 Steven Muchnick
127 Morgan Kaufmann, 1997
129 Building an Optimizing Compiler
130 Robert Morgan
131 Digital Press, 1998
133 People wishing to speed up the code here should read:
134 Elimination Algorithms for Data Flow Analysis
135 B.G. Ryder, M.C. Paull
136 ACM Computing Surveys, Vol. 18, Num. 3, Sep. 1986
138 How to Analyze Large Programs Efficiently and Informatively
139 D.M. Dhamdhere, B.K. Rosen, F.K. Zadeck
140 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
142 People wishing to do something different can find various possibilities
143 in the above papers and elsewhere.
146 #include "config.h"
147 #include "system.h"
148 #include "coretypes.h"
149 #include "tm.h"
150 #include "toplev.h"
152 #include "rtl.h"
153 #include "tree.h"
154 #include "tm_p.h"
155 #include "regs.h"
156 #include "hard-reg-set.h"
157 #include "flags.h"
158 #include "real.h"
159 #include "insn-config.h"
160 #include "recog.h"
161 #include "basic-block.h"
162 #include "output.h"
163 #include "function.h"
164 #include "expr.h"
165 #include "except.h"
166 #include "ggc.h"
167 #include "params.h"
168 #include "cselib.h"
169 #include "intl.h"
170 #include "obstack.h"
171 #include "timevar.h"
173 /* Propagate flow information through back edges and thus enable PRE's
174 moving loop invariant calculations out of loops.
176 Originally this tended to create worse overall code, but several
177 improvements during the development of PRE seem to have made following
178 back edges generally a win.
180 Note much of the loop invariant code motion done here would normally
181 be done by loop.c, which has more heuristics for when to move invariants
182 out of loops. At some point we might need to move some of those
183 heuristics into gcse.c. */
185 /* We support GCSE via Partial Redundancy Elimination. PRE optimizations
186 are a superset of those done by GCSE.
188 We perform the following steps:
190 1) Compute basic block information.
192 2) Compute table of places where registers are set.
194 3) Perform copy/constant propagation.
196 4) Perform global cse using lazy code motion if not optimizing
197 for size, or code hoisting if we are.
199 5) Perform another pass of copy/constant propagation.
201 Two passes of copy/constant propagation are done because the first one
202 enables more GCSE and the second one helps to clean up the copies that
203 GCSE creates. This is needed more for PRE than for Classic because Classic
204 GCSE will try to use an existing register containing the common
205 subexpression rather than create a new one. This is harder to do for PRE
206 because of the code motion (which Classic GCSE doesn't do).
208 Expressions we are interested in GCSE-ing are of the form
209 (set (pseudo-reg) (expression)).
210 Function want_to_gcse_p says what these are.
212 PRE handles moving invariant expressions out of loops (by treating them as
213 partially redundant).
215 Eventually it would be nice to replace cse.c/gcse.c with SSA (static single
216 assignment) based GVN (global value numbering). L. T. Simpson's paper
217 (Rice University) on value numbering is a useful reference for this.
219 **********************
221 We used to support multiple passes but there are diminishing returns in
222 doing so. The first pass usually makes 90% of the changes that are doable.
223 A second pass can make a few more changes made possible by the first pass.
224 Experiments show any further passes don't make enough changes to justify
225 the expense.
227 A study of spec92 using an unlimited number of passes:
228 [1 pass] = 1208 substitutions, [2] = 577, [3] = 202, [4] = 192, [5] = 83,
229 [6] = 34, [7] = 17, [8] = 9, [9] = 4, [10] = 4, [11] = 2,
230 [12] = 2, [13] = 1, [15] = 1, [16] = 2, [41] = 1
232 It was found doing copy propagation between each pass enables further
233 substitutions.
235 PRE is quite expensive in complicated functions because the DFA can take
236 a while to converge. Hence we only perform one pass. The parameter
237 max-gcse-passes can be modified if one wants to experiment.
239 **********************
241 The steps for PRE are:
243 1) Build the hash table of expressions we wish to GCSE (expr_hash_table).
245 2) Perform the data flow analysis for PRE.
247 3) Delete the redundant instructions
249 4) Insert the required copies [if any] that make the partially
250 redundant instructions fully redundant.
252 5) For other reaching expressions, insert an instruction to copy the value
253 to a newly created pseudo that will reach the redundant instruction.
255 The deletion is done first so that when we do insertions we
256 know which pseudo reg to use.
258 Various papers have argued that PRE DFA is expensive (O(n^2)) and others
259 argue it is not. The number of iterations for the algorithm to converge
260 is typically 2-4 so I don't view it as that expensive (relatively speaking).
262 PRE GCSE depends heavily on the second CSE pass to clean up the copies
263 we create. To make an expression reach the place where it's redundant,
264 the result of the expression is copied to a new register, and the redundant
265 expression is deleted by replacing it with this new register. Classic GCSE
266 doesn't have this problem as much as it computes the reaching defs of
267 each register in each block and thus can try to use an existing register.
269 **********************
271 A fair bit of simplicity is created by creating small functions for simple
272 tasks, even when the function is only called in one place. This may
273 measurably slow things down [or may not] by creating more function call
274 overhead than is necessary. The source is laid out so that it's trivial
275 to make the affected functions inline so that one can measure what speed
276 up, if any, can be achieved, and maybe later when things settle things can
277 be rearranged.
279 Help stamp out big monolithic functions! */
281 /* GCSE global vars. */
283 /* -dG dump file. */
284 static FILE *gcse_file;
286 /* Note whether or not we should run jump optimization after gcse. We
287 want to do this for two cases.
289 * If we changed any jumps via cprop.
291 * If we added any labels via edge splitting. */
292 static int run_jump_opt_after_gcse;
294 /* Bitmaps are normally not included in debugging dumps.
295 However it's useful to be able to print them from GDB.
296 We could create special functions for this, but it's simpler to
297 just allow passing stderr to the dump_foo fns. Since stderr can
298 be a macro, we store a copy here. */
299 static FILE *debug_stderr;
301 /* An obstack for our working variables. */
302 static struct obstack gcse_obstack;
304 struct reg_use {rtx reg_rtx; };
306 /* Hash table of expressions. */
308 struct expr
310 /* The expression (SET_SRC for expressions, PATTERN for assignments). */
311 rtx expr;
312 /* Index in the available expression bitmaps. */
313 int bitmap_index;
314 /* Next entry with the same hash. */
315 struct expr *next_same_hash;
316 /* List of anticipatable occurrences in basic blocks in the function.
317 An "anticipatable occurrence" is one that is the first occurrence in the
318 basic block, the operands are not modified in the basic block prior
319 to the occurrence and the output is not used between the start of
320 the block and the occurrence. */
321 struct occr *antic_occr;
322 /* List of available occurrence in basic blocks in the function.
323 An "available occurrence" is one that is the last occurrence in the
324 basic block and the operands are not modified by following statements in
325 the basic block [including this insn]. */
326 struct occr *avail_occr;
327 /* Non-null if the computation is PRE redundant.
328 The value is the newly created pseudo-reg to record a copy of the
329 expression in all the places that reach the redundant copy. */
330 rtx reaching_reg;
333 /* Occurrence of an expression.
334 There is one per basic block. If a pattern appears more than once the
335 last appearance is used [or first for anticipatable expressions]. */
337 struct occr
339 /* Next occurrence of this expression. */
340 struct occr *next;
341 /* The insn that computes the expression. */
342 rtx insn;
343 /* Nonzero if this [anticipatable] occurrence has been deleted. */
344 char deleted_p;
345 /* Nonzero if this [available] occurrence has been copied to
346 reaching_reg. */
347 /* ??? This is mutually exclusive with deleted_p, so they could share
348 the same byte. */
349 char copied_p;
352 /* Expression and copy propagation hash tables.
353 Each hash table is an array of buckets.
354 ??? It is known that if it were an array of entries, structure elements
355 `next_same_hash' and `bitmap_index' wouldn't be necessary. However, it is
356 not clear whether in the final analysis a sufficient amount of memory would
357 be saved as the size of the available expression bitmaps would be larger
358 [one could build a mapping table without holes afterwards though].
359 Someday I'll perform the computation and figure it out. */
361 struct hash_table
363 /* The table itself.
364 This is an array of `expr_hash_table_size' elements. */
365 struct expr **table;
367 /* Size of the hash table, in elements. */
368 unsigned int size;
370 /* Number of hash table elements. */
371 unsigned int n_elems;
373 /* Whether the table is expression of copy propagation one. */
374 int set_p;
377 /* Expression hash table. */
378 static struct hash_table expr_hash_table;
380 /* Copy propagation hash table. */
381 static struct hash_table set_hash_table;
383 /* Mapping of uids to cuids.
384 Only real insns get cuids. */
385 static int *uid_cuid;
387 /* Highest UID in UID_CUID. */
388 static int max_uid;
390 /* Get the cuid of an insn. */
391 #ifdef ENABLE_CHECKING
392 #define INSN_CUID(INSN) (INSN_UID (INSN) > max_uid ? (abort (), 0) : uid_cuid[INSN_UID (INSN)])
393 #else
394 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
395 #endif
397 /* Number of cuids. */
398 static int max_cuid;
400 /* Mapping of cuids to insns. */
401 static rtx *cuid_insn;
403 /* Get insn from cuid. */
404 #define CUID_INSN(CUID) (cuid_insn[CUID])
406 /* Maximum register number in function prior to doing gcse + 1.
407 Registers created during this pass have regno >= max_gcse_regno.
408 This is named with "gcse" to not collide with global of same name. */
409 static unsigned int max_gcse_regno;
411 /* Table of registers that are modified.
413 For each register, each element is a list of places where the pseudo-reg
414 is set.
416 For simplicity, GCSE is done on sets of pseudo-regs only. PRE GCSE only
417 requires knowledge of which blocks kill which regs [and thus could use
418 a bitmap instead of the lists `reg_set_table' uses].
420 `reg_set_table' and could be turned into an array of bitmaps (num-bbs x
421 num-regs) [however perhaps it may be useful to keep the data as is]. One
422 advantage of recording things this way is that `reg_set_table' is fairly
423 sparse with respect to pseudo regs but for hard regs could be fairly dense
424 [relatively speaking]. And recording sets of pseudo-regs in lists speeds
425 up functions like compute_transp since in the case of pseudo-regs we only
426 need to iterate over the number of times a pseudo-reg is set, not over the
427 number of basic blocks [clearly there is a bit of a slow down in the cases
428 where a pseudo is set more than once in a block, however it is believed
429 that the net effect is to speed things up]. This isn't done for hard-regs
430 because recording call-clobbered hard-regs in `reg_set_table' at each
431 function call can consume a fair bit of memory, and iterating over
432 hard-regs stored this way in compute_transp will be more expensive. */
434 typedef struct reg_set
436 /* The next setting of this register. */
437 struct reg_set *next;
438 /* The insn where it was set. */
439 rtx insn;
440 } reg_set;
442 static reg_set **reg_set_table;
444 /* Size of `reg_set_table'.
445 The table starts out at max_gcse_regno + slop, and is enlarged as
446 necessary. */
447 static int reg_set_table_size;
449 /* Amount to grow `reg_set_table' by when it's full. */
450 #define REG_SET_TABLE_SLOP 100
452 /* This is a list of expressions which are MEMs and will be used by load
453 or store motion.
454 Load motion tracks MEMs which aren't killed by
455 anything except itself. (ie, loads and stores to a single location).
456 We can then allow movement of these MEM refs with a little special
457 allowance. (all stores copy the same value to the reaching reg used
458 for the loads). This means all values used to store into memory must have
459 no side effects so we can re-issue the setter value.
460 Store Motion uses this structure as an expression table to track stores
461 which look interesting, and might be moveable towards the exit block. */
463 struct ls_expr
465 struct expr * expr; /* Gcse expression reference for LM. */
466 rtx pattern; /* Pattern of this mem. */
467 rtx pattern_regs; /* List of registers mentioned by the mem. */
468 rtx loads; /* INSN list of loads seen. */
469 rtx stores; /* INSN list of stores seen. */
470 struct ls_expr * next; /* Next in the list. */
471 int invalid; /* Invalid for some reason. */
472 int index; /* If it maps to a bitmap index. */
473 unsigned int hash_index; /* Index when in a hash table. */
474 rtx reaching_reg; /* Register to use when re-writing. */
477 /* Array of implicit set patterns indexed by basic block index. */
478 static rtx *implicit_sets;
480 /* Head of the list of load/store memory refs. */
481 static struct ls_expr * pre_ldst_mems = NULL;
483 /* Bitmap containing one bit for each register in the program.
484 Used when performing GCSE to track which registers have been set since
485 the start of the basic block. */
486 static regset reg_set_bitmap;
488 /* For each block, a bitmap of registers set in the block.
489 This is used by compute_transp.
490 It is computed during hash table computation and not by compute_sets
491 as it includes registers added since the last pass (or between cprop and
492 gcse) and it's currently not easy to realloc sbitmap vectors. */
493 static sbitmap *reg_set_in_block;
495 /* Array, indexed by basic block number for a list of insns which modify
496 memory within that block. */
497 static rtx * modify_mem_list;
498 bitmap modify_mem_list_set;
500 /* This array parallels modify_mem_list, but is kept canonicalized. */
501 static rtx * canon_modify_mem_list;
502 bitmap canon_modify_mem_list_set;
503 /* Various variables for statistics gathering. */
505 /* Memory used in a pass.
506 This isn't intended to be absolutely precise. Its intent is only
507 to keep an eye on memory usage. */
508 static int bytes_used;
510 /* GCSE substitutions made. */
511 static int gcse_subst_count;
512 /* Number of copy instructions created. */
513 static int gcse_create_count;
514 /* Number of local constants propagated. */
515 static int local_const_prop_count;
516 /* Number of local copys propagated. */
517 static int local_copy_prop_count;
518 /* Number of global constants propagated. */
519 static int global_const_prop_count;
520 /* Number of global copys propagated. */
521 static int global_copy_prop_count;
523 /* For available exprs */
524 static sbitmap *ae_kill, *ae_gen;
526 /* Objects of this type are passed around by the null-pointer check
527 removal routines. */
528 struct null_pointer_info
530 /* The basic block being processed. */
531 basic_block current_block;
532 /* The first register to be handled in this pass. */
533 unsigned int min_reg;
534 /* One greater than the last register to be handled in this pass. */
535 unsigned int max_reg;
536 sbitmap *nonnull_local;
537 sbitmap *nonnull_killed;
540 static void compute_can_copy (void);
541 static void *gmalloc (size_t) ATTRIBUTE_MALLOC;
542 static void *gcalloc (size_t, size_t) ATTRIBUTE_MALLOC;
543 static void *grealloc (void *, size_t);
544 static void *gcse_alloc (unsigned long);
545 static void alloc_gcse_mem (rtx);
546 static void free_gcse_mem (void);
547 static void alloc_reg_set_mem (int);
548 static void free_reg_set_mem (void);
549 static void record_one_set (int, rtx);
550 static void replace_one_set (int, rtx, rtx);
551 static void record_set_info (rtx, rtx, void *);
552 static void compute_sets (rtx);
553 static void hash_scan_insn (rtx, struct hash_table *, int);
554 static void hash_scan_set (rtx, rtx, struct hash_table *);
555 static void hash_scan_clobber (rtx, rtx, struct hash_table *);
556 static void hash_scan_call (rtx, rtx, struct hash_table *);
557 static int want_to_gcse_p (rtx);
558 static bool can_assign_to_reg_p (rtx);
559 static bool gcse_constant_p (rtx);
560 static int oprs_unchanged_p (rtx, rtx, int);
561 static int oprs_anticipatable_p (rtx, rtx);
562 static int oprs_available_p (rtx, rtx);
563 static void insert_expr_in_table (rtx, enum machine_mode, rtx, int, int,
564 struct hash_table *);
565 static void insert_set_in_table (rtx, rtx, struct hash_table *);
566 static unsigned int hash_expr (rtx, enum machine_mode, int *, int);
567 static unsigned int hash_expr_1 (rtx, enum machine_mode, int *);
568 static unsigned int hash_string_1 (const char *);
569 static unsigned int hash_set (int, int);
570 static int expr_equiv_p (rtx, rtx);
571 static void record_last_reg_set_info (rtx, int);
572 static void record_last_mem_set_info (rtx);
573 static void record_last_set_info (rtx, rtx, void *);
574 static void compute_hash_table (struct hash_table *);
575 static void alloc_hash_table (int, struct hash_table *, int);
576 static void free_hash_table (struct hash_table *);
577 static void compute_hash_table_work (struct hash_table *);
578 static void dump_hash_table (FILE *, const char *, struct hash_table *);
579 static struct expr *lookup_expr (rtx, struct hash_table *);
580 static struct expr *lookup_set (unsigned int, struct hash_table *);
581 static struct expr *next_set (unsigned int, struct expr *);
582 static void reset_opr_set_tables (void);
583 static int oprs_not_set_p (rtx, rtx);
584 static void mark_call (rtx);
585 static void mark_set (rtx, rtx);
586 static void mark_clobber (rtx, rtx);
587 static void mark_oprs_set (rtx);
588 static void alloc_cprop_mem (int, int);
589 static void free_cprop_mem (void);
590 static void compute_transp (rtx, int, sbitmap *, int);
591 static void compute_transpout (void);
592 static void compute_local_properties (sbitmap *, sbitmap *, sbitmap *,
593 struct hash_table *);
594 static void compute_cprop_data (void);
595 static void find_used_regs (rtx *, void *);
596 static int try_replace_reg (rtx, rtx, rtx);
597 static struct expr *find_avail_set (int, rtx);
598 static int cprop_jump (basic_block, rtx, rtx, rtx, rtx);
599 static void mems_conflict_for_gcse_p (rtx, rtx, void *);
600 static int load_killed_in_block_p (basic_block, int, rtx, int);
601 static void canon_list_insert (rtx, rtx, void *);
602 static int cprop_insn (rtx, int);
603 static int cprop (int);
604 static void find_implicit_sets (void);
605 static int one_cprop_pass (int, int, int);
606 static bool constprop_register (rtx, rtx, rtx, int);
607 static struct expr *find_bypass_set (int, int);
608 static bool reg_killed_on_edge (rtx, edge);
609 static int bypass_block (basic_block, rtx, rtx);
610 static int bypass_conditional_jumps (void);
611 static void alloc_pre_mem (int, int);
612 static void free_pre_mem (void);
613 static void compute_pre_data (void);
614 static int pre_expr_reaches_here_p (basic_block, struct expr *,
615 basic_block);
616 static void insert_insn_end_bb (struct expr *, basic_block, int);
617 static void pre_insert_copy_insn (struct expr *, rtx);
618 static void pre_insert_copies (void);
619 static int pre_delete (void);
620 static int pre_gcse (void);
621 static int one_pre_gcse_pass (int);
622 static void add_label_notes (rtx, rtx);
623 static void alloc_code_hoist_mem (int, int);
624 static void free_code_hoist_mem (void);
625 static void compute_code_hoist_vbeinout (void);
626 static void compute_code_hoist_data (void);
627 static int hoist_expr_reaches_here_p (basic_block, int, basic_block, char *);
628 static void hoist_code (void);
629 static int one_code_hoisting_pass (void);
630 static rtx process_insert_insn (struct expr *);
631 static int pre_edge_insert (struct edge_list *, struct expr **);
632 static int pre_expr_reaches_here_p_work (basic_block, struct expr *,
633 basic_block, char *);
634 static struct ls_expr * ldst_entry (rtx);
635 static void free_ldst_entry (struct ls_expr *);
636 static void free_ldst_mems (void);
637 static void print_ldst_list (FILE *);
638 static struct ls_expr * find_rtx_in_ldst (rtx);
639 static int enumerate_ldsts (void);
640 static inline struct ls_expr * first_ls_expr (void);
641 static inline struct ls_expr * next_ls_expr (struct ls_expr *);
642 static int simple_mem (rtx);
643 static void invalidate_any_buried_refs (rtx);
644 static void compute_ld_motion_mems (void);
645 static void trim_ld_motion_mems (void);
646 static void update_ld_motion_stores (struct expr *);
647 static void reg_set_info (rtx, rtx, void *);
648 static void reg_clear_last_set (rtx, rtx, void *);
649 static bool store_ops_ok (rtx, int *);
650 static rtx extract_mentioned_regs (rtx);
651 static rtx extract_mentioned_regs_helper (rtx, rtx);
652 static void find_moveable_store (rtx, int *, int *);
653 static int compute_store_table (void);
654 static bool load_kills_store (rtx, rtx, int);
655 static bool find_loads (rtx, rtx, int);
656 static bool store_killed_in_insn (rtx, rtx, rtx, int);
657 static bool store_killed_after (rtx, rtx, rtx, basic_block, int *, rtx *);
658 static bool store_killed_before (rtx, rtx, rtx, basic_block, int *);
659 static void build_store_vectors (void);
660 static void insert_insn_start_bb (rtx, basic_block);
661 static int insert_store (struct ls_expr *, edge);
662 static void remove_reachable_equiv_notes (basic_block, struct ls_expr *);
663 static void replace_store_insn (rtx, rtx, basic_block, struct ls_expr *);
664 static void delete_store (struct ls_expr *, basic_block);
665 static void free_store_memory (void);
666 static void store_motion (void);
667 static void free_insn_expr_list_list (rtx *);
668 static void clear_modify_mem_tables (void);
669 static void free_modify_mem_tables (void);
670 static rtx gcse_emit_move_after (rtx, rtx, rtx);
671 static void local_cprop_find_used_regs (rtx *, void *);
672 static bool do_local_cprop (rtx, rtx, int, rtx*);
673 static bool adjust_libcall_notes (rtx, rtx, rtx, rtx*);
674 static void local_cprop_pass (int);
675 static bool is_too_expensive (const char *);
678 /* Entry point for global common subexpression elimination.
679 F is the first instruction in the function. */
682 gcse_main (rtx f, FILE *file)
684 int changed, pass;
685 /* Bytes used at start of pass. */
686 int initial_bytes_used;
687 /* Maximum number of bytes used by a pass. */
688 int max_pass_bytes;
689 /* Point to release obstack data from for each pass. */
690 char *gcse_obstack_bottom;
692 /* We do not construct an accurate cfg in functions which call
693 setjmp, so just punt to be safe. */
694 if (current_function_calls_setjmp)
695 return 0;
697 /* Assume that we do not need to run jump optimizations after gcse. */
698 run_jump_opt_after_gcse = 0;
700 /* For calling dump_foo fns from gdb. */
701 debug_stderr = stderr;
702 gcse_file = file;
704 /* Identify the basic block information for this function, including
705 successors and predecessors. */
706 max_gcse_regno = max_reg_num ();
708 if (file)
709 dump_flow_info (file);
711 /* Return if there's nothing to do, or it is too expensive. */
712 if (n_basic_blocks <= 1 || is_too_expensive (_("GCSE disabled")))
713 return 0;
715 gcc_obstack_init (&gcse_obstack);
716 bytes_used = 0;
718 /* We need alias. */
719 init_alias_analysis ();
720 /* Record where pseudo-registers are set. This data is kept accurate
721 during each pass. ??? We could also record hard-reg information here
722 [since it's unchanging], however it is currently done during hash table
723 computation.
725 It may be tempting to compute MEM set information here too, but MEM sets
726 will be subject to code motion one day and thus we need to compute
727 information about memory sets when we build the hash tables. */
729 alloc_reg_set_mem (max_gcse_regno);
730 compute_sets (f);
732 pass = 0;
733 initial_bytes_used = bytes_used;
734 max_pass_bytes = 0;
735 gcse_obstack_bottom = gcse_alloc (1);
736 changed = 1;
737 while (changed && pass < MAX_GCSE_PASSES)
739 changed = 0;
740 if (file)
741 fprintf (file, "GCSE pass %d\n\n", pass + 1);
743 /* Initialize bytes_used to the space for the pred/succ lists,
744 and the reg_set_table data. */
745 bytes_used = initial_bytes_used;
747 /* Each pass may create new registers, so recalculate each time. */
748 max_gcse_regno = max_reg_num ();
750 alloc_gcse_mem (f);
752 /* Don't allow constant propagation to modify jumps
753 during this pass. */
754 timevar_push (TV_CPROP1);
755 changed = one_cprop_pass (pass + 1, 0, 0);
756 timevar_pop (TV_CPROP1);
758 if (optimize_size)
759 /* Do nothing. */ ;
760 else
762 timevar_push (TV_PRE);
763 changed |= one_pre_gcse_pass (pass + 1);
764 /* We may have just created new basic blocks. Release and
765 recompute various things which are sized on the number of
766 basic blocks. */
767 if (changed)
769 free_modify_mem_tables ();
770 modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
771 canon_modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
773 free_reg_set_mem ();
774 alloc_reg_set_mem (max_reg_num ());
775 compute_sets (f);
776 run_jump_opt_after_gcse = 1;
777 timevar_pop (TV_PRE);
780 if (max_pass_bytes < bytes_used)
781 max_pass_bytes = bytes_used;
783 /* Free up memory, then reallocate for code hoisting. We can
784 not re-use the existing allocated memory because the tables
785 will not have info for the insns or registers created by
786 partial redundancy elimination. */
787 free_gcse_mem ();
789 /* It does not make sense to run code hoisting unless we are optimizing
790 for code size -- it rarely makes programs faster, and can make
791 them bigger if we did partial redundancy elimination (when optimizing
792 for space, we don't run the partial redundancy algorithms). */
793 if (optimize_size)
795 timevar_push (TV_HOIST);
796 max_gcse_regno = max_reg_num ();
797 alloc_gcse_mem (f);
798 changed |= one_code_hoisting_pass ();
799 free_gcse_mem ();
801 if (max_pass_bytes < bytes_used)
802 max_pass_bytes = bytes_used;
803 timevar_pop (TV_HOIST);
806 if (file)
808 fprintf (file, "\n");
809 fflush (file);
812 obstack_free (&gcse_obstack, gcse_obstack_bottom);
813 pass++;
816 /* Do one last pass of copy propagation, including cprop into
817 conditional jumps. */
819 max_gcse_regno = max_reg_num ();
820 alloc_gcse_mem (f);
821 /* This time, go ahead and allow cprop to alter jumps. */
822 timevar_push (TV_CPROP2);
823 one_cprop_pass (pass + 1, 1, 0);
824 timevar_pop (TV_CPROP2);
825 free_gcse_mem ();
827 if (file)
829 fprintf (file, "GCSE of %s: %d basic blocks, ",
830 current_function_name (), n_basic_blocks);
831 fprintf (file, "%d pass%s, %d bytes\n\n",
832 pass, pass > 1 ? "es" : "", max_pass_bytes);
835 obstack_free (&gcse_obstack, NULL);
836 free_reg_set_mem ();
838 /* We are finished with alias. */
839 end_alias_analysis ();
840 allocate_reg_info (max_reg_num (), FALSE, FALSE);
842 if (!optimize_size && flag_gcse_sm)
844 timevar_push (TV_LSM);
845 store_motion ();
846 timevar_pop (TV_LSM);
849 /* Record where pseudo-registers are set. */
850 return run_jump_opt_after_gcse;
853 /* Misc. utilities. */
855 /* Nonzero for each mode that supports (set (reg) (reg)).
856 This is trivially true for integer and floating point values.
857 It may or may not be true for condition codes. */
858 static char can_copy[(int) NUM_MACHINE_MODES];
860 /* Compute which modes support reg/reg copy operations. */
862 static void
863 compute_can_copy (void)
865 int i;
866 #ifndef AVOID_CCMODE_COPIES
867 rtx reg, insn;
868 #endif
869 memset (can_copy, 0, NUM_MACHINE_MODES);
871 start_sequence ();
872 for (i = 0; i < NUM_MACHINE_MODES; i++)
873 if (GET_MODE_CLASS (i) == MODE_CC)
875 #ifdef AVOID_CCMODE_COPIES
876 can_copy[i] = 0;
877 #else
878 reg = gen_rtx_REG ((enum machine_mode) i, LAST_VIRTUAL_REGISTER + 1);
879 insn = emit_insn (gen_rtx_SET (VOIDmode, reg, reg));
880 if (recog (PATTERN (insn), insn, NULL) >= 0)
881 can_copy[i] = 1;
882 #endif
884 else
885 can_copy[i] = 1;
887 end_sequence ();
890 /* Returns whether the mode supports reg/reg copy operations. */
892 bool
893 can_copy_p (enum machine_mode mode)
895 static bool can_copy_init_p = false;
897 if (! can_copy_init_p)
899 compute_can_copy ();
900 can_copy_init_p = true;
903 return can_copy[mode] != 0;
906 /* Cover function to xmalloc to record bytes allocated. */
908 static void *
909 gmalloc (size_t size)
911 bytes_used += size;
912 return xmalloc (size);
915 /* Cover function to xcalloc to record bytes allocated. */
917 static void *
918 gcalloc (size_t nelem, size_t elsize)
920 bytes_used += nelem * elsize;
921 return xcalloc (nelem, elsize);
924 /* Cover function to xrealloc.
925 We don't record the additional size since we don't know it.
926 It won't affect memory usage stats much anyway. */
928 static void *
929 grealloc (void *ptr, size_t size)
931 return xrealloc (ptr, size);
934 /* Cover function to obstack_alloc. */
936 static void *
937 gcse_alloc (unsigned long size)
939 bytes_used += size;
940 return obstack_alloc (&gcse_obstack, size);
943 /* Allocate memory for the cuid mapping array,
944 and reg/memory set tracking tables.
946 This is called at the start of each pass. */
948 static void
949 alloc_gcse_mem (rtx f)
951 int i;
952 rtx insn;
954 /* Find the largest UID and create a mapping from UIDs to CUIDs.
955 CUIDs are like UIDs except they increase monotonically, have no gaps,
956 and only apply to real insns. */
958 max_uid = get_max_uid ();
959 uid_cuid = gcalloc (max_uid + 1, sizeof (int));
960 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
962 if (INSN_P (insn))
963 uid_cuid[INSN_UID (insn)] = i++;
964 else
965 uid_cuid[INSN_UID (insn)] = i;
968 /* Create a table mapping cuids to insns. */
970 max_cuid = i;
971 cuid_insn = gcalloc (max_cuid + 1, sizeof (rtx));
972 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
973 if (INSN_P (insn))
974 CUID_INSN (i++) = insn;
976 /* Allocate vars to track sets of regs. */
977 reg_set_bitmap = BITMAP_XMALLOC ();
979 /* Allocate vars to track sets of regs, memory per block. */
980 reg_set_in_block = sbitmap_vector_alloc (last_basic_block, max_gcse_regno);
981 /* Allocate array to keep a list of insns which modify memory in each
982 basic block. */
983 modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
984 canon_modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
985 modify_mem_list_set = BITMAP_XMALLOC ();
986 canon_modify_mem_list_set = BITMAP_XMALLOC ();
989 /* Free memory allocated by alloc_gcse_mem. */
991 static void
992 free_gcse_mem (void)
994 free (uid_cuid);
995 free (cuid_insn);
997 BITMAP_XFREE (reg_set_bitmap);
999 sbitmap_vector_free (reg_set_in_block);
1000 free_modify_mem_tables ();
1001 BITMAP_XFREE (modify_mem_list_set);
1002 BITMAP_XFREE (canon_modify_mem_list_set);
1005 /* Compute the local properties of each recorded expression.
1007 Local properties are those that are defined by the block, irrespective of
1008 other blocks.
1010 An expression is transparent in a block if its operands are not modified
1011 in the block.
1013 An expression is computed (locally available) in a block if it is computed
1014 at least once and expression would contain the same value if the
1015 computation was moved to the end of the block.
1017 An expression is locally anticipatable in a block if it is computed at
1018 least once and expression would contain the same value if the computation
1019 was moved to the beginning of the block.
1021 We call this routine for cprop, pre and code hoisting. They all compute
1022 basically the same information and thus can easily share this code.
1024 TRANSP, COMP, and ANTLOC are destination sbitmaps for recording local
1025 properties. If NULL, then it is not necessary to compute or record that
1026 particular property.
1028 TABLE controls which hash table to look at. If it is set hash table,
1029 additionally, TRANSP is computed as ~TRANSP, since this is really cprop's
1030 ABSALTERED. */
1032 static void
1033 compute_local_properties (sbitmap *transp, sbitmap *comp, sbitmap *antloc,
1034 struct hash_table *table)
1036 unsigned int i;
1038 /* Initialize any bitmaps that were passed in. */
1039 if (transp)
1041 if (table->set_p)
1042 sbitmap_vector_zero (transp, last_basic_block);
1043 else
1044 sbitmap_vector_ones (transp, last_basic_block);
1047 if (comp)
1048 sbitmap_vector_zero (comp, last_basic_block);
1049 if (antloc)
1050 sbitmap_vector_zero (antloc, last_basic_block);
1052 for (i = 0; i < table->size; i++)
1054 struct expr *expr;
1056 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
1058 int indx = expr->bitmap_index;
1059 struct occr *occr;
1061 /* The expression is transparent in this block if it is not killed.
1062 We start by assuming all are transparent [none are killed], and
1063 then reset the bits for those that are. */
1064 if (transp)
1065 compute_transp (expr->expr, indx, transp, table->set_p);
1067 /* The occurrences recorded in antic_occr are exactly those that
1068 we want to set to nonzero in ANTLOC. */
1069 if (antloc)
1070 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
1072 SET_BIT (antloc[BLOCK_NUM (occr->insn)], indx);
1074 /* While we're scanning the table, this is a good place to
1075 initialize this. */
1076 occr->deleted_p = 0;
1079 /* The occurrences recorded in avail_occr are exactly those that
1080 we want to set to nonzero in COMP. */
1081 if (comp)
1082 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
1084 SET_BIT (comp[BLOCK_NUM (occr->insn)], indx);
1086 /* While we're scanning the table, this is a good place to
1087 initialize this. */
1088 occr->copied_p = 0;
1091 /* While we're scanning the table, this is a good place to
1092 initialize this. */
1093 expr->reaching_reg = 0;
1098 /* Register set information.
1100 `reg_set_table' records where each register is set or otherwise
1101 modified. */
1103 static struct obstack reg_set_obstack;
1105 static void
1106 alloc_reg_set_mem (int n_regs)
1108 reg_set_table_size = n_regs + REG_SET_TABLE_SLOP;
1109 reg_set_table = gcalloc (reg_set_table_size, sizeof (struct reg_set *));
1111 gcc_obstack_init (&reg_set_obstack);
1114 static void
1115 free_reg_set_mem (void)
1117 free (reg_set_table);
1118 obstack_free (&reg_set_obstack, NULL);
1121 /* An OLD_INSN that used to set REGNO was replaced by NEW_INSN.
1122 Update the corresponding `reg_set_table' entry accordingly.
1123 We assume that NEW_INSN is not already recorded in reg_set_table[regno]. */
1125 static void
1126 replace_one_set (int regno, rtx old_insn, rtx new_insn)
1128 struct reg_set *reg_info;
1129 if (regno >= reg_set_table_size)
1130 return;
1131 for (reg_info = reg_set_table[regno]; reg_info; reg_info = reg_info->next)
1132 if (reg_info->insn == old_insn)
1134 reg_info->insn = new_insn;
1135 break;
1139 /* Record REGNO in the reg_set table. */
1141 static void
1142 record_one_set (int regno, rtx insn)
1144 /* Allocate a new reg_set element and link it onto the list. */
1145 struct reg_set *new_reg_info;
1147 /* If the table isn't big enough, enlarge it. */
1148 if (regno >= reg_set_table_size)
1150 int new_size = regno + REG_SET_TABLE_SLOP;
1152 reg_set_table = grealloc (reg_set_table,
1153 new_size * sizeof (struct reg_set *));
1154 memset (reg_set_table + reg_set_table_size, 0,
1155 (new_size - reg_set_table_size) * sizeof (struct reg_set *));
1156 reg_set_table_size = new_size;
1159 new_reg_info = obstack_alloc (&reg_set_obstack, sizeof (struct reg_set));
1160 bytes_used += sizeof (struct reg_set);
1161 new_reg_info->insn = insn;
1162 new_reg_info->next = reg_set_table[regno];
1163 reg_set_table[regno] = new_reg_info;
1166 /* Called from compute_sets via note_stores to handle one SET or CLOBBER in
1167 an insn. The DATA is really the instruction in which the SET is
1168 occurring. */
1170 static void
1171 record_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED, void *data)
1173 rtx record_set_insn = (rtx) data;
1175 if (REG_P (dest) && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
1176 record_one_set (REGNO (dest), record_set_insn);
1179 /* Scan the function and record each set of each pseudo-register.
1181 This is called once, at the start of the gcse pass. See the comments for
1182 `reg_set_table' for further documentation. */
1184 static void
1185 compute_sets (rtx f)
1187 rtx insn;
1189 for (insn = f; insn != 0; insn = NEXT_INSN (insn))
1190 if (INSN_P (insn))
1191 note_stores (PATTERN (insn), record_set_info, insn);
1194 /* Hash table support. */
1196 struct reg_avail_info
1198 basic_block last_bb;
1199 int first_set;
1200 int last_set;
1203 static struct reg_avail_info *reg_avail_info;
1204 static basic_block current_bb;
1207 /* See whether X, the source of a set, is something we want to consider for
1208 GCSE. */
1210 static int
1211 want_to_gcse_p (rtx x)
1213 switch (GET_CODE (x))
1215 case REG:
1216 case SUBREG:
1217 case CONST_INT:
1218 case CONST_DOUBLE:
1219 case CONST_VECTOR:
1220 case CALL:
1221 return 0;
1223 default:
1224 return can_assign_to_reg_p (x);
1228 /* Used internally by can_assign_to_reg_p. */
1230 static GTY(()) rtx test_insn;
1232 /* Return true if we can assign X to a pseudo register. */
1234 static bool
1235 can_assign_to_reg_p (rtx x)
1237 int num_clobbers = 0;
1238 int icode;
1240 /* If this is a valid operand, we are OK. If it's VOIDmode, we aren't. */
1241 if (general_operand (x, GET_MODE (x)))
1242 return 1;
1243 else if (GET_MODE (x) == VOIDmode)
1244 return 0;
1246 /* Otherwise, check if we can make a valid insn from it. First initialize
1247 our test insn if we haven't already. */
1248 if (test_insn == 0)
1250 test_insn
1251 = make_insn_raw (gen_rtx_SET (VOIDmode,
1252 gen_rtx_REG (word_mode,
1253 FIRST_PSEUDO_REGISTER * 2),
1254 const0_rtx));
1255 NEXT_INSN (test_insn) = PREV_INSN (test_insn) = 0;
1258 /* Now make an insn like the one we would make when GCSE'ing and see if
1259 valid. */
1260 PUT_MODE (SET_DEST (PATTERN (test_insn)), GET_MODE (x));
1261 SET_SRC (PATTERN (test_insn)) = x;
1262 return ((icode = recog (PATTERN (test_insn), test_insn, &num_clobbers)) >= 0
1263 && (num_clobbers == 0 || ! added_clobbers_hard_reg_p (icode)));
1266 /* Return nonzero if the operands of expression X are unchanged from the
1267 start of INSN's basic block up to but not including INSN (if AVAIL_P == 0),
1268 or from INSN to the end of INSN's basic block (if AVAIL_P != 0). */
1270 static int
1271 oprs_unchanged_p (rtx x, rtx insn, int avail_p)
1273 int i, j;
1274 enum rtx_code code;
1275 const char *fmt;
1277 if (x == 0)
1278 return 1;
1280 code = GET_CODE (x);
1281 switch (code)
1283 case REG:
1285 struct reg_avail_info *info = &reg_avail_info[REGNO (x)];
1287 if (info->last_bb != current_bb)
1288 return 1;
1289 if (avail_p)
1290 return info->last_set < INSN_CUID (insn);
1291 else
1292 return info->first_set >= INSN_CUID (insn);
1295 case MEM:
1296 if (load_killed_in_block_p (current_bb, INSN_CUID (insn),
1297 x, avail_p))
1298 return 0;
1299 else
1300 return oprs_unchanged_p (XEXP (x, 0), insn, avail_p);
1302 case PRE_DEC:
1303 case PRE_INC:
1304 case POST_DEC:
1305 case POST_INC:
1306 case PRE_MODIFY:
1307 case POST_MODIFY:
1308 return 0;
1310 case PC:
1311 case CC0: /*FIXME*/
1312 case CONST:
1313 case CONST_INT:
1314 case CONST_DOUBLE:
1315 case CONST_VECTOR:
1316 case SYMBOL_REF:
1317 case LABEL_REF:
1318 case ADDR_VEC:
1319 case ADDR_DIFF_VEC:
1320 return 1;
1322 default:
1323 break;
1326 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
1328 if (fmt[i] == 'e')
1330 /* If we are about to do the last recursive call needed at this
1331 level, change it into iteration. This function is called enough
1332 to be worth it. */
1333 if (i == 0)
1334 return oprs_unchanged_p (XEXP (x, i), insn, avail_p);
1336 else if (! oprs_unchanged_p (XEXP (x, i), insn, avail_p))
1337 return 0;
1339 else if (fmt[i] == 'E')
1340 for (j = 0; j < XVECLEN (x, i); j++)
1341 if (! oprs_unchanged_p (XVECEXP (x, i, j), insn, avail_p))
1342 return 0;
1345 return 1;
1348 /* Used for communication between mems_conflict_for_gcse_p and
1349 load_killed_in_block_p. Nonzero if mems_conflict_for_gcse_p finds a
1350 conflict between two memory references. */
1351 static int gcse_mems_conflict_p;
1353 /* Used for communication between mems_conflict_for_gcse_p and
1354 load_killed_in_block_p. A memory reference for a load instruction,
1355 mems_conflict_for_gcse_p will see if a memory store conflicts with
1356 this memory load. */
1357 static rtx gcse_mem_operand;
1359 /* DEST is the output of an instruction. If it is a memory reference, and
1360 possibly conflicts with the load found in gcse_mem_operand, then set
1361 gcse_mems_conflict_p to a nonzero value. */
1363 static void
1364 mems_conflict_for_gcse_p (rtx dest, rtx setter ATTRIBUTE_UNUSED,
1365 void *data ATTRIBUTE_UNUSED)
1367 while (GET_CODE (dest) == SUBREG
1368 || GET_CODE (dest) == ZERO_EXTRACT
1369 || GET_CODE (dest) == SIGN_EXTRACT
1370 || GET_CODE (dest) == STRICT_LOW_PART)
1371 dest = XEXP (dest, 0);
1373 /* If DEST is not a MEM, then it will not conflict with the load. Note
1374 that function calls are assumed to clobber memory, but are handled
1375 elsewhere. */
1376 if (! MEM_P (dest))
1377 return;
1379 /* If we are setting a MEM in our list of specially recognized MEMs,
1380 don't mark as killed this time. */
1382 if (expr_equiv_p (dest, gcse_mem_operand) && pre_ldst_mems != NULL)
1384 if (!find_rtx_in_ldst (dest))
1385 gcse_mems_conflict_p = 1;
1386 return;
1389 if (true_dependence (dest, GET_MODE (dest), gcse_mem_operand,
1390 rtx_addr_varies_p))
1391 gcse_mems_conflict_p = 1;
1394 /* Return nonzero if the expression in X (a memory reference) is killed
1395 in block BB before or after the insn with the CUID in UID_LIMIT.
1396 AVAIL_P is nonzero for kills after UID_LIMIT, and zero for kills
1397 before UID_LIMIT.
1399 To check the entire block, set UID_LIMIT to max_uid + 1 and
1400 AVAIL_P to 0. */
1402 static int
1403 load_killed_in_block_p (basic_block bb, int uid_limit, rtx x, int avail_p)
1405 rtx list_entry = modify_mem_list[bb->index];
1406 while (list_entry)
1408 rtx setter;
1409 /* Ignore entries in the list that do not apply. */
1410 if ((avail_p
1411 && INSN_CUID (XEXP (list_entry, 0)) < uid_limit)
1412 || (! avail_p
1413 && INSN_CUID (XEXP (list_entry, 0)) > uid_limit))
1415 list_entry = XEXP (list_entry, 1);
1416 continue;
1419 setter = XEXP (list_entry, 0);
1421 /* If SETTER is a call everything is clobbered. Note that calls
1422 to pure functions are never put on the list, so we need not
1423 worry about them. */
1424 if (CALL_P (setter))
1425 return 1;
1427 /* SETTER must be an INSN of some kind that sets memory. Call
1428 note_stores to examine each hunk of memory that is modified.
1430 The note_stores interface is pretty limited, so we have to
1431 communicate via global variables. Yuk. */
1432 gcse_mem_operand = x;
1433 gcse_mems_conflict_p = 0;
1434 note_stores (PATTERN (setter), mems_conflict_for_gcse_p, NULL);
1435 if (gcse_mems_conflict_p)
1436 return 1;
1437 list_entry = XEXP (list_entry, 1);
1439 return 0;
1442 /* Return nonzero if the operands of expression X are unchanged from
1443 the start of INSN's basic block up to but not including INSN. */
1445 static int
1446 oprs_anticipatable_p (rtx x, rtx insn)
1448 return oprs_unchanged_p (x, insn, 0);
1451 /* Return nonzero if the operands of expression X are unchanged from
1452 INSN to the end of INSN's basic block. */
1454 static int
1455 oprs_available_p (rtx x, rtx insn)
1457 return oprs_unchanged_p (x, insn, 1);
1460 /* Hash expression X.
1462 MODE is only used if X is a CONST_INT. DO_NOT_RECORD_P is a boolean
1463 indicating if a volatile operand is found or if the expression contains
1464 something we don't want to insert in the table. HASH_TABLE_SIZE is
1465 the current size of the hash table to be probed.
1467 ??? One might want to merge this with canon_hash. Later. */
1469 static unsigned int
1470 hash_expr (rtx x, enum machine_mode mode, int *do_not_record_p,
1471 int hash_table_size)
1473 unsigned int hash;
1475 *do_not_record_p = 0;
1477 hash = hash_expr_1 (x, mode, do_not_record_p);
1478 return hash % hash_table_size;
1481 /* Hash a string. Just add its bytes up. */
1483 static inline unsigned
1484 hash_string_1 (const char *ps)
1486 unsigned hash = 0;
1487 const unsigned char *p = (const unsigned char *) ps;
1489 if (p)
1490 while (*p)
1491 hash += *p++;
1493 return hash;
1496 /* Subroutine of hash_expr to do the actual work. */
1498 static unsigned int
1499 hash_expr_1 (rtx x, enum machine_mode mode, int *do_not_record_p)
1501 int i, j;
1502 unsigned hash = 0;
1503 enum rtx_code code;
1504 const char *fmt;
1506 if (x == 0)
1507 return hash;
1509 /* Used to turn recursion into iteration. We can't rely on GCC's
1510 tail-recursion elimination since we need to keep accumulating values
1511 in HASH. */
1512 repeat:
1514 code = GET_CODE (x);
1515 switch (code)
1517 case REG:
1518 hash += ((unsigned int) REG << 7) + REGNO (x);
1519 return hash;
1521 case CONST_INT:
1522 hash += (((unsigned int) CONST_INT << 7) + (unsigned int) mode
1523 + (unsigned int) INTVAL (x));
1524 return hash;
1526 case CONST_DOUBLE:
1527 /* This is like the general case, except that it only counts
1528 the integers representing the constant. */
1529 hash += (unsigned int) code + (unsigned int) GET_MODE (x);
1530 if (GET_MODE (x) != VOIDmode)
1531 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
1532 hash += (unsigned int) XWINT (x, i);
1533 else
1534 hash += ((unsigned int) CONST_DOUBLE_LOW (x)
1535 + (unsigned int) CONST_DOUBLE_HIGH (x));
1536 return hash;
1538 case CONST_VECTOR:
1540 int units;
1541 rtx elt;
1543 units = CONST_VECTOR_NUNITS (x);
1545 for (i = 0; i < units; ++i)
1547 elt = CONST_VECTOR_ELT (x, i);
1548 hash += hash_expr_1 (elt, GET_MODE (elt), do_not_record_p);
1551 return hash;
1554 /* Assume there is only one rtx object for any given label. */
1555 case LABEL_REF:
1556 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
1557 differences and differences between each stage's debugging dumps. */
1558 hash += (((unsigned int) LABEL_REF << 7)
1559 + CODE_LABEL_NUMBER (XEXP (x, 0)));
1560 return hash;
1562 case SYMBOL_REF:
1564 /* Don't hash on the symbol's address to avoid bootstrap differences.
1565 Different hash values may cause expressions to be recorded in
1566 different orders and thus different registers to be used in the
1567 final assembler. This also avoids differences in the dump files
1568 between various stages. */
1569 unsigned int h = 0;
1570 const unsigned char *p = (const unsigned char *) XSTR (x, 0);
1572 while (*p)
1573 h += (h << 7) + *p++; /* ??? revisit */
1575 hash += ((unsigned int) SYMBOL_REF << 7) + h;
1576 return hash;
1579 case MEM:
1580 if (MEM_VOLATILE_P (x))
1582 *do_not_record_p = 1;
1583 return 0;
1586 hash += (unsigned int) MEM;
1587 /* We used alias set for hashing, but this is not good, since the alias
1588 set may differ in -fprofile-arcs and -fbranch-probabilities compilation
1589 causing the profiles to fail to match. */
1590 x = XEXP (x, 0);
1591 goto repeat;
1593 case PRE_DEC:
1594 case PRE_INC:
1595 case POST_DEC:
1596 case POST_INC:
1597 case PC:
1598 case CC0:
1599 case CALL:
1600 case UNSPEC_VOLATILE:
1601 *do_not_record_p = 1;
1602 return 0;
1604 case ASM_OPERANDS:
1605 if (MEM_VOLATILE_P (x))
1607 *do_not_record_p = 1;
1608 return 0;
1610 else
1612 /* We don't want to take the filename and line into account. */
1613 hash += (unsigned) code + (unsigned) GET_MODE (x)
1614 + hash_string_1 (ASM_OPERANDS_TEMPLATE (x))
1615 + hash_string_1 (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
1616 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
1618 if (ASM_OPERANDS_INPUT_LENGTH (x))
1620 for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
1622 hash += (hash_expr_1 (ASM_OPERANDS_INPUT (x, i),
1623 GET_MODE (ASM_OPERANDS_INPUT (x, i)),
1624 do_not_record_p)
1625 + hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT
1626 (x, i)));
1629 hash += hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
1630 x = ASM_OPERANDS_INPUT (x, 0);
1631 mode = GET_MODE (x);
1632 goto repeat;
1634 return hash;
1637 default:
1638 break;
1641 hash += (unsigned) code + (unsigned) GET_MODE (x);
1642 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
1644 if (fmt[i] == 'e')
1646 /* If we are about to do the last recursive call
1647 needed at this level, change it into iteration.
1648 This function is called enough to be worth it. */
1649 if (i == 0)
1651 x = XEXP (x, i);
1652 goto repeat;
1655 hash += hash_expr_1 (XEXP (x, i), 0, do_not_record_p);
1656 if (*do_not_record_p)
1657 return 0;
1660 else if (fmt[i] == 'E')
1661 for (j = 0; j < XVECLEN (x, i); j++)
1663 hash += hash_expr_1 (XVECEXP (x, i, j), 0, do_not_record_p);
1664 if (*do_not_record_p)
1665 return 0;
1668 else if (fmt[i] == 's')
1669 hash += hash_string_1 (XSTR (x, i));
1670 else if (fmt[i] == 'i')
1671 hash += (unsigned int) XINT (x, i);
1672 else
1673 abort ();
1676 return hash;
1679 /* Hash a set of register REGNO.
1681 Sets are hashed on the register that is set. This simplifies the PRE copy
1682 propagation code.
1684 ??? May need to make things more elaborate. Later, as necessary. */
1686 static unsigned int
1687 hash_set (int regno, int hash_table_size)
1689 unsigned int hash;
1691 hash = regno;
1692 return hash % hash_table_size;
1695 /* Return nonzero if exp1 is equivalent to exp2.
1696 ??? Borrowed from cse.c. Might want to remerge with cse.c. Later. */
1698 static int
1699 expr_equiv_p (rtx x, rtx y)
1701 int i, j;
1702 enum rtx_code code;
1703 const char *fmt;
1705 if (x == y)
1706 return 1;
1708 if (x == 0 || y == 0)
1709 return 0;
1711 code = GET_CODE (x);
1712 if (code != GET_CODE (y))
1713 return 0;
1715 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
1716 if (GET_MODE (x) != GET_MODE (y))
1717 return 0;
1719 switch (code)
1721 case PC:
1722 case CC0:
1723 case CONST_INT:
1724 return 0;
1726 case LABEL_REF:
1727 return XEXP (x, 0) == XEXP (y, 0);
1729 case SYMBOL_REF:
1730 return XSTR (x, 0) == XSTR (y, 0);
1732 case REG:
1733 return REGNO (x) == REGNO (y);
1735 case MEM:
1736 /* Can't merge two expressions in different alias sets, since we can
1737 decide that the expression is transparent in a block when it isn't,
1738 due to it being set with the different alias set. */
1739 if (MEM_ALIAS_SET (x) != MEM_ALIAS_SET (y))
1740 return 0;
1742 /* A volatile mem should not be considered equivalent to any other. */
1743 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
1744 return 0;
1745 break;
1747 /* For commutative operations, check both orders. */
1748 case PLUS:
1749 case MULT:
1750 case AND:
1751 case IOR:
1752 case XOR:
1753 case NE:
1754 case EQ:
1755 return ((expr_equiv_p (XEXP (x, 0), XEXP (y, 0))
1756 && expr_equiv_p (XEXP (x, 1), XEXP (y, 1)))
1757 || (expr_equiv_p (XEXP (x, 0), XEXP (y, 1))
1758 && expr_equiv_p (XEXP (x, 1), XEXP (y, 0))));
1760 case ASM_OPERANDS:
1761 /* We don't use the generic code below because we want to
1762 disregard filename and line numbers. */
1764 /* A volatile asm isn't equivalent to any other. */
1765 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
1766 return 0;
1768 if (GET_MODE (x) != GET_MODE (y)
1769 || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
1770 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
1771 ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
1772 || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
1773 || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
1774 return 0;
1776 if (ASM_OPERANDS_INPUT_LENGTH (x))
1778 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
1779 if (! expr_equiv_p (ASM_OPERANDS_INPUT (x, i),
1780 ASM_OPERANDS_INPUT (y, i))
1781 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
1782 ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
1783 return 0;
1786 return 1;
1788 default:
1789 break;
1792 /* Compare the elements. If any pair of corresponding elements
1793 fail to match, return 0 for the whole thing. */
1795 fmt = GET_RTX_FORMAT (code);
1796 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1798 switch (fmt[i])
1800 case 'e':
1801 if (! expr_equiv_p (XEXP (x, i), XEXP (y, i)))
1802 return 0;
1803 break;
1805 case 'E':
1806 if (XVECLEN (x, i) != XVECLEN (y, i))
1807 return 0;
1808 for (j = 0; j < XVECLEN (x, i); j++)
1809 if (! expr_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
1810 return 0;
1811 break;
1813 case 's':
1814 if (strcmp (XSTR (x, i), XSTR (y, i)))
1815 return 0;
1816 break;
1818 case 'i':
1819 if (XINT (x, i) != XINT (y, i))
1820 return 0;
1821 break;
1823 case 'w':
1824 if (XWINT (x, i) != XWINT (y, i))
1825 return 0;
1826 break;
1828 case '0':
1829 break;
1831 default:
1832 abort ();
1836 return 1;
1839 /* Insert expression X in INSN in the hash TABLE.
1840 If it is already present, record it as the last occurrence in INSN's
1841 basic block.
1843 MODE is the mode of the value X is being stored into.
1844 It is only used if X is a CONST_INT.
1846 ANTIC_P is nonzero if X is an anticipatable expression.
1847 AVAIL_P is nonzero if X is an available expression. */
1849 static void
1850 insert_expr_in_table (rtx x, enum machine_mode mode, rtx insn, int antic_p,
1851 int avail_p, struct hash_table *table)
1853 int found, do_not_record_p;
1854 unsigned int hash;
1855 struct expr *cur_expr, *last_expr = NULL;
1856 struct occr *antic_occr, *avail_occr;
1857 struct occr *last_occr = NULL;
1859 hash = hash_expr (x, mode, &do_not_record_p, table->size);
1861 /* Do not insert expression in table if it contains volatile operands,
1862 or if hash_expr determines the expression is something we don't want
1863 to or can't handle. */
1864 if (do_not_record_p)
1865 return;
1867 cur_expr = table->table[hash];
1868 found = 0;
1870 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
1872 /* If the expression isn't found, save a pointer to the end of
1873 the list. */
1874 last_expr = cur_expr;
1875 cur_expr = cur_expr->next_same_hash;
1878 if (! found)
1880 cur_expr = gcse_alloc (sizeof (struct expr));
1881 bytes_used += sizeof (struct expr);
1882 if (table->table[hash] == NULL)
1883 /* This is the first pattern that hashed to this index. */
1884 table->table[hash] = cur_expr;
1885 else
1886 /* Add EXPR to end of this hash chain. */
1887 last_expr->next_same_hash = cur_expr;
1889 /* Set the fields of the expr element. */
1890 cur_expr->expr = x;
1891 cur_expr->bitmap_index = table->n_elems++;
1892 cur_expr->next_same_hash = NULL;
1893 cur_expr->antic_occr = NULL;
1894 cur_expr->avail_occr = NULL;
1897 /* Now record the occurrence(s). */
1898 if (antic_p)
1900 antic_occr = cur_expr->antic_occr;
1902 /* Search for another occurrence in the same basic block. */
1903 while (antic_occr && BLOCK_NUM (antic_occr->insn) != BLOCK_NUM (insn))
1905 /* If an occurrence isn't found, save a pointer to the end of
1906 the list. */
1907 last_occr = antic_occr;
1908 antic_occr = antic_occr->next;
1911 if (antic_occr)
1912 /* Found another instance of the expression in the same basic block.
1913 Prefer the currently recorded one. We want the first one in the
1914 block and the block is scanned from start to end. */
1915 ; /* nothing to do */
1916 else
1918 /* First occurrence of this expression in this basic block. */
1919 antic_occr = gcse_alloc (sizeof (struct occr));
1920 bytes_used += sizeof (struct occr);
1921 /* First occurrence of this expression in any block? */
1922 if (cur_expr->antic_occr == NULL)
1923 cur_expr->antic_occr = antic_occr;
1924 else
1925 last_occr->next = antic_occr;
1927 antic_occr->insn = insn;
1928 antic_occr->next = NULL;
1929 antic_occr->deleted_p = 0;
1933 if (avail_p)
1935 avail_occr = cur_expr->avail_occr;
1937 /* Search for another occurrence in the same basic block. */
1938 while (avail_occr && BLOCK_NUM (avail_occr->insn) != BLOCK_NUM (insn))
1940 /* If an occurrence isn't found, save a pointer to the end of
1941 the list. */
1942 last_occr = avail_occr;
1943 avail_occr = avail_occr->next;
1946 if (avail_occr)
1947 /* Found another instance of the expression in the same basic block.
1948 Prefer this occurrence to the currently recorded one. We want
1949 the last one in the block and the block is scanned from start
1950 to end. */
1951 avail_occr->insn = insn;
1952 else
1954 /* First occurrence of this expression in this basic block. */
1955 avail_occr = gcse_alloc (sizeof (struct occr));
1956 bytes_used += sizeof (struct occr);
1958 /* First occurrence of this expression in any block? */
1959 if (cur_expr->avail_occr == NULL)
1960 cur_expr->avail_occr = avail_occr;
1961 else
1962 last_occr->next = avail_occr;
1964 avail_occr->insn = insn;
1965 avail_occr->next = NULL;
1966 avail_occr->deleted_p = 0;
1971 /* Insert pattern X in INSN in the hash table.
1972 X is a SET of a reg to either another reg or a constant.
1973 If it is already present, record it as the last occurrence in INSN's
1974 basic block. */
1976 static void
1977 insert_set_in_table (rtx x, rtx insn, struct hash_table *table)
1979 int found;
1980 unsigned int hash;
1981 struct expr *cur_expr, *last_expr = NULL;
1982 struct occr *cur_occr, *last_occr = NULL;
1984 if (GET_CODE (x) != SET
1985 || ! REG_P (SET_DEST (x)))
1986 abort ();
1988 hash = hash_set (REGNO (SET_DEST (x)), table->size);
1990 cur_expr = table->table[hash];
1991 found = 0;
1993 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
1995 /* If the expression isn't found, save a pointer to the end of
1996 the list. */
1997 last_expr = cur_expr;
1998 cur_expr = cur_expr->next_same_hash;
2001 if (! found)
2003 cur_expr = gcse_alloc (sizeof (struct expr));
2004 bytes_used += sizeof (struct expr);
2005 if (table->table[hash] == NULL)
2006 /* This is the first pattern that hashed to this index. */
2007 table->table[hash] = cur_expr;
2008 else
2009 /* Add EXPR to end of this hash chain. */
2010 last_expr->next_same_hash = cur_expr;
2012 /* Set the fields of the expr element.
2013 We must copy X because it can be modified when copy propagation is
2014 performed on its operands. */
2015 cur_expr->expr = copy_rtx (x);
2016 cur_expr->bitmap_index = table->n_elems++;
2017 cur_expr->next_same_hash = NULL;
2018 cur_expr->antic_occr = NULL;
2019 cur_expr->avail_occr = NULL;
2022 /* Now record the occurrence. */
2023 cur_occr = cur_expr->avail_occr;
2025 /* Search for another occurrence in the same basic block. */
2026 while (cur_occr && BLOCK_NUM (cur_occr->insn) != BLOCK_NUM (insn))
2028 /* If an occurrence isn't found, save a pointer to the end of
2029 the list. */
2030 last_occr = cur_occr;
2031 cur_occr = cur_occr->next;
2034 if (cur_occr)
2035 /* Found another instance of the expression in the same basic block.
2036 Prefer this occurrence to the currently recorded one. We want the
2037 last one in the block and the block is scanned from start to end. */
2038 cur_occr->insn = insn;
2039 else
2041 /* First occurrence of this expression in this basic block. */
2042 cur_occr = gcse_alloc (sizeof (struct occr));
2043 bytes_used += sizeof (struct occr);
2045 /* First occurrence of this expression in any block? */
2046 if (cur_expr->avail_occr == NULL)
2047 cur_expr->avail_occr = cur_occr;
2048 else
2049 last_occr->next = cur_occr;
2051 cur_occr->insn = insn;
2052 cur_occr->next = NULL;
2053 cur_occr->deleted_p = 0;
2057 /* Determine whether the rtx X should be treated as a constant for
2058 the purposes of GCSE's constant propagation. */
2060 static bool
2061 gcse_constant_p (rtx x)
2063 /* Consider a COMPARE of two integers constant. */
2064 if (GET_CODE (x) == COMPARE
2065 && GET_CODE (XEXP (x, 0)) == CONST_INT
2066 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2067 return true;
2069 /* Consider a COMPARE of the same registers is a constant
2070 if they are not floating point registers. */
2071 if (GET_CODE(x) == COMPARE
2072 && REG_P (XEXP (x, 0)) && REG_P (XEXP (x, 1))
2073 && REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 1))
2074 && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0)))
2075 && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 1))))
2076 return true;
2078 return CONSTANT_P (x);
2081 /* Scan pattern PAT of INSN and add an entry to the hash TABLE (set or
2082 expression one). */
2084 static void
2085 hash_scan_set (rtx pat, rtx insn, struct hash_table *table)
2087 rtx src = SET_SRC (pat);
2088 rtx dest = SET_DEST (pat);
2089 rtx note;
2091 if (GET_CODE (src) == CALL)
2092 hash_scan_call (src, insn, table);
2094 else if (REG_P (dest))
2096 unsigned int regno = REGNO (dest);
2097 rtx tmp;
2099 /* If this is a single set and we are doing constant propagation,
2100 see if a REG_NOTE shows this equivalent to a constant. */
2101 if (table->set_p && (note = find_reg_equal_equiv_note (insn)) != 0
2102 && gcse_constant_p (XEXP (note, 0)))
2103 src = XEXP (note, 0), pat = gen_rtx_SET (VOIDmode, dest, src);
2105 /* Only record sets of pseudo-regs in the hash table. */
2106 if (! table->set_p
2107 && regno >= FIRST_PSEUDO_REGISTER
2108 /* Don't GCSE something if we can't do a reg/reg copy. */
2109 && can_copy_p (GET_MODE (dest))
2110 /* GCSE commonly inserts instruction after the insn. We can't
2111 do that easily for EH_REGION notes so disable GCSE on these
2112 for now. */
2113 && !find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2114 /* Is SET_SRC something we want to gcse? */
2115 && want_to_gcse_p (src)
2116 /* Don't CSE a nop. */
2117 && ! set_noop_p (pat)
2118 /* Don't GCSE if it has attached REG_EQUIV note.
2119 At this point this only function parameters should have
2120 REG_EQUIV notes and if the argument slot is used somewhere
2121 explicitly, it means address of parameter has been taken,
2122 so we should not extend the lifetime of the pseudo. */
2123 && ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) == 0
2124 || ! MEM_P (XEXP (note, 0))))
2126 /* An expression is not anticipatable if its operands are
2127 modified before this insn or if this is not the only SET in
2128 this insn. */
2129 int antic_p = oprs_anticipatable_p (src, insn) && single_set (insn);
2130 /* An expression is not available if its operands are
2131 subsequently modified, including this insn. It's also not
2132 available if this is a branch, because we can't insert
2133 a set after the branch. */
2134 int avail_p = (oprs_available_p (src, insn)
2135 && ! JUMP_P (insn));
2137 insert_expr_in_table (src, GET_MODE (dest), insn, antic_p, avail_p, table);
2140 /* Record sets for constant/copy propagation. */
2141 else if (table->set_p
2142 && regno >= FIRST_PSEUDO_REGISTER
2143 && ((REG_P (src)
2144 && REGNO (src) >= FIRST_PSEUDO_REGISTER
2145 && can_copy_p (GET_MODE (dest))
2146 && REGNO (src) != regno)
2147 || gcse_constant_p (src))
2148 /* A copy is not available if its src or dest is subsequently
2149 modified. Here we want to search from INSN+1 on, but
2150 oprs_available_p searches from INSN on. */
2151 && (insn == BB_END (BLOCK_FOR_INSN (insn))
2152 || ((tmp = next_nonnote_insn (insn)) != NULL_RTX
2153 && oprs_available_p (pat, tmp))))
2154 insert_set_in_table (pat, insn, table);
2156 /* In case of store we want to consider the memory value as available in
2157 the REG stored in that memory. This makes it possible to remove
2158 redundant loads from due to stores to the same location. */
2159 else if (flag_gcse_las && REG_P (src) && MEM_P (dest))
2161 unsigned int regno = REGNO (src);
2163 /* Do not do this for constant/copy propagation. */
2164 if (! table->set_p
2165 /* Only record sets of pseudo-regs in the hash table. */
2166 && regno >= FIRST_PSEUDO_REGISTER
2167 /* Don't GCSE something if we can't do a reg/reg copy. */
2168 && can_copy_p (GET_MODE (src))
2169 /* GCSE commonly inserts instruction after the insn. We can't
2170 do that easily for EH_REGION notes so disable GCSE on these
2171 for now. */
2172 && ! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2173 /* Is SET_DEST something we want to gcse? */
2174 && want_to_gcse_p (dest)
2175 /* Don't CSE a nop. */
2176 && ! set_noop_p (pat)
2177 /* Don't GCSE if it has attached REG_EQUIV note.
2178 At this point this only function parameters should have
2179 REG_EQUIV notes and if the argument slot is used somewhere
2180 explicitly, it means address of parameter has been taken,
2181 so we should not extend the lifetime of the pseudo. */
2182 && ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) == 0
2183 || ! MEM_P (XEXP (note, 0))))
2185 /* Stores are never anticipatable. */
2186 int antic_p = 0;
2187 /* An expression is not available if its operands are
2188 subsequently modified, including this insn. It's also not
2189 available if this is a branch, because we can't insert
2190 a set after the branch. */
2191 int avail_p = oprs_available_p (dest, insn)
2192 && ! JUMP_P (insn);
2194 /* Record the memory expression (DEST) in the hash table. */
2195 insert_expr_in_table (dest, GET_MODE (dest), insn,
2196 antic_p, avail_p, table);
2201 static void
2202 hash_scan_clobber (rtx x ATTRIBUTE_UNUSED, rtx insn ATTRIBUTE_UNUSED,
2203 struct hash_table *table ATTRIBUTE_UNUSED)
2205 /* Currently nothing to do. */
2208 static void
2209 hash_scan_call (rtx x ATTRIBUTE_UNUSED, rtx insn ATTRIBUTE_UNUSED,
2210 struct hash_table *table ATTRIBUTE_UNUSED)
2212 /* Currently nothing to do. */
2215 /* Process INSN and add hash table entries as appropriate.
2217 Only available expressions that set a single pseudo-reg are recorded.
2219 Single sets in a PARALLEL could be handled, but it's an extra complication
2220 that isn't dealt with right now. The trick is handling the CLOBBERs that
2221 are also in the PARALLEL. Later.
2223 If SET_P is nonzero, this is for the assignment hash table,
2224 otherwise it is for the expression hash table.
2225 If IN_LIBCALL_BLOCK nonzero, we are in a libcall block, and should
2226 not record any expressions. */
2228 static void
2229 hash_scan_insn (rtx insn, struct hash_table *table, int in_libcall_block)
2231 rtx pat = PATTERN (insn);
2232 int i;
2234 if (in_libcall_block)
2235 return;
2237 /* Pick out the sets of INSN and for other forms of instructions record
2238 what's been modified. */
2240 if (GET_CODE (pat) == SET)
2241 hash_scan_set (pat, insn, table);
2242 else if (GET_CODE (pat) == PARALLEL)
2243 for (i = 0; i < XVECLEN (pat, 0); i++)
2245 rtx x = XVECEXP (pat, 0, i);
2247 if (GET_CODE (x) == SET)
2248 hash_scan_set (x, insn, table);
2249 else if (GET_CODE (x) == CLOBBER)
2250 hash_scan_clobber (x, insn, table);
2251 else if (GET_CODE (x) == CALL)
2252 hash_scan_call (x, insn, table);
2255 else if (GET_CODE (pat) == CLOBBER)
2256 hash_scan_clobber (pat, insn, table);
2257 else if (GET_CODE (pat) == CALL)
2258 hash_scan_call (pat, insn, table);
2261 static void
2262 dump_hash_table (FILE *file, const char *name, struct hash_table *table)
2264 int i;
2265 /* Flattened out table, so it's printed in proper order. */
2266 struct expr **flat_table;
2267 unsigned int *hash_val;
2268 struct expr *expr;
2270 flat_table = xcalloc (table->n_elems, sizeof (struct expr *));
2271 hash_val = xmalloc (table->n_elems * sizeof (unsigned int));
2273 for (i = 0; i < (int) table->size; i++)
2274 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
2276 flat_table[expr->bitmap_index] = expr;
2277 hash_val[expr->bitmap_index] = i;
2280 fprintf (file, "%s hash table (%d buckets, %d entries)\n",
2281 name, table->size, table->n_elems);
2283 for (i = 0; i < (int) table->n_elems; i++)
2284 if (flat_table[i] != 0)
2286 expr = flat_table[i];
2287 fprintf (file, "Index %d (hash value %d)\n ",
2288 expr->bitmap_index, hash_val[i]);
2289 print_rtl (file, expr->expr);
2290 fprintf (file, "\n");
2293 fprintf (file, "\n");
2295 free (flat_table);
2296 free (hash_val);
2299 /* Record register first/last/block set information for REGNO in INSN.
2301 first_set records the first place in the block where the register
2302 is set and is used to compute "anticipatability".
2304 last_set records the last place in the block where the register
2305 is set and is used to compute "availability".
2307 last_bb records the block for which first_set and last_set are
2308 valid, as a quick test to invalidate them.
2310 reg_set_in_block records whether the register is set in the block
2311 and is used to compute "transparency". */
2313 static void
2314 record_last_reg_set_info (rtx insn, int regno)
2316 struct reg_avail_info *info = &reg_avail_info[regno];
2317 int cuid = INSN_CUID (insn);
2319 info->last_set = cuid;
2320 if (info->last_bb != current_bb)
2322 info->last_bb = current_bb;
2323 info->first_set = cuid;
2324 SET_BIT (reg_set_in_block[current_bb->index], regno);
2329 /* Record all of the canonicalized MEMs of record_last_mem_set_info's insn.
2330 Note we store a pair of elements in the list, so they have to be
2331 taken off pairwise. */
2333 static void
2334 canon_list_insert (rtx dest ATTRIBUTE_UNUSED, rtx unused1 ATTRIBUTE_UNUSED,
2335 void * v_insn)
2337 rtx dest_addr, insn;
2338 int bb;
2340 while (GET_CODE (dest) == SUBREG
2341 || GET_CODE (dest) == ZERO_EXTRACT
2342 || GET_CODE (dest) == SIGN_EXTRACT
2343 || GET_CODE (dest) == STRICT_LOW_PART)
2344 dest = XEXP (dest, 0);
2346 /* If DEST is not a MEM, then it will not conflict with a load. Note
2347 that function calls are assumed to clobber memory, but are handled
2348 elsewhere. */
2350 if (! MEM_P (dest))
2351 return;
2353 dest_addr = get_addr (XEXP (dest, 0));
2354 dest_addr = canon_rtx (dest_addr);
2355 insn = (rtx) v_insn;
2356 bb = BLOCK_NUM (insn);
2358 canon_modify_mem_list[bb] =
2359 alloc_EXPR_LIST (VOIDmode, dest_addr, canon_modify_mem_list[bb]);
2360 canon_modify_mem_list[bb] =
2361 alloc_EXPR_LIST (VOIDmode, dest, canon_modify_mem_list[bb]);
2362 bitmap_set_bit (canon_modify_mem_list_set, bb);
2365 /* Record memory modification information for INSN. We do not actually care
2366 about the memory location(s) that are set, or even how they are set (consider
2367 a CALL_INSN). We merely need to record which insns modify memory. */
2369 static void
2370 record_last_mem_set_info (rtx insn)
2372 int bb = BLOCK_NUM (insn);
2374 /* load_killed_in_block_p will handle the case of calls clobbering
2375 everything. */
2376 modify_mem_list[bb] = alloc_INSN_LIST (insn, modify_mem_list[bb]);
2377 bitmap_set_bit (modify_mem_list_set, bb);
2379 if (CALL_P (insn))
2381 /* Note that traversals of this loop (other than for free-ing)
2382 will break after encountering a CALL_INSN. So, there's no
2383 need to insert a pair of items, as canon_list_insert does. */
2384 canon_modify_mem_list[bb] =
2385 alloc_INSN_LIST (insn, canon_modify_mem_list[bb]);
2386 bitmap_set_bit (canon_modify_mem_list_set, bb);
2388 else
2389 note_stores (PATTERN (insn), canon_list_insert, (void*) insn);
2392 /* Called from compute_hash_table via note_stores to handle one
2393 SET or CLOBBER in an insn. DATA is really the instruction in which
2394 the SET is taking place. */
2396 static void
2397 record_last_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED, void *data)
2399 rtx last_set_insn = (rtx) data;
2401 if (GET_CODE (dest) == SUBREG)
2402 dest = SUBREG_REG (dest);
2404 if (REG_P (dest))
2405 record_last_reg_set_info (last_set_insn, REGNO (dest));
2406 else if (MEM_P (dest)
2407 /* Ignore pushes, they clobber nothing. */
2408 && ! push_operand (dest, GET_MODE (dest)))
2409 record_last_mem_set_info (last_set_insn);
2412 /* Top level function to create an expression or assignment hash table.
2414 Expression entries are placed in the hash table if
2415 - they are of the form (set (pseudo-reg) src),
2416 - src is something we want to perform GCSE on,
2417 - none of the operands are subsequently modified in the block
2419 Assignment entries are placed in the hash table if
2420 - they are of the form (set (pseudo-reg) src),
2421 - src is something we want to perform const/copy propagation on,
2422 - none of the operands or target are subsequently modified in the block
2424 Currently src must be a pseudo-reg or a const_int.
2426 TABLE is the table computed. */
2428 static void
2429 compute_hash_table_work (struct hash_table *table)
2431 unsigned int i;
2433 /* While we compute the hash table we also compute a bit array of which
2434 registers are set in which blocks.
2435 ??? This isn't needed during const/copy propagation, but it's cheap to
2436 compute. Later. */
2437 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
2439 /* re-Cache any INSN_LIST nodes we have allocated. */
2440 clear_modify_mem_tables ();
2441 /* Some working arrays used to track first and last set in each block. */
2442 reg_avail_info = gmalloc (max_gcse_regno * sizeof (struct reg_avail_info));
2444 for (i = 0; i < max_gcse_regno; ++i)
2445 reg_avail_info[i].last_bb = NULL;
2447 FOR_EACH_BB (current_bb)
2449 rtx insn;
2450 unsigned int regno;
2451 int in_libcall_block;
2453 /* First pass over the instructions records information used to
2454 determine when registers and memory are first and last set.
2455 ??? hard-reg reg_set_in_block computation
2456 could be moved to compute_sets since they currently don't change. */
2458 for (insn = BB_HEAD (current_bb);
2459 insn && insn != NEXT_INSN (BB_END (current_bb));
2460 insn = NEXT_INSN (insn))
2462 if (! INSN_P (insn))
2463 continue;
2465 if (CALL_P (insn))
2467 bool clobbers_all = false;
2468 #ifdef NON_SAVING_SETJMP
2469 if (NON_SAVING_SETJMP
2470 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
2471 clobbers_all = true;
2472 #endif
2474 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2475 if (clobbers_all
2476 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2477 record_last_reg_set_info (insn, regno);
2479 mark_call (insn);
2482 note_stores (PATTERN (insn), record_last_set_info, insn);
2485 /* Insert implicit sets in the hash table. */
2486 if (table->set_p
2487 && implicit_sets[current_bb->index] != NULL_RTX)
2488 hash_scan_set (implicit_sets[current_bb->index],
2489 BB_HEAD (current_bb), table);
2491 /* The next pass builds the hash table. */
2493 for (insn = BB_HEAD (current_bb), in_libcall_block = 0;
2494 insn && insn != NEXT_INSN (BB_END (current_bb));
2495 insn = NEXT_INSN (insn))
2496 if (INSN_P (insn))
2498 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
2499 in_libcall_block = 1;
2500 else if (table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
2501 in_libcall_block = 0;
2502 hash_scan_insn (insn, table, in_libcall_block);
2503 if (!table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
2504 in_libcall_block = 0;
2508 free (reg_avail_info);
2509 reg_avail_info = NULL;
2512 /* Allocate space for the set/expr hash TABLE.
2513 N_INSNS is the number of instructions in the function.
2514 It is used to determine the number of buckets to use.
2515 SET_P determines whether set or expression table will
2516 be created. */
2518 static void
2519 alloc_hash_table (int n_insns, struct hash_table *table, int set_p)
2521 int n;
2523 table->size = n_insns / 4;
2524 if (table->size < 11)
2525 table->size = 11;
2527 /* Attempt to maintain efficient use of hash table.
2528 Making it an odd number is simplest for now.
2529 ??? Later take some measurements. */
2530 table->size |= 1;
2531 n = table->size * sizeof (struct expr *);
2532 table->table = gmalloc (n);
2533 table->set_p = set_p;
2536 /* Free things allocated by alloc_hash_table. */
2538 static void
2539 free_hash_table (struct hash_table *table)
2541 free (table->table);
2544 /* Compute the hash TABLE for doing copy/const propagation or
2545 expression hash table. */
2547 static void
2548 compute_hash_table (struct hash_table *table)
2550 /* Initialize count of number of entries in hash table. */
2551 table->n_elems = 0;
2552 memset (table->table, 0, table->size * sizeof (struct expr *));
2554 compute_hash_table_work (table);
2557 /* Expression tracking support. */
2559 /* Lookup pattern PAT in the expression TABLE.
2560 The result is a pointer to the table entry, or NULL if not found. */
2562 static struct expr *
2563 lookup_expr (rtx pat, struct hash_table *table)
2565 int do_not_record_p;
2566 unsigned int hash = hash_expr (pat, GET_MODE (pat), &do_not_record_p,
2567 table->size);
2568 struct expr *expr;
2570 if (do_not_record_p)
2571 return NULL;
2573 expr = table->table[hash];
2575 while (expr && ! expr_equiv_p (expr->expr, pat))
2576 expr = expr->next_same_hash;
2578 return expr;
2581 /* Lookup REGNO in the set TABLE. The result is a pointer to the
2582 table entry, or NULL if not found. */
2584 static struct expr *
2585 lookup_set (unsigned int regno, struct hash_table *table)
2587 unsigned int hash = hash_set (regno, table->size);
2588 struct expr *expr;
2590 expr = table->table[hash];
2592 while (expr && REGNO (SET_DEST (expr->expr)) != regno)
2593 expr = expr->next_same_hash;
2595 return expr;
2598 /* Return the next entry for REGNO in list EXPR. */
2600 static struct expr *
2601 next_set (unsigned int regno, struct expr *expr)
2604 expr = expr->next_same_hash;
2605 while (expr && REGNO (SET_DEST (expr->expr)) != regno);
2607 return expr;
2610 /* Like free_INSN_LIST_list or free_EXPR_LIST_list, except that the node
2611 types may be mixed. */
2613 static void
2614 free_insn_expr_list_list (rtx *listp)
2616 rtx list, next;
2618 for (list = *listp; list ; list = next)
2620 next = XEXP (list, 1);
2621 if (GET_CODE (list) == EXPR_LIST)
2622 free_EXPR_LIST_node (list);
2623 else
2624 free_INSN_LIST_node (list);
2627 *listp = NULL;
2630 /* Clear canon_modify_mem_list and modify_mem_list tables. */
2631 static void
2632 clear_modify_mem_tables (void)
2634 int i;
2636 EXECUTE_IF_SET_IN_BITMAP
2637 (modify_mem_list_set, 0, i, free_INSN_LIST_list (modify_mem_list + i));
2638 bitmap_clear (modify_mem_list_set);
2640 EXECUTE_IF_SET_IN_BITMAP
2641 (canon_modify_mem_list_set, 0, i,
2642 free_insn_expr_list_list (canon_modify_mem_list + i));
2643 bitmap_clear (canon_modify_mem_list_set);
2646 /* Release memory used by modify_mem_list_set and canon_modify_mem_list_set. */
2648 static void
2649 free_modify_mem_tables (void)
2651 clear_modify_mem_tables ();
2652 free (modify_mem_list);
2653 free (canon_modify_mem_list);
2654 modify_mem_list = 0;
2655 canon_modify_mem_list = 0;
2658 /* Reset tables used to keep track of what's still available [since the
2659 start of the block]. */
2661 static void
2662 reset_opr_set_tables (void)
2664 /* Maintain a bitmap of which regs have been set since beginning of
2665 the block. */
2666 CLEAR_REG_SET (reg_set_bitmap);
2668 /* Also keep a record of the last instruction to modify memory.
2669 For now this is very trivial, we only record whether any memory
2670 location has been modified. */
2671 clear_modify_mem_tables ();
2674 /* Return nonzero if the operands of X are not set before INSN in
2675 INSN's basic block. */
2677 static int
2678 oprs_not_set_p (rtx x, rtx insn)
2680 int i, j;
2681 enum rtx_code code;
2682 const char *fmt;
2684 if (x == 0)
2685 return 1;
2687 code = GET_CODE (x);
2688 switch (code)
2690 case PC:
2691 case CC0:
2692 case CONST:
2693 case CONST_INT:
2694 case CONST_DOUBLE:
2695 case CONST_VECTOR:
2696 case SYMBOL_REF:
2697 case LABEL_REF:
2698 case ADDR_VEC:
2699 case ADDR_DIFF_VEC:
2700 return 1;
2702 case MEM:
2703 if (load_killed_in_block_p (BLOCK_FOR_INSN (insn),
2704 INSN_CUID (insn), x, 0))
2705 return 0;
2706 else
2707 return oprs_not_set_p (XEXP (x, 0), insn);
2709 case REG:
2710 return ! REGNO_REG_SET_P (reg_set_bitmap, REGNO (x));
2712 default:
2713 break;
2716 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
2718 if (fmt[i] == 'e')
2720 /* If we are about to do the last recursive call
2721 needed at this level, change it into iteration.
2722 This function is called enough to be worth it. */
2723 if (i == 0)
2724 return oprs_not_set_p (XEXP (x, i), insn);
2726 if (! oprs_not_set_p (XEXP (x, i), insn))
2727 return 0;
2729 else if (fmt[i] == 'E')
2730 for (j = 0; j < XVECLEN (x, i); j++)
2731 if (! oprs_not_set_p (XVECEXP (x, i, j), insn))
2732 return 0;
2735 return 1;
2738 /* Mark things set by a CALL. */
2740 static void
2741 mark_call (rtx insn)
2743 if (! CONST_OR_PURE_CALL_P (insn))
2744 record_last_mem_set_info (insn);
2747 /* Mark things set by a SET. */
2749 static void
2750 mark_set (rtx pat, rtx insn)
2752 rtx dest = SET_DEST (pat);
2754 while (GET_CODE (dest) == SUBREG
2755 || GET_CODE (dest) == ZERO_EXTRACT
2756 || GET_CODE (dest) == SIGN_EXTRACT
2757 || GET_CODE (dest) == STRICT_LOW_PART)
2758 dest = XEXP (dest, 0);
2760 if (REG_P (dest))
2761 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (dest));
2762 else if (MEM_P (dest))
2763 record_last_mem_set_info (insn);
2765 if (GET_CODE (SET_SRC (pat)) == CALL)
2766 mark_call (insn);
2769 /* Record things set by a CLOBBER. */
2771 static void
2772 mark_clobber (rtx pat, rtx insn)
2774 rtx clob = XEXP (pat, 0);
2776 while (GET_CODE (clob) == SUBREG || GET_CODE (clob) == STRICT_LOW_PART)
2777 clob = XEXP (clob, 0);
2779 if (REG_P (clob))
2780 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (clob));
2781 else
2782 record_last_mem_set_info (insn);
2785 /* Record things set by INSN.
2786 This data is used by oprs_not_set_p. */
2788 static void
2789 mark_oprs_set (rtx insn)
2791 rtx pat = PATTERN (insn);
2792 int i;
2794 if (GET_CODE (pat) == SET)
2795 mark_set (pat, insn);
2796 else if (GET_CODE (pat) == PARALLEL)
2797 for (i = 0; i < XVECLEN (pat, 0); i++)
2799 rtx x = XVECEXP (pat, 0, i);
2801 if (GET_CODE (x) == SET)
2802 mark_set (x, insn);
2803 else if (GET_CODE (x) == CLOBBER)
2804 mark_clobber (x, insn);
2805 else if (GET_CODE (x) == CALL)
2806 mark_call (insn);
2809 else if (GET_CODE (pat) == CLOBBER)
2810 mark_clobber (pat, insn);
2811 else if (GET_CODE (pat) == CALL)
2812 mark_call (insn);
2816 /* Compute copy/constant propagation working variables. */
2818 /* Local properties of assignments. */
2819 static sbitmap *cprop_pavloc;
2820 static sbitmap *cprop_absaltered;
2822 /* Global properties of assignments (computed from the local properties). */
2823 static sbitmap *cprop_avin;
2824 static sbitmap *cprop_avout;
2826 /* Allocate vars used for copy/const propagation. N_BLOCKS is the number of
2827 basic blocks. N_SETS is the number of sets. */
2829 static void
2830 alloc_cprop_mem (int n_blocks, int n_sets)
2832 cprop_pavloc = sbitmap_vector_alloc (n_blocks, n_sets);
2833 cprop_absaltered = sbitmap_vector_alloc (n_blocks, n_sets);
2835 cprop_avin = sbitmap_vector_alloc (n_blocks, n_sets);
2836 cprop_avout = sbitmap_vector_alloc (n_blocks, n_sets);
2839 /* Free vars used by copy/const propagation. */
2841 static void
2842 free_cprop_mem (void)
2844 sbitmap_vector_free (cprop_pavloc);
2845 sbitmap_vector_free (cprop_absaltered);
2846 sbitmap_vector_free (cprop_avin);
2847 sbitmap_vector_free (cprop_avout);
2850 /* For each block, compute whether X is transparent. X is either an
2851 expression or an assignment [though we don't care which, for this context
2852 an assignment is treated as an expression]. For each block where an
2853 element of X is modified, set (SET_P == 1) or reset (SET_P == 0) the INDX
2854 bit in BMAP. */
2856 static void
2857 compute_transp (rtx x, int indx, sbitmap *bmap, int set_p)
2859 int i, j;
2860 basic_block bb;
2861 enum rtx_code code;
2862 reg_set *r;
2863 const char *fmt;
2865 /* repeat is used to turn tail-recursion into iteration since GCC
2866 can't do it when there's no return value. */
2867 repeat:
2869 if (x == 0)
2870 return;
2872 code = GET_CODE (x);
2873 switch (code)
2875 case REG:
2876 if (set_p)
2878 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
2880 FOR_EACH_BB (bb)
2881 if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
2882 SET_BIT (bmap[bb->index], indx);
2884 else
2886 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
2887 SET_BIT (bmap[BLOCK_NUM (r->insn)], indx);
2890 else
2892 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
2894 FOR_EACH_BB (bb)
2895 if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
2896 RESET_BIT (bmap[bb->index], indx);
2898 else
2900 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
2901 RESET_BIT (bmap[BLOCK_NUM (r->insn)], indx);
2905 return;
2907 case MEM:
2908 FOR_EACH_BB (bb)
2910 rtx list_entry = canon_modify_mem_list[bb->index];
2912 while (list_entry)
2914 rtx dest, dest_addr;
2916 if (CALL_P (XEXP (list_entry, 0)))
2918 if (set_p)
2919 SET_BIT (bmap[bb->index], indx);
2920 else
2921 RESET_BIT (bmap[bb->index], indx);
2922 break;
2924 /* LIST_ENTRY must be an INSN of some kind that sets memory.
2925 Examine each hunk of memory that is modified. */
2927 dest = XEXP (list_entry, 0);
2928 list_entry = XEXP (list_entry, 1);
2929 dest_addr = XEXP (list_entry, 0);
2931 if (canon_true_dependence (dest, GET_MODE (dest), dest_addr,
2932 x, rtx_addr_varies_p))
2934 if (set_p)
2935 SET_BIT (bmap[bb->index], indx);
2936 else
2937 RESET_BIT (bmap[bb->index], indx);
2938 break;
2940 list_entry = XEXP (list_entry, 1);
2944 x = XEXP (x, 0);
2945 goto repeat;
2947 case PC:
2948 case CC0: /*FIXME*/
2949 case CONST:
2950 case CONST_INT:
2951 case CONST_DOUBLE:
2952 case CONST_VECTOR:
2953 case SYMBOL_REF:
2954 case LABEL_REF:
2955 case ADDR_VEC:
2956 case ADDR_DIFF_VEC:
2957 return;
2959 default:
2960 break;
2963 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
2965 if (fmt[i] == 'e')
2967 /* If we are about to do the last recursive call
2968 needed at this level, change it into iteration.
2969 This function is called enough to be worth it. */
2970 if (i == 0)
2972 x = XEXP (x, i);
2973 goto repeat;
2976 compute_transp (XEXP (x, i), indx, bmap, set_p);
2978 else if (fmt[i] == 'E')
2979 for (j = 0; j < XVECLEN (x, i); j++)
2980 compute_transp (XVECEXP (x, i, j), indx, bmap, set_p);
2984 /* Top level routine to do the dataflow analysis needed by copy/const
2985 propagation. */
2987 static void
2988 compute_cprop_data (void)
2990 compute_local_properties (cprop_absaltered, cprop_pavloc, NULL, &set_hash_table);
2991 compute_available (cprop_pavloc, cprop_absaltered,
2992 cprop_avout, cprop_avin);
2995 /* Copy/constant propagation. */
2997 /* Maximum number of register uses in an insn that we handle. */
2998 #define MAX_USES 8
3000 /* Table of uses found in an insn.
3001 Allocated statically to avoid alloc/free complexity and overhead. */
3002 static struct reg_use reg_use_table[MAX_USES];
3004 /* Index into `reg_use_table' while building it. */
3005 static int reg_use_count;
3007 /* Set up a list of register numbers used in INSN. The found uses are stored
3008 in `reg_use_table'. `reg_use_count' is initialized to zero before entry,
3009 and contains the number of uses in the table upon exit.
3011 ??? If a register appears multiple times we will record it multiple times.
3012 This doesn't hurt anything but it will slow things down. */
3014 static void
3015 find_used_regs (rtx *xptr, void *data ATTRIBUTE_UNUSED)
3017 int i, j;
3018 enum rtx_code code;
3019 const char *fmt;
3020 rtx x = *xptr;
3022 /* repeat is used to turn tail-recursion into iteration since GCC
3023 can't do it when there's no return value. */
3024 repeat:
3025 if (x == 0)
3026 return;
3028 code = GET_CODE (x);
3029 if (REG_P (x))
3031 if (reg_use_count == MAX_USES)
3032 return;
3034 reg_use_table[reg_use_count].reg_rtx = x;
3035 reg_use_count++;
3038 /* Recursively scan the operands of this expression. */
3040 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
3042 if (fmt[i] == 'e')
3044 /* If we are about to do the last recursive call
3045 needed at this level, change it into iteration.
3046 This function is called enough to be worth it. */
3047 if (i == 0)
3049 x = XEXP (x, 0);
3050 goto repeat;
3053 find_used_regs (&XEXP (x, i), data);
3055 else if (fmt[i] == 'E')
3056 for (j = 0; j < XVECLEN (x, i); j++)
3057 find_used_regs (&XVECEXP (x, i, j), data);
3061 /* Try to replace all non-SET_DEST occurrences of FROM in INSN with TO.
3062 Returns nonzero is successful. */
3064 static int
3065 try_replace_reg (rtx from, rtx to, rtx insn)
3067 rtx note = find_reg_equal_equiv_note (insn);
3068 rtx src = 0;
3069 int success = 0;
3070 rtx set = single_set (insn);
3072 validate_replace_src_group (from, to, insn);
3073 if (num_changes_pending () && apply_change_group ())
3074 success = 1;
3076 /* Try to simplify SET_SRC if we have substituted a constant. */
3077 if (success && set && CONSTANT_P (to))
3079 src = simplify_rtx (SET_SRC (set));
3081 if (src)
3082 validate_change (insn, &SET_SRC (set), src, 0);
3085 /* If there is already a NOTE, update the expression in it with our
3086 replacement. */
3087 if (note != 0)
3088 XEXP (note, 0) = simplify_replace_rtx (XEXP (note, 0), from, to);
3090 if (!success && set && reg_mentioned_p (from, SET_SRC (set)))
3092 /* If above failed and this is a single set, try to simplify the source of
3093 the set given our substitution. We could perhaps try this for multiple
3094 SETs, but it probably won't buy us anything. */
3095 src = simplify_replace_rtx (SET_SRC (set), from, to);
3097 if (!rtx_equal_p (src, SET_SRC (set))
3098 && validate_change (insn, &SET_SRC (set), src, 0))
3099 success = 1;
3101 /* If we've failed to do replacement, have a single SET, don't already
3102 have a note, and have no special SET, add a REG_EQUAL note to not
3103 lose information. */
3104 if (!success && note == 0 && set != 0
3105 && GET_CODE (XEXP (set, 0)) != ZERO_EXTRACT
3106 && GET_CODE (XEXP (set, 0)) != SIGN_EXTRACT)
3107 note = set_unique_reg_note (insn, REG_EQUAL, copy_rtx (src));
3110 /* REG_EQUAL may get simplified into register.
3111 We don't allow that. Remove that note. This code ought
3112 not to happen, because previous code ought to synthesize
3113 reg-reg move, but be on the safe side. */
3114 if (note && REG_P (XEXP (note, 0)))
3115 remove_note (insn, note);
3117 return success;
3120 /* Find a set of REGNOs that are available on entry to INSN's block. Returns
3121 NULL no such set is found. */
3123 static struct expr *
3124 find_avail_set (int regno, rtx insn)
3126 /* SET1 contains the last set found that can be returned to the caller for
3127 use in a substitution. */
3128 struct expr *set1 = 0;
3130 /* Loops are not possible here. To get a loop we would need two sets
3131 available at the start of the block containing INSN. ie we would
3132 need two sets like this available at the start of the block:
3134 (set (reg X) (reg Y))
3135 (set (reg Y) (reg X))
3137 This can not happen since the set of (reg Y) would have killed the
3138 set of (reg X) making it unavailable at the start of this block. */
3139 while (1)
3141 rtx src;
3142 struct expr *set = lookup_set (regno, &set_hash_table);
3144 /* Find a set that is available at the start of the block
3145 which contains INSN. */
3146 while (set)
3148 if (TEST_BIT (cprop_avin[BLOCK_NUM (insn)], set->bitmap_index))
3149 break;
3150 set = next_set (regno, set);
3153 /* If no available set was found we've reached the end of the
3154 (possibly empty) copy chain. */
3155 if (set == 0)
3156 break;
3158 if (GET_CODE (set->expr) != SET)
3159 abort ();
3161 src = SET_SRC (set->expr);
3163 /* We know the set is available.
3164 Now check that SRC is ANTLOC (i.e. none of the source operands
3165 have changed since the start of the block).
3167 If the source operand changed, we may still use it for the next
3168 iteration of this loop, but we may not use it for substitutions. */
3170 if (gcse_constant_p (src) || oprs_not_set_p (src, insn))
3171 set1 = set;
3173 /* If the source of the set is anything except a register, then
3174 we have reached the end of the copy chain. */
3175 if (! REG_P (src))
3176 break;
3178 /* Follow the copy chain, ie start another iteration of the loop
3179 and see if we have an available copy into SRC. */
3180 regno = REGNO (src);
3183 /* SET1 holds the last set that was available and anticipatable at
3184 INSN. */
3185 return set1;
3188 /* Subroutine of cprop_insn that tries to propagate constants into
3189 JUMP_INSNS. JUMP must be a conditional jump. If SETCC is non-NULL
3190 it is the instruction that immediately precedes JUMP, and must be a
3191 single SET of a register. FROM is what we will try to replace,
3192 SRC is the constant we will try to substitute for it. Returns nonzero
3193 if a change was made. */
3195 static int
3196 cprop_jump (basic_block bb, rtx setcc, rtx jump, rtx from, rtx src)
3198 rtx new, set_src, note_src;
3199 rtx set = pc_set (jump);
3200 rtx note = find_reg_equal_equiv_note (jump);
3202 if (note)
3204 note_src = XEXP (note, 0);
3205 if (GET_CODE (note_src) == EXPR_LIST)
3206 note_src = NULL_RTX;
3208 else note_src = NULL_RTX;
3210 /* Prefer REG_EQUAL notes except those containing EXPR_LISTs. */
3211 set_src = note_src ? note_src : SET_SRC (set);
3213 /* First substitute the SETCC condition into the JUMP instruction,
3214 then substitute that given values into this expanded JUMP. */
3215 if (setcc != NULL_RTX
3216 && !modified_between_p (from, setcc, jump)
3217 && !modified_between_p (src, setcc, jump))
3219 rtx setcc_src;
3220 rtx setcc_set = single_set (setcc);
3221 rtx setcc_note = find_reg_equal_equiv_note (setcc);
3222 setcc_src = (setcc_note && GET_CODE (XEXP (setcc_note, 0)) != EXPR_LIST)
3223 ? XEXP (setcc_note, 0) : SET_SRC (setcc_set);
3224 set_src = simplify_replace_rtx (set_src, SET_DEST (setcc_set),
3225 setcc_src);
3227 else
3228 setcc = NULL_RTX;
3230 new = simplify_replace_rtx (set_src, from, src);
3232 /* If no simplification can be made, then try the next register. */
3233 if (rtx_equal_p (new, SET_SRC (set)))
3234 return 0;
3236 /* If this is now a no-op delete it, otherwise this must be a valid insn. */
3237 if (new == pc_rtx)
3238 delete_insn (jump);
3239 else
3241 /* Ensure the value computed inside the jump insn to be equivalent
3242 to one computed by setcc. */
3243 if (setcc && modified_in_p (new, setcc))
3244 return 0;
3245 if (! validate_change (jump, &SET_SRC (set), new, 0))
3247 /* When (some) constants are not valid in a comparison, and there
3248 are two registers to be replaced by constants before the entire
3249 comparison can be folded into a constant, we need to keep
3250 intermediate information in REG_EQUAL notes. For targets with
3251 separate compare insns, such notes are added by try_replace_reg.
3252 When we have a combined compare-and-branch instruction, however,
3253 we need to attach a note to the branch itself to make this
3254 optimization work. */
3256 if (!rtx_equal_p (new, note_src))
3257 set_unique_reg_note (jump, REG_EQUAL, copy_rtx (new));
3258 return 0;
3261 /* Remove REG_EQUAL note after simplification. */
3262 if (note_src)
3263 remove_note (jump, note);
3265 /* If this has turned into an unconditional jump,
3266 then put a barrier after it so that the unreachable
3267 code will be deleted. */
3268 if (GET_CODE (SET_SRC (set)) == LABEL_REF)
3269 emit_barrier_after (jump);
3272 #ifdef HAVE_cc0
3273 /* Delete the cc0 setter. */
3274 if (setcc != NULL && CC0_P (SET_DEST (single_set (setcc))))
3275 delete_insn (setcc);
3276 #endif
3278 run_jump_opt_after_gcse = 1;
3280 global_const_prop_count++;
3281 if (gcse_file != NULL)
3283 fprintf (gcse_file,
3284 "GLOBAL CONST-PROP: Replacing reg %d in jump_insn %d with constant ",
3285 REGNO (from), INSN_UID (jump));
3286 print_rtl (gcse_file, src);
3287 fprintf (gcse_file, "\n");
3289 purge_dead_edges (bb);
3291 return 1;
3294 static bool
3295 constprop_register (rtx insn, rtx from, rtx to, int alter_jumps)
3297 rtx sset;
3299 /* Check for reg or cc0 setting instructions followed by
3300 conditional branch instructions first. */
3301 if (alter_jumps
3302 && (sset = single_set (insn)) != NULL
3303 && NEXT_INSN (insn)
3304 && any_condjump_p (NEXT_INSN (insn)) && onlyjump_p (NEXT_INSN (insn)))
3306 rtx dest = SET_DEST (sset);
3307 if ((REG_P (dest) || CC0_P (dest))
3308 && cprop_jump (BLOCK_FOR_INSN (insn), insn, NEXT_INSN (insn), from, to))
3309 return 1;
3312 /* Handle normal insns next. */
3313 if (NONJUMP_INSN_P (insn)
3314 && try_replace_reg (from, to, insn))
3315 return 1;
3317 /* Try to propagate a CONST_INT into a conditional jump.
3318 We're pretty specific about what we will handle in this
3319 code, we can extend this as necessary over time.
3321 Right now the insn in question must look like
3322 (set (pc) (if_then_else ...)) */
3323 else if (alter_jumps && any_condjump_p (insn) && onlyjump_p (insn))
3324 return cprop_jump (BLOCK_FOR_INSN (insn), NULL, insn, from, to);
3325 return 0;
3328 /* Perform constant and copy propagation on INSN.
3329 The result is nonzero if a change was made. */
3331 static int
3332 cprop_insn (rtx insn, int alter_jumps)
3334 struct reg_use *reg_used;
3335 int changed = 0;
3336 rtx note;
3338 if (!INSN_P (insn))
3339 return 0;
3341 reg_use_count = 0;
3342 note_uses (&PATTERN (insn), find_used_regs, NULL);
3344 note = find_reg_equal_equiv_note (insn);
3346 /* We may win even when propagating constants into notes. */
3347 if (note)
3348 find_used_regs (&XEXP (note, 0), NULL);
3350 for (reg_used = &reg_use_table[0]; reg_use_count > 0;
3351 reg_used++, reg_use_count--)
3353 unsigned int regno = REGNO (reg_used->reg_rtx);
3354 rtx pat, src;
3355 struct expr *set;
3357 /* Ignore registers created by GCSE.
3358 We do this because ... */
3359 if (regno >= max_gcse_regno)
3360 continue;
3362 /* If the register has already been set in this block, there's
3363 nothing we can do. */
3364 if (! oprs_not_set_p (reg_used->reg_rtx, insn))
3365 continue;
3367 /* Find an assignment that sets reg_used and is available
3368 at the start of the block. */
3369 set = find_avail_set (regno, insn);
3370 if (! set)
3371 continue;
3373 pat = set->expr;
3374 /* ??? We might be able to handle PARALLELs. Later. */
3375 if (GET_CODE (pat) != SET)
3376 abort ();
3378 src = SET_SRC (pat);
3380 /* Constant propagation. */
3381 if (gcse_constant_p (src))
3383 if (constprop_register (insn, reg_used->reg_rtx, src, alter_jumps))
3385 changed = 1;
3386 global_const_prop_count++;
3387 if (gcse_file != NULL)
3389 fprintf (gcse_file, "GLOBAL CONST-PROP: Replacing reg %d in ", regno);
3390 fprintf (gcse_file, "insn %d with constant ", INSN_UID (insn));
3391 print_rtl (gcse_file, src);
3392 fprintf (gcse_file, "\n");
3394 if (INSN_DELETED_P (insn))
3395 return 1;
3398 else if (REG_P (src)
3399 && REGNO (src) >= FIRST_PSEUDO_REGISTER
3400 && REGNO (src) != regno)
3402 if (try_replace_reg (reg_used->reg_rtx, src, insn))
3404 changed = 1;
3405 global_copy_prop_count++;
3406 if (gcse_file != NULL)
3408 fprintf (gcse_file, "GLOBAL COPY-PROP: Replacing reg %d in insn %d",
3409 regno, INSN_UID (insn));
3410 fprintf (gcse_file, " with reg %d\n", REGNO (src));
3413 /* The original insn setting reg_used may or may not now be
3414 deletable. We leave the deletion to flow. */
3415 /* FIXME: If it turns out that the insn isn't deletable,
3416 then we may have unnecessarily extended register lifetimes
3417 and made things worse. */
3422 return changed;
3425 /* Like find_used_regs, but avoid recording uses that appear in
3426 input-output contexts such as zero_extract or pre_dec. This
3427 restricts the cases we consider to those for which local cprop
3428 can legitimately make replacements. */
3430 static void
3431 local_cprop_find_used_regs (rtx *xptr, void *data)
3433 rtx x = *xptr;
3435 if (x == 0)
3436 return;
3438 switch (GET_CODE (x))
3440 case ZERO_EXTRACT:
3441 case SIGN_EXTRACT:
3442 case STRICT_LOW_PART:
3443 return;
3445 case PRE_DEC:
3446 case PRE_INC:
3447 case POST_DEC:
3448 case POST_INC:
3449 case PRE_MODIFY:
3450 case POST_MODIFY:
3451 /* Can only legitimately appear this early in the context of
3452 stack pushes for function arguments, but handle all of the
3453 codes nonetheless. */
3454 return;
3456 case SUBREG:
3457 /* Setting a subreg of a register larger than word_mode leaves
3458 the non-written words unchanged. */
3459 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) > BITS_PER_WORD)
3460 return;
3461 break;
3463 default:
3464 break;
3467 find_used_regs (xptr, data);
3470 /* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
3471 their REG_EQUAL notes need updating. */
3473 static bool
3474 do_local_cprop (rtx x, rtx insn, int alter_jumps, rtx *libcall_sp)
3476 rtx newreg = NULL, newcnst = NULL;
3478 /* Rule out USE instructions and ASM statements as we don't want to
3479 change the hard registers mentioned. */
3480 if (REG_P (x)
3481 && (REGNO (x) >= FIRST_PSEUDO_REGISTER
3482 || (GET_CODE (PATTERN (insn)) != USE
3483 && asm_noperands (PATTERN (insn)) < 0)))
3485 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0);
3486 struct elt_loc_list *l;
3488 if (!val)
3489 return false;
3490 for (l = val->locs; l; l = l->next)
3492 rtx this_rtx = l->loc;
3493 rtx note;
3495 if (l->in_libcall)
3496 continue;
3498 if (gcse_constant_p (this_rtx))
3499 newcnst = this_rtx;
3500 if (REG_P (this_rtx) && REGNO (this_rtx) >= FIRST_PSEUDO_REGISTER
3501 /* Don't copy propagate if it has attached REG_EQUIV note.
3502 At this point this only function parameters should have
3503 REG_EQUIV notes and if the argument slot is used somewhere
3504 explicitly, it means address of parameter has been taken,
3505 so we should not extend the lifetime of the pseudo. */
3506 && (!(note = find_reg_note (l->setting_insn, REG_EQUIV, NULL_RTX))
3507 || ! MEM_P (XEXP (note, 0))))
3508 newreg = this_rtx;
3510 if (newcnst && constprop_register (insn, x, newcnst, alter_jumps))
3512 /* If we find a case where we can't fix the retval REG_EQUAL notes
3513 match the new register, we either have to abandon this replacement
3514 or fix delete_trivially_dead_insns to preserve the setting insn,
3515 or make it delete the REG_EUAQL note, and fix up all passes that
3516 require the REG_EQUAL note there. */
3517 if (!adjust_libcall_notes (x, newcnst, insn, libcall_sp))
3518 abort ();
3519 if (gcse_file != NULL)
3521 fprintf (gcse_file, "LOCAL CONST-PROP: Replacing reg %d in ",
3522 REGNO (x));
3523 fprintf (gcse_file, "insn %d with constant ",
3524 INSN_UID (insn));
3525 print_rtl (gcse_file, newcnst);
3526 fprintf (gcse_file, "\n");
3528 local_const_prop_count++;
3529 return true;
3531 else if (newreg && newreg != x && try_replace_reg (x, newreg, insn))
3533 adjust_libcall_notes (x, newreg, insn, libcall_sp);
3534 if (gcse_file != NULL)
3536 fprintf (gcse_file,
3537 "LOCAL COPY-PROP: Replacing reg %d in insn %d",
3538 REGNO (x), INSN_UID (insn));
3539 fprintf (gcse_file, " with reg %d\n", REGNO (newreg));
3541 local_copy_prop_count++;
3542 return true;
3545 return false;
3548 /* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
3549 their REG_EQUAL notes need updating to reflect that OLDREG has been
3550 replaced with NEWVAL in INSN. Return true if all substitutions could
3551 be made. */
3552 static bool
3553 adjust_libcall_notes (rtx oldreg, rtx newval, rtx insn, rtx *libcall_sp)
3555 rtx end;
3557 while ((end = *libcall_sp++))
3559 rtx note = find_reg_equal_equiv_note (end);
3561 if (! note)
3562 continue;
3564 if (REG_P (newval))
3566 if (reg_set_between_p (newval, PREV_INSN (insn), end))
3570 note = find_reg_equal_equiv_note (end);
3571 if (! note)
3572 continue;
3573 if (reg_mentioned_p (newval, XEXP (note, 0)))
3574 return false;
3576 while ((end = *libcall_sp++));
3577 return true;
3580 XEXP (note, 0) = replace_rtx (XEXP (note, 0), oldreg, newval);
3581 insn = end;
3583 return true;
3586 #define MAX_NESTED_LIBCALLS 9
3588 static void
3589 local_cprop_pass (int alter_jumps)
3591 rtx insn;
3592 struct reg_use *reg_used;
3593 rtx libcall_stack[MAX_NESTED_LIBCALLS + 1], *libcall_sp;
3594 bool changed = false;
3596 cselib_init (false);
3597 libcall_sp = &libcall_stack[MAX_NESTED_LIBCALLS];
3598 *libcall_sp = 0;
3599 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3601 if (INSN_P (insn))
3603 rtx note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
3605 if (note)
3607 if (libcall_sp == libcall_stack)
3608 abort ();
3609 *--libcall_sp = XEXP (note, 0);
3611 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
3612 if (note)
3613 libcall_sp++;
3614 note = find_reg_equal_equiv_note (insn);
3617 reg_use_count = 0;
3618 note_uses (&PATTERN (insn), local_cprop_find_used_regs, NULL);
3619 if (note)
3620 local_cprop_find_used_regs (&XEXP (note, 0), NULL);
3622 for (reg_used = &reg_use_table[0]; reg_use_count > 0;
3623 reg_used++, reg_use_count--)
3624 if (do_local_cprop (reg_used->reg_rtx, insn, alter_jumps,
3625 libcall_sp))
3627 changed = true;
3628 break;
3630 if (INSN_DELETED_P (insn))
3631 break;
3633 while (reg_use_count);
3635 cselib_process_insn (insn);
3637 cselib_finish ();
3638 /* Global analysis may get into infinite loops for unreachable blocks. */
3639 if (changed && alter_jumps)
3641 delete_unreachable_blocks ();
3642 free_reg_set_mem ();
3643 alloc_reg_set_mem (max_reg_num ());
3644 compute_sets (get_insns ());
3648 /* Forward propagate copies. This includes copies and constants. Return
3649 nonzero if a change was made. */
3651 static int
3652 cprop (int alter_jumps)
3654 int changed;
3655 basic_block bb;
3656 rtx insn;
3658 /* Note we start at block 1. */
3659 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
3661 if (gcse_file != NULL)
3662 fprintf (gcse_file, "\n");
3663 return 0;
3666 changed = 0;
3667 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR, next_bb)
3669 /* Reset tables used to keep track of what's still valid [since the
3670 start of the block]. */
3671 reset_opr_set_tables ();
3673 for (insn = BB_HEAD (bb);
3674 insn != NULL && insn != NEXT_INSN (BB_END (bb));
3675 insn = NEXT_INSN (insn))
3676 if (INSN_P (insn))
3678 changed |= cprop_insn (insn, alter_jumps);
3680 /* Keep track of everything modified by this insn. */
3681 /* ??? Need to be careful w.r.t. mods done to INSN. Don't
3682 call mark_oprs_set if we turned the insn into a NOTE. */
3683 if (! NOTE_P (insn))
3684 mark_oprs_set (insn);
3688 if (gcse_file != NULL)
3689 fprintf (gcse_file, "\n");
3691 return changed;
3694 /* Similar to get_condition, only the resulting condition must be
3695 valid at JUMP, instead of at EARLIEST.
3697 This differs from noce_get_condition in ifcvt.c in that we prefer not to
3698 settle for the condition variable in the jump instruction being integral.
3699 We prefer to be able to record the value of a user variable, rather than
3700 the value of a temporary used in a condition. This could be solved by
3701 recording the value of *every* register scaned by canonicalize_condition,
3702 but this would require some code reorganization. */
3705 fis_get_condition (rtx jump)
3707 return get_condition (jump, NULL, false, true);
3710 /* Check the comparison COND to see if we can safely form an implicit set from
3711 it. COND is either an EQ or NE comparison. */
3713 static bool
3714 implicit_set_cond_p (rtx cond)
3716 enum machine_mode mode = GET_MODE (XEXP (cond, 0));
3717 rtx cst = XEXP (cond, 1);
3719 /* We can't perform this optimization if either operand might be or might
3720 contain a signed zero. */
3721 if (HONOR_SIGNED_ZEROS (mode))
3723 /* It is sufficient to check if CST is or contains a zero. We must
3724 handle float, complex, and vector. If any subpart is a zero, then
3725 the optimization can't be performed. */
3726 /* ??? The complex and vector checks are not implemented yet. We just
3727 always return zero for them. */
3728 if (GET_CODE (cst) == CONST_DOUBLE)
3730 REAL_VALUE_TYPE d;
3731 REAL_VALUE_FROM_CONST_DOUBLE (d, cst);
3732 if (REAL_VALUES_EQUAL (d, dconst0))
3733 return 0;
3735 else
3736 return 0;
3739 return gcse_constant_p (cst);
3742 /* Find the implicit sets of a function. An "implicit set" is a constraint
3743 on the value of a variable, implied by a conditional jump. For example,
3744 following "if (x == 2)", the then branch may be optimized as though the
3745 conditional performed an "explicit set", in this example, "x = 2". This
3746 function records the set patterns that are implicit at the start of each
3747 basic block. */
3749 static void
3750 find_implicit_sets (void)
3752 basic_block bb, dest;
3753 unsigned int count;
3754 rtx cond, new;
3756 count = 0;
3757 FOR_EACH_BB (bb)
3758 /* Check for more than one successor. */
3759 if (bb->succ && bb->succ->succ_next)
3761 cond = fis_get_condition (BB_END (bb));
3763 if (cond
3764 && (GET_CODE (cond) == EQ || GET_CODE (cond) == NE)
3765 && REG_P (XEXP (cond, 0))
3766 && REGNO (XEXP (cond, 0)) >= FIRST_PSEUDO_REGISTER
3767 && implicit_set_cond_p (cond))
3769 dest = GET_CODE (cond) == EQ ? BRANCH_EDGE (bb)->dest
3770 : FALLTHRU_EDGE (bb)->dest;
3772 if (dest && ! dest->pred->pred_next
3773 && dest != EXIT_BLOCK_PTR)
3775 new = gen_rtx_SET (VOIDmode, XEXP (cond, 0),
3776 XEXP (cond, 1));
3777 implicit_sets[dest->index] = new;
3778 if (gcse_file)
3780 fprintf(gcse_file, "Implicit set of reg %d in ",
3781 REGNO (XEXP (cond, 0)));
3782 fprintf(gcse_file, "basic block %d\n", dest->index);
3784 count++;
3789 if (gcse_file)
3790 fprintf (gcse_file, "Found %d implicit sets\n", count);
3793 /* Perform one copy/constant propagation pass.
3794 PASS is the pass count. If CPROP_JUMPS is true, perform constant
3795 propagation into conditional jumps. If BYPASS_JUMPS is true,
3796 perform conditional jump bypassing optimizations. */
3798 static int
3799 one_cprop_pass (int pass, int cprop_jumps, int bypass_jumps)
3801 int changed = 0;
3803 global_const_prop_count = local_const_prop_count = 0;
3804 global_copy_prop_count = local_copy_prop_count = 0;
3806 local_cprop_pass (cprop_jumps);
3808 /* Determine implicit sets. */
3809 implicit_sets = xcalloc (last_basic_block, sizeof (rtx));
3810 find_implicit_sets ();
3812 alloc_hash_table (max_cuid, &set_hash_table, 1);
3813 compute_hash_table (&set_hash_table);
3815 /* Free implicit_sets before peak usage. */
3816 free (implicit_sets);
3817 implicit_sets = NULL;
3819 if (gcse_file)
3820 dump_hash_table (gcse_file, "SET", &set_hash_table);
3821 if (set_hash_table.n_elems > 0)
3823 alloc_cprop_mem (last_basic_block, set_hash_table.n_elems);
3824 compute_cprop_data ();
3825 changed = cprop (cprop_jumps);
3826 if (bypass_jumps)
3827 changed |= bypass_conditional_jumps ();
3828 free_cprop_mem ();
3831 free_hash_table (&set_hash_table);
3833 if (gcse_file)
3835 fprintf (gcse_file, "CPROP of %s, pass %d: %d bytes needed, ",
3836 current_function_name (), pass, bytes_used);
3837 fprintf (gcse_file, "%d local const props, %d local copy props\n\n",
3838 local_const_prop_count, local_copy_prop_count);
3839 fprintf (gcse_file, "%d global const props, %d global copy props\n\n",
3840 global_const_prop_count, global_copy_prop_count);
3842 /* Global analysis may get into infinite loops for unreachable blocks. */
3843 if (changed && cprop_jumps)
3844 delete_unreachable_blocks ();
3846 return changed;
3849 /* Bypass conditional jumps. */
3851 /* The value of last_basic_block at the beginning of the jump_bypass
3852 pass. The use of redirect_edge_and_branch_force may introduce new
3853 basic blocks, but the data flow analysis is only valid for basic
3854 block indices less than bypass_last_basic_block. */
3856 static int bypass_last_basic_block;
3858 /* Find a set of REGNO to a constant that is available at the end of basic
3859 block BB. Returns NULL if no such set is found. Based heavily upon
3860 find_avail_set. */
3862 static struct expr *
3863 find_bypass_set (int regno, int bb)
3865 struct expr *result = 0;
3867 for (;;)
3869 rtx src;
3870 struct expr *set = lookup_set (regno, &set_hash_table);
3872 while (set)
3874 if (TEST_BIT (cprop_avout[bb], set->bitmap_index))
3875 break;
3876 set = next_set (regno, set);
3879 if (set == 0)
3880 break;
3882 if (GET_CODE (set->expr) != SET)
3883 abort ();
3885 src = SET_SRC (set->expr);
3886 if (gcse_constant_p (src))
3887 result = set;
3889 if (! REG_P (src))
3890 break;
3892 regno = REGNO (src);
3894 return result;
3898 /* Subroutine of bypass_block that checks whether a pseudo is killed by
3899 any of the instructions inserted on an edge. Jump bypassing places
3900 condition code setters on CFG edges using insert_insn_on_edge. This
3901 function is required to check that our data flow analysis is still
3902 valid prior to commit_edge_insertions. */
3904 static bool
3905 reg_killed_on_edge (rtx reg, edge e)
3907 rtx insn;
3909 for (insn = e->insns.r; insn; insn = NEXT_INSN (insn))
3910 if (INSN_P (insn) && reg_set_p (reg, insn))
3911 return true;
3913 return false;
3916 /* Subroutine of bypass_conditional_jumps that attempts to bypass the given
3917 basic block BB which has more than one predecessor. If not NULL, SETCC
3918 is the first instruction of BB, which is immediately followed by JUMP_INSN
3919 JUMP. Otherwise, SETCC is NULL, and JUMP is the first insn of BB.
3920 Returns nonzero if a change was made.
3922 During the jump bypassing pass, we may place copies of SETCC instructions
3923 on CFG edges. The following routine must be careful to pay attention to
3924 these inserted insns when performing its transformations. */
3926 static int
3927 bypass_block (basic_block bb, rtx setcc, rtx jump)
3929 rtx insn, note;
3930 edge e, enext, edest;
3931 int i, change;
3932 int may_be_loop_header;
3934 insn = (setcc != NULL) ? setcc : jump;
3936 /* Determine set of register uses in INSN. */
3937 reg_use_count = 0;
3938 note_uses (&PATTERN (insn), find_used_regs, NULL);
3939 note = find_reg_equal_equiv_note (insn);
3940 if (note)
3941 find_used_regs (&XEXP (note, 0), NULL);
3943 may_be_loop_header = false;
3944 for (e = bb->pred; e; e = e->pred_next)
3945 if (e->flags & EDGE_DFS_BACK)
3947 may_be_loop_header = true;
3948 break;
3951 change = 0;
3952 for (e = bb->pred; e; e = enext)
3954 enext = e->pred_next;
3955 if (e->flags & EDGE_COMPLEX)
3956 continue;
3958 /* We can't redirect edges from new basic blocks. */
3959 if (e->src->index >= bypass_last_basic_block)
3960 continue;
3962 /* The irreducible loops created by redirecting of edges entering the
3963 loop from outside would decrease effectiveness of some of the following
3964 optimizations, so prevent this. */
3965 if (may_be_loop_header
3966 && !(e->flags & EDGE_DFS_BACK))
3967 continue;
3969 for (i = 0; i < reg_use_count; i++)
3971 struct reg_use *reg_used = &reg_use_table[i];
3972 unsigned int regno = REGNO (reg_used->reg_rtx);
3973 basic_block dest, old_dest;
3974 struct expr *set;
3975 rtx src, new;
3977 if (regno >= max_gcse_regno)
3978 continue;
3980 set = find_bypass_set (regno, e->src->index);
3982 if (! set)
3983 continue;
3985 /* Check the data flow is valid after edge insertions. */
3986 if (e->insns.r && reg_killed_on_edge (reg_used->reg_rtx, e))
3987 continue;
3989 src = SET_SRC (pc_set (jump));
3991 if (setcc != NULL)
3992 src = simplify_replace_rtx (src,
3993 SET_DEST (PATTERN (setcc)),
3994 SET_SRC (PATTERN (setcc)));
3996 new = simplify_replace_rtx (src, reg_used->reg_rtx,
3997 SET_SRC (set->expr));
3999 /* Jump bypassing may have already placed instructions on
4000 edges of the CFG. We can't bypass an outgoing edge that
4001 has instructions associated with it, as these insns won't
4002 get executed if the incoming edge is redirected. */
4004 if (new == pc_rtx)
4006 edest = FALLTHRU_EDGE (bb);
4007 dest = edest->insns.r ? NULL : edest->dest;
4009 else if (GET_CODE (new) == LABEL_REF)
4011 dest = BLOCK_FOR_INSN (XEXP (new, 0));
4012 /* Don't bypass edges containing instructions. */
4013 for (edest = bb->succ; edest; edest = edest->succ_next)
4014 if (edest->dest == dest && edest->insns.r)
4016 dest = NULL;
4017 break;
4020 else
4021 dest = NULL;
4023 /* Avoid unification of the edge with other edges from original
4024 branch. We would end up emitting the instruction on "both"
4025 edges. */
4027 if (dest && setcc && !CC0_P (SET_DEST (PATTERN (setcc))))
4029 edge e2;
4030 for (e2 = e->src->succ; e2; e2 = e2->succ_next)
4031 if (e2->dest == dest)
4033 dest = NULL;
4034 break;
4038 old_dest = e->dest;
4039 if (dest != NULL
4040 && dest != old_dest
4041 && dest != EXIT_BLOCK_PTR)
4043 redirect_edge_and_branch_force (e, dest);
4045 /* Copy the register setter to the redirected edge.
4046 Don't copy CC0 setters, as CC0 is dead after jump. */
4047 if (setcc)
4049 rtx pat = PATTERN (setcc);
4050 if (!CC0_P (SET_DEST (pat)))
4051 insert_insn_on_edge (copy_insn (pat), e);
4054 if (gcse_file != NULL)
4056 fprintf (gcse_file, "JUMP-BYPASS: Proved reg %d "
4057 "in jump_insn %d equals constant ",
4058 regno, INSN_UID (jump));
4059 print_rtl (gcse_file, SET_SRC (set->expr));
4060 fprintf (gcse_file, "\nBypass edge from %d->%d to %d\n",
4061 e->src->index, old_dest->index, dest->index);
4063 change = 1;
4064 break;
4068 return change;
4071 /* Find basic blocks with more than one predecessor that only contain a
4072 single conditional jump. If the result of the comparison is known at
4073 compile-time from any incoming edge, redirect that edge to the
4074 appropriate target. Returns nonzero if a change was made.
4076 This function is now mis-named, because we also handle indirect jumps. */
4078 static int
4079 bypass_conditional_jumps (void)
4081 basic_block bb;
4082 int changed;
4083 rtx setcc;
4084 rtx insn;
4085 rtx dest;
4087 /* Note we start at block 1. */
4088 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
4089 return 0;
4091 bypass_last_basic_block = last_basic_block;
4092 mark_dfs_back_edges ();
4094 changed = 0;
4095 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb,
4096 EXIT_BLOCK_PTR, next_bb)
4098 /* Check for more than one predecessor. */
4099 if (bb->pred && bb->pred->pred_next)
4101 setcc = NULL_RTX;
4102 for (insn = BB_HEAD (bb);
4103 insn != NULL && insn != NEXT_INSN (BB_END (bb));
4104 insn = NEXT_INSN (insn))
4105 if (NONJUMP_INSN_P (insn))
4107 if (setcc)
4108 break;
4109 if (GET_CODE (PATTERN (insn)) != SET)
4110 break;
4112 dest = SET_DEST (PATTERN (insn));
4113 if (REG_P (dest) || CC0_P (dest))
4114 setcc = insn;
4115 else
4116 break;
4118 else if (JUMP_P (insn))
4120 if ((any_condjump_p (insn) || computed_jump_p (insn))
4121 && onlyjump_p (insn))
4122 changed |= bypass_block (bb, setcc, insn);
4123 break;
4125 else if (INSN_P (insn))
4126 break;
4130 /* If we bypassed any register setting insns, we inserted a
4131 copy on the redirected edge. These need to be committed. */
4132 if (changed)
4133 commit_edge_insertions();
4135 return changed;
4138 /* Compute PRE+LCM working variables. */
4140 /* Local properties of expressions. */
4141 /* Nonzero for expressions that are transparent in the block. */
4142 static sbitmap *transp;
4144 /* Nonzero for expressions that are transparent at the end of the block.
4145 This is only zero for expressions killed by abnormal critical edge
4146 created by a calls. */
4147 static sbitmap *transpout;
4149 /* Nonzero for expressions that are computed (available) in the block. */
4150 static sbitmap *comp;
4152 /* Nonzero for expressions that are locally anticipatable in the block. */
4153 static sbitmap *antloc;
4155 /* Nonzero for expressions where this block is an optimal computation
4156 point. */
4157 static sbitmap *pre_optimal;
4159 /* Nonzero for expressions which are redundant in a particular block. */
4160 static sbitmap *pre_redundant;
4162 /* Nonzero for expressions which should be inserted on a specific edge. */
4163 static sbitmap *pre_insert_map;
4165 /* Nonzero for expressions which should be deleted in a specific block. */
4166 static sbitmap *pre_delete_map;
4168 /* Contains the edge_list returned by pre_edge_lcm. */
4169 static struct edge_list *edge_list;
4171 /* Redundant insns. */
4172 static sbitmap pre_redundant_insns;
4174 /* Allocate vars used for PRE analysis. */
4176 static void
4177 alloc_pre_mem (int n_blocks, int n_exprs)
4179 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
4180 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
4181 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
4183 pre_optimal = NULL;
4184 pre_redundant = NULL;
4185 pre_insert_map = NULL;
4186 pre_delete_map = NULL;
4187 ae_kill = sbitmap_vector_alloc (n_blocks, n_exprs);
4189 /* pre_insert and pre_delete are allocated later. */
4192 /* Free vars used for PRE analysis. */
4194 static void
4195 free_pre_mem (void)
4197 sbitmap_vector_free (transp);
4198 sbitmap_vector_free (comp);
4200 /* ANTLOC and AE_KILL are freed just after pre_lcm finishes. */
4202 if (pre_optimal)
4203 sbitmap_vector_free (pre_optimal);
4204 if (pre_redundant)
4205 sbitmap_vector_free (pre_redundant);
4206 if (pre_insert_map)
4207 sbitmap_vector_free (pre_insert_map);
4208 if (pre_delete_map)
4209 sbitmap_vector_free (pre_delete_map);
4211 transp = comp = NULL;
4212 pre_optimal = pre_redundant = pre_insert_map = pre_delete_map = NULL;
4215 /* Top level routine to do the dataflow analysis needed by PRE. */
4217 static void
4218 compute_pre_data (void)
4220 sbitmap trapping_expr;
4221 basic_block bb;
4222 unsigned int ui;
4224 compute_local_properties (transp, comp, antloc, &expr_hash_table);
4225 sbitmap_vector_zero (ae_kill, last_basic_block);
4227 /* Collect expressions which might trap. */
4228 trapping_expr = sbitmap_alloc (expr_hash_table.n_elems);
4229 sbitmap_zero (trapping_expr);
4230 for (ui = 0; ui < expr_hash_table.size; ui++)
4232 struct expr *e;
4233 for (e = expr_hash_table.table[ui]; e != NULL; e = e->next_same_hash)
4234 if (may_trap_p (e->expr))
4235 SET_BIT (trapping_expr, e->bitmap_index);
4238 /* Compute ae_kill for each basic block using:
4240 ~(TRANSP | COMP)
4243 FOR_EACH_BB (bb)
4245 edge e;
4247 /* If the current block is the destination of an abnormal edge, we
4248 kill all trapping expressions because we won't be able to properly
4249 place the instruction on the edge. So make them neither
4250 anticipatable nor transparent. This is fairly conservative. */
4251 for (e = bb->pred; e ; e = e->pred_next)
4252 if (e->flags & EDGE_ABNORMAL)
4254 sbitmap_difference (antloc[bb->index], antloc[bb->index], trapping_expr);
4255 sbitmap_difference (transp[bb->index], transp[bb->index], trapping_expr);
4256 break;
4259 sbitmap_a_or_b (ae_kill[bb->index], transp[bb->index], comp[bb->index]);
4260 sbitmap_not (ae_kill[bb->index], ae_kill[bb->index]);
4263 edge_list = pre_edge_lcm (gcse_file, expr_hash_table.n_elems, transp, comp, antloc,
4264 ae_kill, &pre_insert_map, &pre_delete_map);
4265 sbitmap_vector_free (antloc);
4266 antloc = NULL;
4267 sbitmap_vector_free (ae_kill);
4268 ae_kill = NULL;
4269 sbitmap_free (trapping_expr);
4272 /* PRE utilities */
4274 /* Return nonzero if an occurrence of expression EXPR in OCCR_BB would reach
4275 block BB.
4277 VISITED is a pointer to a working buffer for tracking which BB's have
4278 been visited. It is NULL for the top-level call.
4280 We treat reaching expressions that go through blocks containing the same
4281 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
4282 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
4283 2 as not reaching. The intent is to improve the probability of finding
4284 only one reaching expression and to reduce register lifetimes by picking
4285 the closest such expression. */
4287 static int
4288 pre_expr_reaches_here_p_work (basic_block occr_bb, struct expr *expr, basic_block bb, char *visited)
4290 edge pred;
4292 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
4294 basic_block pred_bb = pred->src;
4296 if (pred->src == ENTRY_BLOCK_PTR
4297 /* Has predecessor has already been visited? */
4298 || visited[pred_bb->index])
4299 ;/* Nothing to do. */
4301 /* Does this predecessor generate this expression? */
4302 else if (TEST_BIT (comp[pred_bb->index], expr->bitmap_index))
4304 /* Is this the occurrence we're looking for?
4305 Note that there's only one generating occurrence per block
4306 so we just need to check the block number. */
4307 if (occr_bb == pred_bb)
4308 return 1;
4310 visited[pred_bb->index] = 1;
4312 /* Ignore this predecessor if it kills the expression. */
4313 else if (! TEST_BIT (transp[pred_bb->index], expr->bitmap_index))
4314 visited[pred_bb->index] = 1;
4316 /* Neither gen nor kill. */
4317 else
4319 visited[pred_bb->index] = 1;
4320 if (pre_expr_reaches_here_p_work (occr_bb, expr, pred_bb, visited))
4321 return 1;
4325 /* All paths have been checked. */
4326 return 0;
4329 /* The wrapper for pre_expr_reaches_here_work that ensures that any
4330 memory allocated for that function is returned. */
4332 static int
4333 pre_expr_reaches_here_p (basic_block occr_bb, struct expr *expr, basic_block bb)
4335 int rval;
4336 char *visited = xcalloc (last_basic_block, 1);
4338 rval = pre_expr_reaches_here_p_work (occr_bb, expr, bb, visited);
4340 free (visited);
4341 return rval;
4345 /* Given an expr, generate RTL which we can insert at the end of a BB,
4346 or on an edge. Set the block number of any insns generated to
4347 the value of BB. */
4349 static rtx
4350 process_insert_insn (struct expr *expr)
4352 rtx reg = expr->reaching_reg;
4353 rtx exp = copy_rtx (expr->expr);
4354 rtx pat;
4356 start_sequence ();
4358 /* If the expression is something that's an operand, like a constant,
4359 just copy it to a register. */
4360 if (general_operand (exp, GET_MODE (reg)))
4361 emit_move_insn (reg, exp);
4363 /* Otherwise, make a new insn to compute this expression and make sure the
4364 insn will be recognized (this also adds any needed CLOBBERs). Copy the
4365 expression to make sure we don't have any sharing issues. */
4366 else if (insn_invalid_p (emit_insn (gen_rtx_SET (VOIDmode, reg, exp))))
4367 abort ();
4369 pat = get_insns ();
4370 end_sequence ();
4372 return pat;
4375 /* Add EXPR to the end of basic block BB.
4377 This is used by both the PRE and code hoisting.
4379 For PRE, we want to verify that the expr is either transparent
4380 or locally anticipatable in the target block. This check makes
4381 no sense for code hoisting. */
4383 static void
4384 insert_insn_end_bb (struct expr *expr, basic_block bb, int pre)
4386 rtx insn = BB_END (bb);
4387 rtx new_insn;
4388 rtx reg = expr->reaching_reg;
4389 int regno = REGNO (reg);
4390 rtx pat, pat_end;
4392 pat = process_insert_insn (expr);
4393 if (pat == NULL_RTX || ! INSN_P (pat))
4394 abort ();
4396 pat_end = pat;
4397 while (NEXT_INSN (pat_end) != NULL_RTX)
4398 pat_end = NEXT_INSN (pat_end);
4400 /* If the last insn is a jump, insert EXPR in front [taking care to
4401 handle cc0, etc. properly]. Similarly we need to care trapping
4402 instructions in presence of non-call exceptions. */
4404 if (JUMP_P (insn)
4405 || (NONJUMP_INSN_P (insn)
4406 && (bb->succ->succ_next || (bb->succ->flags & EDGE_ABNORMAL))))
4408 #ifdef HAVE_cc0
4409 rtx note;
4410 #endif
4411 /* It should always be the case that we can put these instructions
4412 anywhere in the basic block with performing PRE optimizations.
4413 Check this. */
4414 if (NONJUMP_INSN_P (insn) && pre
4415 && !TEST_BIT (antloc[bb->index], expr->bitmap_index)
4416 && !TEST_BIT (transp[bb->index], expr->bitmap_index))
4417 abort ();
4419 /* If this is a jump table, then we can't insert stuff here. Since
4420 we know the previous real insn must be the tablejump, we insert
4421 the new instruction just before the tablejump. */
4422 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
4423 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
4424 insn = prev_real_insn (insn);
4426 #ifdef HAVE_cc0
4427 /* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts
4428 if cc0 isn't set. */
4429 note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
4430 if (note)
4431 insn = XEXP (note, 0);
4432 else
4434 rtx maybe_cc0_setter = prev_nonnote_insn (insn);
4435 if (maybe_cc0_setter
4436 && INSN_P (maybe_cc0_setter)
4437 && sets_cc0_p (PATTERN (maybe_cc0_setter)))
4438 insn = maybe_cc0_setter;
4440 #endif
4441 /* FIXME: What if something in cc0/jump uses value set in new insn? */
4442 new_insn = emit_insn_before (pat, insn);
4445 /* Likewise if the last insn is a call, as will happen in the presence
4446 of exception handling. */
4447 else if (CALL_P (insn)
4448 && (bb->succ->succ_next || (bb->succ->flags & EDGE_ABNORMAL)))
4450 /* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers,
4451 we search backward and place the instructions before the first
4452 parameter is loaded. Do this for everyone for consistency and a
4453 presumption that we'll get better code elsewhere as well.
4455 It should always be the case that we can put these instructions
4456 anywhere in the basic block with performing PRE optimizations.
4457 Check this. */
4459 if (pre
4460 && !TEST_BIT (antloc[bb->index], expr->bitmap_index)
4461 && !TEST_BIT (transp[bb->index], expr->bitmap_index))
4462 abort ();
4464 /* Since different machines initialize their parameter registers
4465 in different orders, assume nothing. Collect the set of all
4466 parameter registers. */
4467 insn = find_first_parameter_load (insn, BB_HEAD (bb));
4469 /* If we found all the parameter loads, then we want to insert
4470 before the first parameter load.
4472 If we did not find all the parameter loads, then we might have
4473 stopped on the head of the block, which could be a CODE_LABEL.
4474 If we inserted before the CODE_LABEL, then we would be putting
4475 the insn in the wrong basic block. In that case, put the insn
4476 after the CODE_LABEL. Also, respect NOTE_INSN_BASIC_BLOCK. */
4477 while (LABEL_P (insn)
4478 || NOTE_INSN_BASIC_BLOCK_P (insn))
4479 insn = NEXT_INSN (insn);
4481 new_insn = emit_insn_before (pat, insn);
4483 else
4484 new_insn = emit_insn_after (pat, insn);
4486 while (1)
4488 if (INSN_P (pat))
4490 add_label_notes (PATTERN (pat), new_insn);
4491 note_stores (PATTERN (pat), record_set_info, pat);
4493 if (pat == pat_end)
4494 break;
4495 pat = NEXT_INSN (pat);
4498 gcse_create_count++;
4500 if (gcse_file)
4502 fprintf (gcse_file, "PRE/HOIST: end of bb %d, insn %d, ",
4503 bb->index, INSN_UID (new_insn));
4504 fprintf (gcse_file, "copying expression %d to reg %d\n",
4505 expr->bitmap_index, regno);
4509 /* Insert partially redundant expressions on edges in the CFG to make
4510 the expressions fully redundant. */
4512 static int
4513 pre_edge_insert (struct edge_list *edge_list, struct expr **index_map)
4515 int e, i, j, num_edges, set_size, did_insert = 0;
4516 sbitmap *inserted;
4518 /* Where PRE_INSERT_MAP is nonzero, we add the expression on that edge
4519 if it reaches any of the deleted expressions. */
4521 set_size = pre_insert_map[0]->size;
4522 num_edges = NUM_EDGES (edge_list);
4523 inserted = sbitmap_vector_alloc (num_edges, expr_hash_table.n_elems);
4524 sbitmap_vector_zero (inserted, num_edges);
4526 for (e = 0; e < num_edges; e++)
4528 int indx;
4529 basic_block bb = INDEX_EDGE_PRED_BB (edge_list, e);
4531 for (i = indx = 0; i < set_size; i++, indx += SBITMAP_ELT_BITS)
4533 SBITMAP_ELT_TYPE insert = pre_insert_map[e]->elms[i];
4535 for (j = indx; insert && j < (int) expr_hash_table.n_elems; j++, insert >>= 1)
4536 if ((insert & 1) != 0 && index_map[j]->reaching_reg != NULL_RTX)
4538 struct expr *expr = index_map[j];
4539 struct occr *occr;
4541 /* Now look at each deleted occurrence of this expression. */
4542 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
4544 if (! occr->deleted_p)
4545 continue;
4547 /* Insert this expression on this edge if if it would
4548 reach the deleted occurrence in BB. */
4549 if (!TEST_BIT (inserted[e], j))
4551 rtx insn;
4552 edge eg = INDEX_EDGE (edge_list, e);
4554 /* We can't insert anything on an abnormal and
4555 critical edge, so we insert the insn at the end of
4556 the previous block. There are several alternatives
4557 detailed in Morgans book P277 (sec 10.5) for
4558 handling this situation. This one is easiest for
4559 now. */
4561 if ((eg->flags & EDGE_ABNORMAL) == EDGE_ABNORMAL)
4562 insert_insn_end_bb (index_map[j], bb, 0);
4563 else
4565 insn = process_insert_insn (index_map[j]);
4566 insert_insn_on_edge (insn, eg);
4569 if (gcse_file)
4571 fprintf (gcse_file, "PRE/HOIST: edge (%d,%d), ",
4572 bb->index,
4573 INDEX_EDGE_SUCC_BB (edge_list, e)->index);
4574 fprintf (gcse_file, "copy expression %d\n",
4575 expr->bitmap_index);
4578 update_ld_motion_stores (expr);
4579 SET_BIT (inserted[e], j);
4580 did_insert = 1;
4581 gcse_create_count++;
4588 sbitmap_vector_free (inserted);
4589 return did_insert;
4592 /* Copy the result of EXPR->EXPR generated by INSN to EXPR->REACHING_REG.
4593 Given "old_reg <- expr" (INSN), instead of adding after it
4594 reaching_reg <- old_reg
4595 it's better to do the following:
4596 reaching_reg <- expr
4597 old_reg <- reaching_reg
4598 because this way copy propagation can discover additional PRE
4599 opportunities. But if this fails, we try the old way.
4600 When "expr" is a store, i.e.
4601 given "MEM <- old_reg", instead of adding after it
4602 reaching_reg <- old_reg
4603 it's better to add it before as follows:
4604 reaching_reg <- old_reg
4605 MEM <- reaching_reg. */
4607 static void
4608 pre_insert_copy_insn (struct expr *expr, rtx insn)
4610 rtx reg = expr->reaching_reg;
4611 int regno = REGNO (reg);
4612 int indx = expr->bitmap_index;
4613 rtx pat = PATTERN (insn);
4614 rtx set, new_insn;
4615 rtx old_reg;
4616 int i;
4618 /* This block matches the logic in hash_scan_insn. */
4619 if (GET_CODE (pat) == SET)
4620 set = pat;
4621 else if (GET_CODE (pat) == PARALLEL)
4623 /* Search through the parallel looking for the set whose
4624 source was the expression that we're interested in. */
4625 set = NULL_RTX;
4626 for (i = 0; i < XVECLEN (pat, 0); i++)
4628 rtx x = XVECEXP (pat, 0, i);
4629 if (GET_CODE (x) == SET
4630 && expr_equiv_p (SET_SRC (x), expr->expr))
4632 set = x;
4633 break;
4637 else
4638 abort ();
4640 if (REG_P (SET_DEST (set)))
4642 old_reg = SET_DEST (set);
4643 /* Check if we can modify the set destination in the original insn. */
4644 if (validate_change (insn, &SET_DEST (set), reg, 0))
4646 new_insn = gen_move_insn (old_reg, reg);
4647 new_insn = emit_insn_after (new_insn, insn);
4649 /* Keep register set table up to date. */
4650 replace_one_set (REGNO (old_reg), insn, new_insn);
4651 record_one_set (regno, insn);
4653 else
4655 new_insn = gen_move_insn (reg, old_reg);
4656 new_insn = emit_insn_after (new_insn, insn);
4658 /* Keep register set table up to date. */
4659 record_one_set (regno, new_insn);
4662 else /* This is possible only in case of a store to memory. */
4664 old_reg = SET_SRC (set);
4665 new_insn = gen_move_insn (reg, old_reg);
4667 /* Check if we can modify the set source in the original insn. */
4668 if (validate_change (insn, &SET_SRC (set), reg, 0))
4669 new_insn = emit_insn_before (new_insn, insn);
4670 else
4671 new_insn = emit_insn_after (new_insn, insn);
4673 /* Keep register set table up to date. */
4674 record_one_set (regno, new_insn);
4677 gcse_create_count++;
4679 if (gcse_file)
4680 fprintf (gcse_file,
4681 "PRE: bb %d, insn %d, copy expression %d in insn %d to reg %d\n",
4682 BLOCK_NUM (insn), INSN_UID (new_insn), indx,
4683 INSN_UID (insn), regno);
4686 /* Copy available expressions that reach the redundant expression
4687 to `reaching_reg'. */
4689 static void
4690 pre_insert_copies (void)
4692 unsigned int i, added_copy;
4693 struct expr *expr;
4694 struct occr *occr;
4695 struct occr *avail;
4697 /* For each available expression in the table, copy the result to
4698 `reaching_reg' if the expression reaches a deleted one.
4700 ??? The current algorithm is rather brute force.
4701 Need to do some profiling. */
4703 for (i = 0; i < expr_hash_table.size; i++)
4704 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
4706 /* If the basic block isn't reachable, PPOUT will be TRUE. However,
4707 we don't want to insert a copy here because the expression may not
4708 really be redundant. So only insert an insn if the expression was
4709 deleted. This test also avoids further processing if the
4710 expression wasn't deleted anywhere. */
4711 if (expr->reaching_reg == NULL)
4712 continue;
4714 /* Set when we add a copy for that expression. */
4715 added_copy = 0;
4717 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
4719 if (! occr->deleted_p)
4720 continue;
4722 for (avail = expr->avail_occr; avail != NULL; avail = avail->next)
4724 rtx insn = avail->insn;
4726 /* No need to handle this one if handled already. */
4727 if (avail->copied_p)
4728 continue;
4730 /* Don't handle this one if it's a redundant one. */
4731 if (TEST_BIT (pre_redundant_insns, INSN_CUID (insn)))
4732 continue;
4734 /* Or if the expression doesn't reach the deleted one. */
4735 if (! pre_expr_reaches_here_p (BLOCK_FOR_INSN (avail->insn),
4736 expr,
4737 BLOCK_FOR_INSN (occr->insn)))
4738 continue;
4740 added_copy = 1;
4742 /* Copy the result of avail to reaching_reg. */
4743 pre_insert_copy_insn (expr, insn);
4744 avail->copied_p = 1;
4748 if (added_copy)
4749 update_ld_motion_stores (expr);
4753 /* Emit move from SRC to DEST noting the equivalence with expression computed
4754 in INSN. */
4755 static rtx
4756 gcse_emit_move_after (rtx src, rtx dest, rtx insn)
4758 rtx new;
4759 rtx set = single_set (insn), set2;
4760 rtx note;
4761 rtx eqv;
4763 /* This should never fail since we're creating a reg->reg copy
4764 we've verified to be valid. */
4766 new = emit_insn_after (gen_move_insn (dest, src), insn);
4768 /* Note the equivalence for local CSE pass. */
4769 set2 = single_set (new);
4770 if (!set2 || !rtx_equal_p (SET_DEST (set2), dest))
4771 return new;
4772 if ((note = find_reg_equal_equiv_note (insn)))
4773 eqv = XEXP (note, 0);
4774 else
4775 eqv = SET_SRC (set);
4777 set_unique_reg_note (new, REG_EQUAL, copy_insn_1 (eqv));
4779 return new;
4782 /* Delete redundant computations.
4783 Deletion is done by changing the insn to copy the `reaching_reg' of
4784 the expression into the result of the SET. It is left to later passes
4785 (cprop, cse2, flow, combine, regmove) to propagate the copy or eliminate it.
4787 Returns nonzero if a change is made. */
4789 static int
4790 pre_delete (void)
4792 unsigned int i;
4793 int changed;
4794 struct expr *expr;
4795 struct occr *occr;
4797 changed = 0;
4798 for (i = 0; i < expr_hash_table.size; i++)
4799 for (expr = expr_hash_table.table[i];
4800 expr != NULL;
4801 expr = expr->next_same_hash)
4803 int indx = expr->bitmap_index;
4805 /* We only need to search antic_occr since we require
4806 ANTLOC != 0. */
4808 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
4810 rtx insn = occr->insn;
4811 rtx set;
4812 basic_block bb = BLOCK_FOR_INSN (insn);
4814 /* We only delete insns that have a single_set. */
4815 if (TEST_BIT (pre_delete_map[bb->index], indx)
4816 && (set = single_set (insn)) != 0)
4818 /* Create a pseudo-reg to store the result of reaching
4819 expressions into. Get the mode for the new pseudo from
4820 the mode of the original destination pseudo. */
4821 if (expr->reaching_reg == NULL)
4822 expr->reaching_reg
4823 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
4825 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
4826 delete_insn (insn);
4827 occr->deleted_p = 1;
4828 SET_BIT (pre_redundant_insns, INSN_CUID (insn));
4829 changed = 1;
4830 gcse_subst_count++;
4832 if (gcse_file)
4834 fprintf (gcse_file,
4835 "PRE: redundant insn %d (expression %d) in ",
4836 INSN_UID (insn), indx);
4837 fprintf (gcse_file, "bb %d, reaching reg is %d\n",
4838 bb->index, REGNO (expr->reaching_reg));
4844 return changed;
4847 /* Perform GCSE optimizations using PRE.
4848 This is called by one_pre_gcse_pass after all the dataflow analysis
4849 has been done.
4851 This is based on the original Morel-Renvoise paper Fred Chow's thesis, and
4852 lazy code motion from Knoop, Ruthing and Steffen as described in Advanced
4853 Compiler Design and Implementation.
4855 ??? A new pseudo reg is created to hold the reaching expression. The nice
4856 thing about the classical approach is that it would try to use an existing
4857 reg. If the register can't be adequately optimized [i.e. we introduce
4858 reload problems], one could add a pass here to propagate the new register
4859 through the block.
4861 ??? We don't handle single sets in PARALLELs because we're [currently] not
4862 able to copy the rest of the parallel when we insert copies to create full
4863 redundancies from partial redundancies. However, there's no reason why we
4864 can't handle PARALLELs in the cases where there are no partial
4865 redundancies. */
4867 static int
4868 pre_gcse (void)
4870 unsigned int i;
4871 int did_insert, changed;
4872 struct expr **index_map;
4873 struct expr *expr;
4875 /* Compute a mapping from expression number (`bitmap_index') to
4876 hash table entry. */
4878 index_map = xcalloc (expr_hash_table.n_elems, sizeof (struct expr *));
4879 for (i = 0; i < expr_hash_table.size; i++)
4880 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
4881 index_map[expr->bitmap_index] = expr;
4883 /* Reset bitmap used to track which insns are redundant. */
4884 pre_redundant_insns = sbitmap_alloc (max_cuid);
4885 sbitmap_zero (pre_redundant_insns);
4887 /* Delete the redundant insns first so that
4888 - we know what register to use for the new insns and for the other
4889 ones with reaching expressions
4890 - we know which insns are redundant when we go to create copies */
4892 changed = pre_delete ();
4894 did_insert = pre_edge_insert (edge_list, index_map);
4896 /* In other places with reaching expressions, copy the expression to the
4897 specially allocated pseudo-reg that reaches the redundant expr. */
4898 pre_insert_copies ();
4899 if (did_insert)
4901 commit_edge_insertions ();
4902 changed = 1;
4905 free (index_map);
4906 sbitmap_free (pre_redundant_insns);
4907 return changed;
4910 /* Top level routine to perform one PRE GCSE pass.
4912 Return nonzero if a change was made. */
4914 static int
4915 one_pre_gcse_pass (int pass)
4917 int changed = 0;
4919 gcse_subst_count = 0;
4920 gcse_create_count = 0;
4922 alloc_hash_table (max_cuid, &expr_hash_table, 0);
4923 add_noreturn_fake_exit_edges ();
4924 if (flag_gcse_lm)
4925 compute_ld_motion_mems ();
4927 compute_hash_table (&expr_hash_table);
4928 trim_ld_motion_mems ();
4929 if (gcse_file)
4930 dump_hash_table (gcse_file, "Expression", &expr_hash_table);
4932 if (expr_hash_table.n_elems > 0)
4934 alloc_pre_mem (last_basic_block, expr_hash_table.n_elems);
4935 compute_pre_data ();
4936 changed |= pre_gcse ();
4937 free_edge_list (edge_list);
4938 free_pre_mem ();
4941 free_ldst_mems ();
4942 remove_fake_exit_edges ();
4943 free_hash_table (&expr_hash_table);
4945 if (gcse_file)
4947 fprintf (gcse_file, "\nPRE GCSE of %s, pass %d: %d bytes needed, ",
4948 current_function_name (), pass, bytes_used);
4949 fprintf (gcse_file, "%d substs, %d insns created\n",
4950 gcse_subst_count, gcse_create_count);
4953 return changed;
4956 /* If X contains any LABEL_REF's, add REG_LABEL notes for them to INSN.
4957 If notes are added to an insn which references a CODE_LABEL, the
4958 LABEL_NUSES count is incremented. We have to add REG_LABEL notes,
4959 because the following loop optimization pass requires them. */
4961 /* ??? This is very similar to the loop.c add_label_notes function. We
4962 could probably share code here. */
4964 /* ??? If there was a jump optimization pass after gcse and before loop,
4965 then we would not need to do this here, because jump would add the
4966 necessary REG_LABEL notes. */
4968 static void
4969 add_label_notes (rtx x, rtx insn)
4971 enum rtx_code code = GET_CODE (x);
4972 int i, j;
4973 const char *fmt;
4975 if (code == LABEL_REF && !LABEL_REF_NONLOCAL_P (x))
4977 /* This code used to ignore labels that referred to dispatch tables to
4978 avoid flow generating (slightly) worse code.
4980 We no longer ignore such label references (see LABEL_REF handling in
4981 mark_jump_label for additional information). */
4983 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, XEXP (x, 0),
4984 REG_NOTES (insn));
4985 if (LABEL_P (XEXP (x, 0)))
4986 LABEL_NUSES (XEXP (x, 0))++;
4987 return;
4990 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
4992 if (fmt[i] == 'e')
4993 add_label_notes (XEXP (x, i), insn);
4994 else if (fmt[i] == 'E')
4995 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4996 add_label_notes (XVECEXP (x, i, j), insn);
5000 /* Compute transparent outgoing information for each block.
5002 An expression is transparent to an edge unless it is killed by
5003 the edge itself. This can only happen with abnormal control flow,
5004 when the edge is traversed through a call. This happens with
5005 non-local labels and exceptions.
5007 This would not be necessary if we split the edge. While this is
5008 normally impossible for abnormal critical edges, with some effort
5009 it should be possible with exception handling, since we still have
5010 control over which handler should be invoked. But due to increased
5011 EH table sizes, this may not be worthwhile. */
5013 static void
5014 compute_transpout (void)
5016 basic_block bb;
5017 unsigned int i;
5018 struct expr *expr;
5020 sbitmap_vector_ones (transpout, last_basic_block);
5022 FOR_EACH_BB (bb)
5024 /* Note that flow inserted a nop a the end of basic blocks that
5025 end in call instructions for reasons other than abnormal
5026 control flow. */
5027 if (! CALL_P (BB_END (bb)))
5028 continue;
5030 for (i = 0; i < expr_hash_table.size; i++)
5031 for (expr = expr_hash_table.table[i]; expr ; expr = expr->next_same_hash)
5032 if (MEM_P (expr->expr))
5034 if (GET_CODE (XEXP (expr->expr, 0)) == SYMBOL_REF
5035 && CONSTANT_POOL_ADDRESS_P (XEXP (expr->expr, 0)))
5036 continue;
5038 /* ??? Optimally, we would use interprocedural alias
5039 analysis to determine if this mem is actually killed
5040 by this call. */
5041 RESET_BIT (transpout[bb->index], expr->bitmap_index);
5046 /* Code Hoisting variables and subroutines. */
5048 /* Very busy expressions. */
5049 static sbitmap *hoist_vbein;
5050 static sbitmap *hoist_vbeout;
5052 /* Hoistable expressions. */
5053 static sbitmap *hoist_exprs;
5055 /* ??? We could compute post dominators and run this algorithm in
5056 reverse to perform tail merging, doing so would probably be
5057 more effective than the tail merging code in jump.c.
5059 It's unclear if tail merging could be run in parallel with
5060 code hoisting. It would be nice. */
5062 /* Allocate vars used for code hoisting analysis. */
5064 static void
5065 alloc_code_hoist_mem (int n_blocks, int n_exprs)
5067 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
5068 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
5069 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
5071 hoist_vbein = sbitmap_vector_alloc (n_blocks, n_exprs);
5072 hoist_vbeout = sbitmap_vector_alloc (n_blocks, n_exprs);
5073 hoist_exprs = sbitmap_vector_alloc (n_blocks, n_exprs);
5074 transpout = sbitmap_vector_alloc (n_blocks, n_exprs);
5077 /* Free vars used for code hoisting analysis. */
5079 static void
5080 free_code_hoist_mem (void)
5082 sbitmap_vector_free (antloc);
5083 sbitmap_vector_free (transp);
5084 sbitmap_vector_free (comp);
5086 sbitmap_vector_free (hoist_vbein);
5087 sbitmap_vector_free (hoist_vbeout);
5088 sbitmap_vector_free (hoist_exprs);
5089 sbitmap_vector_free (transpout);
5091 free_dominance_info (CDI_DOMINATORS);
5094 /* Compute the very busy expressions at entry/exit from each block.
5096 An expression is very busy if all paths from a given point
5097 compute the expression. */
5099 static void
5100 compute_code_hoist_vbeinout (void)
5102 int changed, passes;
5103 basic_block bb;
5105 sbitmap_vector_zero (hoist_vbeout, last_basic_block);
5106 sbitmap_vector_zero (hoist_vbein, last_basic_block);
5108 passes = 0;
5109 changed = 1;
5111 while (changed)
5113 changed = 0;
5115 /* We scan the blocks in the reverse order to speed up
5116 the convergence. */
5117 FOR_EACH_BB_REVERSE (bb)
5119 changed |= sbitmap_a_or_b_and_c_cg (hoist_vbein[bb->index], antloc[bb->index],
5120 hoist_vbeout[bb->index], transp[bb->index]);
5121 if (bb->next_bb != EXIT_BLOCK_PTR)
5122 sbitmap_intersection_of_succs (hoist_vbeout[bb->index], hoist_vbein, bb->index);
5125 passes++;
5128 if (gcse_file)
5129 fprintf (gcse_file, "hoisting vbeinout computation: %d passes\n", passes);
5132 /* Top level routine to do the dataflow analysis needed by code hoisting. */
5134 static void
5135 compute_code_hoist_data (void)
5137 compute_local_properties (transp, comp, antloc, &expr_hash_table);
5138 compute_transpout ();
5139 compute_code_hoist_vbeinout ();
5140 calculate_dominance_info (CDI_DOMINATORS);
5141 if (gcse_file)
5142 fprintf (gcse_file, "\n");
5145 /* Determine if the expression identified by EXPR_INDEX would
5146 reach BB unimpared if it was placed at the end of EXPR_BB.
5148 It's unclear exactly what Muchnick meant by "unimpared". It seems
5149 to me that the expression must either be computed or transparent in
5150 *every* block in the path(s) from EXPR_BB to BB. Any other definition
5151 would allow the expression to be hoisted out of loops, even if
5152 the expression wasn't a loop invariant.
5154 Contrast this to reachability for PRE where an expression is
5155 considered reachable if *any* path reaches instead of *all*
5156 paths. */
5158 static int
5159 hoist_expr_reaches_here_p (basic_block expr_bb, int expr_index, basic_block bb, char *visited)
5161 edge pred;
5162 int visited_allocated_locally = 0;
5165 if (visited == NULL)
5167 visited_allocated_locally = 1;
5168 visited = xcalloc (last_basic_block, 1);
5171 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
5173 basic_block pred_bb = pred->src;
5175 if (pred->src == ENTRY_BLOCK_PTR)
5176 break;
5177 else if (pred_bb == expr_bb)
5178 continue;
5179 else if (visited[pred_bb->index])
5180 continue;
5182 /* Does this predecessor generate this expression? */
5183 else if (TEST_BIT (comp[pred_bb->index], expr_index))
5184 break;
5185 else if (! TEST_BIT (transp[pred_bb->index], expr_index))
5186 break;
5188 /* Not killed. */
5189 else
5191 visited[pred_bb->index] = 1;
5192 if (! hoist_expr_reaches_here_p (expr_bb, expr_index,
5193 pred_bb, visited))
5194 break;
5197 if (visited_allocated_locally)
5198 free (visited);
5200 return (pred == NULL);
5203 /* Actually perform code hoisting. */
5205 static void
5206 hoist_code (void)
5208 basic_block bb, dominated;
5209 basic_block *domby;
5210 unsigned int domby_len;
5211 unsigned int i,j;
5212 struct expr **index_map;
5213 struct expr *expr;
5215 sbitmap_vector_zero (hoist_exprs, last_basic_block);
5217 /* Compute a mapping from expression number (`bitmap_index') to
5218 hash table entry. */
5220 index_map = xcalloc (expr_hash_table.n_elems, sizeof (struct expr *));
5221 for (i = 0; i < expr_hash_table.size; i++)
5222 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
5223 index_map[expr->bitmap_index] = expr;
5225 /* Walk over each basic block looking for potentially hoistable
5226 expressions, nothing gets hoisted from the entry block. */
5227 FOR_EACH_BB (bb)
5229 int found = 0;
5230 int insn_inserted_p;
5232 domby_len = get_dominated_by (CDI_DOMINATORS, bb, &domby);
5233 /* Examine each expression that is very busy at the exit of this
5234 block. These are the potentially hoistable expressions. */
5235 for (i = 0; i < hoist_vbeout[bb->index]->n_bits; i++)
5237 int hoistable = 0;
5239 if (TEST_BIT (hoist_vbeout[bb->index], i)
5240 && TEST_BIT (transpout[bb->index], i))
5242 /* We've found a potentially hoistable expression, now
5243 we look at every block BB dominates to see if it
5244 computes the expression. */
5245 for (j = 0; j < domby_len; j++)
5247 dominated = domby[j];
5248 /* Ignore self dominance. */
5249 if (bb == dominated)
5250 continue;
5251 /* We've found a dominated block, now see if it computes
5252 the busy expression and whether or not moving that
5253 expression to the "beginning" of that block is safe. */
5254 if (!TEST_BIT (antloc[dominated->index], i))
5255 continue;
5257 /* Note if the expression would reach the dominated block
5258 unimpared if it was placed at the end of BB.
5260 Keep track of how many times this expression is hoistable
5261 from a dominated block into BB. */
5262 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
5263 hoistable++;
5266 /* If we found more than one hoistable occurrence of this
5267 expression, then note it in the bitmap of expressions to
5268 hoist. It makes no sense to hoist things which are computed
5269 in only one BB, and doing so tends to pessimize register
5270 allocation. One could increase this value to try harder
5271 to avoid any possible code expansion due to register
5272 allocation issues; however experiments have shown that
5273 the vast majority of hoistable expressions are only movable
5274 from two successors, so raising this threshold is likely
5275 to nullify any benefit we get from code hoisting. */
5276 if (hoistable > 1)
5278 SET_BIT (hoist_exprs[bb->index], i);
5279 found = 1;
5283 /* If we found nothing to hoist, then quit now. */
5284 if (! found)
5286 free (domby);
5287 continue;
5290 /* Loop over all the hoistable expressions. */
5291 for (i = 0; i < hoist_exprs[bb->index]->n_bits; i++)
5293 /* We want to insert the expression into BB only once, so
5294 note when we've inserted it. */
5295 insn_inserted_p = 0;
5297 /* These tests should be the same as the tests above. */
5298 if (TEST_BIT (hoist_vbeout[bb->index], i))
5300 /* We've found a potentially hoistable expression, now
5301 we look at every block BB dominates to see if it
5302 computes the expression. */
5303 for (j = 0; j < domby_len; j++)
5305 dominated = domby[j];
5306 /* Ignore self dominance. */
5307 if (bb == dominated)
5308 continue;
5310 /* We've found a dominated block, now see if it computes
5311 the busy expression and whether or not moving that
5312 expression to the "beginning" of that block is safe. */
5313 if (!TEST_BIT (antloc[dominated->index], i))
5314 continue;
5316 /* The expression is computed in the dominated block and
5317 it would be safe to compute it at the start of the
5318 dominated block. Now we have to determine if the
5319 expression would reach the dominated block if it was
5320 placed at the end of BB. */
5321 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
5323 struct expr *expr = index_map[i];
5324 struct occr *occr = expr->antic_occr;
5325 rtx insn;
5326 rtx set;
5328 /* Find the right occurrence of this expression. */
5329 while (BLOCK_FOR_INSN (occr->insn) != dominated && occr)
5330 occr = occr->next;
5332 /* Should never happen. */
5333 if (!occr)
5334 abort ();
5336 insn = occr->insn;
5338 set = single_set (insn);
5339 if (! set)
5340 abort ();
5342 /* Create a pseudo-reg to store the result of reaching
5343 expressions into. Get the mode for the new pseudo
5344 from the mode of the original destination pseudo. */
5345 if (expr->reaching_reg == NULL)
5346 expr->reaching_reg
5347 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
5349 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
5350 delete_insn (insn);
5351 occr->deleted_p = 1;
5352 if (!insn_inserted_p)
5354 insert_insn_end_bb (index_map[i], bb, 0);
5355 insn_inserted_p = 1;
5361 free (domby);
5364 free (index_map);
5367 /* Top level routine to perform one code hoisting (aka unification) pass
5369 Return nonzero if a change was made. */
5371 static int
5372 one_code_hoisting_pass (void)
5374 int changed = 0;
5376 alloc_hash_table (max_cuid, &expr_hash_table, 0);
5377 compute_hash_table (&expr_hash_table);
5378 if (gcse_file)
5379 dump_hash_table (gcse_file, "Code Hosting Expressions", &expr_hash_table);
5381 if (expr_hash_table.n_elems > 0)
5383 alloc_code_hoist_mem (last_basic_block, expr_hash_table.n_elems);
5384 compute_code_hoist_data ();
5385 hoist_code ();
5386 free_code_hoist_mem ();
5389 free_hash_table (&expr_hash_table);
5391 return changed;
5394 /* Here we provide the things required to do store motion towards
5395 the exit. In order for this to be effective, gcse also needed to
5396 be taught how to move a load when it is kill only by a store to itself.
5398 int i;
5399 float a[10];
5401 void foo(float scale)
5403 for (i=0; i<10; i++)
5404 a[i] *= scale;
5407 'i' is both loaded and stored to in the loop. Normally, gcse cannot move
5408 the load out since its live around the loop, and stored at the bottom
5409 of the loop.
5411 The 'Load Motion' referred to and implemented in this file is
5412 an enhancement to gcse which when using edge based lcm, recognizes
5413 this situation and allows gcse to move the load out of the loop.
5415 Once gcse has hoisted the load, store motion can then push this
5416 load towards the exit, and we end up with no loads or stores of 'i'
5417 in the loop. */
5419 /* This will search the ldst list for a matching expression. If it
5420 doesn't find one, we create one and initialize it. */
5422 static struct ls_expr *
5423 ldst_entry (rtx x)
5425 int do_not_record_p = 0;
5426 struct ls_expr * ptr;
5427 unsigned int hash;
5429 hash = hash_expr_1 (x, GET_MODE (x), & do_not_record_p);
5431 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
5432 if (ptr->hash_index == hash && expr_equiv_p (ptr->pattern, x))
5433 return ptr;
5435 ptr = xmalloc (sizeof (struct ls_expr));
5437 ptr->next = pre_ldst_mems;
5438 ptr->expr = NULL;
5439 ptr->pattern = x;
5440 ptr->pattern_regs = NULL_RTX;
5441 ptr->loads = NULL_RTX;
5442 ptr->stores = NULL_RTX;
5443 ptr->reaching_reg = NULL_RTX;
5444 ptr->invalid = 0;
5445 ptr->index = 0;
5446 ptr->hash_index = hash;
5447 pre_ldst_mems = ptr;
5449 return ptr;
5452 /* Free up an individual ldst entry. */
5454 static void
5455 free_ldst_entry (struct ls_expr * ptr)
5457 free_INSN_LIST_list (& ptr->loads);
5458 free_INSN_LIST_list (& ptr->stores);
5460 free (ptr);
5463 /* Free up all memory associated with the ldst list. */
5465 static void
5466 free_ldst_mems (void)
5468 while (pre_ldst_mems)
5470 struct ls_expr * tmp = pre_ldst_mems;
5472 pre_ldst_mems = pre_ldst_mems->next;
5474 free_ldst_entry (tmp);
5477 pre_ldst_mems = NULL;
5480 /* Dump debugging info about the ldst list. */
5482 static void
5483 print_ldst_list (FILE * file)
5485 struct ls_expr * ptr;
5487 fprintf (file, "LDST list: \n");
5489 for (ptr = first_ls_expr(); ptr != NULL; ptr = next_ls_expr (ptr))
5491 fprintf (file, " Pattern (%3d): ", ptr->index);
5493 print_rtl (file, ptr->pattern);
5495 fprintf (file, "\n Loads : ");
5497 if (ptr->loads)
5498 print_rtl (file, ptr->loads);
5499 else
5500 fprintf (file, "(nil)");
5502 fprintf (file, "\n Stores : ");
5504 if (ptr->stores)
5505 print_rtl (file, ptr->stores);
5506 else
5507 fprintf (file, "(nil)");
5509 fprintf (file, "\n\n");
5512 fprintf (file, "\n");
5515 /* Returns 1 if X is in the list of ldst only expressions. */
5517 static struct ls_expr *
5518 find_rtx_in_ldst (rtx x)
5520 struct ls_expr * ptr;
5522 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
5523 if (expr_equiv_p (ptr->pattern, x) && ! ptr->invalid)
5524 return ptr;
5526 return NULL;
5529 /* Assign each element of the list of mems a monotonically increasing value. */
5531 static int
5532 enumerate_ldsts (void)
5534 struct ls_expr * ptr;
5535 int n = 0;
5537 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
5538 ptr->index = n++;
5540 return n;
5543 /* Return first item in the list. */
5545 static inline struct ls_expr *
5546 first_ls_expr (void)
5548 return pre_ldst_mems;
5551 /* Return the next item in the list after the specified one. */
5553 static inline struct ls_expr *
5554 next_ls_expr (struct ls_expr * ptr)
5556 return ptr->next;
5559 /* Load Motion for loads which only kill themselves. */
5561 /* Return true if x is a simple MEM operation, with no registers or
5562 side effects. These are the types of loads we consider for the
5563 ld_motion list, otherwise we let the usual aliasing take care of it. */
5565 static int
5566 simple_mem (rtx x)
5568 if (! MEM_P (x))
5569 return 0;
5571 if (MEM_VOLATILE_P (x))
5572 return 0;
5574 if (GET_MODE (x) == BLKmode)
5575 return 0;
5577 /* If we are handling exceptions, we must be careful with memory references
5578 that may trap. If we are not, the behavior is undefined, so we may just
5579 continue. */
5580 if (flag_non_call_exceptions && may_trap_p (x))
5581 return 0;
5583 if (side_effects_p (x))
5584 return 0;
5586 /* Do not consider function arguments passed on stack. */
5587 if (reg_mentioned_p (stack_pointer_rtx, x))
5588 return 0;
5590 if (flag_float_store && FLOAT_MODE_P (GET_MODE (x)))
5591 return 0;
5593 return 1;
5596 /* Make sure there isn't a buried reference in this pattern anywhere.
5597 If there is, invalidate the entry for it since we're not capable
5598 of fixing it up just yet.. We have to be sure we know about ALL
5599 loads since the aliasing code will allow all entries in the
5600 ld_motion list to not-alias itself. If we miss a load, we will get
5601 the wrong value since gcse might common it and we won't know to
5602 fix it up. */
5604 static void
5605 invalidate_any_buried_refs (rtx x)
5607 const char * fmt;
5608 int i, j;
5609 struct ls_expr * ptr;
5611 /* Invalidate it in the list. */
5612 if (MEM_P (x) && simple_mem (x))
5614 ptr = ldst_entry (x);
5615 ptr->invalid = 1;
5618 /* Recursively process the insn. */
5619 fmt = GET_RTX_FORMAT (GET_CODE (x));
5621 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
5623 if (fmt[i] == 'e')
5624 invalidate_any_buried_refs (XEXP (x, i));
5625 else if (fmt[i] == 'E')
5626 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5627 invalidate_any_buried_refs (XVECEXP (x, i, j));
5631 /* Find all the 'simple' MEMs which are used in LOADs and STORES. Simple
5632 being defined as MEM loads and stores to symbols, with no side effects
5633 and no registers in the expression. For a MEM destination, we also
5634 check that the insn is still valid if we replace the destination with a
5635 REG, as is done in update_ld_motion_stores. If there are any uses/defs
5636 which don't match this criteria, they are invalidated and trimmed out
5637 later. */
5639 static void
5640 compute_ld_motion_mems (void)
5642 struct ls_expr * ptr;
5643 basic_block bb;
5644 rtx insn;
5646 pre_ldst_mems = NULL;
5648 FOR_EACH_BB (bb)
5650 for (insn = BB_HEAD (bb);
5651 insn && insn != NEXT_INSN (BB_END (bb));
5652 insn = NEXT_INSN (insn))
5654 if (INSN_P (insn))
5656 if (GET_CODE (PATTERN (insn)) == SET)
5658 rtx src = SET_SRC (PATTERN (insn));
5659 rtx dest = SET_DEST (PATTERN (insn));
5661 /* Check for a simple LOAD... */
5662 if (MEM_P (src) && simple_mem (src))
5664 ptr = ldst_entry (src);
5665 if (REG_P (dest))
5666 ptr->loads = alloc_INSN_LIST (insn, ptr->loads);
5667 else
5668 ptr->invalid = 1;
5670 else
5672 /* Make sure there isn't a buried load somewhere. */
5673 invalidate_any_buried_refs (src);
5676 /* Check for stores. Don't worry about aliased ones, they
5677 will block any movement we might do later. We only care
5678 about this exact pattern since those are the only
5679 circumstance that we will ignore the aliasing info. */
5680 if (MEM_P (dest) && simple_mem (dest))
5682 ptr = ldst_entry (dest);
5684 if (! MEM_P (src)
5685 && GET_CODE (src) != ASM_OPERANDS
5686 /* Check for REG manually since want_to_gcse_p
5687 returns 0 for all REGs. */
5688 && can_assign_to_reg_p (src))
5689 ptr->stores = alloc_INSN_LIST (insn, ptr->stores);
5690 else
5691 ptr->invalid = 1;
5694 else
5695 invalidate_any_buried_refs (PATTERN (insn));
5701 /* Remove any references that have been either invalidated or are not in the
5702 expression list for pre gcse. */
5704 static void
5705 trim_ld_motion_mems (void)
5707 struct ls_expr * * last = & pre_ldst_mems;
5708 struct ls_expr * ptr = pre_ldst_mems;
5710 while (ptr != NULL)
5712 struct expr * expr;
5714 /* Delete if entry has been made invalid. */
5715 if (! ptr->invalid)
5717 /* Delete if we cannot find this mem in the expression list. */
5718 unsigned int hash = ptr->hash_index % expr_hash_table.size;
5720 for (expr = expr_hash_table.table[hash];
5721 expr != NULL;
5722 expr = expr->next_same_hash)
5723 if (expr_equiv_p (expr->expr, ptr->pattern))
5724 break;
5726 else
5727 expr = (struct expr *) 0;
5729 if (expr)
5731 /* Set the expression field if we are keeping it. */
5732 ptr->expr = expr;
5733 last = & ptr->next;
5734 ptr = ptr->next;
5736 else
5738 *last = ptr->next;
5739 free_ldst_entry (ptr);
5740 ptr = * last;
5744 /* Show the world what we've found. */
5745 if (gcse_file && pre_ldst_mems != NULL)
5746 print_ldst_list (gcse_file);
5749 /* This routine will take an expression which we are replacing with
5750 a reaching register, and update any stores that are needed if
5751 that expression is in the ld_motion list. Stores are updated by
5752 copying their SRC to the reaching register, and then storing
5753 the reaching register into the store location. These keeps the
5754 correct value in the reaching register for the loads. */
5756 static void
5757 update_ld_motion_stores (struct expr * expr)
5759 struct ls_expr * mem_ptr;
5761 if ((mem_ptr = find_rtx_in_ldst (expr->expr)))
5763 /* We can try to find just the REACHED stores, but is shouldn't
5764 matter to set the reaching reg everywhere... some might be
5765 dead and should be eliminated later. */
5767 /* We replace (set mem expr) with (set reg expr) (set mem reg)
5768 where reg is the reaching reg used in the load. We checked in
5769 compute_ld_motion_mems that we can replace (set mem expr) with
5770 (set reg expr) in that insn. */
5771 rtx list = mem_ptr->stores;
5773 for ( ; list != NULL_RTX; list = XEXP (list, 1))
5775 rtx insn = XEXP (list, 0);
5776 rtx pat = PATTERN (insn);
5777 rtx src = SET_SRC (pat);
5778 rtx reg = expr->reaching_reg;
5779 rtx copy, new;
5781 /* If we've already copied it, continue. */
5782 if (expr->reaching_reg == src)
5783 continue;
5785 if (gcse_file)
5787 fprintf (gcse_file, "PRE: store updated with reaching reg ");
5788 print_rtl (gcse_file, expr->reaching_reg);
5789 fprintf (gcse_file, ":\n ");
5790 print_inline_rtx (gcse_file, insn, 8);
5791 fprintf (gcse_file, "\n");
5794 copy = gen_move_insn ( reg, copy_rtx (SET_SRC (pat)));
5795 new = emit_insn_before (copy, insn);
5796 record_one_set (REGNO (reg), new);
5797 SET_SRC (pat) = reg;
5799 /* un-recognize this pattern since it's probably different now. */
5800 INSN_CODE (insn) = -1;
5801 gcse_create_count++;
5806 /* Store motion code. */
5808 #define ANTIC_STORE_LIST(x) ((x)->loads)
5809 #define AVAIL_STORE_LIST(x) ((x)->stores)
5810 #define LAST_AVAIL_CHECK_FAILURE(x) ((x)->reaching_reg)
5812 /* This is used to communicate the target bitvector we want to use in the
5813 reg_set_info routine when called via the note_stores mechanism. */
5814 static int * regvec;
5816 /* And current insn, for the same routine. */
5817 static rtx compute_store_table_current_insn;
5819 /* Used in computing the reverse edge graph bit vectors. */
5820 static sbitmap * st_antloc;
5822 /* Global holding the number of store expressions we are dealing with. */
5823 static int num_stores;
5825 /* Checks to set if we need to mark a register set. Called from
5826 note_stores. */
5828 static void
5829 reg_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED,
5830 void *data)
5832 sbitmap bb_reg = data;
5834 if (GET_CODE (dest) == SUBREG)
5835 dest = SUBREG_REG (dest);
5837 if (REG_P (dest))
5839 regvec[REGNO (dest)] = INSN_UID (compute_store_table_current_insn);
5840 if (bb_reg)
5841 SET_BIT (bb_reg, REGNO (dest));
5845 /* Clear any mark that says that this insn sets dest. Called from
5846 note_stores. */
5848 static void
5849 reg_clear_last_set (rtx dest, rtx setter ATTRIBUTE_UNUSED,
5850 void *data)
5852 int *dead_vec = data;
5854 if (GET_CODE (dest) == SUBREG)
5855 dest = SUBREG_REG (dest);
5857 if (REG_P (dest) &&
5858 dead_vec[REGNO (dest)] == INSN_UID (compute_store_table_current_insn))
5859 dead_vec[REGNO (dest)] = 0;
5862 /* Return zero if some of the registers in list X are killed
5863 due to set of registers in bitmap REGS_SET. */
5865 static bool
5866 store_ops_ok (rtx x, int *regs_set)
5868 rtx reg;
5870 for (; x; x = XEXP (x, 1))
5872 reg = XEXP (x, 0);
5873 if (regs_set[REGNO(reg)])
5874 return false;
5877 return true;
5880 /* Returns a list of registers mentioned in X. */
5881 static rtx
5882 extract_mentioned_regs (rtx x)
5884 return extract_mentioned_regs_helper (x, NULL_RTX);
5887 /* Helper for extract_mentioned_regs; ACCUM is used to accumulate used
5888 registers. */
5889 static rtx
5890 extract_mentioned_regs_helper (rtx x, rtx accum)
5892 int i;
5893 enum rtx_code code;
5894 const char * fmt;
5896 /* Repeat is used to turn tail-recursion into iteration. */
5897 repeat:
5899 if (x == 0)
5900 return accum;
5902 code = GET_CODE (x);
5903 switch (code)
5905 case REG:
5906 return alloc_EXPR_LIST (0, x, accum);
5908 case MEM:
5909 x = XEXP (x, 0);
5910 goto repeat;
5912 case PRE_DEC:
5913 case PRE_INC:
5914 case POST_DEC:
5915 case POST_INC:
5916 /* We do not run this function with arguments having side effects. */
5917 abort ();
5919 case PC:
5920 case CC0: /*FIXME*/
5921 case CONST:
5922 case CONST_INT:
5923 case CONST_DOUBLE:
5924 case CONST_VECTOR:
5925 case SYMBOL_REF:
5926 case LABEL_REF:
5927 case ADDR_VEC:
5928 case ADDR_DIFF_VEC:
5929 return accum;
5931 default:
5932 break;
5935 i = GET_RTX_LENGTH (code) - 1;
5936 fmt = GET_RTX_FORMAT (code);
5938 for (; i >= 0; i--)
5940 if (fmt[i] == 'e')
5942 rtx tem = XEXP (x, i);
5944 /* If we are about to do the last recursive call
5945 needed at this level, change it into iteration. */
5946 if (i == 0)
5948 x = tem;
5949 goto repeat;
5952 accum = extract_mentioned_regs_helper (tem, accum);
5954 else if (fmt[i] == 'E')
5956 int j;
5958 for (j = 0; j < XVECLEN (x, i); j++)
5959 accum = extract_mentioned_regs_helper (XVECEXP (x, i, j), accum);
5963 return accum;
5966 /* Determine whether INSN is MEM store pattern that we will consider moving.
5967 REGS_SET_BEFORE is bitmap of registers set before (and including) the
5968 current insn, REGS_SET_AFTER is bitmap of registers set after (and
5969 including) the insn in this basic block. We must be passing through BB from
5970 head to end, as we are using this fact to speed things up.
5972 The results are stored this way:
5974 -- the first anticipatable expression is added into ANTIC_STORE_LIST
5975 -- if the processed expression is not anticipatable, NULL_RTX is added
5976 there instead, so that we can use it as indicator that no further
5977 expression of this type may be anticipatable
5978 -- if the expression is available, it is added as head of AVAIL_STORE_LIST;
5979 consequently, all of them but this head are dead and may be deleted.
5980 -- if the expression is not available, the insn due to that it fails to be
5981 available is stored in reaching_reg.
5983 The things are complicated a bit by fact that there already may be stores
5984 to the same MEM from other blocks; also caller must take care of the
5985 necessary cleanup of the temporary markers after end of the basic block.
5988 static void
5989 find_moveable_store (rtx insn, int *regs_set_before, int *regs_set_after)
5991 struct ls_expr * ptr;
5992 rtx dest, set, tmp;
5993 int check_anticipatable, check_available;
5994 basic_block bb = BLOCK_FOR_INSN (insn);
5996 set = single_set (insn);
5997 if (!set)
5998 return;
6000 dest = SET_DEST (set);
6002 if (! MEM_P (dest) || MEM_VOLATILE_P (dest)
6003 || GET_MODE (dest) == BLKmode)
6004 return;
6006 if (side_effects_p (dest))
6007 return;
6009 /* If we are handling exceptions, we must be careful with memory references
6010 that may trap. If we are not, the behavior is undefined, so we may just
6011 continue. */
6012 if (flag_non_call_exceptions && may_trap_p (dest))
6013 return;
6015 /* Even if the destination cannot trap, the source may. In this case we'd
6016 need to handle updating the REG_EH_REGION note. */
6017 if (find_reg_note (insn, REG_EH_REGION, NULL_RTX))
6018 return;
6020 ptr = ldst_entry (dest);
6021 if (!ptr->pattern_regs)
6022 ptr->pattern_regs = extract_mentioned_regs (dest);
6024 /* Do not check for anticipatability if we either found one anticipatable
6025 store already, or tested for one and found out that it was killed. */
6026 check_anticipatable = 0;
6027 if (!ANTIC_STORE_LIST (ptr))
6028 check_anticipatable = 1;
6029 else
6031 tmp = XEXP (ANTIC_STORE_LIST (ptr), 0);
6032 if (tmp != NULL_RTX
6033 && BLOCK_FOR_INSN (tmp) != bb)
6034 check_anticipatable = 1;
6036 if (check_anticipatable)
6038 if (store_killed_before (dest, ptr->pattern_regs, insn, bb, regs_set_before))
6039 tmp = NULL_RTX;
6040 else
6041 tmp = insn;
6042 ANTIC_STORE_LIST (ptr) = alloc_INSN_LIST (tmp,
6043 ANTIC_STORE_LIST (ptr));
6046 /* It is not necessary to check whether store is available if we did
6047 it successfully before; if we failed before, do not bother to check
6048 until we reach the insn that caused us to fail. */
6049 check_available = 0;
6050 if (!AVAIL_STORE_LIST (ptr))
6051 check_available = 1;
6052 else
6054 tmp = XEXP (AVAIL_STORE_LIST (ptr), 0);
6055 if (BLOCK_FOR_INSN (tmp) != bb)
6056 check_available = 1;
6058 if (check_available)
6060 /* Check that we have already reached the insn at that the check
6061 failed last time. */
6062 if (LAST_AVAIL_CHECK_FAILURE (ptr))
6064 for (tmp = BB_END (bb);
6065 tmp != insn && tmp != LAST_AVAIL_CHECK_FAILURE (ptr);
6066 tmp = PREV_INSN (tmp))
6067 continue;
6068 if (tmp == insn)
6069 check_available = 0;
6071 else
6072 check_available = store_killed_after (dest, ptr->pattern_regs, insn,
6073 bb, regs_set_after,
6074 &LAST_AVAIL_CHECK_FAILURE (ptr));
6076 if (!check_available)
6077 AVAIL_STORE_LIST (ptr) = alloc_INSN_LIST (insn, AVAIL_STORE_LIST (ptr));
6080 /* Find available and anticipatable stores. */
6082 static int
6083 compute_store_table (void)
6085 int ret;
6086 basic_block bb;
6087 unsigned regno;
6088 rtx insn, pat, tmp;
6089 int *last_set_in, *already_set;
6090 struct ls_expr * ptr, **prev_next_ptr_ptr;
6092 max_gcse_regno = max_reg_num ();
6094 reg_set_in_block = sbitmap_vector_alloc (last_basic_block,
6095 max_gcse_regno);
6096 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
6097 pre_ldst_mems = 0;
6098 last_set_in = xcalloc (max_gcse_regno, sizeof (int));
6099 already_set = xmalloc (sizeof (int) * max_gcse_regno);
6101 /* Find all the stores we care about. */
6102 FOR_EACH_BB (bb)
6104 /* First compute the registers set in this block. */
6105 regvec = last_set_in;
6107 for (insn = BB_HEAD (bb);
6108 insn != NEXT_INSN (BB_END (bb));
6109 insn = NEXT_INSN (insn))
6111 if (! INSN_P (insn))
6112 continue;
6114 if (CALL_P (insn))
6116 bool clobbers_all = false;
6117 #ifdef NON_SAVING_SETJMP
6118 if (NON_SAVING_SETJMP
6119 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
6120 clobbers_all = true;
6121 #endif
6123 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6124 if (clobbers_all
6125 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
6127 last_set_in[regno] = INSN_UID (insn);
6128 SET_BIT (reg_set_in_block[bb->index], regno);
6132 pat = PATTERN (insn);
6133 compute_store_table_current_insn = insn;
6134 note_stores (pat, reg_set_info, reg_set_in_block[bb->index]);
6137 /* Now find the stores. */
6138 memset (already_set, 0, sizeof (int) * max_gcse_regno);
6139 regvec = already_set;
6140 for (insn = BB_HEAD (bb);
6141 insn != NEXT_INSN (BB_END (bb));
6142 insn = NEXT_INSN (insn))
6144 if (! INSN_P (insn))
6145 continue;
6147 if (CALL_P (insn))
6149 bool clobbers_all = false;
6150 #ifdef NON_SAVING_SETJMP
6151 if (NON_SAVING_SETJMP
6152 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
6153 clobbers_all = true;
6154 #endif
6156 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6157 if (clobbers_all
6158 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
6159 already_set[regno] = 1;
6162 pat = PATTERN (insn);
6163 note_stores (pat, reg_set_info, NULL);
6165 /* Now that we've marked regs, look for stores. */
6166 find_moveable_store (insn, already_set, last_set_in);
6168 /* Unmark regs that are no longer set. */
6169 compute_store_table_current_insn = insn;
6170 note_stores (pat, reg_clear_last_set, last_set_in);
6171 if (CALL_P (insn))
6173 bool clobbers_all = false;
6174 #ifdef NON_SAVING_SETJMP
6175 if (NON_SAVING_SETJMP
6176 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
6177 clobbers_all = true;
6178 #endif
6180 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6181 if ((clobbers_all
6182 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
6183 && last_set_in[regno] == INSN_UID (insn))
6184 last_set_in[regno] = 0;
6188 #ifdef ENABLE_CHECKING
6189 /* last_set_in should now be all-zero. */
6190 for (regno = 0; regno < max_gcse_regno; regno++)
6191 if (last_set_in[regno] != 0)
6192 abort ();
6193 #endif
6195 /* Clear temporary marks. */
6196 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
6198 LAST_AVAIL_CHECK_FAILURE(ptr) = NULL_RTX;
6199 if (ANTIC_STORE_LIST (ptr)
6200 && (tmp = XEXP (ANTIC_STORE_LIST (ptr), 0)) == NULL_RTX)
6201 ANTIC_STORE_LIST (ptr) = XEXP (ANTIC_STORE_LIST (ptr), 1);
6205 /* Remove the stores that are not available anywhere, as there will
6206 be no opportunity to optimize them. */
6207 for (ptr = pre_ldst_mems, prev_next_ptr_ptr = &pre_ldst_mems;
6208 ptr != NULL;
6209 ptr = *prev_next_ptr_ptr)
6211 if (!AVAIL_STORE_LIST (ptr))
6213 *prev_next_ptr_ptr = ptr->next;
6214 free_ldst_entry (ptr);
6216 else
6217 prev_next_ptr_ptr = &ptr->next;
6220 ret = enumerate_ldsts ();
6222 if (gcse_file)
6224 fprintf (gcse_file, "ST_avail and ST_antic (shown under loads..)\n");
6225 print_ldst_list (gcse_file);
6228 free (last_set_in);
6229 free (already_set);
6230 return ret;
6233 /* Check to see if the load X is aliased with STORE_PATTERN.
6234 AFTER is true if we are checking the case when STORE_PATTERN occurs
6235 after the X. */
6237 static bool
6238 load_kills_store (rtx x, rtx store_pattern, int after)
6240 if (after)
6241 return anti_dependence (x, store_pattern);
6242 else
6243 return true_dependence (store_pattern, GET_MODE (store_pattern), x,
6244 rtx_addr_varies_p);
6247 /* Go through the entire insn X, looking for any loads which might alias
6248 STORE_PATTERN. Return true if found.
6249 AFTER is true if we are checking the case when STORE_PATTERN occurs
6250 after the insn X. */
6252 static bool
6253 find_loads (rtx x, rtx store_pattern, int after)
6255 const char * fmt;
6256 int i, j;
6257 int ret = false;
6259 if (!x)
6260 return false;
6262 if (GET_CODE (x) == SET)
6263 x = SET_SRC (x);
6265 if (MEM_P (x))
6267 if (load_kills_store (x, store_pattern, after))
6268 return true;
6271 /* Recursively process the insn. */
6272 fmt = GET_RTX_FORMAT (GET_CODE (x));
6274 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0 && !ret; i--)
6276 if (fmt[i] == 'e')
6277 ret |= find_loads (XEXP (x, i), store_pattern, after);
6278 else if (fmt[i] == 'E')
6279 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6280 ret |= find_loads (XVECEXP (x, i, j), store_pattern, after);
6282 return ret;
6285 /* Check if INSN kills the store pattern X (is aliased with it).
6286 AFTER is true if we are checking the case when store X occurs
6287 after the insn. Return true if it it does. */
6289 static bool
6290 store_killed_in_insn (rtx x, rtx x_regs, rtx insn, int after)
6292 rtx reg, base, note;
6294 if (!INSN_P (insn))
6295 return false;
6297 if (CALL_P (insn))
6299 /* A normal or pure call might read from pattern,
6300 but a const call will not. */
6301 if (! CONST_OR_PURE_CALL_P (insn) || pure_call_p (insn))
6302 return true;
6304 /* But even a const call reads its parameters. Check whether the
6305 base of some of registers used in mem is stack pointer. */
6306 for (reg = x_regs; reg; reg = XEXP (reg, 1))
6308 base = find_base_term (XEXP (reg, 0));
6309 if (!base
6310 || (GET_CODE (base) == ADDRESS
6311 && GET_MODE (base) == Pmode
6312 && XEXP (base, 0) == stack_pointer_rtx))
6313 return true;
6316 return false;
6319 if (GET_CODE (PATTERN (insn)) == SET)
6321 rtx pat = PATTERN (insn);
6322 rtx dest = SET_DEST (pat);
6324 if (GET_CODE (dest) == SIGN_EXTRACT
6325 || GET_CODE (dest) == ZERO_EXTRACT)
6326 dest = XEXP (dest, 0);
6328 /* Check for memory stores to aliased objects. */
6329 if (MEM_P (dest)
6330 && !expr_equiv_p (dest, x))
6332 if (after)
6334 if (output_dependence (dest, x))
6335 return true;
6337 else
6339 if (output_dependence (x, dest))
6340 return true;
6343 if (find_loads (SET_SRC (pat), x, after))
6344 return true;
6346 else if (find_loads (PATTERN (insn), x, after))
6347 return true;
6349 /* If this insn has a REG_EQUAL or REG_EQUIV note referencing a memory
6350 location aliased with X, then this insn kills X. */
6351 note = find_reg_equal_equiv_note (insn);
6352 if (! note)
6353 return false;
6354 note = XEXP (note, 0);
6356 /* However, if the note represents a must alias rather than a may
6357 alias relationship, then it does not kill X. */
6358 if (expr_equiv_p (note, x))
6359 return false;
6361 /* See if there are any aliased loads in the note. */
6362 return find_loads (note, x, after);
6365 /* Returns true if the expression X is loaded or clobbered on or after INSN
6366 within basic block BB. REGS_SET_AFTER is bitmap of registers set in
6367 or after the insn. X_REGS is list of registers mentioned in X. If the store
6368 is killed, return the last insn in that it occurs in FAIL_INSN. */
6370 static bool
6371 store_killed_after (rtx x, rtx x_regs, rtx insn, basic_block bb,
6372 int *regs_set_after, rtx *fail_insn)
6374 rtx last = BB_END (bb), act;
6376 if (!store_ops_ok (x_regs, regs_set_after))
6378 /* We do not know where it will happen. */
6379 if (fail_insn)
6380 *fail_insn = NULL_RTX;
6381 return true;
6384 /* Scan from the end, so that fail_insn is determined correctly. */
6385 for (act = last; act != PREV_INSN (insn); act = PREV_INSN (act))
6386 if (store_killed_in_insn (x, x_regs, act, false))
6388 if (fail_insn)
6389 *fail_insn = act;
6390 return true;
6393 return false;
6396 /* Returns true if the expression X is loaded or clobbered on or before INSN
6397 within basic block BB. X_REGS is list of registers mentioned in X.
6398 REGS_SET_BEFORE is bitmap of registers set before or in this insn. */
6399 static bool
6400 store_killed_before (rtx x, rtx x_regs, rtx insn, basic_block bb,
6401 int *regs_set_before)
6403 rtx first = BB_HEAD (bb);
6405 if (!store_ops_ok (x_regs, regs_set_before))
6406 return true;
6408 for ( ; insn != PREV_INSN (first); insn = PREV_INSN (insn))
6409 if (store_killed_in_insn (x, x_regs, insn, true))
6410 return true;
6412 return false;
6415 /* Fill in available, anticipatable, transparent and kill vectors in
6416 STORE_DATA, based on lists of available and anticipatable stores. */
6417 static void
6418 build_store_vectors (void)
6420 basic_block bb;
6421 int *regs_set_in_block;
6422 rtx insn, st;
6423 struct ls_expr * ptr;
6424 unsigned regno;
6426 /* Build the gen_vector. This is any store in the table which is not killed
6427 by aliasing later in its block. */
6428 ae_gen = sbitmap_vector_alloc (last_basic_block, num_stores);
6429 sbitmap_vector_zero (ae_gen, last_basic_block);
6431 st_antloc = sbitmap_vector_alloc (last_basic_block, num_stores);
6432 sbitmap_vector_zero (st_antloc, last_basic_block);
6434 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
6436 for (st = AVAIL_STORE_LIST (ptr); st != NULL; st = XEXP (st, 1))
6438 insn = XEXP (st, 0);
6439 bb = BLOCK_FOR_INSN (insn);
6441 /* If we've already seen an available expression in this block,
6442 we can delete this one (It occurs earlier in the block). We'll
6443 copy the SRC expression to an unused register in case there
6444 are any side effects. */
6445 if (TEST_BIT (ae_gen[bb->index], ptr->index))
6447 rtx r = gen_reg_rtx (GET_MODE (ptr->pattern));
6448 if (gcse_file)
6449 fprintf (gcse_file, "Removing redundant store:\n");
6450 replace_store_insn (r, XEXP (st, 0), bb, ptr);
6451 continue;
6453 SET_BIT (ae_gen[bb->index], ptr->index);
6456 for (st = ANTIC_STORE_LIST (ptr); st != NULL; st = XEXP (st, 1))
6458 insn = XEXP (st, 0);
6459 bb = BLOCK_FOR_INSN (insn);
6460 SET_BIT (st_antloc[bb->index], ptr->index);
6464 ae_kill = sbitmap_vector_alloc (last_basic_block, num_stores);
6465 sbitmap_vector_zero (ae_kill, last_basic_block);
6467 transp = sbitmap_vector_alloc (last_basic_block, num_stores);
6468 sbitmap_vector_zero (transp, last_basic_block);
6469 regs_set_in_block = xmalloc (sizeof (int) * max_gcse_regno);
6471 FOR_EACH_BB (bb)
6473 for (regno = 0; regno < max_gcse_regno; regno++)
6474 regs_set_in_block[regno] = TEST_BIT (reg_set_in_block[bb->index], regno);
6476 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
6478 if (store_killed_after (ptr->pattern, ptr->pattern_regs, BB_HEAD (bb),
6479 bb, regs_set_in_block, NULL))
6481 /* It should not be necessary to consider the expression
6482 killed if it is both anticipatable and available. */
6483 if (!TEST_BIT (st_antloc[bb->index], ptr->index)
6484 || !TEST_BIT (ae_gen[bb->index], ptr->index))
6485 SET_BIT (ae_kill[bb->index], ptr->index);
6487 else
6488 SET_BIT (transp[bb->index], ptr->index);
6492 free (regs_set_in_block);
6494 if (gcse_file)
6496 dump_sbitmap_vector (gcse_file, "st_antloc", "", st_antloc, last_basic_block);
6497 dump_sbitmap_vector (gcse_file, "st_kill", "", ae_kill, last_basic_block);
6498 dump_sbitmap_vector (gcse_file, "Transpt", "", transp, last_basic_block);
6499 dump_sbitmap_vector (gcse_file, "st_avloc", "", ae_gen, last_basic_block);
6503 /* Insert an instruction at the beginning of a basic block, and update
6504 the BB_HEAD if needed. */
6506 static void
6507 insert_insn_start_bb (rtx insn, basic_block bb)
6509 /* Insert at start of successor block. */
6510 rtx prev = PREV_INSN (BB_HEAD (bb));
6511 rtx before = BB_HEAD (bb);
6512 while (before != 0)
6514 if (! LABEL_P (before)
6515 && (! NOTE_P (before)
6516 || NOTE_LINE_NUMBER (before) != NOTE_INSN_BASIC_BLOCK))
6517 break;
6518 prev = before;
6519 if (prev == BB_END (bb))
6520 break;
6521 before = NEXT_INSN (before);
6524 insn = emit_insn_after (insn, prev);
6526 if (gcse_file)
6528 fprintf (gcse_file, "STORE_MOTION insert store at start of BB %d:\n",
6529 bb->index);
6530 print_inline_rtx (gcse_file, insn, 6);
6531 fprintf (gcse_file, "\n");
6535 /* This routine will insert a store on an edge. EXPR is the ldst entry for
6536 the memory reference, and E is the edge to insert it on. Returns nonzero
6537 if an edge insertion was performed. */
6539 static int
6540 insert_store (struct ls_expr * expr, edge e)
6542 rtx reg, insn;
6543 basic_block bb;
6544 edge tmp;
6546 /* We did all the deleted before this insert, so if we didn't delete a
6547 store, then we haven't set the reaching reg yet either. */
6548 if (expr->reaching_reg == NULL_RTX)
6549 return 0;
6551 if (e->flags & EDGE_FAKE)
6552 return 0;
6554 reg = expr->reaching_reg;
6555 insn = gen_move_insn (copy_rtx (expr->pattern), reg);
6557 /* If we are inserting this expression on ALL predecessor edges of a BB,
6558 insert it at the start of the BB, and reset the insert bits on the other
6559 edges so we don't try to insert it on the other edges. */
6560 bb = e->dest;
6561 for (tmp = e->dest->pred; tmp ; tmp = tmp->pred_next)
6562 if (!(tmp->flags & EDGE_FAKE))
6564 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
6565 if (index == EDGE_INDEX_NO_EDGE)
6566 abort ();
6567 if (! TEST_BIT (pre_insert_map[index], expr->index))
6568 break;
6571 /* If tmp is NULL, we found an insertion on every edge, blank the
6572 insertion vector for these edges, and insert at the start of the BB. */
6573 if (!tmp && bb != EXIT_BLOCK_PTR)
6575 for (tmp = e->dest->pred; tmp ; tmp = tmp->pred_next)
6577 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
6578 RESET_BIT (pre_insert_map[index], expr->index);
6580 insert_insn_start_bb (insn, bb);
6581 return 0;
6584 /* We can't insert on this edge, so we'll insert at the head of the
6585 successors block. See Morgan, sec 10.5. */
6586 if ((e->flags & EDGE_ABNORMAL) == EDGE_ABNORMAL)
6588 insert_insn_start_bb (insn, bb);
6589 return 0;
6592 insert_insn_on_edge (insn, e);
6594 if (gcse_file)
6596 fprintf (gcse_file, "STORE_MOTION insert insn on edge (%d, %d):\n",
6597 e->src->index, e->dest->index);
6598 print_inline_rtx (gcse_file, insn, 6);
6599 fprintf (gcse_file, "\n");
6602 return 1;
6605 /* Remove any REG_EQUAL or REG_EQUIV notes containing a reference to the
6606 memory location in SMEXPR set in basic block BB.
6608 This could be rather expensive. */
6610 static void
6611 remove_reachable_equiv_notes (basic_block bb, struct ls_expr *smexpr)
6613 edge *stack = xmalloc (sizeof (edge) * n_basic_blocks), act;
6614 sbitmap visited = sbitmap_alloc (last_basic_block);
6615 int stack_top = 0;
6616 rtx last, insn, note;
6617 rtx mem = smexpr->pattern;
6619 sbitmap_zero (visited);
6620 act = bb->succ;
6622 while (1)
6624 if (!act)
6626 if (!stack_top)
6628 free (stack);
6629 sbitmap_free (visited);
6630 return;
6632 act = stack[--stack_top];
6634 bb = act->dest;
6636 if (bb == EXIT_BLOCK_PTR
6637 || TEST_BIT (visited, bb->index))
6639 act = act->succ_next;
6640 continue;
6642 SET_BIT (visited, bb->index);
6644 if (TEST_BIT (st_antloc[bb->index], smexpr->index))
6646 for (last = ANTIC_STORE_LIST (smexpr);
6647 BLOCK_FOR_INSN (XEXP (last, 0)) != bb;
6648 last = XEXP (last, 1))
6649 continue;
6650 last = XEXP (last, 0);
6652 else
6653 last = NEXT_INSN (BB_END (bb));
6655 for (insn = BB_HEAD (bb); insn != last; insn = NEXT_INSN (insn))
6656 if (INSN_P (insn))
6658 note = find_reg_equal_equiv_note (insn);
6659 if (!note || !expr_equiv_p (XEXP (note, 0), mem))
6660 continue;
6662 if (gcse_file)
6663 fprintf (gcse_file, "STORE_MOTION drop REG_EQUAL note at insn %d:\n",
6664 INSN_UID (insn));
6665 remove_note (insn, note);
6667 act = act->succ_next;
6668 if (bb->succ)
6670 if (act)
6671 stack[stack_top++] = act;
6672 act = bb->succ;
6677 /* This routine will replace a store with a SET to a specified register. */
6679 static void
6680 replace_store_insn (rtx reg, rtx del, basic_block bb, struct ls_expr *smexpr)
6682 rtx insn, mem, note, set, ptr;
6684 mem = smexpr->pattern;
6685 insn = gen_move_insn (reg, SET_SRC (single_set (del)));
6686 insn = emit_insn_after (insn, del);
6688 if (gcse_file)
6690 fprintf (gcse_file,
6691 "STORE_MOTION delete insn in BB %d:\n ", bb->index);
6692 print_inline_rtx (gcse_file, del, 6);
6693 fprintf (gcse_file, "\nSTORE MOTION replaced with insn:\n ");
6694 print_inline_rtx (gcse_file, insn, 6);
6695 fprintf (gcse_file, "\n");
6698 for (ptr = ANTIC_STORE_LIST (smexpr); ptr; ptr = XEXP (ptr, 1))
6699 if (XEXP (ptr, 0) == del)
6701 XEXP (ptr, 0) = insn;
6702 break;
6704 delete_insn (del);
6706 /* Now we must handle REG_EQUAL notes whose contents is equal to the mem;
6707 they are no longer accurate provided that they are reached by this
6708 definition, so drop them. */
6709 for (; insn != NEXT_INSN (BB_END (bb)); insn = NEXT_INSN (insn))
6710 if (INSN_P (insn))
6712 set = single_set (insn);
6713 if (!set)
6714 continue;
6715 if (expr_equiv_p (SET_DEST (set), mem))
6716 return;
6717 note = find_reg_equal_equiv_note (insn);
6718 if (!note || !expr_equiv_p (XEXP (note, 0), mem))
6719 continue;
6721 if (gcse_file)
6722 fprintf (gcse_file, "STORE_MOTION drop REG_EQUAL note at insn %d:\n",
6723 INSN_UID (insn));
6724 remove_note (insn, note);
6726 remove_reachable_equiv_notes (bb, smexpr);
6730 /* Delete a store, but copy the value that would have been stored into
6731 the reaching_reg for later storing. */
6733 static void
6734 delete_store (struct ls_expr * expr, basic_block bb)
6736 rtx reg, i, del;
6738 if (expr->reaching_reg == NULL_RTX)
6739 expr->reaching_reg = gen_reg_rtx (GET_MODE (expr->pattern));
6741 reg = expr->reaching_reg;
6743 for (i = AVAIL_STORE_LIST (expr); i; i = XEXP (i, 1))
6745 del = XEXP (i, 0);
6746 if (BLOCK_FOR_INSN (del) == bb)
6748 /* We know there is only one since we deleted redundant
6749 ones during the available computation. */
6750 replace_store_insn (reg, del, bb, expr);
6751 break;
6756 /* Free memory used by store motion. */
6758 static void
6759 free_store_memory (void)
6761 free_ldst_mems ();
6763 if (ae_gen)
6764 sbitmap_vector_free (ae_gen);
6765 if (ae_kill)
6766 sbitmap_vector_free (ae_kill);
6767 if (transp)
6768 sbitmap_vector_free (transp);
6769 if (st_antloc)
6770 sbitmap_vector_free (st_antloc);
6771 if (pre_insert_map)
6772 sbitmap_vector_free (pre_insert_map);
6773 if (pre_delete_map)
6774 sbitmap_vector_free (pre_delete_map);
6775 if (reg_set_in_block)
6776 sbitmap_vector_free (reg_set_in_block);
6778 ae_gen = ae_kill = transp = st_antloc = NULL;
6779 pre_insert_map = pre_delete_map = reg_set_in_block = NULL;
6782 /* Perform store motion. Much like gcse, except we move expressions the
6783 other way by looking at the flowgraph in reverse. */
6785 static void
6786 store_motion (void)
6788 basic_block bb;
6789 int x;
6790 struct ls_expr * ptr;
6791 int update_flow = 0;
6793 if (gcse_file)
6795 fprintf (gcse_file, "before store motion\n");
6796 print_rtl (gcse_file, get_insns ());
6799 init_alias_analysis ();
6801 /* Find all the available and anticipatable stores. */
6802 num_stores = compute_store_table ();
6803 if (num_stores == 0)
6805 sbitmap_vector_free (reg_set_in_block);
6806 end_alias_analysis ();
6807 return;
6810 /* Now compute kill & transp vectors. */
6811 build_store_vectors ();
6812 add_noreturn_fake_exit_edges ();
6813 connect_infinite_loops_to_exit ();
6815 edge_list = pre_edge_rev_lcm (gcse_file, num_stores, transp, ae_gen,
6816 st_antloc, ae_kill, &pre_insert_map,
6817 &pre_delete_map);
6819 /* Now we want to insert the new stores which are going to be needed. */
6820 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
6822 FOR_EACH_BB (bb)
6823 if (TEST_BIT (pre_delete_map[bb->index], ptr->index))
6824 delete_store (ptr, bb);
6826 for (x = 0; x < NUM_EDGES (edge_list); x++)
6827 if (TEST_BIT (pre_insert_map[x], ptr->index))
6828 update_flow |= insert_store (ptr, INDEX_EDGE (edge_list, x));
6831 if (update_flow)
6832 commit_edge_insertions ();
6834 free_store_memory ();
6835 free_edge_list (edge_list);
6836 remove_fake_exit_edges ();
6837 end_alias_analysis ();
6841 /* Entry point for jump bypassing optimization pass. */
6844 bypass_jumps (FILE *file)
6846 int changed;
6848 /* We do not construct an accurate cfg in functions which call
6849 setjmp, so just punt to be safe. */
6850 if (current_function_calls_setjmp)
6851 return 0;
6853 /* For calling dump_foo fns from gdb. */
6854 debug_stderr = stderr;
6855 gcse_file = file;
6857 /* Identify the basic block information for this function, including
6858 successors and predecessors. */
6859 max_gcse_regno = max_reg_num ();
6861 if (file)
6862 dump_flow_info (file);
6864 /* Return if there's nothing to do, or it is too expensive. */
6865 if (n_basic_blocks <= 1 || is_too_expensive (_ ("jump bypassing disabled")))
6866 return 0;
6868 gcc_obstack_init (&gcse_obstack);
6869 bytes_used = 0;
6871 /* We need alias. */
6872 init_alias_analysis ();
6874 /* Record where pseudo-registers are set. This data is kept accurate
6875 during each pass. ??? We could also record hard-reg information here
6876 [since it's unchanging], however it is currently done during hash table
6877 computation.
6879 It may be tempting to compute MEM set information here too, but MEM sets
6880 will be subject to code motion one day and thus we need to compute
6881 information about memory sets when we build the hash tables. */
6883 alloc_reg_set_mem (max_gcse_regno);
6884 compute_sets (get_insns ());
6886 max_gcse_regno = max_reg_num ();
6887 alloc_gcse_mem (get_insns ());
6888 changed = one_cprop_pass (MAX_GCSE_PASSES + 2, 1, 1);
6889 free_gcse_mem ();
6891 if (file)
6893 fprintf (file, "BYPASS of %s: %d basic blocks, ",
6894 current_function_name (), n_basic_blocks);
6895 fprintf (file, "%d bytes\n\n", bytes_used);
6898 obstack_free (&gcse_obstack, NULL);
6899 free_reg_set_mem ();
6901 /* We are finished with alias. */
6902 end_alias_analysis ();
6903 allocate_reg_info (max_reg_num (), FALSE, FALSE);
6905 return changed;
6908 /* Return true if the graph is too expensive to optimize. PASS is the
6909 optimization about to be performed. */
6911 static bool
6912 is_too_expensive (const char *pass)
6914 /* Trying to perform global optimizations on flow graphs which have
6915 a high connectivity will take a long time and is unlikely to be
6916 particularly useful.
6918 In normal circumstances a cfg should have about twice as many
6919 edges as blocks. But we do not want to punish small functions
6920 which have a couple switch statements. Rather than simply
6921 threshold the number of blocks, uses something with a more
6922 graceful degradation. */
6923 if (n_edges > 20000 + n_basic_blocks * 4)
6925 if (warn_disabled_optimization)
6926 warning ("%s: %d basic blocks and %d edges/basic block",
6927 pass, n_basic_blocks, n_edges / n_basic_blocks);
6929 return true;
6932 /* If allocating memory for the cprop bitmap would take up too much
6933 storage it's better just to disable the optimization. */
6934 if ((n_basic_blocks
6935 * SBITMAP_SET_SIZE (max_reg_num ())
6936 * sizeof (SBITMAP_ELT_TYPE)) > MAX_GCSE_MEMORY)
6938 if (warn_disabled_optimization)
6939 warning ("%s: %d basic blocks and %d registers",
6940 pass, n_basic_blocks, max_reg_num ());
6942 return true;
6945 return false;
6948 /* The following code implements gcse after reload, the purpose of this
6949 pass is to cleanup redundant loads generated by reload and other
6950 optimizations that come after gcse. It searches for simple inter-block
6951 redundancies and tries to eliminate them by adding moves and loads
6952 in cold places. */
6954 /* The following structure holds the information about the occurrences of
6955 the redundant instructions. */
6956 struct unoccr
6958 struct unoccr *next;
6959 edge pred;
6960 rtx insn;
6963 static bool reg_used_on_edge (rtx, edge);
6964 static rtx reg_set_between_after_reload_p (rtx, rtx, rtx);
6965 static rtx reg_used_between_after_reload_p (rtx, rtx, rtx);
6966 static rtx get_avail_load_store_reg (rtx);
6967 static bool is_jump_table_basic_block (basic_block);
6968 static bool bb_has_well_behaved_predecessors (basic_block);
6969 static struct occr* get_bb_avail_insn (basic_block, struct occr *);
6970 static void hash_scan_set_after_reload (rtx, rtx, struct hash_table *);
6971 static void compute_hash_table_after_reload (struct hash_table *);
6972 static void eliminate_partially_redundant_loads (basic_block,
6973 rtx,
6974 struct expr *);
6975 static void gcse_after_reload (void);
6976 static struct occr* get_bb_avail_insn (basic_block, struct occr *);
6977 void gcse_after_reload_main (rtx, FILE *);
6980 /* Check if register REG is used in any insn waiting to be inserted on E.
6981 Assumes no such insn can be a CALL_INSN; if so call reg_used_between_p
6982 with PREV(insn),NEXT(insn) instead of calling
6983 reg_overlap_mentioned_p. */
6985 static bool
6986 reg_used_on_edge (rtx reg, edge e)
6988 rtx insn;
6990 for (insn = e->insns.r; insn; insn = NEXT_INSN (insn))
6991 if (INSN_P (insn) && reg_overlap_mentioned_p (reg, PATTERN (insn)))
6992 return true;
6994 return false;
6997 /* Return the insn that sets register REG or clobbers it in between
6998 FROM_INSN and TO_INSN (exclusive of those two).
6999 Just like reg_set_between but for hard registers and not pseudos. */
7001 static rtx
7002 reg_set_between_after_reload_p (rtx reg, rtx from_insn, rtx to_insn)
7004 rtx insn;
7005 int regno;
7007 if (! REG_P (reg))
7008 abort ();
7009 regno = REGNO (reg);
7011 /* We are called after register allocation. */
7012 if (regno >= FIRST_PSEUDO_REGISTER)
7013 abort ();
7015 if (from_insn == to_insn)
7016 return NULL_RTX;
7018 for (insn = NEXT_INSN (from_insn);
7019 insn != to_insn;
7020 insn = NEXT_INSN (insn))
7022 if (INSN_P (insn))
7024 if (FIND_REG_INC_NOTE (insn, reg)
7025 || (CALL_P (insn)
7026 && call_used_regs[regno])
7027 || find_reg_fusage (insn, CLOBBER, reg))
7028 return insn;
7030 if (set_of (reg, insn) != NULL_RTX)
7031 return insn;
7033 return NULL_RTX;
7036 /* Return the insn that uses register REG in between FROM_INSN and TO_INSN
7037 (exclusive of those two). Similar to reg_used_between but for hard
7038 registers and not pseudos. */
7040 static rtx
7041 reg_used_between_after_reload_p (rtx reg, rtx from_insn, rtx to_insn)
7043 rtx insn;
7044 int regno;
7046 if (! REG_P (reg))
7047 return to_insn;
7048 regno = REGNO (reg);
7050 /* We are called after register allocation. */
7051 if (regno >= FIRST_PSEUDO_REGISTER)
7052 abort ();
7053 if (from_insn == to_insn)
7054 return NULL_RTX;
7056 for (insn = NEXT_INSN (from_insn);
7057 insn != to_insn;
7058 insn = NEXT_INSN (insn))
7059 if (INSN_P (insn)
7060 && (reg_overlap_mentioned_p (reg, PATTERN (insn))
7061 || (CALL_P (insn)
7062 && call_used_regs[regno])
7063 || find_reg_fusage (insn, USE, reg)
7064 || find_reg_fusage (insn, CLOBBER, reg)))
7065 return insn;
7066 return NULL_RTX;
7069 /* Return the loaded/stored register of a load/store instruction. */
7071 static rtx
7072 get_avail_load_store_reg (rtx insn)
7074 if (REG_P (SET_DEST (PATTERN (insn)))) /* A load. */
7075 return SET_DEST(PATTERN(insn));
7076 if (REG_P (SET_SRC (PATTERN (insn)))) /* A store. */
7077 return SET_SRC (PATTERN (insn));
7078 abort ();
7081 /* Don't handle ABNORMAL edges or jump tables. */
7083 static bool
7084 is_jump_table_basic_block (basic_block bb)
7086 rtx insn = BB_END (bb);
7088 if (JUMP_TABLE_DATA_P (insn))
7089 return true;
7090 return false;
7093 /* Return nonzero if the predecessors of BB are "well behaved". */
7095 static bool
7096 bb_has_well_behaved_predecessors (basic_block bb)
7098 edge pred;
7100 if (! bb->pred)
7101 return false;
7102 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
7103 if (((pred->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (pred))
7104 || is_jump_table_basic_block (pred->src))
7105 return false;
7106 return true;
7110 /* Search for the occurrences of expression in BB. */
7112 static struct occr*
7113 get_bb_avail_insn (basic_block bb, struct occr *occr)
7115 for (; occr != NULL; occr = occr->next)
7116 if (BLOCK_FOR_INSN (occr->insn)->index == bb->index)
7117 return occr;
7118 return NULL;
7121 /* Perform partial GCSE pass after reload, try to eliminate redundant loads
7122 created by the reload pass. We try to look for a full or partial
7123 redundant loads fed by one or more loads/stores in predecessor BBs,
7124 and try adding loads to make them fully redundant. We also check if
7125 it's worth adding loads to be able to delete the redundant load.
7127 Algorithm:
7128 1. Build available expressions hash table:
7129 For each load/store instruction, if the loaded/stored memory didn't
7130 change until the end of the basic block add this memory expression to
7131 the hash table.
7132 2. Perform Redundancy elimination:
7133 For each load instruction do the following:
7134 perform partial redundancy elimination, check if it's worth adding
7135 loads to make the load fully redundant. If so add loads and
7136 register copies and delete the load.
7138 Future enhancement:
7139 if loaded register is used/defined between load and some store,
7140 look for some other free register between load and all its stores,
7141 and replace load with a copy from this register to the loaded
7142 register. */
7145 /* This handles the case where several stores feed a partially redundant
7146 load. It checks if the redundancy elimination is possible and if it's
7147 worth it. */
7149 static void
7150 eliminate_partially_redundant_loads (basic_block bb, rtx insn,
7151 struct expr *expr)
7153 edge pred;
7154 rtx avail_insn = NULL_RTX;
7155 rtx avail_reg;
7156 rtx dest, pat;
7157 struct occr *a_occr;
7158 struct unoccr *occr, *avail_occrs = NULL;
7159 struct unoccr *unoccr, *unavail_occrs = NULL;
7160 int npred_ok = 0;
7161 gcov_type ok_count = 0; /* Redundant load execution count. */
7162 gcov_type critical_count = 0; /* Execution count of critical edges. */
7164 /* The execution count of the loads to be added to make the
7165 load fully redundant. */
7166 gcov_type not_ok_count = 0;
7167 basic_block pred_bb;
7169 pat = PATTERN (insn);
7170 dest = SET_DEST (pat);
7171 /* Check that the loaded register is not used, set, or killed from the
7172 beginning of the block. */
7173 if (reg_used_between_after_reload_p (dest,
7174 PREV_INSN (BB_HEAD (bb)), insn)
7175 || reg_set_between_after_reload_p (dest,
7176 PREV_INSN (BB_HEAD (bb)), insn))
7177 return;
7179 /* Check potential for replacing load with copy for predecessors. */
7180 for (pred = bb->pred; pred; pred = pred->pred_next)
7182 rtx next_pred_bb_end;
7184 avail_insn = NULL_RTX;
7185 pred_bb = pred->src;
7186 next_pred_bb_end = NEXT_INSN (BB_END (pred_bb));
7187 for (a_occr = get_bb_avail_insn (pred_bb, expr->avail_occr); a_occr;
7188 a_occr = get_bb_avail_insn (pred_bb, a_occr->next))
7190 /* Check if the loaded register is not used. */
7191 avail_insn = a_occr->insn;
7192 if (! (avail_reg = get_avail_load_store_reg (avail_insn)))
7193 abort ();
7194 /* Make sure we can generate a move from register avail_reg to
7195 dest. */
7196 extract_insn (gen_move_insn (copy_rtx (dest),
7197 copy_rtx (avail_reg)));
7198 if (! constrain_operands (1)
7199 || reg_killed_on_edge (avail_reg, pred)
7200 || reg_used_on_edge (dest, pred))
7202 avail_insn = NULL;
7203 continue;
7205 if (! reg_set_between_after_reload_p (avail_reg, avail_insn,
7206 next_pred_bb_end))
7207 /* AVAIL_INSN remains non-null. */
7208 break;
7209 else
7210 avail_insn = NULL;
7212 if (avail_insn != NULL_RTX)
7214 npred_ok++;
7215 ok_count += pred->count;
7216 if (EDGE_CRITICAL_P (pred))
7217 critical_count += pred->count;
7218 occr = gmalloc (sizeof (struct unoccr));
7219 occr->insn = avail_insn;
7220 occr->pred = pred;
7221 occr->next = avail_occrs;
7222 avail_occrs = occr;
7224 else
7226 not_ok_count += pred->count;
7227 if (EDGE_CRITICAL_P (pred))
7228 critical_count += pred->count;
7229 unoccr = gmalloc (sizeof (struct unoccr));
7230 unoccr->insn = NULL_RTX;
7231 unoccr->pred = pred;
7232 unoccr->next = unavail_occrs;
7233 unavail_occrs = unoccr;
7237 if (npred_ok == 0 /* No load can be replaced by copy. */
7238 || (optimize_size && npred_ok > 1)) /* Prevent exploding the code. */
7239 goto cleanup;
7241 /* Check if it's worth applying the partial redundancy elimination. */
7242 if (ok_count < GCSE_AFTER_RELOAD_PARTIAL_FRACTION * not_ok_count)
7243 goto cleanup;
7245 if (ok_count < GCSE_AFTER_RELOAD_CRITICAL_FRACTION * critical_count)
7246 goto cleanup;
7248 /* Generate moves to the loaded register from where
7249 the memory is available. */
7250 for (occr = avail_occrs; occr; occr = occr->next)
7252 avail_insn = occr->insn;
7253 pred = occr->pred;
7254 /* Set avail_reg to be the register having the value of the
7255 memory. */
7256 avail_reg = get_avail_load_store_reg (avail_insn);
7257 if (! avail_reg)
7258 abort ();
7260 insert_insn_on_edge (gen_move_insn (copy_rtx (dest),
7261 copy_rtx (avail_reg)),
7262 pred);
7264 if (gcse_file)
7265 fprintf (gcse_file,
7266 "GCSE AFTER reload generating move from %d to %d on \
7267 edge from %d to %d\n",
7268 REGNO (avail_reg),
7269 REGNO (dest),
7270 pred->src->index,
7271 pred->dest->index);
7274 /* Regenerate loads where the memory is unavailable. */
7275 for (unoccr = unavail_occrs; unoccr; unoccr = unoccr->next)
7277 pred = unoccr->pred;
7278 insert_insn_on_edge (copy_insn (PATTERN (insn)), pred);
7280 if (gcse_file)
7281 fprintf (gcse_file,
7282 "GCSE AFTER reload: generating on edge from %d to %d\
7283 a copy of load:\n",
7284 pred->src->index,
7285 pred->dest->index);
7288 /* Delete the insn if it is not available in this block and mark it
7289 for deletion if it is available. If insn is available it may help
7290 discover additional redundancies, so mark it for later deletion.*/
7291 for (a_occr = get_bb_avail_insn (bb, expr->avail_occr);
7292 a_occr && (a_occr->insn != insn);
7293 a_occr = get_bb_avail_insn (bb, a_occr->next));
7295 if (!a_occr)
7296 delete_insn (insn);
7297 else
7298 a_occr->deleted_p = 1;
7300 cleanup:
7302 while (unavail_occrs)
7304 struct unoccr *temp = unavail_occrs->next;
7305 free (unavail_occrs);
7306 unavail_occrs = temp;
7309 while (avail_occrs)
7311 struct unoccr *temp = avail_occrs->next;
7312 free (avail_occrs);
7313 avail_occrs = temp;
7317 /* Performing the redundancy elimination as described before. */
7319 static void
7320 gcse_after_reload (void)
7322 unsigned int i;
7323 rtx insn;
7324 basic_block bb;
7325 struct expr *expr;
7326 struct occr *occr;
7328 /* Note we start at block 1. */
7330 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
7331 return;
7333 FOR_BB_BETWEEN (bb,
7334 ENTRY_BLOCK_PTR->next_bb->next_bb,
7335 EXIT_BLOCK_PTR,
7336 next_bb)
7338 if (! bb_has_well_behaved_predecessors (bb))
7339 continue;
7341 /* Do not try this optimization on cold basic blocks. */
7342 if (probably_cold_bb_p (bb))
7343 continue;
7345 reset_opr_set_tables ();
7347 for (insn = BB_HEAD (bb);
7348 insn != NULL
7349 && insn != NEXT_INSN (BB_END (bb));
7350 insn = NEXT_INSN (insn))
7352 /* Is it a load - of the form (set (reg) (mem))? */
7353 if (NONJUMP_INSN_P (insn)
7354 && GET_CODE (PATTERN (insn)) == SET
7355 && REG_P (SET_DEST (PATTERN (insn)))
7356 && MEM_P (SET_SRC (PATTERN (insn))))
7358 rtx pat = PATTERN (insn);
7359 rtx src = SET_SRC (pat);
7360 struct expr *expr;
7362 if (general_operand (src, GET_MODE (src))
7363 /* Is the expression recorded? */
7364 && (expr = lookup_expr (src, &expr_hash_table)) != NULL
7365 /* Are the operands unchanged since the start of the
7366 block? */
7367 && oprs_not_set_p (src, insn)
7368 && ! MEM_VOLATILE_P (src)
7369 && GET_MODE (src) != BLKmode
7370 && !(flag_non_call_exceptions && may_trap_p (src))
7371 && !side_effects_p (src))
7373 /* We now have a load (insn) and an available memory at
7374 its BB start (expr). Try to remove the loads if it is
7375 redundant. */
7376 eliminate_partially_redundant_loads (bb, insn, expr);
7380 /* Keep track of everything modified by this insn. */
7381 if (INSN_P (insn))
7382 mark_oprs_set (insn);
7386 commit_edge_insertions ();
7388 /* Go over the expression hash table and delete insns that were
7389 marked for later deletion. */
7390 for (i = 0; i < expr_hash_table.size; i++)
7392 for (expr = expr_hash_table.table[i];
7393 expr != NULL;
7394 expr = expr->next_same_hash)
7395 for (occr = expr->avail_occr; occr; occr = occr->next)
7396 if (occr->deleted_p)
7397 delete_insn (occr->insn);
7401 /* Scan pattern PAT of INSN and add an entry to the hash TABLE.
7402 After reload we are interested in loads/stores only. */
7404 static void
7405 hash_scan_set_after_reload (rtx pat, rtx insn, struct hash_table *table)
7407 rtx src = SET_SRC (pat);
7408 rtx dest = SET_DEST (pat);
7410 if (! MEM_P (src) && ! MEM_P (dest))
7411 return;
7413 if (REG_P (dest))
7415 if (/* Don't GCSE something if we can't do a reg/reg copy. */
7416 can_copy_p (GET_MODE (dest))
7417 /* GCSE commonly inserts instruction after the insn. We can't
7418 do that easily for EH_REGION notes so disable GCSE on these
7419 for now. */
7420 && ! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
7421 /* Is SET_SRC something we want to gcse? */
7422 && general_operand (src, GET_MODE (src))
7423 /* Don't CSE a nop. */
7424 && ! set_noop_p (pat)
7425 && ! JUMP_P (insn))
7427 /* An expression is not available if its operands are
7428 subsequently modified, including this insn. */
7429 if (oprs_available_p (src, insn))
7430 insert_expr_in_table (src, GET_MODE (dest), insn, 0, 1, table);
7433 else if (REG_P (src))
7435 /* Only record sets of pseudo-regs in the hash table. */
7436 if (/* Don't GCSE something if we can't do a reg/reg copy. */
7437 can_copy_p (GET_MODE (src))
7438 /* GCSE commonly inserts instruction after the insn. We can't
7439 do that easily for EH_REGION notes so disable GCSE on these
7440 for now. */
7441 && ! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
7442 /* Is SET_DEST something we want to gcse? */
7443 && general_operand (dest, GET_MODE (dest))
7444 /* Don't CSE a nop. */
7445 && ! set_noop_p (pat)
7446 &&! JUMP_P (insn)
7447 && ! (flag_float_store && FLOAT_MODE_P (GET_MODE (dest)))
7448 /* Check if the memory expression is killed after insn. */
7449 && ! load_killed_in_block_p (BLOCK_FOR_INSN (insn),
7450 INSN_CUID (insn) + 1,
7451 dest,
7453 && oprs_unchanged_p (XEXP (dest, 0), insn, 1))
7455 insert_expr_in_table (dest, GET_MODE (dest), insn, 0, 1, table);
7461 /* Create hash table of memory expressions available at end of basic
7462 blocks. */
7464 static void
7465 compute_hash_table_after_reload (struct hash_table *table)
7467 unsigned int i;
7469 table->set_p = 0;
7471 /* Initialize count of number of entries in hash table. */
7472 table->n_elems = 0;
7473 memset ((char *) table->table, 0,
7474 table->size * sizeof (struct expr *));
7476 /* While we compute the hash table we also compute a bit array of which
7477 registers are set in which blocks. */
7478 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
7480 /* Re-cache any INSN_LIST nodes we have allocated. */
7481 clear_modify_mem_tables ();
7483 /* Some working arrays used to track first and last set in each block. */
7484 reg_avail_info = gmalloc (max_gcse_regno * sizeof (struct reg_avail_info));
7486 for (i = 0; i < max_gcse_regno; ++i)
7487 reg_avail_info[i].last_bb = NULL;
7489 FOR_EACH_BB (current_bb)
7491 rtx insn;
7492 unsigned int regno;
7494 /* First pass over the instructions records information used to
7495 determine when registers and memory are first and last set. */
7496 for (insn = BB_HEAD (current_bb);
7497 insn && insn != NEXT_INSN (BB_END (current_bb));
7498 insn = NEXT_INSN (insn))
7500 if (! INSN_P (insn))
7501 continue;
7503 if (CALL_P (insn))
7505 bool clobbers_all = false;
7507 #ifdef NON_SAVING_SETJMP
7508 if (NON_SAVING_SETJMP
7509 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
7510 clobbers_all = true;
7511 #endif
7513 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7514 if (clobbers_all
7515 || TEST_HARD_REG_BIT (regs_invalidated_by_call,
7516 regno))
7517 record_last_reg_set_info (insn, regno);
7519 mark_call (insn);
7522 note_stores (PATTERN (insn), record_last_set_info, insn);
7524 if (GET_CODE (PATTERN (insn)) == SET)
7526 rtx src, dest;
7528 src = SET_SRC (PATTERN (insn));
7529 dest = SET_DEST (PATTERN (insn));
7530 if (MEM_P (src) && auto_inc_p (XEXP (src, 0)))
7532 regno = REGNO (XEXP (XEXP (src, 0), 0));
7533 record_last_reg_set_info (insn, regno);
7535 if (MEM_P (dest) && auto_inc_p (XEXP (dest, 0)))
7537 regno = REGNO (XEXP (XEXP (dest, 0), 0));
7538 record_last_reg_set_info (insn, regno);
7543 /* The next pass builds the hash table. */
7544 for (insn = BB_HEAD (current_bb);
7545 insn && insn != NEXT_INSN (BB_END (current_bb));
7546 insn = NEXT_INSN (insn))
7547 if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == SET)
7548 if (! find_reg_note (insn, REG_LIBCALL, NULL_RTX))
7549 hash_scan_set_after_reload (PATTERN (insn), insn, table);
7552 free (reg_avail_info);
7553 reg_avail_info = NULL;
7557 /* Main entry point of the GCSE after reload - clean some redundant loads
7558 due to spilling. */
7560 void
7561 gcse_after_reload_main (rtx f, FILE* file)
7563 gcse_subst_count = 0;
7564 gcse_create_count = 0;
7566 gcse_file = file;
7568 gcc_obstack_init (&gcse_obstack);
7569 bytes_used = 0;
7571 /* We need alias. */
7572 init_alias_analysis ();
7574 max_gcse_regno = max_reg_num ();
7576 alloc_reg_set_mem (max_gcse_regno);
7577 alloc_gcse_mem (f);
7578 alloc_hash_table (max_cuid, &expr_hash_table, 0);
7579 compute_hash_table_after_reload (&expr_hash_table);
7581 if (gcse_file)
7582 dump_hash_table (gcse_file, "Expression", &expr_hash_table);
7584 if (expr_hash_table.n_elems > 0)
7585 gcse_after_reload ();
7587 free_hash_table (&expr_hash_table);
7589 free_gcse_mem ();
7590 free_reg_set_mem ();
7592 /* We are finished with alias. */
7593 end_alias_analysis ();
7595 obstack_free (&gcse_obstack, NULL);
7598 #include "gt-gcse.h"