2007-03-01 Paul Brook <paul@codesourcery.com>
[official-gcc.git] / gcc / gcse.c
blob36b2c8f9db4c81910e501db1f0cbe62037ea48e1
1 /* Global common subexpression elimination/Partial redundancy elimination
2 and global constant/copy propagation for GNU compiler.
3 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005,
4 2006, 2007 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA. */
23 /* TODO
24 - reordering of memory allocation and freeing to be more space efficient
25 - do rough calc of how many regs are needed in each block, and a rough
26 calc of how many regs are available in each class and use that to
27 throttle back the code in cases where RTX_COST is minimal.
28 - a store to the same address as a load does not kill the load if the
29 source of the store is also the destination of the load. Handling this
30 allows more load motion, particularly out of loops.
31 - ability to realloc sbitmap vectors would allow one initial computation
32 of reg_set_in_block with only subsequent additions, rather than
33 recomputing it for each pass
37 /* References searched while implementing this.
39 Compilers Principles, Techniques and Tools
40 Aho, Sethi, Ullman
41 Addison-Wesley, 1988
43 Global Optimization by Suppression of Partial Redundancies
44 E. Morel, C. Renvoise
45 communications of the acm, Vol. 22, Num. 2, Feb. 1979
47 A Portable Machine-Independent Global Optimizer - Design and Measurements
48 Frederick Chow
49 Stanford Ph.D. thesis, Dec. 1983
51 A Fast Algorithm for Code Movement Optimization
52 D.M. Dhamdhere
53 SIGPLAN Notices, Vol. 23, Num. 10, Oct. 1988
55 A Solution to a Problem with Morel and Renvoise's
56 Global Optimization by Suppression of Partial Redundancies
57 K-H Drechsler, M.P. Stadel
58 ACM TOPLAS, Vol. 10, Num. 4, Oct. 1988
60 Practical Adaptation of the Global Optimization
61 Algorithm of Morel and Renvoise
62 D.M. Dhamdhere
63 ACM TOPLAS, Vol. 13, Num. 2. Apr. 1991
65 Efficiently Computing Static Single Assignment Form and the Control
66 Dependence Graph
67 R. Cytron, J. Ferrante, B.K. Rosen, M.N. Wegman, and F.K. Zadeck
68 ACM TOPLAS, Vol. 13, Num. 4, Oct. 1991
70 Lazy Code Motion
71 J. Knoop, O. Ruthing, B. Steffen
72 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
74 What's In a Region? Or Computing Control Dependence Regions in Near-Linear
75 Time for Reducible Flow Control
76 Thomas Ball
77 ACM Letters on Programming Languages and Systems,
78 Vol. 2, Num. 1-4, Mar-Dec 1993
80 An Efficient Representation for Sparse Sets
81 Preston Briggs, Linda Torczon
82 ACM Letters on Programming Languages and Systems,
83 Vol. 2, Num. 1-4, Mar-Dec 1993
85 A Variation of Knoop, Ruthing, and Steffen's Lazy Code Motion
86 K-H Drechsler, M.P. Stadel
87 ACM SIGPLAN Notices, Vol. 28, Num. 5, May 1993
89 Partial Dead Code Elimination
90 J. Knoop, O. Ruthing, B. Steffen
91 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
93 Effective Partial Redundancy Elimination
94 P. Briggs, K.D. Cooper
95 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
97 The Program Structure Tree: Computing Control Regions in Linear Time
98 R. Johnson, D. Pearson, K. Pingali
99 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
101 Optimal Code Motion: Theory and Practice
102 J. Knoop, O. Ruthing, B. Steffen
103 ACM TOPLAS, Vol. 16, Num. 4, Jul. 1994
105 The power of assignment motion
106 J. Knoop, O. Ruthing, B. Steffen
107 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
109 Global code motion / global value numbering
110 C. Click
111 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
113 Value Driven Redundancy Elimination
114 L.T. Simpson
115 Rice University Ph.D. thesis, Apr. 1996
117 Value Numbering
118 L.T. Simpson
119 Massively Scalar Compiler Project, Rice University, Sep. 1996
121 High Performance Compilers for Parallel Computing
122 Michael Wolfe
123 Addison-Wesley, 1996
125 Advanced Compiler Design and Implementation
126 Steven Muchnick
127 Morgan Kaufmann, 1997
129 Building an Optimizing Compiler
130 Robert Morgan
131 Digital Press, 1998
133 People wishing to speed up the code here should read:
134 Elimination Algorithms for Data Flow Analysis
135 B.G. Ryder, M.C. Paull
136 ACM Computing Surveys, Vol. 18, Num. 3, Sep. 1986
138 How to Analyze Large Programs Efficiently and Informatively
139 D.M. Dhamdhere, B.K. Rosen, F.K. Zadeck
140 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
142 People wishing to do something different can find various possibilities
143 in the above papers and elsewhere.
146 #include "config.h"
147 #include "system.h"
148 #include "coretypes.h"
149 #include "tm.h"
150 #include "toplev.h"
152 #include "rtl.h"
153 #include "tree.h"
154 #include "tm_p.h"
155 #include "regs.h"
156 #include "hard-reg-set.h"
157 #include "flags.h"
158 #include "real.h"
159 #include "insn-config.h"
160 #include "recog.h"
161 #include "basic-block.h"
162 #include "output.h"
163 #include "function.h"
164 #include "expr.h"
165 #include "except.h"
166 #include "ggc.h"
167 #include "params.h"
168 #include "cselib.h"
169 #include "intl.h"
170 #include "obstack.h"
171 #include "timevar.h"
172 #include "tree-pass.h"
173 #include "hashtab.h"
175 /* Propagate flow information through back edges and thus enable PRE's
176 moving loop invariant calculations out of loops.
178 Originally this tended to create worse overall code, but several
179 improvements during the development of PRE seem to have made following
180 back edges generally a win.
182 Note much of the loop invariant code motion done here would normally
183 be done by loop.c, which has more heuristics for when to move invariants
184 out of loops. At some point we might need to move some of those
185 heuristics into gcse.c. */
187 /* We support GCSE via Partial Redundancy Elimination. PRE optimizations
188 are a superset of those done by GCSE.
190 We perform the following steps:
192 1) Compute basic block information.
194 2) Compute table of places where registers are set.
196 3) Perform copy/constant propagation.
198 4) Perform global cse using lazy code motion if not optimizing
199 for size, or code hoisting if we are.
201 5) Perform another pass of copy/constant propagation.
203 Two passes of copy/constant propagation are done because the first one
204 enables more GCSE and the second one helps to clean up the copies that
205 GCSE creates. This is needed more for PRE than for Classic because Classic
206 GCSE will try to use an existing register containing the common
207 subexpression rather than create a new one. This is harder to do for PRE
208 because of the code motion (which Classic GCSE doesn't do).
210 Expressions we are interested in GCSE-ing are of the form
211 (set (pseudo-reg) (expression)).
212 Function want_to_gcse_p says what these are.
214 PRE handles moving invariant expressions out of loops (by treating them as
215 partially redundant).
217 Eventually it would be nice to replace cse.c/gcse.c with SSA (static single
218 assignment) based GVN (global value numbering). L. T. Simpson's paper
219 (Rice University) on value numbering is a useful reference for this.
221 **********************
223 We used to support multiple passes but there are diminishing returns in
224 doing so. The first pass usually makes 90% of the changes that are doable.
225 A second pass can make a few more changes made possible by the first pass.
226 Experiments show any further passes don't make enough changes to justify
227 the expense.
229 A study of spec92 using an unlimited number of passes:
230 [1 pass] = 1208 substitutions, [2] = 577, [3] = 202, [4] = 192, [5] = 83,
231 [6] = 34, [7] = 17, [8] = 9, [9] = 4, [10] = 4, [11] = 2,
232 [12] = 2, [13] = 1, [15] = 1, [16] = 2, [41] = 1
234 It was found doing copy propagation between each pass enables further
235 substitutions.
237 PRE is quite expensive in complicated functions because the DFA can take
238 a while to converge. Hence we only perform one pass. The parameter
239 max-gcse-passes can be modified if one wants to experiment.
241 **********************
243 The steps for PRE are:
245 1) Build the hash table of expressions we wish to GCSE (expr_hash_table).
247 2) Perform the data flow analysis for PRE.
249 3) Delete the redundant instructions
251 4) Insert the required copies [if any] that make the partially
252 redundant instructions fully redundant.
254 5) For other reaching expressions, insert an instruction to copy the value
255 to a newly created pseudo that will reach the redundant instruction.
257 The deletion is done first so that when we do insertions we
258 know which pseudo reg to use.
260 Various papers have argued that PRE DFA is expensive (O(n^2)) and others
261 argue it is not. The number of iterations for the algorithm to converge
262 is typically 2-4 so I don't view it as that expensive (relatively speaking).
264 PRE GCSE depends heavily on the second CSE pass to clean up the copies
265 we create. To make an expression reach the place where it's redundant,
266 the result of the expression is copied to a new register, and the redundant
267 expression is deleted by replacing it with this new register. Classic GCSE
268 doesn't have this problem as much as it computes the reaching defs of
269 each register in each block and thus can try to use an existing
270 register. */
272 /* GCSE global vars. */
274 /* Note whether or not we should run jump optimization after gcse. We
275 want to do this for two cases.
277 * If we changed any jumps via cprop.
279 * If we added any labels via edge splitting. */
280 static int run_jump_opt_after_gcse;
282 /* An obstack for our working variables. */
283 static struct obstack gcse_obstack;
285 struct reg_use {rtx reg_rtx; };
287 /* Hash table of expressions. */
289 struct expr
291 /* The expression (SET_SRC for expressions, PATTERN for assignments). */
292 rtx expr;
293 /* Index in the available expression bitmaps. */
294 int bitmap_index;
295 /* Next entry with the same hash. */
296 struct expr *next_same_hash;
297 /* List of anticipatable occurrences in basic blocks in the function.
298 An "anticipatable occurrence" is one that is the first occurrence in the
299 basic block, the operands are not modified in the basic block prior
300 to the occurrence and the output is not used between the start of
301 the block and the occurrence. */
302 struct occr *antic_occr;
303 /* List of available occurrence in basic blocks in the function.
304 An "available occurrence" is one that is the last occurrence in the
305 basic block and the operands are not modified by following statements in
306 the basic block [including this insn]. */
307 struct occr *avail_occr;
308 /* Non-null if the computation is PRE redundant.
309 The value is the newly created pseudo-reg to record a copy of the
310 expression in all the places that reach the redundant copy. */
311 rtx reaching_reg;
314 /* Occurrence of an expression.
315 There is one per basic block. If a pattern appears more than once the
316 last appearance is used [or first for anticipatable expressions]. */
318 struct occr
320 /* Next occurrence of this expression. */
321 struct occr *next;
322 /* The insn that computes the expression. */
323 rtx insn;
324 /* Nonzero if this [anticipatable] occurrence has been deleted. */
325 char deleted_p;
326 /* Nonzero if this [available] occurrence has been copied to
327 reaching_reg. */
328 /* ??? This is mutually exclusive with deleted_p, so they could share
329 the same byte. */
330 char copied_p;
333 /* Expression and copy propagation hash tables.
334 Each hash table is an array of buckets.
335 ??? It is known that if it were an array of entries, structure elements
336 `next_same_hash' and `bitmap_index' wouldn't be necessary. However, it is
337 not clear whether in the final analysis a sufficient amount of memory would
338 be saved as the size of the available expression bitmaps would be larger
339 [one could build a mapping table without holes afterwards though].
340 Someday I'll perform the computation and figure it out. */
342 struct hash_table
344 /* The table itself.
345 This is an array of `expr_hash_table_size' elements. */
346 struct expr **table;
348 /* Size of the hash table, in elements. */
349 unsigned int size;
351 /* Number of hash table elements. */
352 unsigned int n_elems;
354 /* Whether the table is expression of copy propagation one. */
355 int set_p;
358 /* Expression hash table. */
359 static struct hash_table expr_hash_table;
361 /* Copy propagation hash table. */
362 static struct hash_table set_hash_table;
364 /* Mapping of uids to cuids.
365 Only real insns get cuids. */
366 static int *uid_cuid;
368 /* Highest UID in UID_CUID. */
369 static int max_uid;
371 /* Get the cuid of an insn. */
372 #ifdef ENABLE_CHECKING
373 #define INSN_CUID(INSN) \
374 (gcc_assert (INSN_UID (INSN) <= max_uid), uid_cuid[INSN_UID (INSN)])
375 #else
376 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
377 #endif
379 /* Number of cuids. */
380 static int max_cuid;
382 /* Mapping of cuids to insns. */
383 static rtx *cuid_insn;
385 /* Get insn from cuid. */
386 #define CUID_INSN(CUID) (cuid_insn[CUID])
388 /* Maximum register number in function prior to doing gcse + 1.
389 Registers created during this pass have regno >= max_gcse_regno.
390 This is named with "gcse" to not collide with global of same name. */
391 static unsigned int max_gcse_regno;
393 /* Table of registers that are modified.
395 For each register, each element is a list of places where the pseudo-reg
396 is set.
398 For simplicity, GCSE is done on sets of pseudo-regs only. PRE GCSE only
399 requires knowledge of which blocks kill which regs [and thus could use
400 a bitmap instead of the lists `reg_set_table' uses].
402 `reg_set_table' and could be turned into an array of bitmaps (num-bbs x
403 num-regs) [however perhaps it may be useful to keep the data as is]. One
404 advantage of recording things this way is that `reg_set_table' is fairly
405 sparse with respect to pseudo regs but for hard regs could be fairly dense
406 [relatively speaking]. And recording sets of pseudo-regs in lists speeds
407 up functions like compute_transp since in the case of pseudo-regs we only
408 need to iterate over the number of times a pseudo-reg is set, not over the
409 number of basic blocks [clearly there is a bit of a slow down in the cases
410 where a pseudo is set more than once in a block, however it is believed
411 that the net effect is to speed things up]. This isn't done for hard-regs
412 because recording call-clobbered hard-regs in `reg_set_table' at each
413 function call can consume a fair bit of memory, and iterating over
414 hard-regs stored this way in compute_transp will be more expensive. */
416 typedef struct reg_set
418 /* The next setting of this register. */
419 struct reg_set *next;
420 /* The index of the block where it was set. */
421 int bb_index;
422 } reg_set;
424 static reg_set **reg_set_table;
426 /* Size of `reg_set_table'.
427 The table starts out at max_gcse_regno + slop, and is enlarged as
428 necessary. */
429 static int reg_set_table_size;
431 /* Amount to grow `reg_set_table' by when it's full. */
432 #define REG_SET_TABLE_SLOP 100
434 /* This is a list of expressions which are MEMs and will be used by load
435 or store motion.
436 Load motion tracks MEMs which aren't killed by
437 anything except itself. (i.e., loads and stores to a single location).
438 We can then allow movement of these MEM refs with a little special
439 allowance. (all stores copy the same value to the reaching reg used
440 for the loads). This means all values used to store into memory must have
441 no side effects so we can re-issue the setter value.
442 Store Motion uses this structure as an expression table to track stores
443 which look interesting, and might be moveable towards the exit block. */
445 struct ls_expr
447 struct expr * expr; /* Gcse expression reference for LM. */
448 rtx pattern; /* Pattern of this mem. */
449 rtx pattern_regs; /* List of registers mentioned by the mem. */
450 rtx loads; /* INSN list of loads seen. */
451 rtx stores; /* INSN list of stores seen. */
452 struct ls_expr * next; /* Next in the list. */
453 int invalid; /* Invalid for some reason. */
454 int index; /* If it maps to a bitmap index. */
455 unsigned int hash_index; /* Index when in a hash table. */
456 rtx reaching_reg; /* Register to use when re-writing. */
459 /* Array of implicit set patterns indexed by basic block index. */
460 static rtx *implicit_sets;
462 /* Head of the list of load/store memory refs. */
463 static struct ls_expr * pre_ldst_mems = NULL;
465 /* Hashtable for the load/store memory refs. */
466 static htab_t pre_ldst_table = NULL;
468 /* Bitmap containing one bit for each register in the program.
469 Used when performing GCSE to track which registers have been set since
470 the start of the basic block. */
471 static regset reg_set_bitmap;
473 /* For each block, a bitmap of registers set in the block.
474 This is used by compute_transp.
475 It is computed during hash table computation and not by compute_sets
476 as it includes registers added since the last pass (or between cprop and
477 gcse) and it's currently not easy to realloc sbitmap vectors. */
478 static sbitmap *reg_set_in_block;
480 /* Array, indexed by basic block number for a list of insns which modify
481 memory within that block. */
482 static rtx * modify_mem_list;
483 static bitmap modify_mem_list_set;
485 /* This array parallels modify_mem_list, but is kept canonicalized. */
486 static rtx * canon_modify_mem_list;
488 /* Bitmap indexed by block numbers to record which blocks contain
489 function calls. */
490 static bitmap blocks_with_calls;
492 /* Various variables for statistics gathering. */
494 /* Memory used in a pass.
495 This isn't intended to be absolutely precise. Its intent is only
496 to keep an eye on memory usage. */
497 static int bytes_used;
499 /* GCSE substitutions made. */
500 static int gcse_subst_count;
501 /* Number of copy instructions created. */
502 static int gcse_create_count;
503 /* Number of local constants propagated. */
504 static int local_const_prop_count;
505 /* Number of local copies propagated. */
506 static int local_copy_prop_count;
507 /* Number of global constants propagated. */
508 static int global_const_prop_count;
509 /* Number of global copies propagated. */
510 static int global_copy_prop_count;
512 /* For available exprs */
513 static sbitmap *ae_kill, *ae_gen;
515 static void compute_can_copy (void);
516 static void *gmalloc (size_t) ATTRIBUTE_MALLOC;
517 static void *gcalloc (size_t, size_t) ATTRIBUTE_MALLOC;
518 static void *grealloc (void *, size_t);
519 static void *gcse_alloc (unsigned long);
520 static void alloc_gcse_mem (void);
521 static void free_gcse_mem (void);
522 static void alloc_reg_set_mem (int);
523 static void free_reg_set_mem (void);
524 static void record_one_set (int, rtx);
525 static void record_set_info (rtx, rtx, void *);
526 static void compute_sets (void);
527 static void hash_scan_insn (rtx, struct hash_table *, int);
528 static void hash_scan_set (rtx, rtx, struct hash_table *);
529 static void hash_scan_clobber (rtx, rtx, struct hash_table *);
530 static void hash_scan_call (rtx, rtx, struct hash_table *);
531 static int want_to_gcse_p (rtx);
532 static bool can_assign_to_reg_p (rtx);
533 static bool gcse_constant_p (rtx);
534 static int oprs_unchanged_p (rtx, rtx, int);
535 static int oprs_anticipatable_p (rtx, rtx);
536 static int oprs_available_p (rtx, rtx);
537 static void insert_expr_in_table (rtx, enum machine_mode, rtx, int, int,
538 struct hash_table *);
539 static void insert_set_in_table (rtx, rtx, struct hash_table *);
540 static unsigned int hash_expr (rtx, enum machine_mode, int *, int);
541 static unsigned int hash_set (int, int);
542 static int expr_equiv_p (rtx, rtx);
543 static void record_last_reg_set_info (rtx, int);
544 static void record_last_mem_set_info (rtx);
545 static void record_last_set_info (rtx, rtx, void *);
546 static void compute_hash_table (struct hash_table *);
547 static void alloc_hash_table (int, struct hash_table *, int);
548 static void free_hash_table (struct hash_table *);
549 static void compute_hash_table_work (struct hash_table *);
550 static void dump_hash_table (FILE *, const char *, struct hash_table *);
551 static struct expr *lookup_set (unsigned int, struct hash_table *);
552 static struct expr *next_set (unsigned int, struct expr *);
553 static void reset_opr_set_tables (void);
554 static int oprs_not_set_p (rtx, rtx);
555 static void mark_call (rtx);
556 static void mark_set (rtx, rtx);
557 static void mark_clobber (rtx, rtx);
558 static void mark_oprs_set (rtx);
559 static void alloc_cprop_mem (int, int);
560 static void free_cprop_mem (void);
561 static void compute_transp (rtx, int, sbitmap *, int);
562 static void compute_transpout (void);
563 static void compute_local_properties (sbitmap *, sbitmap *, sbitmap *,
564 struct hash_table *);
565 static void compute_cprop_data (void);
566 static void find_used_regs (rtx *, void *);
567 static int try_replace_reg (rtx, rtx, rtx);
568 static struct expr *find_avail_set (int, rtx);
569 static int cprop_jump (basic_block, rtx, rtx, rtx, rtx);
570 static void mems_conflict_for_gcse_p (rtx, rtx, void *);
571 static int load_killed_in_block_p (basic_block, int, rtx, int);
572 static void canon_list_insert (rtx, rtx, void *);
573 static int cprop_insn (rtx, int);
574 static int cprop (int);
575 static void find_implicit_sets (void);
576 static int one_cprop_pass (int, bool, bool);
577 static bool constprop_register (rtx, rtx, rtx, bool);
578 static struct expr *find_bypass_set (int, int);
579 static bool reg_killed_on_edge (rtx, edge);
580 static int bypass_block (basic_block, rtx, rtx);
581 static int bypass_conditional_jumps (void);
582 static void alloc_pre_mem (int, int);
583 static void free_pre_mem (void);
584 static void compute_pre_data (void);
585 static int pre_expr_reaches_here_p (basic_block, struct expr *,
586 basic_block);
587 static void insert_insn_end_bb (struct expr *, basic_block, int);
588 static void pre_insert_copy_insn (struct expr *, rtx);
589 static void pre_insert_copies (void);
590 static int pre_delete (void);
591 static int pre_gcse (void);
592 static int one_pre_gcse_pass (int);
593 static void add_label_notes (rtx, rtx);
594 static void alloc_code_hoist_mem (int, int);
595 static void free_code_hoist_mem (void);
596 static void compute_code_hoist_vbeinout (void);
597 static void compute_code_hoist_data (void);
598 static int hoist_expr_reaches_here_p (basic_block, int, basic_block, char *);
599 static void hoist_code (void);
600 static int one_code_hoisting_pass (void);
601 static rtx process_insert_insn (struct expr *);
602 static int pre_edge_insert (struct edge_list *, struct expr **);
603 static int pre_expr_reaches_here_p_work (basic_block, struct expr *,
604 basic_block, char *);
605 static struct ls_expr * ldst_entry (rtx);
606 static void free_ldst_entry (struct ls_expr *);
607 static void free_ldst_mems (void);
608 static void print_ldst_list (FILE *);
609 static struct ls_expr * find_rtx_in_ldst (rtx);
610 static int enumerate_ldsts (void);
611 static inline struct ls_expr * first_ls_expr (void);
612 static inline struct ls_expr * next_ls_expr (struct ls_expr *);
613 static int simple_mem (rtx);
614 static void invalidate_any_buried_refs (rtx);
615 static void compute_ld_motion_mems (void);
616 static void trim_ld_motion_mems (void);
617 static void update_ld_motion_stores (struct expr *);
618 static void reg_set_info (rtx, rtx, void *);
619 static void reg_clear_last_set (rtx, rtx, void *);
620 static bool store_ops_ok (rtx, int *);
621 static rtx extract_mentioned_regs (rtx);
622 static rtx extract_mentioned_regs_helper (rtx, rtx);
623 static void find_moveable_store (rtx, int *, int *);
624 static int compute_store_table (void);
625 static bool load_kills_store (rtx, rtx, int);
626 static bool find_loads (rtx, rtx, int);
627 static bool store_killed_in_insn (rtx, rtx, rtx, int);
628 static bool store_killed_after (rtx, rtx, rtx, basic_block, int *, rtx *);
629 static bool store_killed_before (rtx, rtx, rtx, basic_block, int *);
630 static void build_store_vectors (void);
631 static void insert_insn_start_bb (rtx, basic_block);
632 static int insert_store (struct ls_expr *, edge);
633 static void remove_reachable_equiv_notes (basic_block, struct ls_expr *);
634 static void replace_store_insn (rtx, rtx, basic_block, struct ls_expr *);
635 static void delete_store (struct ls_expr *, basic_block);
636 static void free_store_memory (void);
637 static void store_motion (void);
638 static void free_insn_expr_list_list (rtx *);
639 static void clear_modify_mem_tables (void);
640 static void free_modify_mem_tables (void);
641 static rtx gcse_emit_move_after (rtx, rtx, rtx);
642 static void local_cprop_find_used_regs (rtx *, void *);
643 static bool do_local_cprop (rtx, rtx, bool, rtx*);
644 static bool adjust_libcall_notes (rtx, rtx, rtx, rtx*);
645 static void local_cprop_pass (bool);
646 static bool is_too_expensive (const char *);
649 /* Entry point for global common subexpression elimination.
650 F is the first instruction in the function. Return nonzero if a
651 change is mode. */
653 static int
654 gcse_main (rtx f ATTRIBUTE_UNUSED)
656 int changed, pass;
657 /* Bytes used at start of pass. */
658 int initial_bytes_used;
659 /* Maximum number of bytes used by a pass. */
660 int max_pass_bytes;
661 /* Point to release obstack data from for each pass. */
662 char *gcse_obstack_bottom;
664 /* We do not construct an accurate cfg in functions which call
665 setjmp, so just punt to be safe. */
666 if (current_function_calls_setjmp)
667 return 0;
669 /* Assume that we do not need to run jump optimizations after gcse. */
670 run_jump_opt_after_gcse = 0;
672 /* Identify the basic block information for this function, including
673 successors and predecessors. */
674 max_gcse_regno = max_reg_num ();
676 if (dump_file)
677 dump_flow_info (dump_file, dump_flags);
679 /* Return if there's nothing to do, or it is too expensive. */
680 if (n_basic_blocks <= NUM_FIXED_BLOCKS + 1
681 || is_too_expensive (_("GCSE disabled")))
682 return 0;
684 gcc_obstack_init (&gcse_obstack);
685 bytes_used = 0;
687 /* We need alias. */
688 init_alias_analysis ();
689 /* Record where pseudo-registers are set. This data is kept accurate
690 during each pass. ??? We could also record hard-reg information here
691 [since it's unchanging], however it is currently done during hash table
692 computation.
694 It may be tempting to compute MEM set information here too, but MEM sets
695 will be subject to code motion one day and thus we need to compute
696 information about memory sets when we build the hash tables. */
698 alloc_reg_set_mem (max_gcse_regno);
699 compute_sets ();
701 pass = 0;
702 initial_bytes_used = bytes_used;
703 max_pass_bytes = 0;
704 gcse_obstack_bottom = gcse_alloc (1);
705 changed = 1;
706 while (changed && pass < MAX_GCSE_PASSES)
708 changed = 0;
709 if (dump_file)
710 fprintf (dump_file, "GCSE pass %d\n\n", pass + 1);
712 /* Initialize bytes_used to the space for the pred/succ lists,
713 and the reg_set_table data. */
714 bytes_used = initial_bytes_used;
716 /* Each pass may create new registers, so recalculate each time. */
717 max_gcse_regno = max_reg_num ();
719 alloc_gcse_mem ();
721 /* Don't allow constant propagation to modify jumps
722 during this pass. */
723 timevar_push (TV_CPROP1);
724 changed = one_cprop_pass (pass + 1, false, false);
725 timevar_pop (TV_CPROP1);
727 if (optimize_size)
728 /* Do nothing. */ ;
729 else
731 timevar_push (TV_PRE);
732 changed |= one_pre_gcse_pass (pass + 1);
733 /* We may have just created new basic blocks. Release and
734 recompute various things which are sized on the number of
735 basic blocks. */
736 if (changed)
738 free_modify_mem_tables ();
739 modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
740 canon_modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
742 free_reg_set_mem ();
743 alloc_reg_set_mem (max_reg_num ());
744 compute_sets ();
745 run_jump_opt_after_gcse = 1;
746 timevar_pop (TV_PRE);
749 if (max_pass_bytes < bytes_used)
750 max_pass_bytes = bytes_used;
752 /* Free up memory, then reallocate for code hoisting. We can
753 not re-use the existing allocated memory because the tables
754 will not have info for the insns or registers created by
755 partial redundancy elimination. */
756 free_gcse_mem ();
758 /* It does not make sense to run code hoisting unless we are optimizing
759 for code size -- it rarely makes programs faster, and can make
760 them bigger if we did partial redundancy elimination (when optimizing
761 for space, we don't run the partial redundancy algorithms). */
762 if (optimize_size)
764 timevar_push (TV_HOIST);
765 max_gcse_regno = max_reg_num ();
766 alloc_gcse_mem ();
767 changed |= one_code_hoisting_pass ();
768 free_gcse_mem ();
770 if (max_pass_bytes < bytes_used)
771 max_pass_bytes = bytes_used;
772 timevar_pop (TV_HOIST);
775 if (dump_file)
777 fprintf (dump_file, "\n");
778 fflush (dump_file);
781 obstack_free (&gcse_obstack, gcse_obstack_bottom);
782 pass++;
785 /* Do one last pass of copy propagation, including cprop into
786 conditional jumps. */
788 max_gcse_regno = max_reg_num ();
789 alloc_gcse_mem ();
790 /* This time, go ahead and allow cprop to alter jumps. */
791 timevar_push (TV_CPROP2);
792 one_cprop_pass (pass + 1, true, false);
793 timevar_pop (TV_CPROP2);
794 free_gcse_mem ();
796 if (dump_file)
798 fprintf (dump_file, "GCSE of %s: %d basic blocks, ",
799 current_function_name (), n_basic_blocks);
800 fprintf (dump_file, "%d pass%s, %d bytes\n\n",
801 pass, pass > 1 ? "es" : "", max_pass_bytes);
804 obstack_free (&gcse_obstack, NULL);
805 free_reg_set_mem ();
807 /* We are finished with alias. */
808 end_alias_analysis ();
809 allocate_reg_info (max_reg_num (), FALSE, FALSE);
811 if (!optimize_size && flag_gcse_sm)
813 timevar_push (TV_LSM);
814 store_motion ();
815 timevar_pop (TV_LSM);
818 /* Record where pseudo-registers are set. */
819 return run_jump_opt_after_gcse;
822 /* Misc. utilities. */
824 /* Nonzero for each mode that supports (set (reg) (reg)).
825 This is trivially true for integer and floating point values.
826 It may or may not be true for condition codes. */
827 static char can_copy[(int) NUM_MACHINE_MODES];
829 /* Compute which modes support reg/reg copy operations. */
831 static void
832 compute_can_copy (void)
834 int i;
835 #ifndef AVOID_CCMODE_COPIES
836 rtx reg, insn;
837 #endif
838 memset (can_copy, 0, NUM_MACHINE_MODES);
840 start_sequence ();
841 for (i = 0; i < NUM_MACHINE_MODES; i++)
842 if (GET_MODE_CLASS (i) == MODE_CC)
844 #ifdef AVOID_CCMODE_COPIES
845 can_copy[i] = 0;
846 #else
847 reg = gen_rtx_REG ((enum machine_mode) i, LAST_VIRTUAL_REGISTER + 1);
848 insn = emit_insn (gen_rtx_SET (VOIDmode, reg, reg));
849 if (recog (PATTERN (insn), insn, NULL) >= 0)
850 can_copy[i] = 1;
851 #endif
853 else
854 can_copy[i] = 1;
856 end_sequence ();
859 /* Returns whether the mode supports reg/reg copy operations. */
861 bool
862 can_copy_p (enum machine_mode mode)
864 static bool can_copy_init_p = false;
866 if (! can_copy_init_p)
868 compute_can_copy ();
869 can_copy_init_p = true;
872 return can_copy[mode] != 0;
875 /* Cover function to xmalloc to record bytes allocated. */
877 static void *
878 gmalloc (size_t size)
880 bytes_used += size;
881 return xmalloc (size);
884 /* Cover function to xcalloc to record bytes allocated. */
886 static void *
887 gcalloc (size_t nelem, size_t elsize)
889 bytes_used += nelem * elsize;
890 return xcalloc (nelem, elsize);
893 /* Cover function to xrealloc.
894 We don't record the additional size since we don't know it.
895 It won't affect memory usage stats much anyway. */
897 static void *
898 grealloc (void *ptr, size_t size)
900 return xrealloc (ptr, size);
903 /* Cover function to obstack_alloc. */
905 static void *
906 gcse_alloc (unsigned long size)
908 bytes_used += size;
909 return obstack_alloc (&gcse_obstack, size);
912 /* Allocate memory for the cuid mapping array,
913 and reg/memory set tracking tables.
915 This is called at the start of each pass. */
917 static void
918 alloc_gcse_mem (void)
920 int i;
921 basic_block bb;
922 rtx insn;
924 /* Find the largest UID and create a mapping from UIDs to CUIDs.
925 CUIDs are like UIDs except they increase monotonically, have no gaps,
926 and only apply to real insns.
927 (Actually, there are gaps, for insn that are not inside a basic block.
928 but we should never see those anyway, so this is OK.) */
930 max_uid = get_max_uid ();
931 uid_cuid = gcalloc (max_uid + 1, sizeof (int));
932 i = 0;
933 FOR_EACH_BB (bb)
934 FOR_BB_INSNS (bb, insn)
936 if (INSN_P (insn))
937 uid_cuid[INSN_UID (insn)] = i++;
938 else
939 uid_cuid[INSN_UID (insn)] = i;
942 /* Create a table mapping cuids to insns. */
944 max_cuid = i;
945 cuid_insn = gcalloc (max_cuid + 1, sizeof (rtx));
946 i = 0;
947 FOR_EACH_BB (bb)
948 FOR_BB_INSNS (bb, insn)
949 if (INSN_P (insn))
950 CUID_INSN (i++) = insn;
952 /* Allocate vars to track sets of regs. */
953 reg_set_bitmap = BITMAP_ALLOC (NULL);
955 /* Allocate vars to track sets of regs, memory per block. */
956 reg_set_in_block = sbitmap_vector_alloc (last_basic_block, max_gcse_regno);
957 /* Allocate array to keep a list of insns which modify memory in each
958 basic block. */
959 modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
960 canon_modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
961 modify_mem_list_set = BITMAP_ALLOC (NULL);
962 blocks_with_calls = BITMAP_ALLOC (NULL);
965 /* Free memory allocated by alloc_gcse_mem. */
967 static void
968 free_gcse_mem (void)
970 free (uid_cuid);
971 free (cuid_insn);
973 BITMAP_FREE (reg_set_bitmap);
975 sbitmap_vector_free (reg_set_in_block);
976 free_modify_mem_tables ();
977 BITMAP_FREE (modify_mem_list_set);
978 BITMAP_FREE (blocks_with_calls);
981 /* Compute the local properties of each recorded expression.
983 Local properties are those that are defined by the block, irrespective of
984 other blocks.
986 An expression is transparent in a block if its operands are not modified
987 in the block.
989 An expression is computed (locally available) in a block if it is computed
990 at least once and expression would contain the same value if the
991 computation was moved to the end of the block.
993 An expression is locally anticipatable in a block if it is computed at
994 least once and expression would contain the same value if the computation
995 was moved to the beginning of the block.
997 We call this routine for cprop, pre and code hoisting. They all compute
998 basically the same information and thus can easily share this code.
1000 TRANSP, COMP, and ANTLOC are destination sbitmaps for recording local
1001 properties. If NULL, then it is not necessary to compute or record that
1002 particular property.
1004 TABLE controls which hash table to look at. If it is set hash table,
1005 additionally, TRANSP is computed as ~TRANSP, since this is really cprop's
1006 ABSALTERED. */
1008 static void
1009 compute_local_properties (sbitmap *transp, sbitmap *comp, sbitmap *antloc,
1010 struct hash_table *table)
1012 unsigned int i;
1014 /* Initialize any bitmaps that were passed in. */
1015 if (transp)
1017 if (table->set_p)
1018 sbitmap_vector_zero (transp, last_basic_block);
1019 else
1020 sbitmap_vector_ones (transp, last_basic_block);
1023 if (comp)
1024 sbitmap_vector_zero (comp, last_basic_block);
1025 if (antloc)
1026 sbitmap_vector_zero (antloc, last_basic_block);
1028 for (i = 0; i < table->size; i++)
1030 struct expr *expr;
1032 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
1034 int indx = expr->bitmap_index;
1035 struct occr *occr;
1037 /* The expression is transparent in this block if it is not killed.
1038 We start by assuming all are transparent [none are killed], and
1039 then reset the bits for those that are. */
1040 if (transp)
1041 compute_transp (expr->expr, indx, transp, table->set_p);
1043 /* The occurrences recorded in antic_occr are exactly those that
1044 we want to set to nonzero in ANTLOC. */
1045 if (antloc)
1046 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
1048 SET_BIT (antloc[BLOCK_NUM (occr->insn)], indx);
1050 /* While we're scanning the table, this is a good place to
1051 initialize this. */
1052 occr->deleted_p = 0;
1055 /* The occurrences recorded in avail_occr are exactly those that
1056 we want to set to nonzero in COMP. */
1057 if (comp)
1058 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
1060 SET_BIT (comp[BLOCK_NUM (occr->insn)], indx);
1062 /* While we're scanning the table, this is a good place to
1063 initialize this. */
1064 occr->copied_p = 0;
1067 /* While we're scanning the table, this is a good place to
1068 initialize this. */
1069 expr->reaching_reg = 0;
1074 /* Register set information.
1076 `reg_set_table' records where each register is set or otherwise
1077 modified. */
1079 static struct obstack reg_set_obstack;
1081 static void
1082 alloc_reg_set_mem (int n_regs)
1084 reg_set_table_size = n_regs + REG_SET_TABLE_SLOP;
1085 reg_set_table = gcalloc (reg_set_table_size, sizeof (struct reg_set *));
1087 gcc_obstack_init (&reg_set_obstack);
1090 static void
1091 free_reg_set_mem (void)
1093 free (reg_set_table);
1094 obstack_free (&reg_set_obstack, NULL);
1097 /* Record REGNO in the reg_set table. */
1099 static void
1100 record_one_set (int regno, rtx insn)
1102 /* Allocate a new reg_set element and link it onto the list. */
1103 struct reg_set *new_reg_info;
1105 /* If the table isn't big enough, enlarge it. */
1106 if (regno >= reg_set_table_size)
1108 int new_size = regno + REG_SET_TABLE_SLOP;
1110 reg_set_table = grealloc (reg_set_table,
1111 new_size * sizeof (struct reg_set *));
1112 memset (reg_set_table + reg_set_table_size, 0,
1113 (new_size - reg_set_table_size) * sizeof (struct reg_set *));
1114 reg_set_table_size = new_size;
1117 new_reg_info = obstack_alloc (&reg_set_obstack, sizeof (struct reg_set));
1118 bytes_used += sizeof (struct reg_set);
1119 new_reg_info->bb_index = BLOCK_NUM (insn);
1120 new_reg_info->next = reg_set_table[regno];
1121 reg_set_table[regno] = new_reg_info;
1124 /* Called from compute_sets via note_stores to handle one SET or CLOBBER in
1125 an insn. The DATA is really the instruction in which the SET is
1126 occurring. */
1128 static void
1129 record_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED, void *data)
1131 rtx record_set_insn = (rtx) data;
1133 if (REG_P (dest) && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
1134 record_one_set (REGNO (dest), record_set_insn);
1137 /* Scan the function and record each set of each pseudo-register.
1139 This is called once, at the start of the gcse pass. See the comments for
1140 `reg_set_table' for further documentation. */
1142 static void
1143 compute_sets (void)
1145 basic_block bb;
1146 rtx insn;
1148 FOR_EACH_BB (bb)
1149 FOR_BB_INSNS (bb, insn)
1150 if (INSN_P (insn))
1151 note_stores (PATTERN (insn), record_set_info, insn);
1154 /* Hash table support. */
1156 struct reg_avail_info
1158 basic_block last_bb;
1159 int first_set;
1160 int last_set;
1163 static struct reg_avail_info *reg_avail_info;
1164 static basic_block current_bb;
1167 /* See whether X, the source of a set, is something we want to consider for
1168 GCSE. */
1170 static int
1171 want_to_gcse_p (rtx x)
1173 #ifdef STACK_REGS
1174 /* On register stack architectures, don't GCSE constants from the
1175 constant pool, as the benefits are often swamped by the overhead
1176 of shuffling the register stack between basic blocks. */
1177 if (IS_STACK_MODE (GET_MODE (x)))
1178 x = avoid_constant_pool_reference (x);
1179 #endif
1181 switch (GET_CODE (x))
1183 case REG:
1184 case SUBREG:
1185 case CONST_INT:
1186 case CONST_DOUBLE:
1187 case CONST_VECTOR:
1188 case CALL:
1189 return 0;
1191 default:
1192 return can_assign_to_reg_p (x);
1196 /* Used internally by can_assign_to_reg_p. */
1198 static GTY(()) rtx test_insn;
1200 /* Return true if we can assign X to a pseudo register. */
1202 static bool
1203 can_assign_to_reg_p (rtx x)
1205 int num_clobbers = 0;
1206 int icode;
1208 /* If this is a valid operand, we are OK. If it's VOIDmode, we aren't. */
1209 if (general_operand (x, GET_MODE (x)))
1210 return 1;
1211 else if (GET_MODE (x) == VOIDmode)
1212 return 0;
1214 /* Otherwise, check if we can make a valid insn from it. First initialize
1215 our test insn if we haven't already. */
1216 if (test_insn == 0)
1218 test_insn
1219 = make_insn_raw (gen_rtx_SET (VOIDmode,
1220 gen_rtx_REG (word_mode,
1221 FIRST_PSEUDO_REGISTER * 2),
1222 const0_rtx));
1223 NEXT_INSN (test_insn) = PREV_INSN (test_insn) = 0;
1226 /* Now make an insn like the one we would make when GCSE'ing and see if
1227 valid. */
1228 PUT_MODE (SET_DEST (PATTERN (test_insn)), GET_MODE (x));
1229 SET_SRC (PATTERN (test_insn)) = x;
1230 return ((icode = recog (PATTERN (test_insn), test_insn, &num_clobbers)) >= 0
1231 && (num_clobbers == 0 || ! added_clobbers_hard_reg_p (icode)));
1234 /* Return nonzero if the operands of expression X are unchanged from the
1235 start of INSN's basic block up to but not including INSN (if AVAIL_P == 0),
1236 or from INSN to the end of INSN's basic block (if AVAIL_P != 0). */
1238 static int
1239 oprs_unchanged_p (rtx x, rtx insn, int avail_p)
1241 int i, j;
1242 enum rtx_code code;
1243 const char *fmt;
1245 if (x == 0)
1246 return 1;
1248 code = GET_CODE (x);
1249 switch (code)
1251 case REG:
1253 struct reg_avail_info *info = &reg_avail_info[REGNO (x)];
1255 if (info->last_bb != current_bb)
1256 return 1;
1257 if (avail_p)
1258 return info->last_set < INSN_CUID (insn);
1259 else
1260 return info->first_set >= INSN_CUID (insn);
1263 case MEM:
1264 if (load_killed_in_block_p (current_bb, INSN_CUID (insn),
1265 x, avail_p))
1266 return 0;
1267 else
1268 return oprs_unchanged_p (XEXP (x, 0), insn, avail_p);
1270 case PRE_DEC:
1271 case PRE_INC:
1272 case POST_DEC:
1273 case POST_INC:
1274 case PRE_MODIFY:
1275 case POST_MODIFY:
1276 return 0;
1278 case PC:
1279 case CC0: /*FIXME*/
1280 case CONST:
1281 case CONST_INT:
1282 case CONST_DOUBLE:
1283 case CONST_VECTOR:
1284 case SYMBOL_REF:
1285 case LABEL_REF:
1286 case ADDR_VEC:
1287 case ADDR_DIFF_VEC:
1288 return 1;
1290 default:
1291 break;
1294 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
1296 if (fmt[i] == 'e')
1298 /* If we are about to do the last recursive call needed at this
1299 level, change it into iteration. This function is called enough
1300 to be worth it. */
1301 if (i == 0)
1302 return oprs_unchanged_p (XEXP (x, i), insn, avail_p);
1304 else if (! oprs_unchanged_p (XEXP (x, i), insn, avail_p))
1305 return 0;
1307 else if (fmt[i] == 'E')
1308 for (j = 0; j < XVECLEN (x, i); j++)
1309 if (! oprs_unchanged_p (XVECEXP (x, i, j), insn, avail_p))
1310 return 0;
1313 return 1;
1316 /* Used for communication between mems_conflict_for_gcse_p and
1317 load_killed_in_block_p. Nonzero if mems_conflict_for_gcse_p finds a
1318 conflict between two memory references. */
1319 static int gcse_mems_conflict_p;
1321 /* Used for communication between mems_conflict_for_gcse_p and
1322 load_killed_in_block_p. A memory reference for a load instruction,
1323 mems_conflict_for_gcse_p will see if a memory store conflicts with
1324 this memory load. */
1325 static rtx gcse_mem_operand;
1327 /* DEST is the output of an instruction. If it is a memory reference, and
1328 possibly conflicts with the load found in gcse_mem_operand, then set
1329 gcse_mems_conflict_p to a nonzero value. */
1331 static void
1332 mems_conflict_for_gcse_p (rtx dest, rtx setter ATTRIBUTE_UNUSED,
1333 void *data ATTRIBUTE_UNUSED)
1335 while (GET_CODE (dest) == SUBREG
1336 || GET_CODE (dest) == ZERO_EXTRACT
1337 || GET_CODE (dest) == STRICT_LOW_PART)
1338 dest = XEXP (dest, 0);
1340 /* If DEST is not a MEM, then it will not conflict with the load. Note
1341 that function calls are assumed to clobber memory, but are handled
1342 elsewhere. */
1343 if (! MEM_P (dest))
1344 return;
1346 /* If we are setting a MEM in our list of specially recognized MEMs,
1347 don't mark as killed this time. */
1349 if (expr_equiv_p (dest, gcse_mem_operand) && pre_ldst_mems != NULL)
1351 if (!find_rtx_in_ldst (dest))
1352 gcse_mems_conflict_p = 1;
1353 return;
1356 if (true_dependence (dest, GET_MODE (dest), gcse_mem_operand,
1357 rtx_addr_varies_p))
1358 gcse_mems_conflict_p = 1;
1361 /* Return nonzero if the expression in X (a memory reference) is killed
1362 in block BB before or after the insn with the CUID in UID_LIMIT.
1363 AVAIL_P is nonzero for kills after UID_LIMIT, and zero for kills
1364 before UID_LIMIT.
1366 To check the entire block, set UID_LIMIT to max_uid + 1 and
1367 AVAIL_P to 0. */
1369 static int
1370 load_killed_in_block_p (basic_block bb, int uid_limit, rtx x, int avail_p)
1372 rtx list_entry = modify_mem_list[bb->index];
1374 /* If this is a readonly then we aren't going to be changing it. */
1375 if (MEM_READONLY_P (x))
1376 return 0;
1378 while (list_entry)
1380 rtx setter;
1381 /* Ignore entries in the list that do not apply. */
1382 if ((avail_p
1383 && INSN_CUID (XEXP (list_entry, 0)) < uid_limit)
1384 || (! avail_p
1385 && INSN_CUID (XEXP (list_entry, 0)) > uid_limit))
1387 list_entry = XEXP (list_entry, 1);
1388 continue;
1391 setter = XEXP (list_entry, 0);
1393 /* If SETTER is a call everything is clobbered. Note that calls
1394 to pure functions are never put on the list, so we need not
1395 worry about them. */
1396 if (CALL_P (setter))
1397 return 1;
1399 /* SETTER must be an INSN of some kind that sets memory. Call
1400 note_stores to examine each hunk of memory that is modified.
1402 The note_stores interface is pretty limited, so we have to
1403 communicate via global variables. Yuk. */
1404 gcse_mem_operand = x;
1405 gcse_mems_conflict_p = 0;
1406 note_stores (PATTERN (setter), mems_conflict_for_gcse_p, NULL);
1407 if (gcse_mems_conflict_p)
1408 return 1;
1409 list_entry = XEXP (list_entry, 1);
1411 return 0;
1414 /* Return nonzero if the operands of expression X are unchanged from
1415 the start of INSN's basic block up to but not including INSN. */
1417 static int
1418 oprs_anticipatable_p (rtx x, rtx insn)
1420 return oprs_unchanged_p (x, insn, 0);
1423 /* Return nonzero if the operands of expression X are unchanged from
1424 INSN to the end of INSN's basic block. */
1426 static int
1427 oprs_available_p (rtx x, rtx insn)
1429 return oprs_unchanged_p (x, insn, 1);
1432 /* Hash expression X.
1434 MODE is only used if X is a CONST_INT. DO_NOT_RECORD_P is a boolean
1435 indicating if a volatile operand is found or if the expression contains
1436 something we don't want to insert in the table. HASH_TABLE_SIZE is
1437 the current size of the hash table to be probed. */
1439 static unsigned int
1440 hash_expr (rtx x, enum machine_mode mode, int *do_not_record_p,
1441 int hash_table_size)
1443 unsigned int hash;
1445 *do_not_record_p = 0;
1447 hash = hash_rtx (x, mode, do_not_record_p,
1448 NULL, /*have_reg_qty=*/false);
1449 return hash % hash_table_size;
1452 /* Hash a set of register REGNO.
1454 Sets are hashed on the register that is set. This simplifies the PRE copy
1455 propagation code.
1457 ??? May need to make things more elaborate. Later, as necessary. */
1459 static unsigned int
1460 hash_set (int regno, int hash_table_size)
1462 unsigned int hash;
1464 hash = regno;
1465 return hash % hash_table_size;
1468 /* Return nonzero if exp1 is equivalent to exp2. */
1470 static int
1471 expr_equiv_p (rtx x, rtx y)
1473 return exp_equiv_p (x, y, 0, true);
1476 /* Insert expression X in INSN in the hash TABLE.
1477 If it is already present, record it as the last occurrence in INSN's
1478 basic block.
1480 MODE is the mode of the value X is being stored into.
1481 It is only used if X is a CONST_INT.
1483 ANTIC_P is nonzero if X is an anticipatable expression.
1484 AVAIL_P is nonzero if X is an available expression. */
1486 static void
1487 insert_expr_in_table (rtx x, enum machine_mode mode, rtx insn, int antic_p,
1488 int avail_p, struct hash_table *table)
1490 int found, do_not_record_p;
1491 unsigned int hash;
1492 struct expr *cur_expr, *last_expr = NULL;
1493 struct occr *antic_occr, *avail_occr;
1495 hash = hash_expr (x, mode, &do_not_record_p, table->size);
1497 /* Do not insert expression in table if it contains volatile operands,
1498 or if hash_expr determines the expression is something we don't want
1499 to or can't handle. */
1500 if (do_not_record_p)
1501 return;
1503 cur_expr = table->table[hash];
1504 found = 0;
1506 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
1508 /* If the expression isn't found, save a pointer to the end of
1509 the list. */
1510 last_expr = cur_expr;
1511 cur_expr = cur_expr->next_same_hash;
1514 if (! found)
1516 cur_expr = gcse_alloc (sizeof (struct expr));
1517 bytes_used += sizeof (struct expr);
1518 if (table->table[hash] == NULL)
1519 /* This is the first pattern that hashed to this index. */
1520 table->table[hash] = cur_expr;
1521 else
1522 /* Add EXPR to end of this hash chain. */
1523 last_expr->next_same_hash = cur_expr;
1525 /* Set the fields of the expr element. */
1526 cur_expr->expr = x;
1527 cur_expr->bitmap_index = table->n_elems++;
1528 cur_expr->next_same_hash = NULL;
1529 cur_expr->antic_occr = NULL;
1530 cur_expr->avail_occr = NULL;
1533 /* Now record the occurrence(s). */
1534 if (antic_p)
1536 antic_occr = cur_expr->antic_occr;
1538 if (antic_occr && BLOCK_NUM (antic_occr->insn) != BLOCK_NUM (insn))
1539 antic_occr = NULL;
1541 if (antic_occr)
1542 /* Found another instance of the expression in the same basic block.
1543 Prefer the currently recorded one. We want the first one in the
1544 block and the block is scanned from start to end. */
1545 ; /* nothing to do */
1546 else
1548 /* First occurrence of this expression in this basic block. */
1549 antic_occr = gcse_alloc (sizeof (struct occr));
1550 bytes_used += sizeof (struct occr);
1551 antic_occr->insn = insn;
1552 antic_occr->next = cur_expr->antic_occr;
1553 antic_occr->deleted_p = 0;
1554 cur_expr->antic_occr = antic_occr;
1558 if (avail_p)
1560 avail_occr = cur_expr->avail_occr;
1562 if (avail_occr && BLOCK_NUM (avail_occr->insn) == BLOCK_NUM (insn))
1564 /* Found another instance of the expression in the same basic block.
1565 Prefer this occurrence to the currently recorded one. We want
1566 the last one in the block and the block is scanned from start
1567 to end. */
1568 avail_occr->insn = insn;
1570 else
1572 /* First occurrence of this expression in this basic block. */
1573 avail_occr = gcse_alloc (sizeof (struct occr));
1574 bytes_used += sizeof (struct occr);
1575 avail_occr->insn = insn;
1576 avail_occr->next = cur_expr->avail_occr;
1577 avail_occr->deleted_p = 0;
1578 cur_expr->avail_occr = avail_occr;
1583 /* Insert pattern X in INSN in the hash table.
1584 X is a SET of a reg to either another reg or a constant.
1585 If it is already present, record it as the last occurrence in INSN's
1586 basic block. */
1588 static void
1589 insert_set_in_table (rtx x, rtx insn, struct hash_table *table)
1591 int found;
1592 unsigned int hash;
1593 struct expr *cur_expr, *last_expr = NULL;
1594 struct occr *cur_occr;
1596 gcc_assert (GET_CODE (x) == SET && REG_P (SET_DEST (x)));
1598 hash = hash_set (REGNO (SET_DEST (x)), table->size);
1600 cur_expr = table->table[hash];
1601 found = 0;
1603 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
1605 /* If the expression isn't found, save a pointer to the end of
1606 the list. */
1607 last_expr = cur_expr;
1608 cur_expr = cur_expr->next_same_hash;
1611 if (! found)
1613 cur_expr = gcse_alloc (sizeof (struct expr));
1614 bytes_used += sizeof (struct expr);
1615 if (table->table[hash] == NULL)
1616 /* This is the first pattern that hashed to this index. */
1617 table->table[hash] = cur_expr;
1618 else
1619 /* Add EXPR to end of this hash chain. */
1620 last_expr->next_same_hash = cur_expr;
1622 /* Set the fields of the expr element.
1623 We must copy X because it can be modified when copy propagation is
1624 performed on its operands. */
1625 cur_expr->expr = copy_rtx (x);
1626 cur_expr->bitmap_index = table->n_elems++;
1627 cur_expr->next_same_hash = NULL;
1628 cur_expr->antic_occr = NULL;
1629 cur_expr->avail_occr = NULL;
1632 /* Now record the occurrence. */
1633 cur_occr = cur_expr->avail_occr;
1635 if (cur_occr && BLOCK_NUM (cur_occr->insn) == BLOCK_NUM (insn))
1637 /* Found another instance of the expression in the same basic block.
1638 Prefer this occurrence to the currently recorded one. We want
1639 the last one in the block and the block is scanned from start
1640 to end. */
1641 cur_occr->insn = insn;
1643 else
1645 /* First occurrence of this expression in this basic block. */
1646 cur_occr = gcse_alloc (sizeof (struct occr));
1647 bytes_used += sizeof (struct occr);
1649 cur_occr->insn = insn;
1650 cur_occr->next = cur_expr->avail_occr;
1651 cur_occr->deleted_p = 0;
1652 cur_expr->avail_occr = cur_occr;
1656 /* Determine whether the rtx X should be treated as a constant for
1657 the purposes of GCSE's constant propagation. */
1659 static bool
1660 gcse_constant_p (rtx x)
1662 /* Consider a COMPARE of two integers constant. */
1663 if (GET_CODE (x) == COMPARE
1664 && GET_CODE (XEXP (x, 0)) == CONST_INT
1665 && GET_CODE (XEXP (x, 1)) == CONST_INT)
1666 return true;
1668 /* Consider a COMPARE of the same registers is a constant
1669 if they are not floating point registers. */
1670 if (GET_CODE(x) == COMPARE
1671 && REG_P (XEXP (x, 0)) && REG_P (XEXP (x, 1))
1672 && REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 1))
1673 && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0)))
1674 && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 1))))
1675 return true;
1677 return CONSTANT_P (x);
1680 /* Scan pattern PAT of INSN and add an entry to the hash TABLE (set or
1681 expression one). */
1683 static void
1684 hash_scan_set (rtx pat, rtx insn, struct hash_table *table)
1686 rtx src = SET_SRC (pat);
1687 rtx dest = SET_DEST (pat);
1688 rtx note;
1690 if (GET_CODE (src) == CALL)
1691 hash_scan_call (src, insn, table);
1693 else if (REG_P (dest))
1695 unsigned int regno = REGNO (dest);
1696 rtx tmp;
1698 /* See if a REG_NOTE shows this equivalent to a simpler expression.
1699 This allows us to do a single GCSE pass and still eliminate
1700 redundant constants, addresses or other expressions that are
1701 constructed with multiple instructions. */
1702 note = find_reg_equal_equiv_note (insn);
1703 if (note != 0
1704 && (table->set_p
1705 ? gcse_constant_p (XEXP (note, 0))
1706 : want_to_gcse_p (XEXP (note, 0))))
1707 src = XEXP (note, 0), pat = gen_rtx_SET (VOIDmode, dest, src);
1709 /* Only record sets of pseudo-regs in the hash table. */
1710 if (! table->set_p
1711 && regno >= FIRST_PSEUDO_REGISTER
1712 /* Don't GCSE something if we can't do a reg/reg copy. */
1713 && can_copy_p (GET_MODE (dest))
1714 /* GCSE commonly inserts instruction after the insn. We can't
1715 do that easily for EH_REGION notes so disable GCSE on these
1716 for now. */
1717 && !find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1718 /* Is SET_SRC something we want to gcse? */
1719 && want_to_gcse_p (src)
1720 /* Don't CSE a nop. */
1721 && ! set_noop_p (pat)
1722 /* Don't GCSE if it has attached REG_EQUIV note.
1723 At this point this only function parameters should have
1724 REG_EQUIV notes and if the argument slot is used somewhere
1725 explicitly, it means address of parameter has been taken,
1726 so we should not extend the lifetime of the pseudo. */
1727 && (note == NULL_RTX || ! MEM_P (XEXP (note, 0))))
1729 /* An expression is not anticipatable if its operands are
1730 modified before this insn or if this is not the only SET in
1731 this insn. */
1732 int antic_p = oprs_anticipatable_p (src, insn) && single_set (insn);
1733 /* An expression is not available if its operands are
1734 subsequently modified, including this insn. It's also not
1735 available if this is a branch, because we can't insert
1736 a set after the branch. */
1737 int avail_p = (oprs_available_p (src, insn)
1738 && ! JUMP_P (insn));
1740 insert_expr_in_table (src, GET_MODE (dest), insn, antic_p, avail_p, table);
1743 /* Record sets for constant/copy propagation. */
1744 else if (table->set_p
1745 && regno >= FIRST_PSEUDO_REGISTER
1746 && ((REG_P (src)
1747 && REGNO (src) >= FIRST_PSEUDO_REGISTER
1748 && can_copy_p (GET_MODE (dest))
1749 && REGNO (src) != regno)
1750 || gcse_constant_p (src))
1751 /* A copy is not available if its src or dest is subsequently
1752 modified. Here we want to search from INSN+1 on, but
1753 oprs_available_p searches from INSN on. */
1754 && (insn == BB_END (BLOCK_FOR_INSN (insn))
1755 || ((tmp = next_nonnote_insn (insn)) != NULL_RTX
1756 && oprs_available_p (pat, tmp))))
1757 insert_set_in_table (pat, insn, table);
1759 /* In case of store we want to consider the memory value as available in
1760 the REG stored in that memory. This makes it possible to remove
1761 redundant loads from due to stores to the same location. */
1762 else if (flag_gcse_las && REG_P (src) && MEM_P (dest))
1764 unsigned int regno = REGNO (src);
1766 /* Do not do this for constant/copy propagation. */
1767 if (! table->set_p
1768 /* Only record sets of pseudo-regs in the hash table. */
1769 && regno >= FIRST_PSEUDO_REGISTER
1770 /* Don't GCSE something if we can't do a reg/reg copy. */
1771 && can_copy_p (GET_MODE (src))
1772 /* GCSE commonly inserts instruction after the insn. We can't
1773 do that easily for EH_REGION notes so disable GCSE on these
1774 for now. */
1775 && ! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1776 /* Is SET_DEST something we want to gcse? */
1777 && want_to_gcse_p (dest)
1778 /* Don't CSE a nop. */
1779 && ! set_noop_p (pat)
1780 /* Don't GCSE if it has attached REG_EQUIV note.
1781 At this point this only function parameters should have
1782 REG_EQUIV notes and if the argument slot is used somewhere
1783 explicitly, it means address of parameter has been taken,
1784 so we should not extend the lifetime of the pseudo. */
1785 && ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) == 0
1786 || ! MEM_P (XEXP (note, 0))))
1788 /* Stores are never anticipatable. */
1789 int antic_p = 0;
1790 /* An expression is not available if its operands are
1791 subsequently modified, including this insn. It's also not
1792 available if this is a branch, because we can't insert
1793 a set after the branch. */
1794 int avail_p = oprs_available_p (dest, insn)
1795 && ! JUMP_P (insn);
1797 /* Record the memory expression (DEST) in the hash table. */
1798 insert_expr_in_table (dest, GET_MODE (dest), insn,
1799 antic_p, avail_p, table);
1804 static void
1805 hash_scan_clobber (rtx x ATTRIBUTE_UNUSED, rtx insn ATTRIBUTE_UNUSED,
1806 struct hash_table *table ATTRIBUTE_UNUSED)
1808 /* Currently nothing to do. */
1811 static void
1812 hash_scan_call (rtx x ATTRIBUTE_UNUSED, rtx insn ATTRIBUTE_UNUSED,
1813 struct hash_table *table ATTRIBUTE_UNUSED)
1815 /* Currently nothing to do. */
1818 /* Process INSN and add hash table entries as appropriate.
1820 Only available expressions that set a single pseudo-reg are recorded.
1822 Single sets in a PARALLEL could be handled, but it's an extra complication
1823 that isn't dealt with right now. The trick is handling the CLOBBERs that
1824 are also in the PARALLEL. Later.
1826 If SET_P is nonzero, this is for the assignment hash table,
1827 otherwise it is for the expression hash table.
1828 If IN_LIBCALL_BLOCK nonzero, we are in a libcall block, and should
1829 not record any expressions. */
1831 static void
1832 hash_scan_insn (rtx insn, struct hash_table *table, int in_libcall_block)
1834 rtx pat = PATTERN (insn);
1835 int i;
1837 if (in_libcall_block)
1838 return;
1840 /* Pick out the sets of INSN and for other forms of instructions record
1841 what's been modified. */
1843 if (GET_CODE (pat) == SET)
1844 hash_scan_set (pat, insn, table);
1845 else if (GET_CODE (pat) == PARALLEL)
1846 for (i = 0; i < XVECLEN (pat, 0); i++)
1848 rtx x = XVECEXP (pat, 0, i);
1850 if (GET_CODE (x) == SET)
1851 hash_scan_set (x, insn, table);
1852 else if (GET_CODE (x) == CLOBBER)
1853 hash_scan_clobber (x, insn, table);
1854 else if (GET_CODE (x) == CALL)
1855 hash_scan_call (x, insn, table);
1858 else if (GET_CODE (pat) == CLOBBER)
1859 hash_scan_clobber (pat, insn, table);
1860 else if (GET_CODE (pat) == CALL)
1861 hash_scan_call (pat, insn, table);
1864 static void
1865 dump_hash_table (FILE *file, const char *name, struct hash_table *table)
1867 int i;
1868 /* Flattened out table, so it's printed in proper order. */
1869 struct expr **flat_table;
1870 unsigned int *hash_val;
1871 struct expr *expr;
1873 flat_table = xcalloc (table->n_elems, sizeof (struct expr *));
1874 hash_val = xmalloc (table->n_elems * sizeof (unsigned int));
1876 for (i = 0; i < (int) table->size; i++)
1877 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
1879 flat_table[expr->bitmap_index] = expr;
1880 hash_val[expr->bitmap_index] = i;
1883 fprintf (file, "%s hash table (%d buckets, %d entries)\n",
1884 name, table->size, table->n_elems);
1886 for (i = 0; i < (int) table->n_elems; i++)
1887 if (flat_table[i] != 0)
1889 expr = flat_table[i];
1890 fprintf (file, "Index %d (hash value %d)\n ",
1891 expr->bitmap_index, hash_val[i]);
1892 print_rtl (file, expr->expr);
1893 fprintf (file, "\n");
1896 fprintf (file, "\n");
1898 free (flat_table);
1899 free (hash_val);
1902 /* Record register first/last/block set information for REGNO in INSN.
1904 first_set records the first place in the block where the register
1905 is set and is used to compute "anticipatability".
1907 last_set records the last place in the block where the register
1908 is set and is used to compute "availability".
1910 last_bb records the block for which first_set and last_set are
1911 valid, as a quick test to invalidate them.
1913 reg_set_in_block records whether the register is set in the block
1914 and is used to compute "transparency". */
1916 static void
1917 record_last_reg_set_info (rtx insn, int regno)
1919 struct reg_avail_info *info = &reg_avail_info[regno];
1920 int cuid = INSN_CUID (insn);
1922 info->last_set = cuid;
1923 if (info->last_bb != current_bb)
1925 info->last_bb = current_bb;
1926 info->first_set = cuid;
1927 SET_BIT (reg_set_in_block[current_bb->index], regno);
1932 /* Record all of the canonicalized MEMs of record_last_mem_set_info's insn.
1933 Note we store a pair of elements in the list, so they have to be
1934 taken off pairwise. */
1936 static void
1937 canon_list_insert (rtx dest ATTRIBUTE_UNUSED, rtx unused1 ATTRIBUTE_UNUSED,
1938 void * v_insn)
1940 rtx dest_addr, insn;
1941 int bb;
1943 while (GET_CODE (dest) == SUBREG
1944 || GET_CODE (dest) == ZERO_EXTRACT
1945 || GET_CODE (dest) == STRICT_LOW_PART)
1946 dest = XEXP (dest, 0);
1948 /* If DEST is not a MEM, then it will not conflict with a load. Note
1949 that function calls are assumed to clobber memory, but are handled
1950 elsewhere. */
1952 if (! MEM_P (dest))
1953 return;
1955 dest_addr = get_addr (XEXP (dest, 0));
1956 dest_addr = canon_rtx (dest_addr);
1957 insn = (rtx) v_insn;
1958 bb = BLOCK_NUM (insn);
1960 canon_modify_mem_list[bb] =
1961 alloc_EXPR_LIST (VOIDmode, dest_addr, canon_modify_mem_list[bb]);
1962 canon_modify_mem_list[bb] =
1963 alloc_EXPR_LIST (VOIDmode, dest, canon_modify_mem_list[bb]);
1966 /* Record memory modification information for INSN. We do not actually care
1967 about the memory location(s) that are set, or even how they are set (consider
1968 a CALL_INSN). We merely need to record which insns modify memory. */
1970 static void
1971 record_last_mem_set_info (rtx insn)
1973 int bb = BLOCK_NUM (insn);
1975 /* load_killed_in_block_p will handle the case of calls clobbering
1976 everything. */
1977 modify_mem_list[bb] = alloc_INSN_LIST (insn, modify_mem_list[bb]);
1978 bitmap_set_bit (modify_mem_list_set, bb);
1980 if (CALL_P (insn))
1982 /* Note that traversals of this loop (other than for free-ing)
1983 will break after encountering a CALL_INSN. So, there's no
1984 need to insert a pair of items, as canon_list_insert does. */
1985 canon_modify_mem_list[bb] =
1986 alloc_INSN_LIST (insn, canon_modify_mem_list[bb]);
1987 bitmap_set_bit (blocks_with_calls, bb);
1989 else
1990 note_stores (PATTERN (insn), canon_list_insert, (void*) insn);
1993 /* Called from compute_hash_table via note_stores to handle one
1994 SET or CLOBBER in an insn. DATA is really the instruction in which
1995 the SET is taking place. */
1997 static void
1998 record_last_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED, void *data)
2000 rtx last_set_insn = (rtx) data;
2002 if (GET_CODE (dest) == SUBREG)
2003 dest = SUBREG_REG (dest);
2005 if (REG_P (dest))
2006 record_last_reg_set_info (last_set_insn, REGNO (dest));
2007 else if (MEM_P (dest)
2008 /* Ignore pushes, they clobber nothing. */
2009 && ! push_operand (dest, GET_MODE (dest)))
2010 record_last_mem_set_info (last_set_insn);
2013 /* Top level function to create an expression or assignment hash table.
2015 Expression entries are placed in the hash table if
2016 - they are of the form (set (pseudo-reg) src),
2017 - src is something we want to perform GCSE on,
2018 - none of the operands are subsequently modified in the block
2020 Assignment entries are placed in the hash table if
2021 - they are of the form (set (pseudo-reg) src),
2022 - src is something we want to perform const/copy propagation on,
2023 - none of the operands or target are subsequently modified in the block
2025 Currently src must be a pseudo-reg or a const_int.
2027 TABLE is the table computed. */
2029 static void
2030 compute_hash_table_work (struct hash_table *table)
2032 unsigned int i;
2034 /* While we compute the hash table we also compute a bit array of which
2035 registers are set in which blocks.
2036 ??? This isn't needed during const/copy propagation, but it's cheap to
2037 compute. Later. */
2038 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
2040 /* re-Cache any INSN_LIST nodes we have allocated. */
2041 clear_modify_mem_tables ();
2042 /* Some working arrays used to track first and last set in each block. */
2043 reg_avail_info = gmalloc (max_gcse_regno * sizeof (struct reg_avail_info));
2045 for (i = 0; i < max_gcse_regno; ++i)
2046 reg_avail_info[i].last_bb = NULL;
2048 FOR_EACH_BB (current_bb)
2050 rtx insn;
2051 unsigned int regno;
2052 int in_libcall_block;
2054 /* First pass over the instructions records information used to
2055 determine when registers and memory are first and last set.
2056 ??? hard-reg reg_set_in_block computation
2057 could be moved to compute_sets since they currently don't change. */
2059 FOR_BB_INSNS (current_bb, insn)
2061 if (! INSN_P (insn))
2062 continue;
2064 if (CALL_P (insn))
2066 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2067 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2068 record_last_reg_set_info (insn, regno);
2070 mark_call (insn);
2073 note_stores (PATTERN (insn), record_last_set_info, insn);
2076 /* Insert implicit sets in the hash table. */
2077 if (table->set_p
2078 && implicit_sets[current_bb->index] != NULL_RTX)
2079 hash_scan_set (implicit_sets[current_bb->index],
2080 BB_HEAD (current_bb), table);
2082 /* The next pass builds the hash table. */
2083 in_libcall_block = 0;
2084 FOR_BB_INSNS (current_bb, insn)
2085 if (INSN_P (insn))
2087 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
2088 in_libcall_block = 1;
2089 else if (table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
2090 in_libcall_block = 0;
2091 hash_scan_insn (insn, table, in_libcall_block);
2092 if (!table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
2093 in_libcall_block = 0;
2097 free (reg_avail_info);
2098 reg_avail_info = NULL;
2101 /* Allocate space for the set/expr hash TABLE.
2102 N_INSNS is the number of instructions in the function.
2103 It is used to determine the number of buckets to use.
2104 SET_P determines whether set or expression table will
2105 be created. */
2107 static void
2108 alloc_hash_table (int n_insns, struct hash_table *table, int set_p)
2110 int n;
2112 table->size = n_insns / 4;
2113 if (table->size < 11)
2114 table->size = 11;
2116 /* Attempt to maintain efficient use of hash table.
2117 Making it an odd number is simplest for now.
2118 ??? Later take some measurements. */
2119 table->size |= 1;
2120 n = table->size * sizeof (struct expr *);
2121 table->table = gmalloc (n);
2122 table->set_p = set_p;
2125 /* Free things allocated by alloc_hash_table. */
2127 static void
2128 free_hash_table (struct hash_table *table)
2130 free (table->table);
2133 /* Compute the hash TABLE for doing copy/const propagation or
2134 expression hash table. */
2136 static void
2137 compute_hash_table (struct hash_table *table)
2139 /* Initialize count of number of entries in hash table. */
2140 table->n_elems = 0;
2141 memset (table->table, 0, table->size * sizeof (struct expr *));
2143 compute_hash_table_work (table);
2146 /* Expression tracking support. */
2148 /* Lookup REGNO in the set TABLE. The result is a pointer to the
2149 table entry, or NULL if not found. */
2151 static struct expr *
2152 lookup_set (unsigned int regno, struct hash_table *table)
2154 unsigned int hash = hash_set (regno, table->size);
2155 struct expr *expr;
2157 expr = table->table[hash];
2159 while (expr && REGNO (SET_DEST (expr->expr)) != regno)
2160 expr = expr->next_same_hash;
2162 return expr;
2165 /* Return the next entry for REGNO in list EXPR. */
2167 static struct expr *
2168 next_set (unsigned int regno, struct expr *expr)
2171 expr = expr->next_same_hash;
2172 while (expr && REGNO (SET_DEST (expr->expr)) != regno);
2174 return expr;
2177 /* Like free_INSN_LIST_list or free_EXPR_LIST_list, except that the node
2178 types may be mixed. */
2180 static void
2181 free_insn_expr_list_list (rtx *listp)
2183 rtx list, next;
2185 for (list = *listp; list ; list = next)
2187 next = XEXP (list, 1);
2188 if (GET_CODE (list) == EXPR_LIST)
2189 free_EXPR_LIST_node (list);
2190 else
2191 free_INSN_LIST_node (list);
2194 *listp = NULL;
2197 /* Clear canon_modify_mem_list and modify_mem_list tables. */
2198 static void
2199 clear_modify_mem_tables (void)
2201 unsigned i;
2202 bitmap_iterator bi;
2204 EXECUTE_IF_SET_IN_BITMAP (modify_mem_list_set, 0, i, bi)
2206 free_INSN_LIST_list (modify_mem_list + i);
2207 free_insn_expr_list_list (canon_modify_mem_list + i);
2209 bitmap_clear (modify_mem_list_set);
2210 bitmap_clear (blocks_with_calls);
2213 /* Release memory used by modify_mem_list_set. */
2215 static void
2216 free_modify_mem_tables (void)
2218 clear_modify_mem_tables ();
2219 free (modify_mem_list);
2220 free (canon_modify_mem_list);
2221 modify_mem_list = 0;
2222 canon_modify_mem_list = 0;
2225 /* Reset tables used to keep track of what's still available [since the
2226 start of the block]. */
2228 static void
2229 reset_opr_set_tables (void)
2231 /* Maintain a bitmap of which regs have been set since beginning of
2232 the block. */
2233 CLEAR_REG_SET (reg_set_bitmap);
2235 /* Also keep a record of the last instruction to modify memory.
2236 For now this is very trivial, we only record whether any memory
2237 location has been modified. */
2238 clear_modify_mem_tables ();
2241 /* Return nonzero if the operands of X are not set before INSN in
2242 INSN's basic block. */
2244 static int
2245 oprs_not_set_p (rtx x, rtx insn)
2247 int i, j;
2248 enum rtx_code code;
2249 const char *fmt;
2251 if (x == 0)
2252 return 1;
2254 code = GET_CODE (x);
2255 switch (code)
2257 case PC:
2258 case CC0:
2259 case CONST:
2260 case CONST_INT:
2261 case CONST_DOUBLE:
2262 case CONST_VECTOR:
2263 case SYMBOL_REF:
2264 case LABEL_REF:
2265 case ADDR_VEC:
2266 case ADDR_DIFF_VEC:
2267 return 1;
2269 case MEM:
2270 if (load_killed_in_block_p (BLOCK_FOR_INSN (insn),
2271 INSN_CUID (insn), x, 0))
2272 return 0;
2273 else
2274 return oprs_not_set_p (XEXP (x, 0), insn);
2276 case REG:
2277 return ! REGNO_REG_SET_P (reg_set_bitmap, REGNO (x));
2279 default:
2280 break;
2283 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
2285 if (fmt[i] == 'e')
2287 /* If we are about to do the last recursive call
2288 needed at this level, change it into iteration.
2289 This function is called enough to be worth it. */
2290 if (i == 0)
2291 return oprs_not_set_p (XEXP (x, i), insn);
2293 if (! oprs_not_set_p (XEXP (x, i), insn))
2294 return 0;
2296 else if (fmt[i] == 'E')
2297 for (j = 0; j < XVECLEN (x, i); j++)
2298 if (! oprs_not_set_p (XVECEXP (x, i, j), insn))
2299 return 0;
2302 return 1;
2305 /* Mark things set by a CALL. */
2307 static void
2308 mark_call (rtx insn)
2310 if (! CONST_OR_PURE_CALL_P (insn))
2311 record_last_mem_set_info (insn);
2314 /* Mark things set by a SET. */
2316 static void
2317 mark_set (rtx pat, rtx insn)
2319 rtx dest = SET_DEST (pat);
2321 while (GET_CODE (dest) == SUBREG
2322 || GET_CODE (dest) == ZERO_EXTRACT
2323 || GET_CODE (dest) == STRICT_LOW_PART)
2324 dest = XEXP (dest, 0);
2326 if (REG_P (dest))
2327 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (dest));
2328 else if (MEM_P (dest))
2329 record_last_mem_set_info (insn);
2331 if (GET_CODE (SET_SRC (pat)) == CALL)
2332 mark_call (insn);
2335 /* Record things set by a CLOBBER. */
2337 static void
2338 mark_clobber (rtx pat, rtx insn)
2340 rtx clob = XEXP (pat, 0);
2342 while (GET_CODE (clob) == SUBREG || GET_CODE (clob) == STRICT_LOW_PART)
2343 clob = XEXP (clob, 0);
2345 if (REG_P (clob))
2346 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (clob));
2347 else
2348 record_last_mem_set_info (insn);
2351 /* Record things set by INSN.
2352 This data is used by oprs_not_set_p. */
2354 static void
2355 mark_oprs_set (rtx insn)
2357 rtx pat = PATTERN (insn);
2358 int i;
2360 if (GET_CODE (pat) == SET)
2361 mark_set (pat, insn);
2362 else if (GET_CODE (pat) == PARALLEL)
2363 for (i = 0; i < XVECLEN (pat, 0); i++)
2365 rtx x = XVECEXP (pat, 0, i);
2367 if (GET_CODE (x) == SET)
2368 mark_set (x, insn);
2369 else if (GET_CODE (x) == CLOBBER)
2370 mark_clobber (x, insn);
2371 else if (GET_CODE (x) == CALL)
2372 mark_call (insn);
2375 else if (GET_CODE (pat) == CLOBBER)
2376 mark_clobber (pat, insn);
2377 else if (GET_CODE (pat) == CALL)
2378 mark_call (insn);
2382 /* Compute copy/constant propagation working variables. */
2384 /* Local properties of assignments. */
2385 static sbitmap *cprop_pavloc;
2386 static sbitmap *cprop_absaltered;
2388 /* Global properties of assignments (computed from the local properties). */
2389 static sbitmap *cprop_avin;
2390 static sbitmap *cprop_avout;
2392 /* Allocate vars used for copy/const propagation. N_BLOCKS is the number of
2393 basic blocks. N_SETS is the number of sets. */
2395 static void
2396 alloc_cprop_mem (int n_blocks, int n_sets)
2398 cprop_pavloc = sbitmap_vector_alloc (n_blocks, n_sets);
2399 cprop_absaltered = sbitmap_vector_alloc (n_blocks, n_sets);
2401 cprop_avin = sbitmap_vector_alloc (n_blocks, n_sets);
2402 cprop_avout = sbitmap_vector_alloc (n_blocks, n_sets);
2405 /* Free vars used by copy/const propagation. */
2407 static void
2408 free_cprop_mem (void)
2410 sbitmap_vector_free (cprop_pavloc);
2411 sbitmap_vector_free (cprop_absaltered);
2412 sbitmap_vector_free (cprop_avin);
2413 sbitmap_vector_free (cprop_avout);
2416 /* For each block, compute whether X is transparent. X is either an
2417 expression or an assignment [though we don't care which, for this context
2418 an assignment is treated as an expression]. For each block where an
2419 element of X is modified, set (SET_P == 1) or reset (SET_P == 0) the INDX
2420 bit in BMAP. */
2422 static void
2423 compute_transp (rtx x, int indx, sbitmap *bmap, int set_p)
2425 int i, j;
2426 basic_block bb;
2427 enum rtx_code code;
2428 reg_set *r;
2429 const char *fmt;
2431 /* repeat is used to turn tail-recursion into iteration since GCC
2432 can't do it when there's no return value. */
2433 repeat:
2435 if (x == 0)
2436 return;
2438 code = GET_CODE (x);
2439 switch (code)
2441 case REG:
2442 if (set_p)
2444 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
2446 FOR_EACH_BB (bb)
2447 if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
2448 SET_BIT (bmap[bb->index], indx);
2450 else
2452 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
2453 SET_BIT (bmap[r->bb_index], indx);
2456 else
2458 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
2460 FOR_EACH_BB (bb)
2461 if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
2462 RESET_BIT (bmap[bb->index], indx);
2464 else
2466 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
2467 RESET_BIT (bmap[r->bb_index], indx);
2471 return;
2473 case MEM:
2474 if (! MEM_READONLY_P (x))
2476 bitmap_iterator bi;
2477 unsigned bb_index;
2479 /* First handle all the blocks with calls. We don't need to
2480 do any list walking for them. */
2481 EXECUTE_IF_SET_IN_BITMAP (blocks_with_calls, 0, bb_index, bi)
2483 if (set_p)
2484 SET_BIT (bmap[bb_index], indx);
2485 else
2486 RESET_BIT (bmap[bb_index], indx);
2489 /* Now iterate over the blocks which have memory modifications
2490 but which do not have any calls. */
2491 EXECUTE_IF_AND_COMPL_IN_BITMAP (modify_mem_list_set,
2492 blocks_with_calls,
2493 0, bb_index, bi)
2495 rtx list_entry = canon_modify_mem_list[bb_index];
2497 while (list_entry)
2499 rtx dest, dest_addr;
2501 /* LIST_ENTRY must be an INSN of some kind that sets memory.
2502 Examine each hunk of memory that is modified. */
2504 dest = XEXP (list_entry, 0);
2505 list_entry = XEXP (list_entry, 1);
2506 dest_addr = XEXP (list_entry, 0);
2508 if (canon_true_dependence (dest, GET_MODE (dest), dest_addr,
2509 x, rtx_addr_varies_p))
2511 if (set_p)
2512 SET_BIT (bmap[bb_index], indx);
2513 else
2514 RESET_BIT (bmap[bb_index], indx);
2515 break;
2517 list_entry = XEXP (list_entry, 1);
2522 x = XEXP (x, 0);
2523 goto repeat;
2525 case PC:
2526 case CC0: /*FIXME*/
2527 case CONST:
2528 case CONST_INT:
2529 case CONST_DOUBLE:
2530 case CONST_VECTOR:
2531 case SYMBOL_REF:
2532 case LABEL_REF:
2533 case ADDR_VEC:
2534 case ADDR_DIFF_VEC:
2535 return;
2537 default:
2538 break;
2541 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
2543 if (fmt[i] == 'e')
2545 /* If we are about to do the last recursive call
2546 needed at this level, change it into iteration.
2547 This function is called enough to be worth it. */
2548 if (i == 0)
2550 x = XEXP (x, i);
2551 goto repeat;
2554 compute_transp (XEXP (x, i), indx, bmap, set_p);
2556 else if (fmt[i] == 'E')
2557 for (j = 0; j < XVECLEN (x, i); j++)
2558 compute_transp (XVECEXP (x, i, j), indx, bmap, set_p);
2562 /* Top level routine to do the dataflow analysis needed by copy/const
2563 propagation. */
2565 static void
2566 compute_cprop_data (void)
2568 compute_local_properties (cprop_absaltered, cprop_pavloc, NULL, &set_hash_table);
2569 compute_available (cprop_pavloc, cprop_absaltered,
2570 cprop_avout, cprop_avin);
2573 /* Copy/constant propagation. */
2575 /* Maximum number of register uses in an insn that we handle. */
2576 #define MAX_USES 8
2578 /* Table of uses found in an insn.
2579 Allocated statically to avoid alloc/free complexity and overhead. */
2580 static struct reg_use reg_use_table[MAX_USES];
2582 /* Index into `reg_use_table' while building it. */
2583 static int reg_use_count;
2585 /* Set up a list of register numbers used in INSN. The found uses are stored
2586 in `reg_use_table'. `reg_use_count' is initialized to zero before entry,
2587 and contains the number of uses in the table upon exit.
2589 ??? If a register appears multiple times we will record it multiple times.
2590 This doesn't hurt anything but it will slow things down. */
2592 static void
2593 find_used_regs (rtx *xptr, void *data ATTRIBUTE_UNUSED)
2595 int i, j;
2596 enum rtx_code code;
2597 const char *fmt;
2598 rtx x = *xptr;
2600 /* repeat is used to turn tail-recursion into iteration since GCC
2601 can't do it when there's no return value. */
2602 repeat:
2603 if (x == 0)
2604 return;
2606 code = GET_CODE (x);
2607 if (REG_P (x))
2609 if (reg_use_count == MAX_USES)
2610 return;
2612 reg_use_table[reg_use_count].reg_rtx = x;
2613 reg_use_count++;
2616 /* Recursively scan the operands of this expression. */
2618 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
2620 if (fmt[i] == 'e')
2622 /* If we are about to do the last recursive call
2623 needed at this level, change it into iteration.
2624 This function is called enough to be worth it. */
2625 if (i == 0)
2627 x = XEXP (x, 0);
2628 goto repeat;
2631 find_used_regs (&XEXP (x, i), data);
2633 else if (fmt[i] == 'E')
2634 for (j = 0; j < XVECLEN (x, i); j++)
2635 find_used_regs (&XVECEXP (x, i, j), data);
2639 /* Try to replace all non-SET_DEST occurrences of FROM in INSN with TO.
2640 Returns nonzero is successful. */
2642 static int
2643 try_replace_reg (rtx from, rtx to, rtx insn)
2645 rtx note = find_reg_equal_equiv_note (insn);
2646 rtx src = 0;
2647 int success = 0;
2648 rtx set = single_set (insn);
2650 /* Usually we substitute easy stuff, so we won't copy everything.
2651 We however need to take care to not duplicate non-trivial CONST
2652 expressions. */
2653 to = copy_rtx (to);
2655 validate_replace_src_group (from, to, insn);
2656 if (num_changes_pending () && apply_change_group ())
2657 success = 1;
2659 /* Try to simplify SET_SRC if we have substituted a constant. */
2660 if (success && set && CONSTANT_P (to))
2662 src = simplify_rtx (SET_SRC (set));
2664 if (src)
2665 validate_change (insn, &SET_SRC (set), src, 0);
2668 /* If there is already a REG_EQUAL note, update the expression in it
2669 with our replacement. */
2670 if (note != 0 && REG_NOTE_KIND (note) == REG_EQUAL)
2671 set_unique_reg_note (insn, REG_EQUAL,
2672 simplify_replace_rtx (XEXP (note, 0), from, to));
2674 if (!success && set && reg_mentioned_p (from, SET_SRC (set)))
2676 /* If above failed and this is a single set, try to simplify the source of
2677 the set given our substitution. We could perhaps try this for multiple
2678 SETs, but it probably won't buy us anything. */
2679 src = simplify_replace_rtx (SET_SRC (set), from, to);
2681 if (!rtx_equal_p (src, SET_SRC (set))
2682 && validate_change (insn, &SET_SRC (set), src, 0))
2683 success = 1;
2685 /* If we've failed to do replacement, have a single SET, don't already
2686 have a note, and have no special SET, add a REG_EQUAL note to not
2687 lose information. */
2688 if (!success && note == 0 && set != 0
2689 && GET_CODE (SET_DEST (set)) != ZERO_EXTRACT
2690 && GET_CODE (SET_DEST (set)) != STRICT_LOW_PART)
2691 note = set_unique_reg_note (insn, REG_EQUAL, copy_rtx (src));
2694 /* REG_EQUAL may get simplified into register.
2695 We don't allow that. Remove that note. This code ought
2696 not to happen, because previous code ought to synthesize
2697 reg-reg move, but be on the safe side. */
2698 if (note && REG_NOTE_KIND (note) == REG_EQUAL && REG_P (XEXP (note, 0)))
2699 remove_note (insn, note);
2701 return success;
2704 /* Find a set of REGNOs that are available on entry to INSN's block. Returns
2705 NULL no such set is found. */
2707 static struct expr *
2708 find_avail_set (int regno, rtx insn)
2710 /* SET1 contains the last set found that can be returned to the caller for
2711 use in a substitution. */
2712 struct expr *set1 = 0;
2714 /* Loops are not possible here. To get a loop we would need two sets
2715 available at the start of the block containing INSN. i.e. we would
2716 need two sets like this available at the start of the block:
2718 (set (reg X) (reg Y))
2719 (set (reg Y) (reg X))
2721 This can not happen since the set of (reg Y) would have killed the
2722 set of (reg X) making it unavailable at the start of this block. */
2723 while (1)
2725 rtx src;
2726 struct expr *set = lookup_set (regno, &set_hash_table);
2728 /* Find a set that is available at the start of the block
2729 which contains INSN. */
2730 while (set)
2732 if (TEST_BIT (cprop_avin[BLOCK_NUM (insn)], set->bitmap_index))
2733 break;
2734 set = next_set (regno, set);
2737 /* If no available set was found we've reached the end of the
2738 (possibly empty) copy chain. */
2739 if (set == 0)
2740 break;
2742 gcc_assert (GET_CODE (set->expr) == SET);
2744 src = SET_SRC (set->expr);
2746 /* We know the set is available.
2747 Now check that SRC is ANTLOC (i.e. none of the source operands
2748 have changed since the start of the block).
2750 If the source operand changed, we may still use it for the next
2751 iteration of this loop, but we may not use it for substitutions. */
2753 if (gcse_constant_p (src) || oprs_not_set_p (src, insn))
2754 set1 = set;
2756 /* If the source of the set is anything except a register, then
2757 we have reached the end of the copy chain. */
2758 if (! REG_P (src))
2759 break;
2761 /* Follow the copy chain, i.e. start another iteration of the loop
2762 and see if we have an available copy into SRC. */
2763 regno = REGNO (src);
2766 /* SET1 holds the last set that was available and anticipatable at
2767 INSN. */
2768 return set1;
2771 /* Subroutine of cprop_insn that tries to propagate constants into
2772 JUMP_INSNS. JUMP must be a conditional jump. If SETCC is non-NULL
2773 it is the instruction that immediately precedes JUMP, and must be a
2774 single SET of a register. FROM is what we will try to replace,
2775 SRC is the constant we will try to substitute for it. Returns nonzero
2776 if a change was made. */
2778 static int
2779 cprop_jump (basic_block bb, rtx setcc, rtx jump, rtx from, rtx src)
2781 rtx new, set_src, note_src;
2782 rtx set = pc_set (jump);
2783 rtx note = find_reg_equal_equiv_note (jump);
2785 if (note)
2787 note_src = XEXP (note, 0);
2788 if (GET_CODE (note_src) == EXPR_LIST)
2789 note_src = NULL_RTX;
2791 else note_src = NULL_RTX;
2793 /* Prefer REG_EQUAL notes except those containing EXPR_LISTs. */
2794 set_src = note_src ? note_src : SET_SRC (set);
2796 /* First substitute the SETCC condition into the JUMP instruction,
2797 then substitute that given values into this expanded JUMP. */
2798 if (setcc != NULL_RTX
2799 && !modified_between_p (from, setcc, jump)
2800 && !modified_between_p (src, setcc, jump))
2802 rtx setcc_src;
2803 rtx setcc_set = single_set (setcc);
2804 rtx setcc_note = find_reg_equal_equiv_note (setcc);
2805 setcc_src = (setcc_note && GET_CODE (XEXP (setcc_note, 0)) != EXPR_LIST)
2806 ? XEXP (setcc_note, 0) : SET_SRC (setcc_set);
2807 set_src = simplify_replace_rtx (set_src, SET_DEST (setcc_set),
2808 setcc_src);
2810 else
2811 setcc = NULL_RTX;
2813 new = simplify_replace_rtx (set_src, from, src);
2815 /* If no simplification can be made, then try the next register. */
2816 if (rtx_equal_p (new, SET_SRC (set)))
2817 return 0;
2819 /* If this is now a no-op delete it, otherwise this must be a valid insn. */
2820 if (new == pc_rtx)
2821 delete_insn (jump);
2822 else
2824 /* Ensure the value computed inside the jump insn to be equivalent
2825 to one computed by setcc. */
2826 if (setcc && modified_in_p (new, setcc))
2827 return 0;
2828 if (! validate_change (jump, &SET_SRC (set), new, 0))
2830 /* When (some) constants are not valid in a comparison, and there
2831 are two registers to be replaced by constants before the entire
2832 comparison can be folded into a constant, we need to keep
2833 intermediate information in REG_EQUAL notes. For targets with
2834 separate compare insns, such notes are added by try_replace_reg.
2835 When we have a combined compare-and-branch instruction, however,
2836 we need to attach a note to the branch itself to make this
2837 optimization work. */
2839 if (!rtx_equal_p (new, note_src))
2840 set_unique_reg_note (jump, REG_EQUAL, copy_rtx (new));
2841 return 0;
2844 /* Remove REG_EQUAL note after simplification. */
2845 if (note_src)
2846 remove_note (jump, note);
2848 /* If this has turned into an unconditional jump,
2849 then put a barrier after it so that the unreachable
2850 code will be deleted. */
2851 if (GET_CODE (SET_SRC (set)) == LABEL_REF)
2852 emit_barrier_after (jump);
2855 #ifdef HAVE_cc0
2856 /* Delete the cc0 setter. */
2857 if (setcc != NULL && CC0_P (SET_DEST (single_set (setcc))))
2858 delete_insn (setcc);
2859 #endif
2861 run_jump_opt_after_gcse = 1;
2863 global_const_prop_count++;
2864 if (dump_file != NULL)
2866 fprintf (dump_file,
2867 "GLOBAL CONST-PROP: Replacing reg %d in jump_insn %d with constant ",
2868 REGNO (from), INSN_UID (jump));
2869 print_rtl (dump_file, src);
2870 fprintf (dump_file, "\n");
2872 purge_dead_edges (bb);
2874 return 1;
2877 static bool
2878 constprop_register (rtx insn, rtx from, rtx to, bool alter_jumps)
2880 rtx sset;
2882 /* Check for reg or cc0 setting instructions followed by
2883 conditional branch instructions first. */
2884 if (alter_jumps
2885 && (sset = single_set (insn)) != NULL
2886 && NEXT_INSN (insn)
2887 && any_condjump_p (NEXT_INSN (insn)) && onlyjump_p (NEXT_INSN (insn)))
2889 rtx dest = SET_DEST (sset);
2890 if ((REG_P (dest) || CC0_P (dest))
2891 && cprop_jump (BLOCK_FOR_INSN (insn), insn, NEXT_INSN (insn), from, to))
2892 return 1;
2895 /* Handle normal insns next. */
2896 if (NONJUMP_INSN_P (insn)
2897 && try_replace_reg (from, to, insn))
2898 return 1;
2900 /* Try to propagate a CONST_INT into a conditional jump.
2901 We're pretty specific about what we will handle in this
2902 code, we can extend this as necessary over time.
2904 Right now the insn in question must look like
2905 (set (pc) (if_then_else ...)) */
2906 else if (alter_jumps && any_condjump_p (insn) && onlyjump_p (insn))
2907 return cprop_jump (BLOCK_FOR_INSN (insn), NULL, insn, from, to);
2908 return 0;
2911 /* Perform constant and copy propagation on INSN.
2912 The result is nonzero if a change was made. */
2914 static int
2915 cprop_insn (rtx insn, int alter_jumps)
2917 struct reg_use *reg_used;
2918 int changed = 0;
2919 rtx note;
2921 if (!INSN_P (insn))
2922 return 0;
2924 reg_use_count = 0;
2925 note_uses (&PATTERN (insn), find_used_regs, NULL);
2927 note = find_reg_equal_equiv_note (insn);
2929 /* We may win even when propagating constants into notes. */
2930 if (note)
2931 find_used_regs (&XEXP (note, 0), NULL);
2933 for (reg_used = &reg_use_table[0]; reg_use_count > 0;
2934 reg_used++, reg_use_count--)
2936 unsigned int regno = REGNO (reg_used->reg_rtx);
2937 rtx pat, src;
2938 struct expr *set;
2940 /* Ignore registers created by GCSE.
2941 We do this because ... */
2942 if (regno >= max_gcse_regno)
2943 continue;
2945 /* If the register has already been set in this block, there's
2946 nothing we can do. */
2947 if (! oprs_not_set_p (reg_used->reg_rtx, insn))
2948 continue;
2950 /* Find an assignment that sets reg_used and is available
2951 at the start of the block. */
2952 set = find_avail_set (regno, insn);
2953 if (! set)
2954 continue;
2956 pat = set->expr;
2957 /* ??? We might be able to handle PARALLELs. Later. */
2958 gcc_assert (GET_CODE (pat) == SET);
2960 src = SET_SRC (pat);
2962 /* Constant propagation. */
2963 if (gcse_constant_p (src))
2965 if (constprop_register (insn, reg_used->reg_rtx, src, alter_jumps))
2967 changed = 1;
2968 global_const_prop_count++;
2969 if (dump_file != NULL)
2971 fprintf (dump_file, "GLOBAL CONST-PROP: Replacing reg %d in ", regno);
2972 fprintf (dump_file, "insn %d with constant ", INSN_UID (insn));
2973 print_rtl (dump_file, src);
2974 fprintf (dump_file, "\n");
2976 if (INSN_DELETED_P (insn))
2977 return 1;
2980 else if (REG_P (src)
2981 && REGNO (src) >= FIRST_PSEUDO_REGISTER
2982 && REGNO (src) != regno)
2984 if (try_replace_reg (reg_used->reg_rtx, src, insn))
2986 changed = 1;
2987 global_copy_prop_count++;
2988 if (dump_file != NULL)
2990 fprintf (dump_file, "GLOBAL COPY-PROP: Replacing reg %d in insn %d",
2991 regno, INSN_UID (insn));
2992 fprintf (dump_file, " with reg %d\n", REGNO (src));
2995 /* The original insn setting reg_used may or may not now be
2996 deletable. We leave the deletion to flow. */
2997 /* FIXME: If it turns out that the insn isn't deletable,
2998 then we may have unnecessarily extended register lifetimes
2999 and made things worse. */
3004 return changed;
3007 /* Like find_used_regs, but avoid recording uses that appear in
3008 input-output contexts such as zero_extract or pre_dec. This
3009 restricts the cases we consider to those for which local cprop
3010 can legitimately make replacements. */
3012 static void
3013 local_cprop_find_used_regs (rtx *xptr, void *data)
3015 rtx x = *xptr;
3017 if (x == 0)
3018 return;
3020 switch (GET_CODE (x))
3022 case ZERO_EXTRACT:
3023 case SIGN_EXTRACT:
3024 case STRICT_LOW_PART:
3025 return;
3027 case PRE_DEC:
3028 case PRE_INC:
3029 case POST_DEC:
3030 case POST_INC:
3031 case PRE_MODIFY:
3032 case POST_MODIFY:
3033 /* Can only legitimately appear this early in the context of
3034 stack pushes for function arguments, but handle all of the
3035 codes nonetheless. */
3036 return;
3038 case SUBREG:
3039 /* Setting a subreg of a register larger than word_mode leaves
3040 the non-written words unchanged. */
3041 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) > BITS_PER_WORD)
3042 return;
3043 break;
3045 default:
3046 break;
3049 find_used_regs (xptr, data);
3052 /* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
3053 their REG_EQUAL notes need updating. */
3055 static bool
3056 do_local_cprop (rtx x, rtx insn, bool alter_jumps, rtx *libcall_sp)
3058 rtx newreg = NULL, newcnst = NULL;
3060 /* Rule out USE instructions and ASM statements as we don't want to
3061 change the hard registers mentioned. */
3062 if (REG_P (x)
3063 && (REGNO (x) >= FIRST_PSEUDO_REGISTER
3064 || (GET_CODE (PATTERN (insn)) != USE
3065 && asm_noperands (PATTERN (insn)) < 0)))
3067 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0);
3068 struct elt_loc_list *l;
3070 if (!val)
3071 return false;
3072 for (l = val->locs; l; l = l->next)
3074 rtx this_rtx = l->loc;
3075 rtx note;
3077 /* Don't CSE non-constant values out of libcall blocks. */
3078 if (l->in_libcall && ! CONSTANT_P (this_rtx))
3079 continue;
3081 if (gcse_constant_p (this_rtx))
3082 newcnst = this_rtx;
3083 if (REG_P (this_rtx) && REGNO (this_rtx) >= FIRST_PSEUDO_REGISTER
3084 /* Don't copy propagate if it has attached REG_EQUIV note.
3085 At this point this only function parameters should have
3086 REG_EQUIV notes and if the argument slot is used somewhere
3087 explicitly, it means address of parameter has been taken,
3088 so we should not extend the lifetime of the pseudo. */
3089 && (!(note = find_reg_note (l->setting_insn, REG_EQUIV, NULL_RTX))
3090 || ! MEM_P (XEXP (note, 0))))
3091 newreg = this_rtx;
3093 if (newcnst && constprop_register (insn, x, newcnst, alter_jumps))
3095 /* If we find a case where we can't fix the retval REG_EQUAL notes
3096 match the new register, we either have to abandon this replacement
3097 or fix delete_trivially_dead_insns to preserve the setting insn,
3098 or make it delete the REG_EUAQL note, and fix up all passes that
3099 require the REG_EQUAL note there. */
3100 bool adjusted;
3102 adjusted = adjust_libcall_notes (x, newcnst, insn, libcall_sp);
3103 gcc_assert (adjusted);
3105 if (dump_file != NULL)
3107 fprintf (dump_file, "LOCAL CONST-PROP: Replacing reg %d in ",
3108 REGNO (x));
3109 fprintf (dump_file, "insn %d with constant ",
3110 INSN_UID (insn));
3111 print_rtl (dump_file, newcnst);
3112 fprintf (dump_file, "\n");
3114 local_const_prop_count++;
3115 return true;
3117 else if (newreg && newreg != x && try_replace_reg (x, newreg, insn))
3119 adjust_libcall_notes (x, newreg, insn, libcall_sp);
3120 if (dump_file != NULL)
3122 fprintf (dump_file,
3123 "LOCAL COPY-PROP: Replacing reg %d in insn %d",
3124 REGNO (x), INSN_UID (insn));
3125 fprintf (dump_file, " with reg %d\n", REGNO (newreg));
3127 local_copy_prop_count++;
3128 return true;
3131 return false;
3134 /* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
3135 their REG_EQUAL notes need updating to reflect that OLDREG has been
3136 replaced with NEWVAL in INSN. Return true if all substitutions could
3137 be made. */
3138 static bool
3139 adjust_libcall_notes (rtx oldreg, rtx newval, rtx insn, rtx *libcall_sp)
3141 rtx end;
3143 while ((end = *libcall_sp++))
3145 rtx note = find_reg_equal_equiv_note (end);
3147 if (! note)
3148 continue;
3150 if (REG_P (newval))
3152 if (reg_set_between_p (newval, PREV_INSN (insn), end))
3156 note = find_reg_equal_equiv_note (end);
3157 if (! note)
3158 continue;
3159 if (reg_mentioned_p (newval, XEXP (note, 0)))
3160 return false;
3162 while ((end = *libcall_sp++));
3163 return true;
3166 XEXP (note, 0) = simplify_replace_rtx (XEXP (note, 0), oldreg, newval);
3167 insn = end;
3169 return true;
3172 #define MAX_NESTED_LIBCALLS 9
3174 /* Do local const/copy propagation (i.e. within each basic block).
3175 If ALTER_JUMPS is true, allow propagating into jump insns, which
3176 could modify the CFG. */
3178 static void
3179 local_cprop_pass (bool alter_jumps)
3181 basic_block bb;
3182 rtx insn;
3183 struct reg_use *reg_used;
3184 rtx libcall_stack[MAX_NESTED_LIBCALLS + 1], *libcall_sp;
3185 bool changed = false;
3187 cselib_init (false);
3188 libcall_sp = &libcall_stack[MAX_NESTED_LIBCALLS];
3189 *libcall_sp = 0;
3190 FOR_EACH_BB (bb)
3192 FOR_BB_INSNS (bb, insn)
3194 if (INSN_P (insn))
3196 rtx note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
3198 if (note)
3200 gcc_assert (libcall_sp != libcall_stack);
3201 *--libcall_sp = XEXP (note, 0);
3203 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
3204 if (note)
3205 libcall_sp++;
3206 note = find_reg_equal_equiv_note (insn);
3209 reg_use_count = 0;
3210 note_uses (&PATTERN (insn), local_cprop_find_used_regs,
3211 NULL);
3212 if (note)
3213 local_cprop_find_used_regs (&XEXP (note, 0), NULL);
3215 for (reg_used = &reg_use_table[0]; reg_use_count > 0;
3216 reg_used++, reg_use_count--)
3217 if (do_local_cprop (reg_used->reg_rtx, insn, alter_jumps,
3218 libcall_sp))
3220 changed = true;
3221 break;
3223 if (INSN_DELETED_P (insn))
3224 break;
3226 while (reg_use_count);
3228 cselib_process_insn (insn);
3231 /* Forget everything at the end of a basic block. Make sure we are
3232 not inside a libcall, they should never cross basic blocks. */
3233 cselib_clear_table ();
3234 gcc_assert (libcall_sp == &libcall_stack[MAX_NESTED_LIBCALLS]);
3237 cselib_finish ();
3239 /* Global analysis may get into infinite loops for unreachable blocks. */
3240 if (changed && alter_jumps)
3242 delete_unreachable_blocks ();
3243 free_reg_set_mem ();
3244 alloc_reg_set_mem (max_reg_num ());
3245 compute_sets ();
3249 /* Forward propagate copies. This includes copies and constants. Return
3250 nonzero if a change was made. */
3252 static int
3253 cprop (int alter_jumps)
3255 int changed;
3256 basic_block bb;
3257 rtx insn;
3259 /* Note we start at block 1. */
3260 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
3262 if (dump_file != NULL)
3263 fprintf (dump_file, "\n");
3264 return 0;
3267 changed = 0;
3268 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR, next_bb)
3270 /* Reset tables used to keep track of what's still valid [since the
3271 start of the block]. */
3272 reset_opr_set_tables ();
3274 FOR_BB_INSNS (bb, insn)
3275 if (INSN_P (insn))
3277 changed |= cprop_insn (insn, alter_jumps);
3279 /* Keep track of everything modified by this insn. */
3280 /* ??? Need to be careful w.r.t. mods done to INSN. Don't
3281 call mark_oprs_set if we turned the insn into a NOTE. */
3282 if (! NOTE_P (insn))
3283 mark_oprs_set (insn);
3287 if (dump_file != NULL)
3288 fprintf (dump_file, "\n");
3290 return changed;
3293 /* Similar to get_condition, only the resulting condition must be
3294 valid at JUMP, instead of at EARLIEST.
3296 This differs from noce_get_condition in ifcvt.c in that we prefer not to
3297 settle for the condition variable in the jump instruction being integral.
3298 We prefer to be able to record the value of a user variable, rather than
3299 the value of a temporary used in a condition. This could be solved by
3300 recording the value of *every* register scanned by canonicalize_condition,
3301 but this would require some code reorganization. */
3304 fis_get_condition (rtx jump)
3306 return get_condition (jump, NULL, false, true);
3309 /* Check the comparison COND to see if we can safely form an implicit set from
3310 it. COND is either an EQ or NE comparison. */
3312 static bool
3313 implicit_set_cond_p (rtx cond)
3315 enum machine_mode mode = GET_MODE (XEXP (cond, 0));
3316 rtx cst = XEXP (cond, 1);
3318 /* We can't perform this optimization if either operand might be or might
3319 contain a signed zero. */
3320 if (HONOR_SIGNED_ZEROS (mode))
3322 /* It is sufficient to check if CST is or contains a zero. We must
3323 handle float, complex, and vector. If any subpart is a zero, then
3324 the optimization can't be performed. */
3325 /* ??? The complex and vector checks are not implemented yet. We just
3326 always return zero for them. */
3327 if (GET_CODE (cst) == CONST_DOUBLE)
3329 REAL_VALUE_TYPE d;
3330 REAL_VALUE_FROM_CONST_DOUBLE (d, cst);
3331 if (REAL_VALUES_EQUAL (d, dconst0))
3332 return 0;
3334 else
3335 return 0;
3338 return gcse_constant_p (cst);
3341 /* Find the implicit sets of a function. An "implicit set" is a constraint
3342 on the value of a variable, implied by a conditional jump. For example,
3343 following "if (x == 2)", the then branch may be optimized as though the
3344 conditional performed an "explicit set", in this example, "x = 2". This
3345 function records the set patterns that are implicit at the start of each
3346 basic block. */
3348 static void
3349 find_implicit_sets (void)
3351 basic_block bb, dest;
3352 unsigned int count;
3353 rtx cond, new;
3355 count = 0;
3356 FOR_EACH_BB (bb)
3357 /* Check for more than one successor. */
3358 if (EDGE_COUNT (bb->succs) > 1)
3360 cond = fis_get_condition (BB_END (bb));
3362 if (cond
3363 && (GET_CODE (cond) == EQ || GET_CODE (cond) == NE)
3364 && REG_P (XEXP (cond, 0))
3365 && REGNO (XEXP (cond, 0)) >= FIRST_PSEUDO_REGISTER
3366 && implicit_set_cond_p (cond))
3368 dest = GET_CODE (cond) == EQ ? BRANCH_EDGE (bb)->dest
3369 : FALLTHRU_EDGE (bb)->dest;
3371 if (dest && single_pred_p (dest)
3372 && dest != EXIT_BLOCK_PTR)
3374 new = gen_rtx_SET (VOIDmode, XEXP (cond, 0),
3375 XEXP (cond, 1));
3376 implicit_sets[dest->index] = new;
3377 if (dump_file)
3379 fprintf(dump_file, "Implicit set of reg %d in ",
3380 REGNO (XEXP (cond, 0)));
3381 fprintf(dump_file, "basic block %d\n", dest->index);
3383 count++;
3388 if (dump_file)
3389 fprintf (dump_file, "Found %d implicit sets\n", count);
3392 /* Perform one copy/constant propagation pass.
3393 PASS is the pass count. If CPROP_JUMPS is true, perform constant
3394 propagation into conditional jumps. If BYPASS_JUMPS is true,
3395 perform conditional jump bypassing optimizations. */
3397 static int
3398 one_cprop_pass (int pass, bool cprop_jumps, bool bypass_jumps)
3400 int changed = 0;
3402 global_const_prop_count = local_const_prop_count = 0;
3403 global_copy_prop_count = local_copy_prop_count = 0;
3405 if (cprop_jumps)
3406 local_cprop_pass (cprop_jumps);
3408 /* Determine implicit sets. */
3409 implicit_sets = XCNEWVEC (rtx, last_basic_block);
3410 find_implicit_sets ();
3412 alloc_hash_table (max_cuid, &set_hash_table, 1);
3413 compute_hash_table (&set_hash_table);
3415 /* Free implicit_sets before peak usage. */
3416 free (implicit_sets);
3417 implicit_sets = NULL;
3419 if (dump_file)
3420 dump_hash_table (dump_file, "SET", &set_hash_table);
3421 if (set_hash_table.n_elems > 0)
3423 alloc_cprop_mem (last_basic_block, set_hash_table.n_elems);
3424 compute_cprop_data ();
3425 changed = cprop (cprop_jumps);
3426 if (bypass_jumps)
3427 changed |= bypass_conditional_jumps ();
3428 free_cprop_mem ();
3431 free_hash_table (&set_hash_table);
3433 if (dump_file)
3435 fprintf (dump_file, "CPROP of %s, pass %d: %d bytes needed, ",
3436 current_function_name (), pass, bytes_used);
3437 fprintf (dump_file, "%d local const props, %d local copy props, ",
3438 local_const_prop_count, local_copy_prop_count);
3439 fprintf (dump_file, "%d global const props, %d global copy props\n\n",
3440 global_const_prop_count, global_copy_prop_count);
3442 /* Global analysis may get into infinite loops for unreachable blocks. */
3443 if (changed && cprop_jumps)
3444 delete_unreachable_blocks ();
3446 return changed;
3449 /* Bypass conditional jumps. */
3451 /* The value of last_basic_block at the beginning of the jump_bypass
3452 pass. The use of redirect_edge_and_branch_force may introduce new
3453 basic blocks, but the data flow analysis is only valid for basic
3454 block indices less than bypass_last_basic_block. */
3456 static int bypass_last_basic_block;
3458 /* Find a set of REGNO to a constant that is available at the end of basic
3459 block BB. Returns NULL if no such set is found. Based heavily upon
3460 find_avail_set. */
3462 static struct expr *
3463 find_bypass_set (int regno, int bb)
3465 struct expr *result = 0;
3467 for (;;)
3469 rtx src;
3470 struct expr *set = lookup_set (regno, &set_hash_table);
3472 while (set)
3474 if (TEST_BIT (cprop_avout[bb], set->bitmap_index))
3475 break;
3476 set = next_set (regno, set);
3479 if (set == 0)
3480 break;
3482 gcc_assert (GET_CODE (set->expr) == SET);
3484 src = SET_SRC (set->expr);
3485 if (gcse_constant_p (src))
3486 result = set;
3488 if (! REG_P (src))
3489 break;
3491 regno = REGNO (src);
3493 return result;
3497 /* Subroutine of bypass_block that checks whether a pseudo is killed by
3498 any of the instructions inserted on an edge. Jump bypassing places
3499 condition code setters on CFG edges using insert_insn_on_edge. This
3500 function is required to check that our data flow analysis is still
3501 valid prior to commit_edge_insertions. */
3503 static bool
3504 reg_killed_on_edge (rtx reg, edge e)
3506 rtx insn;
3508 for (insn = e->insns.r; insn; insn = NEXT_INSN (insn))
3509 if (INSN_P (insn) && reg_set_p (reg, insn))
3510 return true;
3512 return false;
3515 /* Subroutine of bypass_conditional_jumps that attempts to bypass the given
3516 basic block BB which has more than one predecessor. If not NULL, SETCC
3517 is the first instruction of BB, which is immediately followed by JUMP_INSN
3518 JUMP. Otherwise, SETCC is NULL, and JUMP is the first insn of BB.
3519 Returns nonzero if a change was made.
3521 During the jump bypassing pass, we may place copies of SETCC instructions
3522 on CFG edges. The following routine must be careful to pay attention to
3523 these inserted insns when performing its transformations. */
3525 static int
3526 bypass_block (basic_block bb, rtx setcc, rtx jump)
3528 rtx insn, note;
3529 edge e, edest;
3530 int i, change;
3531 int may_be_loop_header;
3532 unsigned removed_p;
3533 edge_iterator ei;
3535 insn = (setcc != NULL) ? setcc : jump;
3537 /* Determine set of register uses in INSN. */
3538 reg_use_count = 0;
3539 note_uses (&PATTERN (insn), find_used_regs, NULL);
3540 note = find_reg_equal_equiv_note (insn);
3541 if (note)
3542 find_used_regs (&XEXP (note, 0), NULL);
3544 may_be_loop_header = false;
3545 FOR_EACH_EDGE (e, ei, bb->preds)
3546 if (e->flags & EDGE_DFS_BACK)
3548 may_be_loop_header = true;
3549 break;
3552 change = 0;
3553 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
3555 removed_p = 0;
3557 if (e->flags & EDGE_COMPLEX)
3559 ei_next (&ei);
3560 continue;
3563 /* We can't redirect edges from new basic blocks. */
3564 if (e->src->index >= bypass_last_basic_block)
3566 ei_next (&ei);
3567 continue;
3570 /* The irreducible loops created by redirecting of edges entering the
3571 loop from outside would decrease effectiveness of some of the following
3572 optimizations, so prevent this. */
3573 if (may_be_loop_header
3574 && !(e->flags & EDGE_DFS_BACK))
3576 ei_next (&ei);
3577 continue;
3580 for (i = 0; i < reg_use_count; i++)
3582 struct reg_use *reg_used = &reg_use_table[i];
3583 unsigned int regno = REGNO (reg_used->reg_rtx);
3584 basic_block dest, old_dest;
3585 struct expr *set;
3586 rtx src, new;
3588 if (regno >= max_gcse_regno)
3589 continue;
3591 set = find_bypass_set (regno, e->src->index);
3593 if (! set)
3594 continue;
3596 /* Check the data flow is valid after edge insertions. */
3597 if (e->insns.r && reg_killed_on_edge (reg_used->reg_rtx, e))
3598 continue;
3600 src = SET_SRC (pc_set (jump));
3602 if (setcc != NULL)
3603 src = simplify_replace_rtx (src,
3604 SET_DEST (PATTERN (setcc)),
3605 SET_SRC (PATTERN (setcc)));
3607 new = simplify_replace_rtx (src, reg_used->reg_rtx,
3608 SET_SRC (set->expr));
3610 /* Jump bypassing may have already placed instructions on
3611 edges of the CFG. We can't bypass an outgoing edge that
3612 has instructions associated with it, as these insns won't
3613 get executed if the incoming edge is redirected. */
3615 if (new == pc_rtx)
3617 edest = FALLTHRU_EDGE (bb);
3618 dest = edest->insns.r ? NULL : edest->dest;
3620 else if (GET_CODE (new) == LABEL_REF)
3622 dest = BLOCK_FOR_INSN (XEXP (new, 0));
3623 /* Don't bypass edges containing instructions. */
3624 edest = find_edge (bb, dest);
3625 if (edest && edest->insns.r)
3626 dest = NULL;
3628 else
3629 dest = NULL;
3631 /* Avoid unification of the edge with other edges from original
3632 branch. We would end up emitting the instruction on "both"
3633 edges. */
3635 if (dest && setcc && !CC0_P (SET_DEST (PATTERN (setcc)))
3636 && find_edge (e->src, dest))
3637 dest = NULL;
3639 old_dest = e->dest;
3640 if (dest != NULL
3641 && dest != old_dest
3642 && dest != EXIT_BLOCK_PTR)
3644 redirect_edge_and_branch_force (e, dest);
3646 /* Copy the register setter to the redirected edge.
3647 Don't copy CC0 setters, as CC0 is dead after jump. */
3648 if (setcc)
3650 rtx pat = PATTERN (setcc);
3651 if (!CC0_P (SET_DEST (pat)))
3652 insert_insn_on_edge (copy_insn (pat), e);
3655 if (dump_file != NULL)
3657 fprintf (dump_file, "JUMP-BYPASS: Proved reg %d "
3658 "in jump_insn %d equals constant ",
3659 regno, INSN_UID (jump));
3660 print_rtl (dump_file, SET_SRC (set->expr));
3661 fprintf (dump_file, "\nBypass edge from %d->%d to %d\n",
3662 e->src->index, old_dest->index, dest->index);
3664 change = 1;
3665 removed_p = 1;
3666 break;
3669 if (!removed_p)
3670 ei_next (&ei);
3672 return change;
3675 /* Find basic blocks with more than one predecessor that only contain a
3676 single conditional jump. If the result of the comparison is known at
3677 compile-time from any incoming edge, redirect that edge to the
3678 appropriate target. Returns nonzero if a change was made.
3680 This function is now mis-named, because we also handle indirect jumps. */
3682 static int
3683 bypass_conditional_jumps (void)
3685 basic_block bb;
3686 int changed;
3687 rtx setcc;
3688 rtx insn;
3689 rtx dest;
3691 /* Note we start at block 1. */
3692 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
3693 return 0;
3695 bypass_last_basic_block = last_basic_block;
3696 mark_dfs_back_edges ();
3698 changed = 0;
3699 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb,
3700 EXIT_BLOCK_PTR, next_bb)
3702 /* Check for more than one predecessor. */
3703 if (!single_pred_p (bb))
3705 setcc = NULL_RTX;
3706 FOR_BB_INSNS (bb, insn)
3707 if (NONJUMP_INSN_P (insn))
3709 if (setcc)
3710 break;
3711 if (GET_CODE (PATTERN (insn)) != SET)
3712 break;
3714 dest = SET_DEST (PATTERN (insn));
3715 if (REG_P (dest) || CC0_P (dest))
3716 setcc = insn;
3717 else
3718 break;
3720 else if (JUMP_P (insn))
3722 if ((any_condjump_p (insn) || computed_jump_p (insn))
3723 && onlyjump_p (insn))
3724 changed |= bypass_block (bb, setcc, insn);
3725 break;
3727 else if (INSN_P (insn))
3728 break;
3732 /* If we bypassed any register setting insns, we inserted a
3733 copy on the redirected edge. These need to be committed. */
3734 if (changed)
3735 commit_edge_insertions ();
3737 return changed;
3740 /* Compute PRE+LCM working variables. */
3742 /* Local properties of expressions. */
3743 /* Nonzero for expressions that are transparent in the block. */
3744 static sbitmap *transp;
3746 /* Nonzero for expressions that are transparent at the end of the block.
3747 This is only zero for expressions killed by abnormal critical edge
3748 created by a calls. */
3749 static sbitmap *transpout;
3751 /* Nonzero for expressions that are computed (available) in the block. */
3752 static sbitmap *comp;
3754 /* Nonzero for expressions that are locally anticipatable in the block. */
3755 static sbitmap *antloc;
3757 /* Nonzero for expressions where this block is an optimal computation
3758 point. */
3759 static sbitmap *pre_optimal;
3761 /* Nonzero for expressions which are redundant in a particular block. */
3762 static sbitmap *pre_redundant;
3764 /* Nonzero for expressions which should be inserted on a specific edge. */
3765 static sbitmap *pre_insert_map;
3767 /* Nonzero for expressions which should be deleted in a specific block. */
3768 static sbitmap *pre_delete_map;
3770 /* Contains the edge_list returned by pre_edge_lcm. */
3771 static struct edge_list *edge_list;
3773 /* Redundant insns. */
3774 static sbitmap pre_redundant_insns;
3776 /* Allocate vars used for PRE analysis. */
3778 static void
3779 alloc_pre_mem (int n_blocks, int n_exprs)
3781 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
3782 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
3783 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
3785 pre_optimal = NULL;
3786 pre_redundant = NULL;
3787 pre_insert_map = NULL;
3788 pre_delete_map = NULL;
3789 ae_kill = sbitmap_vector_alloc (n_blocks, n_exprs);
3791 /* pre_insert and pre_delete are allocated later. */
3794 /* Free vars used for PRE analysis. */
3796 static void
3797 free_pre_mem (void)
3799 sbitmap_vector_free (transp);
3800 sbitmap_vector_free (comp);
3802 /* ANTLOC and AE_KILL are freed just after pre_lcm finishes. */
3804 if (pre_optimal)
3805 sbitmap_vector_free (pre_optimal);
3806 if (pre_redundant)
3807 sbitmap_vector_free (pre_redundant);
3808 if (pre_insert_map)
3809 sbitmap_vector_free (pre_insert_map);
3810 if (pre_delete_map)
3811 sbitmap_vector_free (pre_delete_map);
3813 transp = comp = NULL;
3814 pre_optimal = pre_redundant = pre_insert_map = pre_delete_map = NULL;
3817 /* Top level routine to do the dataflow analysis needed by PRE. */
3819 static void
3820 compute_pre_data (void)
3822 sbitmap trapping_expr;
3823 basic_block bb;
3824 unsigned int ui;
3826 compute_local_properties (transp, comp, antloc, &expr_hash_table);
3827 sbitmap_vector_zero (ae_kill, last_basic_block);
3829 /* Collect expressions which might trap. */
3830 trapping_expr = sbitmap_alloc (expr_hash_table.n_elems);
3831 sbitmap_zero (trapping_expr);
3832 for (ui = 0; ui < expr_hash_table.size; ui++)
3834 struct expr *e;
3835 for (e = expr_hash_table.table[ui]; e != NULL; e = e->next_same_hash)
3836 if (may_trap_p (e->expr))
3837 SET_BIT (trapping_expr, e->bitmap_index);
3840 /* Compute ae_kill for each basic block using:
3842 ~(TRANSP | COMP)
3845 FOR_EACH_BB (bb)
3847 edge e;
3848 edge_iterator ei;
3850 /* If the current block is the destination of an abnormal edge, we
3851 kill all trapping expressions because we won't be able to properly
3852 place the instruction on the edge. So make them neither
3853 anticipatable nor transparent. This is fairly conservative. */
3854 FOR_EACH_EDGE (e, ei, bb->preds)
3855 if (e->flags & EDGE_ABNORMAL)
3857 sbitmap_difference (antloc[bb->index], antloc[bb->index], trapping_expr);
3858 sbitmap_difference (transp[bb->index], transp[bb->index], trapping_expr);
3859 break;
3862 sbitmap_a_or_b (ae_kill[bb->index], transp[bb->index], comp[bb->index]);
3863 sbitmap_not (ae_kill[bb->index], ae_kill[bb->index]);
3866 edge_list = pre_edge_lcm (expr_hash_table.n_elems, transp, comp, antloc,
3867 ae_kill, &pre_insert_map, &pre_delete_map);
3868 sbitmap_vector_free (antloc);
3869 antloc = NULL;
3870 sbitmap_vector_free (ae_kill);
3871 ae_kill = NULL;
3872 sbitmap_free (trapping_expr);
3875 /* PRE utilities */
3877 /* Return nonzero if an occurrence of expression EXPR in OCCR_BB would reach
3878 block BB.
3880 VISITED is a pointer to a working buffer for tracking which BB's have
3881 been visited. It is NULL for the top-level call.
3883 We treat reaching expressions that go through blocks containing the same
3884 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
3885 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
3886 2 as not reaching. The intent is to improve the probability of finding
3887 only one reaching expression and to reduce register lifetimes by picking
3888 the closest such expression. */
3890 static int
3891 pre_expr_reaches_here_p_work (basic_block occr_bb, struct expr *expr, basic_block bb, char *visited)
3893 edge pred;
3894 edge_iterator ei;
3896 FOR_EACH_EDGE (pred, ei, bb->preds)
3898 basic_block pred_bb = pred->src;
3900 if (pred->src == ENTRY_BLOCK_PTR
3901 /* Has predecessor has already been visited? */
3902 || visited[pred_bb->index])
3903 ;/* Nothing to do. */
3905 /* Does this predecessor generate this expression? */
3906 else if (TEST_BIT (comp[pred_bb->index], expr->bitmap_index))
3908 /* Is this the occurrence we're looking for?
3909 Note that there's only one generating occurrence per block
3910 so we just need to check the block number. */
3911 if (occr_bb == pred_bb)
3912 return 1;
3914 visited[pred_bb->index] = 1;
3916 /* Ignore this predecessor if it kills the expression. */
3917 else if (! TEST_BIT (transp[pred_bb->index], expr->bitmap_index))
3918 visited[pred_bb->index] = 1;
3920 /* Neither gen nor kill. */
3921 else
3923 visited[pred_bb->index] = 1;
3924 if (pre_expr_reaches_here_p_work (occr_bb, expr, pred_bb, visited))
3925 return 1;
3929 /* All paths have been checked. */
3930 return 0;
3933 /* The wrapper for pre_expr_reaches_here_work that ensures that any
3934 memory allocated for that function is returned. */
3936 static int
3937 pre_expr_reaches_here_p (basic_block occr_bb, struct expr *expr, basic_block bb)
3939 int rval;
3940 char *visited = XCNEWVEC (char, last_basic_block);
3942 rval = pre_expr_reaches_here_p_work (occr_bb, expr, bb, visited);
3944 free (visited);
3945 return rval;
3949 /* Given an expr, generate RTL which we can insert at the end of a BB,
3950 or on an edge. Set the block number of any insns generated to
3951 the value of BB. */
3953 static rtx
3954 process_insert_insn (struct expr *expr)
3956 rtx reg = expr->reaching_reg;
3957 rtx exp = copy_rtx (expr->expr);
3958 rtx pat;
3960 start_sequence ();
3962 /* If the expression is something that's an operand, like a constant,
3963 just copy it to a register. */
3964 if (general_operand (exp, GET_MODE (reg)))
3965 emit_move_insn (reg, exp);
3967 /* Otherwise, make a new insn to compute this expression and make sure the
3968 insn will be recognized (this also adds any needed CLOBBERs). Copy the
3969 expression to make sure we don't have any sharing issues. */
3970 else
3972 rtx insn = emit_insn (gen_rtx_SET (VOIDmode, reg, exp));
3974 if (insn_invalid_p (insn))
3975 gcc_unreachable ();
3979 pat = get_insns ();
3980 end_sequence ();
3982 return pat;
3985 /* Add EXPR to the end of basic block BB.
3987 This is used by both the PRE and code hoisting.
3989 For PRE, we want to verify that the expr is either transparent
3990 or locally anticipatable in the target block. This check makes
3991 no sense for code hoisting. */
3993 static void
3994 insert_insn_end_bb (struct expr *expr, basic_block bb, int pre)
3996 rtx insn = BB_END (bb);
3997 rtx new_insn;
3998 rtx reg = expr->reaching_reg;
3999 int regno = REGNO (reg);
4000 rtx pat, pat_end;
4002 pat = process_insert_insn (expr);
4003 gcc_assert (pat && INSN_P (pat));
4005 pat_end = pat;
4006 while (NEXT_INSN (pat_end) != NULL_RTX)
4007 pat_end = NEXT_INSN (pat_end);
4009 /* If the last insn is a jump, insert EXPR in front [taking care to
4010 handle cc0, etc. properly]. Similarly we need to care trapping
4011 instructions in presence of non-call exceptions. */
4013 if (JUMP_P (insn)
4014 || (NONJUMP_INSN_P (insn)
4015 && (!single_succ_p (bb)
4016 || single_succ_edge (bb)->flags & EDGE_ABNORMAL)))
4018 #ifdef HAVE_cc0
4019 rtx note;
4020 #endif
4021 /* It should always be the case that we can put these instructions
4022 anywhere in the basic block with performing PRE optimizations.
4023 Check this. */
4024 gcc_assert (!NONJUMP_INSN_P (insn) || !pre
4025 || TEST_BIT (antloc[bb->index], expr->bitmap_index)
4026 || TEST_BIT (transp[bb->index], expr->bitmap_index));
4028 /* If this is a jump table, then we can't insert stuff here. Since
4029 we know the previous real insn must be the tablejump, we insert
4030 the new instruction just before the tablejump. */
4031 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
4032 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
4033 insn = prev_real_insn (insn);
4035 #ifdef HAVE_cc0
4036 /* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts
4037 if cc0 isn't set. */
4038 note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
4039 if (note)
4040 insn = XEXP (note, 0);
4041 else
4043 rtx maybe_cc0_setter = prev_nonnote_insn (insn);
4044 if (maybe_cc0_setter
4045 && INSN_P (maybe_cc0_setter)
4046 && sets_cc0_p (PATTERN (maybe_cc0_setter)))
4047 insn = maybe_cc0_setter;
4049 #endif
4050 /* FIXME: What if something in cc0/jump uses value set in new insn? */
4051 new_insn = emit_insn_before_noloc (pat, insn);
4054 /* Likewise if the last insn is a call, as will happen in the presence
4055 of exception handling. */
4056 else if (CALL_P (insn)
4057 && (!single_succ_p (bb)
4058 || single_succ_edge (bb)->flags & EDGE_ABNORMAL))
4060 /* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers,
4061 we search backward and place the instructions before the first
4062 parameter is loaded. Do this for everyone for consistency and a
4063 presumption that we'll get better code elsewhere as well.
4065 It should always be the case that we can put these instructions
4066 anywhere in the basic block with performing PRE optimizations.
4067 Check this. */
4069 gcc_assert (!pre
4070 || TEST_BIT (antloc[bb->index], expr->bitmap_index)
4071 || TEST_BIT (transp[bb->index], expr->bitmap_index));
4073 /* Since different machines initialize their parameter registers
4074 in different orders, assume nothing. Collect the set of all
4075 parameter registers. */
4076 insn = find_first_parameter_load (insn, BB_HEAD (bb));
4078 /* If we found all the parameter loads, then we want to insert
4079 before the first parameter load.
4081 If we did not find all the parameter loads, then we might have
4082 stopped on the head of the block, which could be a CODE_LABEL.
4083 If we inserted before the CODE_LABEL, then we would be putting
4084 the insn in the wrong basic block. In that case, put the insn
4085 after the CODE_LABEL. Also, respect NOTE_INSN_BASIC_BLOCK. */
4086 while (LABEL_P (insn)
4087 || NOTE_INSN_BASIC_BLOCK_P (insn))
4088 insn = NEXT_INSN (insn);
4090 new_insn = emit_insn_before_noloc (pat, insn);
4092 else
4093 new_insn = emit_insn_after_noloc (pat, insn);
4095 while (1)
4097 if (INSN_P (pat))
4099 add_label_notes (PATTERN (pat), new_insn);
4100 note_stores (PATTERN (pat), record_set_info, pat);
4102 if (pat == pat_end)
4103 break;
4104 pat = NEXT_INSN (pat);
4107 gcse_create_count++;
4109 if (dump_file)
4111 fprintf (dump_file, "PRE/HOIST: end of bb %d, insn %d, ",
4112 bb->index, INSN_UID (new_insn));
4113 fprintf (dump_file, "copying expression %d to reg %d\n",
4114 expr->bitmap_index, regno);
4118 /* Insert partially redundant expressions on edges in the CFG to make
4119 the expressions fully redundant. */
4121 static int
4122 pre_edge_insert (struct edge_list *edge_list, struct expr **index_map)
4124 int e, i, j, num_edges, set_size, did_insert = 0;
4125 sbitmap *inserted;
4127 /* Where PRE_INSERT_MAP is nonzero, we add the expression on that edge
4128 if it reaches any of the deleted expressions. */
4130 set_size = pre_insert_map[0]->size;
4131 num_edges = NUM_EDGES (edge_list);
4132 inserted = sbitmap_vector_alloc (num_edges, expr_hash_table.n_elems);
4133 sbitmap_vector_zero (inserted, num_edges);
4135 for (e = 0; e < num_edges; e++)
4137 int indx;
4138 basic_block bb = INDEX_EDGE_PRED_BB (edge_list, e);
4140 for (i = indx = 0; i < set_size; i++, indx += SBITMAP_ELT_BITS)
4142 SBITMAP_ELT_TYPE insert = pre_insert_map[e]->elms[i];
4144 for (j = indx; insert && j < (int) expr_hash_table.n_elems; j++, insert >>= 1)
4145 if ((insert & 1) != 0 && index_map[j]->reaching_reg != NULL_RTX)
4147 struct expr *expr = index_map[j];
4148 struct occr *occr;
4150 /* Now look at each deleted occurrence of this expression. */
4151 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
4153 if (! occr->deleted_p)
4154 continue;
4156 /* Insert this expression on this edge if it would
4157 reach the deleted occurrence in BB. */
4158 if (!TEST_BIT (inserted[e], j))
4160 rtx insn;
4161 edge eg = INDEX_EDGE (edge_list, e);
4163 /* We can't insert anything on an abnormal and
4164 critical edge, so we insert the insn at the end of
4165 the previous block. There are several alternatives
4166 detailed in Morgans book P277 (sec 10.5) for
4167 handling this situation. This one is easiest for
4168 now. */
4170 if (eg->flags & EDGE_ABNORMAL)
4171 insert_insn_end_bb (index_map[j], bb, 0);
4172 else
4174 insn = process_insert_insn (index_map[j]);
4175 insert_insn_on_edge (insn, eg);
4178 if (dump_file)
4180 fprintf (dump_file, "PRE/HOIST: edge (%d,%d), ",
4181 bb->index,
4182 INDEX_EDGE_SUCC_BB (edge_list, e)->index);
4183 fprintf (dump_file, "copy expression %d\n",
4184 expr->bitmap_index);
4187 update_ld_motion_stores (expr);
4188 SET_BIT (inserted[e], j);
4189 did_insert = 1;
4190 gcse_create_count++;
4197 sbitmap_vector_free (inserted);
4198 return did_insert;
4201 /* Copy the result of EXPR->EXPR generated by INSN to EXPR->REACHING_REG.
4202 Given "old_reg <- expr" (INSN), instead of adding after it
4203 reaching_reg <- old_reg
4204 it's better to do the following:
4205 reaching_reg <- expr
4206 old_reg <- reaching_reg
4207 because this way copy propagation can discover additional PRE
4208 opportunities. But if this fails, we try the old way.
4209 When "expr" is a store, i.e.
4210 given "MEM <- old_reg", instead of adding after it
4211 reaching_reg <- old_reg
4212 it's better to add it before as follows:
4213 reaching_reg <- old_reg
4214 MEM <- reaching_reg. */
4216 static void
4217 pre_insert_copy_insn (struct expr *expr, rtx insn)
4219 rtx reg = expr->reaching_reg;
4220 int regno = REGNO (reg);
4221 int indx = expr->bitmap_index;
4222 rtx pat = PATTERN (insn);
4223 rtx set, first_set, new_insn;
4224 rtx old_reg;
4225 int i;
4227 /* This block matches the logic in hash_scan_insn. */
4228 switch (GET_CODE (pat))
4230 case SET:
4231 set = pat;
4232 break;
4234 case PARALLEL:
4235 /* Search through the parallel looking for the set whose
4236 source was the expression that we're interested in. */
4237 first_set = NULL_RTX;
4238 set = NULL_RTX;
4239 for (i = 0; i < XVECLEN (pat, 0); i++)
4241 rtx x = XVECEXP (pat, 0, i);
4242 if (GET_CODE (x) == SET)
4244 /* If the source was a REG_EQUAL or REG_EQUIV note, we
4245 may not find an equivalent expression, but in this
4246 case the PARALLEL will have a single set. */
4247 if (first_set == NULL_RTX)
4248 first_set = x;
4249 if (expr_equiv_p (SET_SRC (x), expr->expr))
4251 set = x;
4252 break;
4257 gcc_assert (first_set);
4258 if (set == NULL_RTX)
4259 set = first_set;
4260 break;
4262 default:
4263 gcc_unreachable ();
4266 if (REG_P (SET_DEST (set)))
4268 old_reg = SET_DEST (set);
4269 /* Check if we can modify the set destination in the original insn. */
4270 if (validate_change (insn, &SET_DEST (set), reg, 0))
4272 new_insn = gen_move_insn (old_reg, reg);
4273 new_insn = emit_insn_after (new_insn, insn);
4275 /* Keep register set table up to date. */
4276 record_one_set (regno, insn);
4278 else
4280 new_insn = gen_move_insn (reg, old_reg);
4281 new_insn = emit_insn_after (new_insn, insn);
4283 /* Keep register set table up to date. */
4284 record_one_set (regno, new_insn);
4287 else /* This is possible only in case of a store to memory. */
4289 old_reg = SET_SRC (set);
4290 new_insn = gen_move_insn (reg, old_reg);
4292 /* Check if we can modify the set source in the original insn. */
4293 if (validate_change (insn, &SET_SRC (set), reg, 0))
4294 new_insn = emit_insn_before (new_insn, insn);
4295 else
4296 new_insn = emit_insn_after (new_insn, insn);
4298 /* Keep register set table up to date. */
4299 record_one_set (regno, new_insn);
4302 gcse_create_count++;
4304 if (dump_file)
4305 fprintf (dump_file,
4306 "PRE: bb %d, insn %d, copy expression %d in insn %d to reg %d\n",
4307 BLOCK_NUM (insn), INSN_UID (new_insn), indx,
4308 INSN_UID (insn), regno);
4311 /* Copy available expressions that reach the redundant expression
4312 to `reaching_reg'. */
4314 static void
4315 pre_insert_copies (void)
4317 unsigned int i, added_copy;
4318 struct expr *expr;
4319 struct occr *occr;
4320 struct occr *avail;
4322 /* For each available expression in the table, copy the result to
4323 `reaching_reg' if the expression reaches a deleted one.
4325 ??? The current algorithm is rather brute force.
4326 Need to do some profiling. */
4328 for (i = 0; i < expr_hash_table.size; i++)
4329 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
4331 /* If the basic block isn't reachable, PPOUT will be TRUE. However,
4332 we don't want to insert a copy here because the expression may not
4333 really be redundant. So only insert an insn if the expression was
4334 deleted. This test also avoids further processing if the
4335 expression wasn't deleted anywhere. */
4336 if (expr->reaching_reg == NULL)
4337 continue;
4339 /* Set when we add a copy for that expression. */
4340 added_copy = 0;
4342 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
4344 if (! occr->deleted_p)
4345 continue;
4347 for (avail = expr->avail_occr; avail != NULL; avail = avail->next)
4349 rtx insn = avail->insn;
4351 /* No need to handle this one if handled already. */
4352 if (avail->copied_p)
4353 continue;
4355 /* Don't handle this one if it's a redundant one. */
4356 if (TEST_BIT (pre_redundant_insns, INSN_CUID (insn)))
4357 continue;
4359 /* Or if the expression doesn't reach the deleted one. */
4360 if (! pre_expr_reaches_here_p (BLOCK_FOR_INSN (avail->insn),
4361 expr,
4362 BLOCK_FOR_INSN (occr->insn)))
4363 continue;
4365 added_copy = 1;
4367 /* Copy the result of avail to reaching_reg. */
4368 pre_insert_copy_insn (expr, insn);
4369 avail->copied_p = 1;
4373 if (added_copy)
4374 update_ld_motion_stores (expr);
4378 /* Emit move from SRC to DEST noting the equivalence with expression computed
4379 in INSN. */
4380 static rtx
4381 gcse_emit_move_after (rtx src, rtx dest, rtx insn)
4383 rtx new;
4384 rtx set = single_set (insn), set2;
4385 rtx note;
4386 rtx eqv;
4388 /* This should never fail since we're creating a reg->reg copy
4389 we've verified to be valid. */
4391 new = emit_insn_after (gen_move_insn (dest, src), insn);
4393 /* Note the equivalence for local CSE pass. */
4394 set2 = single_set (new);
4395 if (!set2 || !rtx_equal_p (SET_DEST (set2), dest))
4396 return new;
4397 if ((note = find_reg_equal_equiv_note (insn)))
4398 eqv = XEXP (note, 0);
4399 else
4400 eqv = SET_SRC (set);
4402 set_unique_reg_note (new, REG_EQUAL, copy_insn_1 (eqv));
4404 return new;
4407 /* Delete redundant computations.
4408 Deletion is done by changing the insn to copy the `reaching_reg' of
4409 the expression into the result of the SET. It is left to later passes
4410 (cprop, cse2, flow, combine, regmove) to propagate the copy or eliminate it.
4412 Returns nonzero if a change is made. */
4414 static int
4415 pre_delete (void)
4417 unsigned int i;
4418 int changed;
4419 struct expr *expr;
4420 struct occr *occr;
4422 changed = 0;
4423 for (i = 0; i < expr_hash_table.size; i++)
4424 for (expr = expr_hash_table.table[i];
4425 expr != NULL;
4426 expr = expr->next_same_hash)
4428 int indx = expr->bitmap_index;
4430 /* We only need to search antic_occr since we require
4431 ANTLOC != 0. */
4433 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
4435 rtx insn = occr->insn;
4436 rtx set;
4437 basic_block bb = BLOCK_FOR_INSN (insn);
4439 /* We only delete insns that have a single_set. */
4440 if (TEST_BIT (pre_delete_map[bb->index], indx)
4441 && (set = single_set (insn)) != 0)
4443 /* Create a pseudo-reg to store the result of reaching
4444 expressions into. Get the mode for the new pseudo from
4445 the mode of the original destination pseudo. */
4446 if (expr->reaching_reg == NULL)
4447 expr->reaching_reg
4448 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
4450 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
4451 delete_insn (insn);
4452 occr->deleted_p = 1;
4453 SET_BIT (pre_redundant_insns, INSN_CUID (insn));
4454 changed = 1;
4455 gcse_subst_count++;
4457 if (dump_file)
4459 fprintf (dump_file,
4460 "PRE: redundant insn %d (expression %d) in ",
4461 INSN_UID (insn), indx);
4462 fprintf (dump_file, "bb %d, reaching reg is %d\n",
4463 bb->index, REGNO (expr->reaching_reg));
4469 return changed;
4472 /* Perform GCSE optimizations using PRE.
4473 This is called by one_pre_gcse_pass after all the dataflow analysis
4474 has been done.
4476 This is based on the original Morel-Renvoise paper Fred Chow's thesis, and
4477 lazy code motion from Knoop, Ruthing and Steffen as described in Advanced
4478 Compiler Design and Implementation.
4480 ??? A new pseudo reg is created to hold the reaching expression. The nice
4481 thing about the classical approach is that it would try to use an existing
4482 reg. If the register can't be adequately optimized [i.e. we introduce
4483 reload problems], one could add a pass here to propagate the new register
4484 through the block.
4486 ??? We don't handle single sets in PARALLELs because we're [currently] not
4487 able to copy the rest of the parallel when we insert copies to create full
4488 redundancies from partial redundancies. However, there's no reason why we
4489 can't handle PARALLELs in the cases where there are no partial
4490 redundancies. */
4492 static int
4493 pre_gcse (void)
4495 unsigned int i;
4496 int did_insert, changed;
4497 struct expr **index_map;
4498 struct expr *expr;
4500 /* Compute a mapping from expression number (`bitmap_index') to
4501 hash table entry. */
4503 index_map = XCNEWVEC (struct expr *, expr_hash_table.n_elems);
4504 for (i = 0; i < expr_hash_table.size; i++)
4505 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
4506 index_map[expr->bitmap_index] = expr;
4508 /* Reset bitmap used to track which insns are redundant. */
4509 pre_redundant_insns = sbitmap_alloc (max_cuid);
4510 sbitmap_zero (pre_redundant_insns);
4512 /* Delete the redundant insns first so that
4513 - we know what register to use for the new insns and for the other
4514 ones with reaching expressions
4515 - we know which insns are redundant when we go to create copies */
4517 changed = pre_delete ();
4519 did_insert = pre_edge_insert (edge_list, index_map);
4521 /* In other places with reaching expressions, copy the expression to the
4522 specially allocated pseudo-reg that reaches the redundant expr. */
4523 pre_insert_copies ();
4524 if (did_insert)
4526 commit_edge_insertions ();
4527 changed = 1;
4530 free (index_map);
4531 sbitmap_free (pre_redundant_insns);
4532 return changed;
4535 /* Top level routine to perform one PRE GCSE pass.
4537 Return nonzero if a change was made. */
4539 static int
4540 one_pre_gcse_pass (int pass)
4542 int changed = 0;
4544 gcse_subst_count = 0;
4545 gcse_create_count = 0;
4547 alloc_hash_table (max_cuid, &expr_hash_table, 0);
4548 add_noreturn_fake_exit_edges ();
4549 if (flag_gcse_lm)
4550 compute_ld_motion_mems ();
4552 compute_hash_table (&expr_hash_table);
4553 trim_ld_motion_mems ();
4554 if (dump_file)
4555 dump_hash_table (dump_file, "Expression", &expr_hash_table);
4557 if (expr_hash_table.n_elems > 0)
4559 alloc_pre_mem (last_basic_block, expr_hash_table.n_elems);
4560 compute_pre_data ();
4561 changed |= pre_gcse ();
4562 free_edge_list (edge_list);
4563 free_pre_mem ();
4566 free_ldst_mems ();
4567 remove_fake_exit_edges ();
4568 free_hash_table (&expr_hash_table);
4570 if (dump_file)
4572 fprintf (dump_file, "\nPRE GCSE of %s, pass %d: %d bytes needed, ",
4573 current_function_name (), pass, bytes_used);
4574 fprintf (dump_file, "%d substs, %d insns created\n",
4575 gcse_subst_count, gcse_create_count);
4578 return changed;
4581 /* If X contains any LABEL_REF's, add REG_LABEL notes for them to INSN.
4582 If notes are added to an insn which references a CODE_LABEL, the
4583 LABEL_NUSES count is incremented. We have to add REG_LABEL notes,
4584 because the following loop optimization pass requires them. */
4586 /* ??? If there was a jump optimization pass after gcse and before loop,
4587 then we would not need to do this here, because jump would add the
4588 necessary REG_LABEL notes. */
4590 static void
4591 add_label_notes (rtx x, rtx insn)
4593 enum rtx_code code = GET_CODE (x);
4594 int i, j;
4595 const char *fmt;
4597 if (code == LABEL_REF && !LABEL_REF_NONLOCAL_P (x))
4599 /* This code used to ignore labels that referred to dispatch tables to
4600 avoid flow generating (slightly) worse code.
4602 We no longer ignore such label references (see LABEL_REF handling in
4603 mark_jump_label for additional information). */
4605 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, XEXP (x, 0),
4606 REG_NOTES (insn));
4607 if (LABEL_P (XEXP (x, 0)))
4608 LABEL_NUSES (XEXP (x, 0))++;
4609 return;
4612 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
4614 if (fmt[i] == 'e')
4615 add_label_notes (XEXP (x, i), insn);
4616 else if (fmt[i] == 'E')
4617 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4618 add_label_notes (XVECEXP (x, i, j), insn);
4622 /* Compute transparent outgoing information for each block.
4624 An expression is transparent to an edge unless it is killed by
4625 the edge itself. This can only happen with abnormal control flow,
4626 when the edge is traversed through a call. This happens with
4627 non-local labels and exceptions.
4629 This would not be necessary if we split the edge. While this is
4630 normally impossible for abnormal critical edges, with some effort
4631 it should be possible with exception handling, since we still have
4632 control over which handler should be invoked. But due to increased
4633 EH table sizes, this may not be worthwhile. */
4635 static void
4636 compute_transpout (void)
4638 basic_block bb;
4639 unsigned int i;
4640 struct expr *expr;
4642 sbitmap_vector_ones (transpout, last_basic_block);
4644 FOR_EACH_BB (bb)
4646 /* Note that flow inserted a nop a the end of basic blocks that
4647 end in call instructions for reasons other than abnormal
4648 control flow. */
4649 if (! CALL_P (BB_END (bb)))
4650 continue;
4652 for (i = 0; i < expr_hash_table.size; i++)
4653 for (expr = expr_hash_table.table[i]; expr ; expr = expr->next_same_hash)
4654 if (MEM_P (expr->expr))
4656 if (GET_CODE (XEXP (expr->expr, 0)) == SYMBOL_REF
4657 && CONSTANT_POOL_ADDRESS_P (XEXP (expr->expr, 0)))
4658 continue;
4660 /* ??? Optimally, we would use interprocedural alias
4661 analysis to determine if this mem is actually killed
4662 by this call. */
4663 RESET_BIT (transpout[bb->index], expr->bitmap_index);
4668 /* Code Hoisting variables and subroutines. */
4670 /* Very busy expressions. */
4671 static sbitmap *hoist_vbein;
4672 static sbitmap *hoist_vbeout;
4674 /* Hoistable expressions. */
4675 static sbitmap *hoist_exprs;
4677 /* ??? We could compute post dominators and run this algorithm in
4678 reverse to perform tail merging, doing so would probably be
4679 more effective than the tail merging code in jump.c.
4681 It's unclear if tail merging could be run in parallel with
4682 code hoisting. It would be nice. */
4684 /* Allocate vars used for code hoisting analysis. */
4686 static void
4687 alloc_code_hoist_mem (int n_blocks, int n_exprs)
4689 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
4690 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
4691 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
4693 hoist_vbein = sbitmap_vector_alloc (n_blocks, n_exprs);
4694 hoist_vbeout = sbitmap_vector_alloc (n_blocks, n_exprs);
4695 hoist_exprs = sbitmap_vector_alloc (n_blocks, n_exprs);
4696 transpout = sbitmap_vector_alloc (n_blocks, n_exprs);
4699 /* Free vars used for code hoisting analysis. */
4701 static void
4702 free_code_hoist_mem (void)
4704 sbitmap_vector_free (antloc);
4705 sbitmap_vector_free (transp);
4706 sbitmap_vector_free (comp);
4708 sbitmap_vector_free (hoist_vbein);
4709 sbitmap_vector_free (hoist_vbeout);
4710 sbitmap_vector_free (hoist_exprs);
4711 sbitmap_vector_free (transpout);
4713 free_dominance_info (CDI_DOMINATORS);
4716 /* Compute the very busy expressions at entry/exit from each block.
4718 An expression is very busy if all paths from a given point
4719 compute the expression. */
4721 static void
4722 compute_code_hoist_vbeinout (void)
4724 int changed, passes;
4725 basic_block bb;
4727 sbitmap_vector_zero (hoist_vbeout, last_basic_block);
4728 sbitmap_vector_zero (hoist_vbein, last_basic_block);
4730 passes = 0;
4731 changed = 1;
4733 while (changed)
4735 changed = 0;
4737 /* We scan the blocks in the reverse order to speed up
4738 the convergence. */
4739 FOR_EACH_BB_REVERSE (bb)
4741 changed |= sbitmap_a_or_b_and_c_cg (hoist_vbein[bb->index], antloc[bb->index],
4742 hoist_vbeout[bb->index], transp[bb->index]);
4743 if (bb->next_bb != EXIT_BLOCK_PTR)
4744 sbitmap_intersection_of_succs (hoist_vbeout[bb->index], hoist_vbein, bb->index);
4747 passes++;
4750 if (dump_file)
4751 fprintf (dump_file, "hoisting vbeinout computation: %d passes\n", passes);
4754 /* Top level routine to do the dataflow analysis needed by code hoisting. */
4756 static void
4757 compute_code_hoist_data (void)
4759 compute_local_properties (transp, comp, antloc, &expr_hash_table);
4760 compute_transpout ();
4761 compute_code_hoist_vbeinout ();
4762 calculate_dominance_info (CDI_DOMINATORS);
4763 if (dump_file)
4764 fprintf (dump_file, "\n");
4767 /* Determine if the expression identified by EXPR_INDEX would
4768 reach BB unimpared if it was placed at the end of EXPR_BB.
4770 It's unclear exactly what Muchnick meant by "unimpared". It seems
4771 to me that the expression must either be computed or transparent in
4772 *every* block in the path(s) from EXPR_BB to BB. Any other definition
4773 would allow the expression to be hoisted out of loops, even if
4774 the expression wasn't a loop invariant.
4776 Contrast this to reachability for PRE where an expression is
4777 considered reachable if *any* path reaches instead of *all*
4778 paths. */
4780 static int
4781 hoist_expr_reaches_here_p (basic_block expr_bb, int expr_index, basic_block bb, char *visited)
4783 edge pred;
4784 edge_iterator ei;
4785 int visited_allocated_locally = 0;
4788 if (visited == NULL)
4790 visited_allocated_locally = 1;
4791 visited = XCNEWVEC (char, last_basic_block);
4794 FOR_EACH_EDGE (pred, ei, bb->preds)
4796 basic_block pred_bb = pred->src;
4798 if (pred->src == ENTRY_BLOCK_PTR)
4799 break;
4800 else if (pred_bb == expr_bb)
4801 continue;
4802 else if (visited[pred_bb->index])
4803 continue;
4805 /* Does this predecessor generate this expression? */
4806 else if (TEST_BIT (comp[pred_bb->index], expr_index))
4807 break;
4808 else if (! TEST_BIT (transp[pred_bb->index], expr_index))
4809 break;
4811 /* Not killed. */
4812 else
4814 visited[pred_bb->index] = 1;
4815 if (! hoist_expr_reaches_here_p (expr_bb, expr_index,
4816 pred_bb, visited))
4817 break;
4820 if (visited_allocated_locally)
4821 free (visited);
4823 return (pred == NULL);
4826 /* Actually perform code hoisting. */
4828 static void
4829 hoist_code (void)
4831 basic_block bb, dominated;
4832 basic_block *domby;
4833 unsigned int domby_len;
4834 unsigned int i,j;
4835 struct expr **index_map;
4836 struct expr *expr;
4838 sbitmap_vector_zero (hoist_exprs, last_basic_block);
4840 /* Compute a mapping from expression number (`bitmap_index') to
4841 hash table entry. */
4843 index_map = XCNEWVEC (struct expr *, expr_hash_table.n_elems);
4844 for (i = 0; i < expr_hash_table.size; i++)
4845 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
4846 index_map[expr->bitmap_index] = expr;
4848 /* Walk over each basic block looking for potentially hoistable
4849 expressions, nothing gets hoisted from the entry block. */
4850 FOR_EACH_BB (bb)
4852 int found = 0;
4853 int insn_inserted_p;
4855 domby_len = get_dominated_by (CDI_DOMINATORS, bb, &domby);
4856 /* Examine each expression that is very busy at the exit of this
4857 block. These are the potentially hoistable expressions. */
4858 for (i = 0; i < hoist_vbeout[bb->index]->n_bits; i++)
4860 int hoistable = 0;
4862 if (TEST_BIT (hoist_vbeout[bb->index], i)
4863 && TEST_BIT (transpout[bb->index], i))
4865 /* We've found a potentially hoistable expression, now
4866 we look at every block BB dominates to see if it
4867 computes the expression. */
4868 for (j = 0; j < domby_len; j++)
4870 dominated = domby[j];
4871 /* Ignore self dominance. */
4872 if (bb == dominated)
4873 continue;
4874 /* We've found a dominated block, now see if it computes
4875 the busy expression and whether or not moving that
4876 expression to the "beginning" of that block is safe. */
4877 if (!TEST_BIT (antloc[dominated->index], i))
4878 continue;
4880 /* Note if the expression would reach the dominated block
4881 unimpared if it was placed at the end of BB.
4883 Keep track of how many times this expression is hoistable
4884 from a dominated block into BB. */
4885 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
4886 hoistable++;
4889 /* If we found more than one hoistable occurrence of this
4890 expression, then note it in the bitmap of expressions to
4891 hoist. It makes no sense to hoist things which are computed
4892 in only one BB, and doing so tends to pessimize register
4893 allocation. One could increase this value to try harder
4894 to avoid any possible code expansion due to register
4895 allocation issues; however experiments have shown that
4896 the vast majority of hoistable expressions are only movable
4897 from two successors, so raising this threshold is likely
4898 to nullify any benefit we get from code hoisting. */
4899 if (hoistable > 1)
4901 SET_BIT (hoist_exprs[bb->index], i);
4902 found = 1;
4906 /* If we found nothing to hoist, then quit now. */
4907 if (! found)
4909 free (domby);
4910 continue;
4913 /* Loop over all the hoistable expressions. */
4914 for (i = 0; i < hoist_exprs[bb->index]->n_bits; i++)
4916 /* We want to insert the expression into BB only once, so
4917 note when we've inserted it. */
4918 insn_inserted_p = 0;
4920 /* These tests should be the same as the tests above. */
4921 if (TEST_BIT (hoist_exprs[bb->index], i))
4923 /* We've found a potentially hoistable expression, now
4924 we look at every block BB dominates to see if it
4925 computes the expression. */
4926 for (j = 0; j < domby_len; j++)
4928 dominated = domby[j];
4929 /* Ignore self dominance. */
4930 if (bb == dominated)
4931 continue;
4933 /* We've found a dominated block, now see if it computes
4934 the busy expression and whether or not moving that
4935 expression to the "beginning" of that block is safe. */
4936 if (!TEST_BIT (antloc[dominated->index], i))
4937 continue;
4939 /* The expression is computed in the dominated block and
4940 it would be safe to compute it at the start of the
4941 dominated block. Now we have to determine if the
4942 expression would reach the dominated block if it was
4943 placed at the end of BB. */
4944 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
4946 struct expr *expr = index_map[i];
4947 struct occr *occr = expr->antic_occr;
4948 rtx insn;
4949 rtx set;
4951 /* Find the right occurrence of this expression. */
4952 while (BLOCK_FOR_INSN (occr->insn) != dominated && occr)
4953 occr = occr->next;
4955 gcc_assert (occr);
4956 insn = occr->insn;
4957 set = single_set (insn);
4958 gcc_assert (set);
4960 /* Create a pseudo-reg to store the result of reaching
4961 expressions into. Get the mode for the new pseudo
4962 from the mode of the original destination pseudo. */
4963 if (expr->reaching_reg == NULL)
4964 expr->reaching_reg
4965 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
4967 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
4968 delete_insn (insn);
4969 occr->deleted_p = 1;
4970 if (!insn_inserted_p)
4972 insert_insn_end_bb (index_map[i], bb, 0);
4973 insn_inserted_p = 1;
4979 free (domby);
4982 free (index_map);
4985 /* Top level routine to perform one code hoisting (aka unification) pass
4987 Return nonzero if a change was made. */
4989 static int
4990 one_code_hoisting_pass (void)
4992 int changed = 0;
4994 alloc_hash_table (max_cuid, &expr_hash_table, 0);
4995 compute_hash_table (&expr_hash_table);
4996 if (dump_file)
4997 dump_hash_table (dump_file, "Code Hosting Expressions", &expr_hash_table);
4999 if (expr_hash_table.n_elems > 0)
5001 alloc_code_hoist_mem (last_basic_block, expr_hash_table.n_elems);
5002 compute_code_hoist_data ();
5003 hoist_code ();
5004 free_code_hoist_mem ();
5007 free_hash_table (&expr_hash_table);
5009 return changed;
5012 /* Here we provide the things required to do store motion towards
5013 the exit. In order for this to be effective, gcse also needed to
5014 be taught how to move a load when it is kill only by a store to itself.
5016 int i;
5017 float a[10];
5019 void foo(float scale)
5021 for (i=0; i<10; i++)
5022 a[i] *= scale;
5025 'i' is both loaded and stored to in the loop. Normally, gcse cannot move
5026 the load out since its live around the loop, and stored at the bottom
5027 of the loop.
5029 The 'Load Motion' referred to and implemented in this file is
5030 an enhancement to gcse which when using edge based lcm, recognizes
5031 this situation and allows gcse to move the load out of the loop.
5033 Once gcse has hoisted the load, store motion can then push this
5034 load towards the exit, and we end up with no loads or stores of 'i'
5035 in the loop. */
5037 static hashval_t
5038 pre_ldst_expr_hash (const void *p)
5040 int do_not_record_p = 0;
5041 const struct ls_expr *x = p;
5042 return hash_rtx (x->pattern, GET_MODE (x->pattern), &do_not_record_p, NULL, false);
5045 static int
5046 pre_ldst_expr_eq (const void *p1, const void *p2)
5048 const struct ls_expr *ptr1 = p1, *ptr2 = p2;
5049 return expr_equiv_p (ptr1->pattern, ptr2->pattern);
5052 /* This will search the ldst list for a matching expression. If it
5053 doesn't find one, we create one and initialize it. */
5055 static struct ls_expr *
5056 ldst_entry (rtx x)
5058 int do_not_record_p = 0;
5059 struct ls_expr * ptr;
5060 unsigned int hash;
5061 void **slot;
5062 struct ls_expr e;
5064 hash = hash_rtx (x, GET_MODE (x), &do_not_record_p,
5065 NULL, /*have_reg_qty=*/false);
5067 e.pattern = x;
5068 slot = htab_find_slot_with_hash (pre_ldst_table, &e, hash, INSERT);
5069 if (*slot)
5070 return (struct ls_expr *)*slot;
5072 ptr = XNEW (struct ls_expr);
5074 ptr->next = pre_ldst_mems;
5075 ptr->expr = NULL;
5076 ptr->pattern = x;
5077 ptr->pattern_regs = NULL_RTX;
5078 ptr->loads = NULL_RTX;
5079 ptr->stores = NULL_RTX;
5080 ptr->reaching_reg = NULL_RTX;
5081 ptr->invalid = 0;
5082 ptr->index = 0;
5083 ptr->hash_index = hash;
5084 pre_ldst_mems = ptr;
5085 *slot = ptr;
5087 return ptr;
5090 /* Free up an individual ldst entry. */
5092 static void
5093 free_ldst_entry (struct ls_expr * ptr)
5095 free_INSN_LIST_list (& ptr->loads);
5096 free_INSN_LIST_list (& ptr->stores);
5098 free (ptr);
5101 /* Free up all memory associated with the ldst list. */
5103 static void
5104 free_ldst_mems (void)
5106 if (pre_ldst_table)
5107 htab_delete (pre_ldst_table);
5108 pre_ldst_table = NULL;
5110 while (pre_ldst_mems)
5112 struct ls_expr * tmp = pre_ldst_mems;
5114 pre_ldst_mems = pre_ldst_mems->next;
5116 free_ldst_entry (tmp);
5119 pre_ldst_mems = NULL;
5122 /* Dump debugging info about the ldst list. */
5124 static void
5125 print_ldst_list (FILE * file)
5127 struct ls_expr * ptr;
5129 fprintf (file, "LDST list: \n");
5131 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
5133 fprintf (file, " Pattern (%3d): ", ptr->index);
5135 print_rtl (file, ptr->pattern);
5137 fprintf (file, "\n Loads : ");
5139 if (ptr->loads)
5140 print_rtl (file, ptr->loads);
5141 else
5142 fprintf (file, "(nil)");
5144 fprintf (file, "\n Stores : ");
5146 if (ptr->stores)
5147 print_rtl (file, ptr->stores);
5148 else
5149 fprintf (file, "(nil)");
5151 fprintf (file, "\n\n");
5154 fprintf (file, "\n");
5157 /* Returns 1 if X is in the list of ldst only expressions. */
5159 static struct ls_expr *
5160 find_rtx_in_ldst (rtx x)
5162 struct ls_expr e;
5163 void **slot;
5164 if (!pre_ldst_table)
5165 return NULL;
5166 e.pattern = x;
5167 slot = htab_find_slot (pre_ldst_table, &e, NO_INSERT);
5168 if (!slot || ((struct ls_expr *)*slot)->invalid)
5169 return NULL;
5170 return *slot;
5173 /* Assign each element of the list of mems a monotonically increasing value. */
5175 static int
5176 enumerate_ldsts (void)
5178 struct ls_expr * ptr;
5179 int n = 0;
5181 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
5182 ptr->index = n++;
5184 return n;
5187 /* Return first item in the list. */
5189 static inline struct ls_expr *
5190 first_ls_expr (void)
5192 return pre_ldst_mems;
5195 /* Return the next item in the list after the specified one. */
5197 static inline struct ls_expr *
5198 next_ls_expr (struct ls_expr * ptr)
5200 return ptr->next;
5203 /* Load Motion for loads which only kill themselves. */
5205 /* Return true if x is a simple MEM operation, with no registers or
5206 side effects. These are the types of loads we consider for the
5207 ld_motion list, otherwise we let the usual aliasing take care of it. */
5209 static int
5210 simple_mem (rtx x)
5212 if (! MEM_P (x))
5213 return 0;
5215 if (MEM_VOLATILE_P (x))
5216 return 0;
5218 if (GET_MODE (x) == BLKmode)
5219 return 0;
5221 /* If we are handling exceptions, we must be careful with memory references
5222 that may trap. If we are not, the behavior is undefined, so we may just
5223 continue. */
5224 if (flag_non_call_exceptions && may_trap_p (x))
5225 return 0;
5227 if (side_effects_p (x))
5228 return 0;
5230 /* Do not consider function arguments passed on stack. */
5231 if (reg_mentioned_p (stack_pointer_rtx, x))
5232 return 0;
5234 if (flag_float_store && FLOAT_MODE_P (GET_MODE (x)))
5235 return 0;
5237 return 1;
5240 /* Make sure there isn't a buried reference in this pattern anywhere.
5241 If there is, invalidate the entry for it since we're not capable
5242 of fixing it up just yet.. We have to be sure we know about ALL
5243 loads since the aliasing code will allow all entries in the
5244 ld_motion list to not-alias itself. If we miss a load, we will get
5245 the wrong value since gcse might common it and we won't know to
5246 fix it up. */
5248 static void
5249 invalidate_any_buried_refs (rtx x)
5251 const char * fmt;
5252 int i, j;
5253 struct ls_expr * ptr;
5255 /* Invalidate it in the list. */
5256 if (MEM_P (x) && simple_mem (x))
5258 ptr = ldst_entry (x);
5259 ptr->invalid = 1;
5262 /* Recursively process the insn. */
5263 fmt = GET_RTX_FORMAT (GET_CODE (x));
5265 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
5267 if (fmt[i] == 'e')
5268 invalidate_any_buried_refs (XEXP (x, i));
5269 else if (fmt[i] == 'E')
5270 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5271 invalidate_any_buried_refs (XVECEXP (x, i, j));
5275 /* Find all the 'simple' MEMs which are used in LOADs and STORES. Simple
5276 being defined as MEM loads and stores to symbols, with no side effects
5277 and no registers in the expression. For a MEM destination, we also
5278 check that the insn is still valid if we replace the destination with a
5279 REG, as is done in update_ld_motion_stores. If there are any uses/defs
5280 which don't match this criteria, they are invalidated and trimmed out
5281 later. */
5283 static void
5284 compute_ld_motion_mems (void)
5286 struct ls_expr * ptr;
5287 basic_block bb;
5288 rtx insn;
5290 pre_ldst_mems = NULL;
5291 pre_ldst_table = htab_create (13, pre_ldst_expr_hash,
5292 pre_ldst_expr_eq, NULL);
5294 FOR_EACH_BB (bb)
5296 FOR_BB_INSNS (bb, insn)
5298 if (INSN_P (insn))
5300 if (GET_CODE (PATTERN (insn)) == SET)
5302 rtx src = SET_SRC (PATTERN (insn));
5303 rtx dest = SET_DEST (PATTERN (insn));
5305 /* Check for a simple LOAD... */
5306 if (MEM_P (src) && simple_mem (src))
5308 ptr = ldst_entry (src);
5309 if (REG_P (dest))
5310 ptr->loads = alloc_INSN_LIST (insn, ptr->loads);
5311 else
5312 ptr->invalid = 1;
5314 else
5316 /* Make sure there isn't a buried load somewhere. */
5317 invalidate_any_buried_refs (src);
5320 /* Check for stores. Don't worry about aliased ones, they
5321 will block any movement we might do later. We only care
5322 about this exact pattern since those are the only
5323 circumstance that we will ignore the aliasing info. */
5324 if (MEM_P (dest) && simple_mem (dest))
5326 ptr = ldst_entry (dest);
5328 if (! MEM_P (src)
5329 && GET_CODE (src) != ASM_OPERANDS
5330 /* Check for REG manually since want_to_gcse_p
5331 returns 0 for all REGs. */
5332 && can_assign_to_reg_p (src))
5333 ptr->stores = alloc_INSN_LIST (insn, ptr->stores);
5334 else
5335 ptr->invalid = 1;
5338 else
5339 invalidate_any_buried_refs (PATTERN (insn));
5345 /* Remove any references that have been either invalidated or are not in the
5346 expression list for pre gcse. */
5348 static void
5349 trim_ld_motion_mems (void)
5351 struct ls_expr * * last = & pre_ldst_mems;
5352 struct ls_expr * ptr = pre_ldst_mems;
5354 while (ptr != NULL)
5356 struct expr * expr;
5358 /* Delete if entry has been made invalid. */
5359 if (! ptr->invalid)
5361 /* Delete if we cannot find this mem in the expression list. */
5362 unsigned int hash = ptr->hash_index % expr_hash_table.size;
5364 for (expr = expr_hash_table.table[hash];
5365 expr != NULL;
5366 expr = expr->next_same_hash)
5367 if (expr_equiv_p (expr->expr, ptr->pattern))
5368 break;
5370 else
5371 expr = (struct expr *) 0;
5373 if (expr)
5375 /* Set the expression field if we are keeping it. */
5376 ptr->expr = expr;
5377 last = & ptr->next;
5378 ptr = ptr->next;
5380 else
5382 *last = ptr->next;
5383 htab_remove_elt_with_hash (pre_ldst_table, ptr, ptr->hash_index);
5384 free_ldst_entry (ptr);
5385 ptr = * last;
5389 /* Show the world what we've found. */
5390 if (dump_file && pre_ldst_mems != NULL)
5391 print_ldst_list (dump_file);
5394 /* This routine will take an expression which we are replacing with
5395 a reaching register, and update any stores that are needed if
5396 that expression is in the ld_motion list. Stores are updated by
5397 copying their SRC to the reaching register, and then storing
5398 the reaching register into the store location. These keeps the
5399 correct value in the reaching register for the loads. */
5401 static void
5402 update_ld_motion_stores (struct expr * expr)
5404 struct ls_expr * mem_ptr;
5406 if ((mem_ptr = find_rtx_in_ldst (expr->expr)))
5408 /* We can try to find just the REACHED stores, but is shouldn't
5409 matter to set the reaching reg everywhere... some might be
5410 dead and should be eliminated later. */
5412 /* We replace (set mem expr) with (set reg expr) (set mem reg)
5413 where reg is the reaching reg used in the load. We checked in
5414 compute_ld_motion_mems that we can replace (set mem expr) with
5415 (set reg expr) in that insn. */
5416 rtx list = mem_ptr->stores;
5418 for ( ; list != NULL_RTX; list = XEXP (list, 1))
5420 rtx insn = XEXP (list, 0);
5421 rtx pat = PATTERN (insn);
5422 rtx src = SET_SRC (pat);
5423 rtx reg = expr->reaching_reg;
5424 rtx copy, new;
5426 /* If we've already copied it, continue. */
5427 if (expr->reaching_reg == src)
5428 continue;
5430 if (dump_file)
5432 fprintf (dump_file, "PRE: store updated with reaching reg ");
5433 print_rtl (dump_file, expr->reaching_reg);
5434 fprintf (dump_file, ":\n ");
5435 print_inline_rtx (dump_file, insn, 8);
5436 fprintf (dump_file, "\n");
5439 copy = gen_move_insn ( reg, copy_rtx (SET_SRC (pat)));
5440 new = emit_insn_before (copy, insn);
5441 record_one_set (REGNO (reg), new);
5442 SET_SRC (pat) = reg;
5444 /* un-recognize this pattern since it's probably different now. */
5445 INSN_CODE (insn) = -1;
5446 gcse_create_count++;
5451 /* Store motion code. */
5453 #define ANTIC_STORE_LIST(x) ((x)->loads)
5454 #define AVAIL_STORE_LIST(x) ((x)->stores)
5455 #define LAST_AVAIL_CHECK_FAILURE(x) ((x)->reaching_reg)
5457 /* This is used to communicate the target bitvector we want to use in the
5458 reg_set_info routine when called via the note_stores mechanism. */
5459 static int * regvec;
5461 /* And current insn, for the same routine. */
5462 static rtx compute_store_table_current_insn;
5464 /* Used in computing the reverse edge graph bit vectors. */
5465 static sbitmap * st_antloc;
5467 /* Global holding the number of store expressions we are dealing with. */
5468 static int num_stores;
5470 /* Checks to set if we need to mark a register set. Called from
5471 note_stores. */
5473 static void
5474 reg_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED,
5475 void *data)
5477 sbitmap bb_reg = data;
5479 if (GET_CODE (dest) == SUBREG)
5480 dest = SUBREG_REG (dest);
5482 if (REG_P (dest))
5484 regvec[REGNO (dest)] = INSN_UID (compute_store_table_current_insn);
5485 if (bb_reg)
5486 SET_BIT (bb_reg, REGNO (dest));
5490 /* Clear any mark that says that this insn sets dest. Called from
5491 note_stores. */
5493 static void
5494 reg_clear_last_set (rtx dest, rtx setter ATTRIBUTE_UNUSED,
5495 void *data)
5497 int *dead_vec = data;
5499 if (GET_CODE (dest) == SUBREG)
5500 dest = SUBREG_REG (dest);
5502 if (REG_P (dest) &&
5503 dead_vec[REGNO (dest)] == INSN_UID (compute_store_table_current_insn))
5504 dead_vec[REGNO (dest)] = 0;
5507 /* Return zero if some of the registers in list X are killed
5508 due to set of registers in bitmap REGS_SET. */
5510 static bool
5511 store_ops_ok (rtx x, int *regs_set)
5513 rtx reg;
5515 for (; x; x = XEXP (x, 1))
5517 reg = XEXP (x, 0);
5518 if (regs_set[REGNO(reg)])
5519 return false;
5522 return true;
5525 /* Returns a list of registers mentioned in X. */
5526 static rtx
5527 extract_mentioned_regs (rtx x)
5529 return extract_mentioned_regs_helper (x, NULL_RTX);
5532 /* Helper for extract_mentioned_regs; ACCUM is used to accumulate used
5533 registers. */
5534 static rtx
5535 extract_mentioned_regs_helper (rtx x, rtx accum)
5537 int i;
5538 enum rtx_code code;
5539 const char * fmt;
5541 /* Repeat is used to turn tail-recursion into iteration. */
5542 repeat:
5544 if (x == 0)
5545 return accum;
5547 code = GET_CODE (x);
5548 switch (code)
5550 case REG:
5551 return alloc_EXPR_LIST (0, x, accum);
5553 case MEM:
5554 x = XEXP (x, 0);
5555 goto repeat;
5557 case PRE_DEC:
5558 case PRE_INC:
5559 case POST_DEC:
5560 case POST_INC:
5561 /* We do not run this function with arguments having side effects. */
5562 gcc_unreachable ();
5564 case PC:
5565 case CC0: /*FIXME*/
5566 case CONST:
5567 case CONST_INT:
5568 case CONST_DOUBLE:
5569 case CONST_VECTOR:
5570 case SYMBOL_REF:
5571 case LABEL_REF:
5572 case ADDR_VEC:
5573 case ADDR_DIFF_VEC:
5574 return accum;
5576 default:
5577 break;
5580 i = GET_RTX_LENGTH (code) - 1;
5581 fmt = GET_RTX_FORMAT (code);
5583 for (; i >= 0; i--)
5585 if (fmt[i] == 'e')
5587 rtx tem = XEXP (x, i);
5589 /* If we are about to do the last recursive call
5590 needed at this level, change it into iteration. */
5591 if (i == 0)
5593 x = tem;
5594 goto repeat;
5597 accum = extract_mentioned_regs_helper (tem, accum);
5599 else if (fmt[i] == 'E')
5601 int j;
5603 for (j = 0; j < XVECLEN (x, i); j++)
5604 accum = extract_mentioned_regs_helper (XVECEXP (x, i, j), accum);
5608 return accum;
5611 /* Determine whether INSN is MEM store pattern that we will consider moving.
5612 REGS_SET_BEFORE is bitmap of registers set before (and including) the
5613 current insn, REGS_SET_AFTER is bitmap of registers set after (and
5614 including) the insn in this basic block. We must be passing through BB from
5615 head to end, as we are using this fact to speed things up.
5617 The results are stored this way:
5619 -- the first anticipatable expression is added into ANTIC_STORE_LIST
5620 -- if the processed expression is not anticipatable, NULL_RTX is added
5621 there instead, so that we can use it as indicator that no further
5622 expression of this type may be anticipatable
5623 -- if the expression is available, it is added as head of AVAIL_STORE_LIST;
5624 consequently, all of them but this head are dead and may be deleted.
5625 -- if the expression is not available, the insn due to that it fails to be
5626 available is stored in reaching_reg.
5628 The things are complicated a bit by fact that there already may be stores
5629 to the same MEM from other blocks; also caller must take care of the
5630 necessary cleanup of the temporary markers after end of the basic block.
5633 static void
5634 find_moveable_store (rtx insn, int *regs_set_before, int *regs_set_after)
5636 struct ls_expr * ptr;
5637 rtx dest, set, tmp;
5638 int check_anticipatable, check_available;
5639 basic_block bb = BLOCK_FOR_INSN (insn);
5641 set = single_set (insn);
5642 if (!set)
5643 return;
5645 dest = SET_DEST (set);
5647 if (! MEM_P (dest) || MEM_VOLATILE_P (dest)
5648 || GET_MODE (dest) == BLKmode)
5649 return;
5651 if (side_effects_p (dest))
5652 return;
5654 /* If we are handling exceptions, we must be careful with memory references
5655 that may trap. If we are not, the behavior is undefined, so we may just
5656 continue. */
5657 if (flag_non_call_exceptions && may_trap_p (dest))
5658 return;
5660 /* Even if the destination cannot trap, the source may. In this case we'd
5661 need to handle updating the REG_EH_REGION note. */
5662 if (find_reg_note (insn, REG_EH_REGION, NULL_RTX))
5663 return;
5665 /* Make sure that the SET_SRC of this store insns can be assigned to
5666 a register, or we will fail later on in replace_store_insn, which
5667 assumes that we can do this. But sometimes the target machine has
5668 oddities like MEM read-modify-write instruction. See for example
5669 PR24257. */
5670 if (!can_assign_to_reg_p (SET_SRC (set)))
5671 return;
5673 ptr = ldst_entry (dest);
5674 if (!ptr->pattern_regs)
5675 ptr->pattern_regs = extract_mentioned_regs (dest);
5677 /* Do not check for anticipatability if we either found one anticipatable
5678 store already, or tested for one and found out that it was killed. */
5679 check_anticipatable = 0;
5680 if (!ANTIC_STORE_LIST (ptr))
5681 check_anticipatable = 1;
5682 else
5684 tmp = XEXP (ANTIC_STORE_LIST (ptr), 0);
5685 if (tmp != NULL_RTX
5686 && BLOCK_FOR_INSN (tmp) != bb)
5687 check_anticipatable = 1;
5689 if (check_anticipatable)
5691 if (store_killed_before (dest, ptr->pattern_regs, insn, bb, regs_set_before))
5692 tmp = NULL_RTX;
5693 else
5694 tmp = insn;
5695 ANTIC_STORE_LIST (ptr) = alloc_INSN_LIST (tmp,
5696 ANTIC_STORE_LIST (ptr));
5699 /* It is not necessary to check whether store is available if we did
5700 it successfully before; if we failed before, do not bother to check
5701 until we reach the insn that caused us to fail. */
5702 check_available = 0;
5703 if (!AVAIL_STORE_LIST (ptr))
5704 check_available = 1;
5705 else
5707 tmp = XEXP (AVAIL_STORE_LIST (ptr), 0);
5708 if (BLOCK_FOR_INSN (tmp) != bb)
5709 check_available = 1;
5711 if (check_available)
5713 /* Check that we have already reached the insn at that the check
5714 failed last time. */
5715 if (LAST_AVAIL_CHECK_FAILURE (ptr))
5717 for (tmp = BB_END (bb);
5718 tmp != insn && tmp != LAST_AVAIL_CHECK_FAILURE (ptr);
5719 tmp = PREV_INSN (tmp))
5720 continue;
5721 if (tmp == insn)
5722 check_available = 0;
5724 else
5725 check_available = store_killed_after (dest, ptr->pattern_regs, insn,
5726 bb, regs_set_after,
5727 &LAST_AVAIL_CHECK_FAILURE (ptr));
5729 if (!check_available)
5730 AVAIL_STORE_LIST (ptr) = alloc_INSN_LIST (insn, AVAIL_STORE_LIST (ptr));
5733 /* Find available and anticipatable stores. */
5735 static int
5736 compute_store_table (void)
5738 int ret;
5739 basic_block bb;
5740 unsigned regno;
5741 rtx insn, pat, tmp;
5742 int *last_set_in, *already_set;
5743 struct ls_expr * ptr, **prev_next_ptr_ptr;
5745 max_gcse_regno = max_reg_num ();
5747 reg_set_in_block = sbitmap_vector_alloc (last_basic_block,
5748 max_gcse_regno);
5749 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
5750 pre_ldst_mems = 0;
5751 pre_ldst_table = htab_create (13, pre_ldst_expr_hash,
5752 pre_ldst_expr_eq, NULL);
5753 last_set_in = XCNEWVEC (int, max_gcse_regno);
5754 already_set = XNEWVEC (int, max_gcse_regno);
5756 /* Find all the stores we care about. */
5757 FOR_EACH_BB (bb)
5759 /* First compute the registers set in this block. */
5760 regvec = last_set_in;
5762 FOR_BB_INSNS (bb, insn)
5764 if (! INSN_P (insn))
5765 continue;
5767 if (CALL_P (insn))
5769 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
5770 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
5772 last_set_in[regno] = INSN_UID (insn);
5773 SET_BIT (reg_set_in_block[bb->index], regno);
5777 pat = PATTERN (insn);
5778 compute_store_table_current_insn = insn;
5779 note_stores (pat, reg_set_info, reg_set_in_block[bb->index]);
5782 /* Now find the stores. */
5783 memset (already_set, 0, sizeof (int) * max_gcse_regno);
5784 regvec = already_set;
5785 FOR_BB_INSNS (bb, insn)
5787 if (! INSN_P (insn))
5788 continue;
5790 if (CALL_P (insn))
5792 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
5793 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
5794 already_set[regno] = 1;
5797 pat = PATTERN (insn);
5798 note_stores (pat, reg_set_info, NULL);
5800 /* Now that we've marked regs, look for stores. */
5801 find_moveable_store (insn, already_set, last_set_in);
5803 /* Unmark regs that are no longer set. */
5804 compute_store_table_current_insn = insn;
5805 note_stores (pat, reg_clear_last_set, last_set_in);
5806 if (CALL_P (insn))
5808 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
5809 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno)
5810 && last_set_in[regno] == INSN_UID (insn))
5811 last_set_in[regno] = 0;
5815 #ifdef ENABLE_CHECKING
5816 /* last_set_in should now be all-zero. */
5817 for (regno = 0; regno < max_gcse_regno; regno++)
5818 gcc_assert (!last_set_in[regno]);
5819 #endif
5821 /* Clear temporary marks. */
5822 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
5824 LAST_AVAIL_CHECK_FAILURE(ptr) = NULL_RTX;
5825 if (ANTIC_STORE_LIST (ptr)
5826 && (tmp = XEXP (ANTIC_STORE_LIST (ptr), 0)) == NULL_RTX)
5827 ANTIC_STORE_LIST (ptr) = XEXP (ANTIC_STORE_LIST (ptr), 1);
5831 /* Remove the stores that are not available anywhere, as there will
5832 be no opportunity to optimize them. */
5833 for (ptr = pre_ldst_mems, prev_next_ptr_ptr = &pre_ldst_mems;
5834 ptr != NULL;
5835 ptr = *prev_next_ptr_ptr)
5837 if (!AVAIL_STORE_LIST (ptr))
5839 *prev_next_ptr_ptr = ptr->next;
5840 htab_remove_elt_with_hash (pre_ldst_table, ptr, ptr->hash_index);
5841 free_ldst_entry (ptr);
5843 else
5844 prev_next_ptr_ptr = &ptr->next;
5847 ret = enumerate_ldsts ();
5849 if (dump_file)
5851 fprintf (dump_file, "ST_avail and ST_antic (shown under loads..)\n");
5852 print_ldst_list (dump_file);
5855 free (last_set_in);
5856 free (already_set);
5857 return ret;
5860 /* Check to see if the load X is aliased with STORE_PATTERN.
5861 AFTER is true if we are checking the case when STORE_PATTERN occurs
5862 after the X. */
5864 static bool
5865 load_kills_store (rtx x, rtx store_pattern, int after)
5867 if (after)
5868 return anti_dependence (x, store_pattern);
5869 else
5870 return true_dependence (store_pattern, GET_MODE (store_pattern), x,
5871 rtx_addr_varies_p);
5874 /* Go through the entire insn X, looking for any loads which might alias
5875 STORE_PATTERN. Return true if found.
5876 AFTER is true if we are checking the case when STORE_PATTERN occurs
5877 after the insn X. */
5879 static bool
5880 find_loads (rtx x, rtx store_pattern, int after)
5882 const char * fmt;
5883 int i, j;
5884 int ret = false;
5886 if (!x)
5887 return false;
5889 if (GET_CODE (x) == SET)
5890 x = SET_SRC (x);
5892 if (MEM_P (x))
5894 if (load_kills_store (x, store_pattern, after))
5895 return true;
5898 /* Recursively process the insn. */
5899 fmt = GET_RTX_FORMAT (GET_CODE (x));
5901 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0 && !ret; i--)
5903 if (fmt[i] == 'e')
5904 ret |= find_loads (XEXP (x, i), store_pattern, after);
5905 else if (fmt[i] == 'E')
5906 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5907 ret |= find_loads (XVECEXP (x, i, j), store_pattern, after);
5909 return ret;
5912 /* Check if INSN kills the store pattern X (is aliased with it).
5913 AFTER is true if we are checking the case when store X occurs
5914 after the insn. Return true if it does. */
5916 static bool
5917 store_killed_in_insn (rtx x, rtx x_regs, rtx insn, int after)
5919 rtx reg, base, note;
5921 if (!INSN_P (insn))
5922 return false;
5924 if (CALL_P (insn))
5926 /* A normal or pure call might read from pattern,
5927 but a const call will not. */
5928 if (! CONST_OR_PURE_CALL_P (insn) || pure_call_p (insn))
5929 return true;
5931 /* But even a const call reads its parameters. Check whether the
5932 base of some of registers used in mem is stack pointer. */
5933 for (reg = x_regs; reg; reg = XEXP (reg, 1))
5935 base = find_base_term (XEXP (reg, 0));
5936 if (!base
5937 || (GET_CODE (base) == ADDRESS
5938 && GET_MODE (base) == Pmode
5939 && XEXP (base, 0) == stack_pointer_rtx))
5940 return true;
5943 return false;
5946 if (GET_CODE (PATTERN (insn)) == SET)
5948 rtx pat = PATTERN (insn);
5949 rtx dest = SET_DEST (pat);
5951 if (GET_CODE (dest) == ZERO_EXTRACT)
5952 dest = XEXP (dest, 0);
5954 /* Check for memory stores to aliased objects. */
5955 if (MEM_P (dest)
5956 && !expr_equiv_p (dest, x))
5958 if (after)
5960 if (output_dependence (dest, x))
5961 return true;
5963 else
5965 if (output_dependence (x, dest))
5966 return true;
5969 if (find_loads (SET_SRC (pat), x, after))
5970 return true;
5972 else if (find_loads (PATTERN (insn), x, after))
5973 return true;
5975 /* If this insn has a REG_EQUAL or REG_EQUIV note referencing a memory
5976 location aliased with X, then this insn kills X. */
5977 note = find_reg_equal_equiv_note (insn);
5978 if (! note)
5979 return false;
5980 note = XEXP (note, 0);
5982 /* However, if the note represents a must alias rather than a may
5983 alias relationship, then it does not kill X. */
5984 if (expr_equiv_p (note, x))
5985 return false;
5987 /* See if there are any aliased loads in the note. */
5988 return find_loads (note, x, after);
5991 /* Returns true if the expression X is loaded or clobbered on or after INSN
5992 within basic block BB. REGS_SET_AFTER is bitmap of registers set in
5993 or after the insn. X_REGS is list of registers mentioned in X. If the store
5994 is killed, return the last insn in that it occurs in FAIL_INSN. */
5996 static bool
5997 store_killed_after (rtx x, rtx x_regs, rtx insn, basic_block bb,
5998 int *regs_set_after, rtx *fail_insn)
6000 rtx last = BB_END (bb), act;
6002 if (!store_ops_ok (x_regs, regs_set_after))
6004 /* We do not know where it will happen. */
6005 if (fail_insn)
6006 *fail_insn = NULL_RTX;
6007 return true;
6010 /* Scan from the end, so that fail_insn is determined correctly. */
6011 for (act = last; act != PREV_INSN (insn); act = PREV_INSN (act))
6012 if (store_killed_in_insn (x, x_regs, act, false))
6014 if (fail_insn)
6015 *fail_insn = act;
6016 return true;
6019 return false;
6022 /* Returns true if the expression X is loaded or clobbered on or before INSN
6023 within basic block BB. X_REGS is list of registers mentioned in X.
6024 REGS_SET_BEFORE is bitmap of registers set before or in this insn. */
6025 static bool
6026 store_killed_before (rtx x, rtx x_regs, rtx insn, basic_block bb,
6027 int *regs_set_before)
6029 rtx first = BB_HEAD (bb);
6031 if (!store_ops_ok (x_regs, regs_set_before))
6032 return true;
6034 for ( ; insn != PREV_INSN (first); insn = PREV_INSN (insn))
6035 if (store_killed_in_insn (x, x_regs, insn, true))
6036 return true;
6038 return false;
6041 /* Fill in available, anticipatable, transparent and kill vectors in
6042 STORE_DATA, based on lists of available and anticipatable stores. */
6043 static void
6044 build_store_vectors (void)
6046 basic_block bb;
6047 int *regs_set_in_block;
6048 rtx insn, st;
6049 struct ls_expr * ptr;
6050 unsigned regno;
6052 /* Build the gen_vector. This is any store in the table which is not killed
6053 by aliasing later in its block. */
6054 ae_gen = sbitmap_vector_alloc (last_basic_block, num_stores);
6055 sbitmap_vector_zero (ae_gen, last_basic_block);
6057 st_antloc = sbitmap_vector_alloc (last_basic_block, num_stores);
6058 sbitmap_vector_zero (st_antloc, last_basic_block);
6060 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
6062 for (st = AVAIL_STORE_LIST (ptr); st != NULL; st = XEXP (st, 1))
6064 insn = XEXP (st, 0);
6065 bb = BLOCK_FOR_INSN (insn);
6067 /* If we've already seen an available expression in this block,
6068 we can delete this one (It occurs earlier in the block). We'll
6069 copy the SRC expression to an unused register in case there
6070 are any side effects. */
6071 if (TEST_BIT (ae_gen[bb->index], ptr->index))
6073 rtx r = gen_reg_rtx (GET_MODE (ptr->pattern));
6074 if (dump_file)
6075 fprintf (dump_file, "Removing redundant store:\n");
6076 replace_store_insn (r, XEXP (st, 0), bb, ptr);
6077 continue;
6079 SET_BIT (ae_gen[bb->index], ptr->index);
6082 for (st = ANTIC_STORE_LIST (ptr); st != NULL; st = XEXP (st, 1))
6084 insn = XEXP (st, 0);
6085 bb = BLOCK_FOR_INSN (insn);
6086 SET_BIT (st_antloc[bb->index], ptr->index);
6090 ae_kill = sbitmap_vector_alloc (last_basic_block, num_stores);
6091 sbitmap_vector_zero (ae_kill, last_basic_block);
6093 transp = sbitmap_vector_alloc (last_basic_block, num_stores);
6094 sbitmap_vector_zero (transp, last_basic_block);
6095 regs_set_in_block = XNEWVEC (int, max_gcse_regno);
6097 FOR_EACH_BB (bb)
6099 for (regno = 0; regno < max_gcse_regno; regno++)
6100 regs_set_in_block[regno] = TEST_BIT (reg_set_in_block[bb->index], regno);
6102 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
6104 if (store_killed_after (ptr->pattern, ptr->pattern_regs, BB_HEAD (bb),
6105 bb, regs_set_in_block, NULL))
6107 /* It should not be necessary to consider the expression
6108 killed if it is both anticipatable and available. */
6109 if (!TEST_BIT (st_antloc[bb->index], ptr->index)
6110 || !TEST_BIT (ae_gen[bb->index], ptr->index))
6111 SET_BIT (ae_kill[bb->index], ptr->index);
6113 else
6114 SET_BIT (transp[bb->index], ptr->index);
6118 free (regs_set_in_block);
6120 if (dump_file)
6122 dump_sbitmap_vector (dump_file, "st_antloc", "", st_antloc, last_basic_block);
6123 dump_sbitmap_vector (dump_file, "st_kill", "", ae_kill, last_basic_block);
6124 dump_sbitmap_vector (dump_file, "Transpt", "", transp, last_basic_block);
6125 dump_sbitmap_vector (dump_file, "st_avloc", "", ae_gen, last_basic_block);
6129 /* Insert an instruction at the beginning of a basic block, and update
6130 the BB_HEAD if needed. */
6132 static void
6133 insert_insn_start_bb (rtx insn, basic_block bb)
6135 /* Insert at start of successor block. */
6136 rtx prev = PREV_INSN (BB_HEAD (bb));
6137 rtx before = BB_HEAD (bb);
6138 while (before != 0)
6140 if (! LABEL_P (before)
6141 && (! NOTE_P (before)
6142 || NOTE_LINE_NUMBER (before) != NOTE_INSN_BASIC_BLOCK))
6143 break;
6144 prev = before;
6145 if (prev == BB_END (bb))
6146 break;
6147 before = NEXT_INSN (before);
6150 insn = emit_insn_after_noloc (insn, prev);
6152 if (dump_file)
6154 fprintf (dump_file, "STORE_MOTION insert store at start of BB %d:\n",
6155 bb->index);
6156 print_inline_rtx (dump_file, insn, 6);
6157 fprintf (dump_file, "\n");
6161 /* This routine will insert a store on an edge. EXPR is the ldst entry for
6162 the memory reference, and E is the edge to insert it on. Returns nonzero
6163 if an edge insertion was performed. */
6165 static int
6166 insert_store (struct ls_expr * expr, edge e)
6168 rtx reg, insn;
6169 basic_block bb;
6170 edge tmp;
6171 edge_iterator ei;
6173 /* We did all the deleted before this insert, so if we didn't delete a
6174 store, then we haven't set the reaching reg yet either. */
6175 if (expr->reaching_reg == NULL_RTX)
6176 return 0;
6178 if (e->flags & EDGE_FAKE)
6179 return 0;
6181 reg = expr->reaching_reg;
6182 insn = gen_move_insn (copy_rtx (expr->pattern), reg);
6184 /* If we are inserting this expression on ALL predecessor edges of a BB,
6185 insert it at the start of the BB, and reset the insert bits on the other
6186 edges so we don't try to insert it on the other edges. */
6187 bb = e->dest;
6188 FOR_EACH_EDGE (tmp, ei, e->dest->preds)
6189 if (!(tmp->flags & EDGE_FAKE))
6191 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
6193 gcc_assert (index != EDGE_INDEX_NO_EDGE);
6194 if (! TEST_BIT (pre_insert_map[index], expr->index))
6195 break;
6198 /* If tmp is NULL, we found an insertion on every edge, blank the
6199 insertion vector for these edges, and insert at the start of the BB. */
6200 if (!tmp && bb != EXIT_BLOCK_PTR)
6202 FOR_EACH_EDGE (tmp, ei, e->dest->preds)
6204 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
6205 RESET_BIT (pre_insert_map[index], expr->index);
6207 insert_insn_start_bb (insn, bb);
6208 return 0;
6211 /* We can't put stores in the front of blocks pointed to by abnormal
6212 edges since that may put a store where one didn't used to be. */
6213 gcc_assert (!(e->flags & EDGE_ABNORMAL));
6215 insert_insn_on_edge (insn, e);
6217 if (dump_file)
6219 fprintf (dump_file, "STORE_MOTION insert insn on edge (%d, %d):\n",
6220 e->src->index, e->dest->index);
6221 print_inline_rtx (dump_file, insn, 6);
6222 fprintf (dump_file, "\n");
6225 return 1;
6228 /* Remove any REG_EQUAL or REG_EQUIV notes containing a reference to the
6229 memory location in SMEXPR set in basic block BB.
6231 This could be rather expensive. */
6233 static void
6234 remove_reachable_equiv_notes (basic_block bb, struct ls_expr *smexpr)
6236 edge_iterator *stack, ei;
6237 int sp;
6238 edge act;
6239 sbitmap visited = sbitmap_alloc (last_basic_block);
6240 rtx last, insn, note;
6241 rtx mem = smexpr->pattern;
6243 stack = XNEWVEC (edge_iterator, n_basic_blocks);
6244 sp = 0;
6245 ei = ei_start (bb->succs);
6247 sbitmap_zero (visited);
6249 act = (EDGE_COUNT (ei_container (ei)) > 0 ? EDGE_I (ei_container (ei), 0) : NULL);
6250 while (1)
6252 if (!act)
6254 if (!sp)
6256 free (stack);
6257 sbitmap_free (visited);
6258 return;
6260 act = ei_edge (stack[--sp]);
6262 bb = act->dest;
6264 if (bb == EXIT_BLOCK_PTR
6265 || TEST_BIT (visited, bb->index))
6267 if (!ei_end_p (ei))
6268 ei_next (&ei);
6269 act = (! ei_end_p (ei)) ? ei_edge (ei) : NULL;
6270 continue;
6272 SET_BIT (visited, bb->index);
6274 if (TEST_BIT (st_antloc[bb->index], smexpr->index))
6276 for (last = ANTIC_STORE_LIST (smexpr);
6277 BLOCK_FOR_INSN (XEXP (last, 0)) != bb;
6278 last = XEXP (last, 1))
6279 continue;
6280 last = XEXP (last, 0);
6282 else
6283 last = NEXT_INSN (BB_END (bb));
6285 for (insn = BB_HEAD (bb); insn != last; insn = NEXT_INSN (insn))
6286 if (INSN_P (insn))
6288 note = find_reg_equal_equiv_note (insn);
6289 if (!note || !expr_equiv_p (XEXP (note, 0), mem))
6290 continue;
6292 if (dump_file)
6293 fprintf (dump_file, "STORE_MOTION drop REG_EQUAL note at insn %d:\n",
6294 INSN_UID (insn));
6295 remove_note (insn, note);
6298 if (!ei_end_p (ei))
6299 ei_next (&ei);
6300 act = (! ei_end_p (ei)) ? ei_edge (ei) : NULL;
6302 if (EDGE_COUNT (bb->succs) > 0)
6304 if (act)
6305 stack[sp++] = ei;
6306 ei = ei_start (bb->succs);
6307 act = (EDGE_COUNT (ei_container (ei)) > 0 ? EDGE_I (ei_container (ei), 0) : NULL);
6312 /* This routine will replace a store with a SET to a specified register. */
6314 static void
6315 replace_store_insn (rtx reg, rtx del, basic_block bb, struct ls_expr *smexpr)
6317 rtx insn, mem, note, set, ptr, pair;
6319 mem = smexpr->pattern;
6320 insn = gen_move_insn (reg, SET_SRC (single_set (del)));
6321 insn = emit_insn_after (insn, del);
6323 if (dump_file)
6325 fprintf (dump_file,
6326 "STORE_MOTION delete insn in BB %d:\n ", bb->index);
6327 print_inline_rtx (dump_file, del, 6);
6328 fprintf (dump_file, "\nSTORE MOTION replaced with insn:\n ");
6329 print_inline_rtx (dump_file, insn, 6);
6330 fprintf (dump_file, "\n");
6333 for (ptr = ANTIC_STORE_LIST (smexpr); ptr; ptr = XEXP (ptr, 1))
6334 if (XEXP (ptr, 0) == del)
6336 XEXP (ptr, 0) = insn;
6337 break;
6340 /* Move the notes from the deleted insn to its replacement, and patch
6341 up the LIBCALL notes. */
6342 REG_NOTES (insn) = REG_NOTES (del);
6344 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
6345 if (note)
6347 pair = XEXP (note, 0);
6348 note = find_reg_note (pair, REG_LIBCALL, NULL_RTX);
6349 XEXP (note, 0) = insn;
6351 note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
6352 if (note)
6354 pair = XEXP (note, 0);
6355 note = find_reg_note (pair, REG_RETVAL, NULL_RTX);
6356 XEXP (note, 0) = insn;
6359 delete_insn (del);
6361 /* Now we must handle REG_EQUAL notes whose contents is equal to the mem;
6362 they are no longer accurate provided that they are reached by this
6363 definition, so drop them. */
6364 for (; insn != NEXT_INSN (BB_END (bb)); insn = NEXT_INSN (insn))
6365 if (INSN_P (insn))
6367 set = single_set (insn);
6368 if (!set)
6369 continue;
6370 if (expr_equiv_p (SET_DEST (set), mem))
6371 return;
6372 note = find_reg_equal_equiv_note (insn);
6373 if (!note || !expr_equiv_p (XEXP (note, 0), mem))
6374 continue;
6376 if (dump_file)
6377 fprintf (dump_file, "STORE_MOTION drop REG_EQUAL note at insn %d:\n",
6378 INSN_UID (insn));
6379 remove_note (insn, note);
6381 remove_reachable_equiv_notes (bb, smexpr);
6385 /* Delete a store, but copy the value that would have been stored into
6386 the reaching_reg for later storing. */
6388 static void
6389 delete_store (struct ls_expr * expr, basic_block bb)
6391 rtx reg, i, del;
6393 if (expr->reaching_reg == NULL_RTX)
6394 expr->reaching_reg = gen_reg_rtx (GET_MODE (expr->pattern));
6396 reg = expr->reaching_reg;
6398 for (i = AVAIL_STORE_LIST (expr); i; i = XEXP (i, 1))
6400 del = XEXP (i, 0);
6401 if (BLOCK_FOR_INSN (del) == bb)
6403 /* We know there is only one since we deleted redundant
6404 ones during the available computation. */
6405 replace_store_insn (reg, del, bb, expr);
6406 break;
6411 /* Free memory used by store motion. */
6413 static void
6414 free_store_memory (void)
6416 free_ldst_mems ();
6418 if (ae_gen)
6419 sbitmap_vector_free (ae_gen);
6420 if (ae_kill)
6421 sbitmap_vector_free (ae_kill);
6422 if (transp)
6423 sbitmap_vector_free (transp);
6424 if (st_antloc)
6425 sbitmap_vector_free (st_antloc);
6426 if (pre_insert_map)
6427 sbitmap_vector_free (pre_insert_map);
6428 if (pre_delete_map)
6429 sbitmap_vector_free (pre_delete_map);
6430 if (reg_set_in_block)
6431 sbitmap_vector_free (reg_set_in_block);
6433 ae_gen = ae_kill = transp = st_antloc = NULL;
6434 pre_insert_map = pre_delete_map = reg_set_in_block = NULL;
6437 /* Perform store motion. Much like gcse, except we move expressions the
6438 other way by looking at the flowgraph in reverse. */
6440 static void
6441 store_motion (void)
6443 basic_block bb;
6444 int x;
6445 struct ls_expr * ptr;
6446 int update_flow = 0;
6448 if (dump_file)
6450 fprintf (dump_file, "before store motion\n");
6451 print_rtl (dump_file, get_insns ());
6454 init_alias_analysis ();
6456 /* Find all the available and anticipatable stores. */
6457 num_stores = compute_store_table ();
6458 if (num_stores == 0)
6460 htab_delete (pre_ldst_table);
6461 pre_ldst_table = NULL;
6462 sbitmap_vector_free (reg_set_in_block);
6463 end_alias_analysis ();
6464 return;
6467 /* Now compute kill & transp vectors. */
6468 build_store_vectors ();
6469 add_noreturn_fake_exit_edges ();
6470 connect_infinite_loops_to_exit ();
6472 edge_list = pre_edge_rev_lcm (num_stores, transp, ae_gen,
6473 st_antloc, ae_kill, &pre_insert_map,
6474 &pre_delete_map);
6476 /* Now we want to insert the new stores which are going to be needed. */
6477 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
6479 /* If any of the edges we have above are abnormal, we can't move this
6480 store. */
6481 for (x = NUM_EDGES (edge_list) - 1; x >= 0; x--)
6482 if (TEST_BIT (pre_insert_map[x], ptr->index)
6483 && (INDEX_EDGE (edge_list, x)->flags & EDGE_ABNORMAL))
6484 break;
6486 if (x >= 0)
6488 if (dump_file != NULL)
6489 fprintf (dump_file,
6490 "Can't replace store %d: abnormal edge from %d to %d\n",
6491 ptr->index, INDEX_EDGE (edge_list, x)->src->index,
6492 INDEX_EDGE (edge_list, x)->dest->index);
6493 continue;
6496 /* Now we want to insert the new stores which are going to be needed. */
6498 FOR_EACH_BB (bb)
6499 if (TEST_BIT (pre_delete_map[bb->index], ptr->index))
6500 delete_store (ptr, bb);
6502 for (x = 0; x < NUM_EDGES (edge_list); x++)
6503 if (TEST_BIT (pre_insert_map[x], ptr->index))
6504 update_flow |= insert_store (ptr, INDEX_EDGE (edge_list, x));
6507 if (update_flow)
6508 commit_edge_insertions ();
6510 free_store_memory ();
6511 free_edge_list (edge_list);
6512 remove_fake_exit_edges ();
6513 end_alias_analysis ();
6517 /* Entry point for jump bypassing optimization pass. */
6519 static int
6520 bypass_jumps (void)
6522 int changed;
6524 /* We do not construct an accurate cfg in functions which call
6525 setjmp, so just punt to be safe. */
6526 if (current_function_calls_setjmp)
6527 return 0;
6529 /* Identify the basic block information for this function, including
6530 successors and predecessors. */
6531 max_gcse_regno = max_reg_num ();
6533 if (dump_file)
6534 dump_flow_info (dump_file, dump_flags);
6536 /* Return if there's nothing to do, or it is too expensive. */
6537 if (n_basic_blocks <= NUM_FIXED_BLOCKS + 1
6538 || is_too_expensive (_ ("jump bypassing disabled")))
6539 return 0;
6541 gcc_obstack_init (&gcse_obstack);
6542 bytes_used = 0;
6544 /* We need alias. */
6545 init_alias_analysis ();
6547 /* Record where pseudo-registers are set. This data is kept accurate
6548 during each pass. ??? We could also record hard-reg information here
6549 [since it's unchanging], however it is currently done during hash table
6550 computation.
6552 It may be tempting to compute MEM set information here too, but MEM sets
6553 will be subject to code motion one day and thus we need to compute
6554 information about memory sets when we build the hash tables. */
6556 alloc_reg_set_mem (max_gcse_regno);
6557 compute_sets ();
6559 max_gcse_regno = max_reg_num ();
6560 alloc_gcse_mem ();
6561 changed = one_cprop_pass (MAX_GCSE_PASSES + 2, true, true);
6562 free_gcse_mem ();
6564 if (dump_file)
6566 fprintf (dump_file, "BYPASS of %s: %d basic blocks, ",
6567 current_function_name (), n_basic_blocks);
6568 fprintf (dump_file, "%d bytes\n\n", bytes_used);
6571 obstack_free (&gcse_obstack, NULL);
6572 free_reg_set_mem ();
6574 /* We are finished with alias. */
6575 end_alias_analysis ();
6576 allocate_reg_info (max_reg_num (), FALSE, FALSE);
6578 return changed;
6581 /* Return true if the graph is too expensive to optimize. PASS is the
6582 optimization about to be performed. */
6584 static bool
6585 is_too_expensive (const char *pass)
6587 /* Trying to perform global optimizations on flow graphs which have
6588 a high connectivity will take a long time and is unlikely to be
6589 particularly useful.
6591 In normal circumstances a cfg should have about twice as many
6592 edges as blocks. But we do not want to punish small functions
6593 which have a couple switch statements. Rather than simply
6594 threshold the number of blocks, uses something with a more
6595 graceful degradation. */
6596 if (n_edges > 20000 + n_basic_blocks * 4)
6598 warning (OPT_Wdisabled_optimization,
6599 "%s: %d basic blocks and %d edges/basic block",
6600 pass, n_basic_blocks, n_edges / n_basic_blocks);
6602 return true;
6605 /* If allocating memory for the cprop bitmap would take up too much
6606 storage it's better just to disable the optimization. */
6607 if ((n_basic_blocks
6608 * SBITMAP_SET_SIZE (max_reg_num ())
6609 * sizeof (SBITMAP_ELT_TYPE)) > MAX_GCSE_MEMORY)
6611 warning (OPT_Wdisabled_optimization,
6612 "%s: %d basic blocks and %d registers",
6613 pass, n_basic_blocks, max_reg_num ());
6615 return true;
6618 return false;
6621 static bool
6622 gate_handle_jump_bypass (void)
6624 return optimize > 0 && flag_gcse;
6627 /* Perform jump bypassing and control flow optimizations. */
6628 static unsigned int
6629 rest_of_handle_jump_bypass (void)
6631 cleanup_cfg (CLEANUP_EXPENSIVE);
6632 reg_scan (get_insns (), max_reg_num ());
6634 if (bypass_jumps ())
6636 rebuild_jump_labels (get_insns ());
6637 cleanup_cfg (CLEANUP_EXPENSIVE);
6638 delete_trivially_dead_insns (get_insns (), max_reg_num ());
6640 return 0;
6643 struct tree_opt_pass pass_jump_bypass =
6645 "bypass", /* name */
6646 gate_handle_jump_bypass, /* gate */
6647 rest_of_handle_jump_bypass, /* execute */
6648 NULL, /* sub */
6649 NULL, /* next */
6650 0, /* static_pass_number */
6651 TV_BYPASS, /* tv_id */
6652 0, /* properties_required */
6653 0, /* properties_provided */
6654 0, /* properties_destroyed */
6655 0, /* todo_flags_start */
6656 TODO_dump_func |
6657 TODO_ggc_collect | TODO_verify_flow, /* todo_flags_finish */
6658 'G' /* letter */
6662 static bool
6663 gate_handle_gcse (void)
6665 return optimize > 0 && flag_gcse;
6669 static unsigned int
6670 rest_of_handle_gcse (void)
6672 int save_csb, save_cfj;
6673 int tem2 = 0, tem;
6675 tem = gcse_main (get_insns ());
6676 rebuild_jump_labels (get_insns ());
6677 delete_trivially_dead_insns (get_insns (), max_reg_num ());
6679 save_csb = flag_cse_skip_blocks;
6680 save_cfj = flag_cse_follow_jumps;
6681 flag_cse_skip_blocks = flag_cse_follow_jumps = 0;
6683 /* If -fexpensive-optimizations, re-run CSE to clean up things done
6684 by gcse. */
6685 if (flag_expensive_optimizations)
6687 timevar_push (TV_CSE);
6688 reg_scan (get_insns (), max_reg_num ());
6689 tem2 = cse_main (get_insns (), max_reg_num ());
6690 purge_all_dead_edges ();
6691 delete_trivially_dead_insns (get_insns (), max_reg_num ());
6692 timevar_pop (TV_CSE);
6693 cse_not_expected = !flag_rerun_cse_after_loop;
6696 /* If gcse or cse altered any jumps, rerun jump optimizations to clean
6697 things up. */
6698 if (tem || tem2)
6700 timevar_push (TV_JUMP);
6701 rebuild_jump_labels (get_insns ());
6702 delete_dead_jumptables ();
6703 cleanup_cfg (CLEANUP_EXPENSIVE);
6704 timevar_pop (TV_JUMP);
6707 flag_cse_skip_blocks = save_csb;
6708 flag_cse_follow_jumps = save_cfj;
6709 return 0;
6712 struct tree_opt_pass pass_gcse =
6714 "gcse1", /* name */
6715 gate_handle_gcse, /* gate */
6716 rest_of_handle_gcse, /* execute */
6717 NULL, /* sub */
6718 NULL, /* next */
6719 0, /* static_pass_number */
6720 TV_GCSE, /* tv_id */
6721 0, /* properties_required */
6722 0, /* properties_provided */
6723 0, /* properties_destroyed */
6724 0, /* todo_flags_start */
6725 TODO_dump_func |
6726 TODO_verify_flow | TODO_ggc_collect, /* todo_flags_finish */
6727 'G' /* letter */
6731 #include "gt-gcse.h"