* config/xtensa/crti.asm (_init, _fini): Increase frame size to 64.
[official-gcc.git] / gcc / gcse.c
blob7be71d9da377068f209a37df5f0acd8c058bb88b
1 /* Global common subexpression elimination/Partial redundancy elimination
2 and global constant/copy propagation for GNU compiler.
3 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 02111-1307, USA. */
23 /* TODO
24 - reordering of memory allocation and freeing to be more space efficient
25 - do rough calc of how many regs are needed in each block, and a rough
26 calc of how many regs are available in each class and use that to
27 throttle back the code in cases where RTX_COST is minimal.
28 - a store to the same address as a load does not kill the load if the
29 source of the store is also the destination of the load. Handling this
30 allows more load motion, particularly out of loops.
31 - ability to realloc sbitmap vectors would allow one initial computation
32 of reg_set_in_block with only subsequent additions, rather than
33 recomputing it for each pass
37 /* References searched while implementing this.
39 Compilers Principles, Techniques and Tools
40 Aho, Sethi, Ullman
41 Addison-Wesley, 1988
43 Global Optimization by Suppression of Partial Redundancies
44 E. Morel, C. Renvoise
45 communications of the acm, Vol. 22, Num. 2, Feb. 1979
47 A Portable Machine-Independent Global Optimizer - Design and Measurements
48 Frederick Chow
49 Stanford Ph.D. thesis, Dec. 1983
51 A Fast Algorithm for Code Movement Optimization
52 D.M. Dhamdhere
53 SIGPLAN Notices, Vol. 23, Num. 10, Oct. 1988
55 A Solution to a Problem with Morel and Renvoise's
56 Global Optimization by Suppression of Partial Redundancies
57 K-H Drechsler, M.P. Stadel
58 ACM TOPLAS, Vol. 10, Num. 4, Oct. 1988
60 Practical Adaptation of the Global Optimization
61 Algorithm of Morel and Renvoise
62 D.M. Dhamdhere
63 ACM TOPLAS, Vol. 13, Num. 2. Apr. 1991
65 Efficiently Computing Static Single Assignment Form and the Control
66 Dependence Graph
67 R. Cytron, J. Ferrante, B.K. Rosen, M.N. Wegman, and F.K. Zadeck
68 ACM TOPLAS, Vol. 13, Num. 4, Oct. 1991
70 Lazy Code Motion
71 J. Knoop, O. Ruthing, B. Steffen
72 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
74 What's In a Region? Or Computing Control Dependence Regions in Near-Linear
75 Time for Reducible Flow Control
76 Thomas Ball
77 ACM Letters on Programming Languages and Systems,
78 Vol. 2, Num. 1-4, Mar-Dec 1993
80 An Efficient Representation for Sparse Sets
81 Preston Briggs, Linda Torczon
82 ACM Letters on Programming Languages and Systems,
83 Vol. 2, Num. 1-4, Mar-Dec 1993
85 A Variation of Knoop, Ruthing, and Steffen's Lazy Code Motion
86 K-H Drechsler, M.P. Stadel
87 ACM SIGPLAN Notices, Vol. 28, Num. 5, May 1993
89 Partial Dead Code Elimination
90 J. Knoop, O. Ruthing, B. Steffen
91 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
93 Effective Partial Redundancy Elimination
94 P. Briggs, K.D. Cooper
95 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
97 The Program Structure Tree: Computing Control Regions in Linear Time
98 R. Johnson, D. Pearson, K. Pingali
99 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
101 Optimal Code Motion: Theory and Practice
102 J. Knoop, O. Ruthing, B. Steffen
103 ACM TOPLAS, Vol. 16, Num. 4, Jul. 1994
105 The power of assignment motion
106 J. Knoop, O. Ruthing, B. Steffen
107 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
109 Global code motion / global value numbering
110 C. Click
111 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
113 Value Driven Redundancy Elimination
114 L.T. Simpson
115 Rice University Ph.D. thesis, Apr. 1996
117 Value Numbering
118 L.T. Simpson
119 Massively Scalar Compiler Project, Rice University, Sep. 1996
121 High Performance Compilers for Parallel Computing
122 Michael Wolfe
123 Addison-Wesley, 1996
125 Advanced Compiler Design and Implementation
126 Steven Muchnick
127 Morgan Kaufmann, 1997
129 Building an Optimizing Compiler
130 Robert Morgan
131 Digital Press, 1998
133 People wishing to speed up the code here should read:
134 Elimination Algorithms for Data Flow Analysis
135 B.G. Ryder, M.C. Paull
136 ACM Computing Surveys, Vol. 18, Num. 3, Sep. 1986
138 How to Analyze Large Programs Efficiently and Informatively
139 D.M. Dhamdhere, B.K. Rosen, F.K. Zadeck
140 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
142 People wishing to do something different can find various possibilities
143 in the above papers and elsewhere.
146 #include "config.h"
147 #include "system.h"
148 #include "coretypes.h"
149 #include "tm.h"
150 #include "toplev.h"
152 #include "rtl.h"
153 #include "tm_p.h"
154 #include "regs.h"
155 #include "hard-reg-set.h"
156 #include "flags.h"
157 #include "real.h"
158 #include "insn-config.h"
159 #include "recog.h"
160 #include "basic-block.h"
161 #include "output.h"
162 #include "function.h"
163 #include "expr.h"
164 #include "except.h"
165 #include "ggc.h"
166 #include "params.h"
167 #include "cselib.h"
169 #include "obstack.h"
171 /* Propagate flow information through back edges and thus enable PRE's
172 moving loop invariant calculations out of loops.
174 Originally this tended to create worse overall code, but several
175 improvements during the development of PRE seem to have made following
176 back edges generally a win.
178 Note much of the loop invariant code motion done here would normally
179 be done by loop.c, which has more heuristics for when to move invariants
180 out of loops. At some point we might need to move some of those
181 heuristics into gcse.c. */
183 /* We support GCSE via Partial Redundancy Elimination. PRE optimizations
184 are a superset of those done by GCSE.
186 We perform the following steps:
188 1) Compute basic block information.
190 2) Compute table of places where registers are set.
192 3) Perform copy/constant propagation.
194 4) Perform global cse.
196 5) Perform another pass of copy/constant propagation.
198 Two passes of copy/constant propagation are done because the first one
199 enables more GCSE and the second one helps to clean up the copies that
200 GCSE creates. This is needed more for PRE than for Classic because Classic
201 GCSE will try to use an existing register containing the common
202 subexpression rather than create a new one. This is harder to do for PRE
203 because of the code motion (which Classic GCSE doesn't do).
205 Expressions we are interested in GCSE-ing are of the form
206 (set (pseudo-reg) (expression)).
207 Function want_to_gcse_p says what these are.
209 PRE handles moving invariant expressions out of loops (by treating them as
210 partially redundant).
212 Eventually it would be nice to replace cse.c/gcse.c with SSA (static single
213 assignment) based GVN (global value numbering). L. T. Simpson's paper
214 (Rice University) on value numbering is a useful reference for this.
216 **********************
218 We used to support multiple passes but there are diminishing returns in
219 doing so. The first pass usually makes 90% of the changes that are doable.
220 A second pass can make a few more changes made possible by the first pass.
221 Experiments show any further passes don't make enough changes to justify
222 the expense.
224 A study of spec92 using an unlimited number of passes:
225 [1 pass] = 1208 substitutions, [2] = 577, [3] = 202, [4] = 192, [5] = 83,
226 [6] = 34, [7] = 17, [8] = 9, [9] = 4, [10] = 4, [11] = 2,
227 [12] = 2, [13] = 1, [15] = 1, [16] = 2, [41] = 1
229 It was found doing copy propagation between each pass enables further
230 substitutions.
232 PRE is quite expensive in complicated functions because the DFA can take
233 awhile to converge. Hence we only perform one pass. The parameter max-gcse-passes can
234 be modified if one wants to experiment.
236 **********************
238 The steps for PRE are:
240 1) Build the hash table of expressions we wish to GCSE (expr_hash_table).
242 2) Perform the data flow analysis for PRE.
244 3) Delete the redundant instructions
246 4) Insert the required copies [if any] that make the partially
247 redundant instructions fully redundant.
249 5) For other reaching expressions, insert an instruction to copy the value
250 to a newly created pseudo that will reach the redundant instruction.
252 The deletion is done first so that when we do insertions we
253 know which pseudo reg to use.
255 Various papers have argued that PRE DFA is expensive (O(n^2)) and others
256 argue it is not. The number of iterations for the algorithm to converge
257 is typically 2-4 so I don't view it as that expensive (relatively speaking).
259 PRE GCSE depends heavily on the second CSE pass to clean up the copies
260 we create. To make an expression reach the place where it's redundant,
261 the result of the expression is copied to a new register, and the redundant
262 expression is deleted by replacing it with this new register. Classic GCSE
263 doesn't have this problem as much as it computes the reaching defs of
264 each register in each block and thus can try to use an existing register.
266 **********************
268 A fair bit of simplicity is created by creating small functions for simple
269 tasks, even when the function is only called in one place. This may
270 measurably slow things down [or may not] by creating more function call
271 overhead than is necessary. The source is laid out so that it's trivial
272 to make the affected functions inline so that one can measure what speed
273 up, if any, can be achieved, and maybe later when things settle things can
274 be rearranged.
276 Help stamp out big monolithic functions! */
278 /* GCSE global vars. */
280 /* -dG dump file. */
281 static FILE *gcse_file;
283 /* Note whether or not we should run jump optimization after gcse. We
284 want to do this for two cases.
286 * If we changed any jumps via cprop.
288 * If we added any labels via edge splitting. */
290 static int run_jump_opt_after_gcse;
292 /* Bitmaps are normally not included in debugging dumps.
293 However it's useful to be able to print them from GDB.
294 We could create special functions for this, but it's simpler to
295 just allow passing stderr to the dump_foo fns. Since stderr can
296 be a macro, we store a copy here. */
297 static FILE *debug_stderr;
299 /* An obstack for our working variables. */
300 static struct obstack gcse_obstack;
302 struct reg_use {rtx reg_rtx; };
304 /* Hash table of expressions. */
306 struct expr
308 /* The expression (SET_SRC for expressions, PATTERN for assignments). */
309 rtx expr;
310 /* Index in the available expression bitmaps. */
311 int bitmap_index;
312 /* Next entry with the same hash. */
313 struct expr *next_same_hash;
314 /* List of anticipatable occurrences in basic blocks in the function.
315 An "anticipatable occurrence" is one that is the first occurrence in the
316 basic block, the operands are not modified in the basic block prior
317 to the occurrence and the output is not used between the start of
318 the block and the occurrence. */
319 struct occr *antic_occr;
320 /* List of available occurrence in basic blocks in the function.
321 An "available occurrence" is one that is the last occurrence in the
322 basic block and the operands are not modified by following statements in
323 the basic block [including this insn]. */
324 struct occr *avail_occr;
325 /* Non-null if the computation is PRE redundant.
326 The value is the newly created pseudo-reg to record a copy of the
327 expression in all the places that reach the redundant copy. */
328 rtx reaching_reg;
331 /* Occurrence of an expression.
332 There is one per basic block. If a pattern appears more than once the
333 last appearance is used [or first for anticipatable expressions]. */
335 struct occr
337 /* Next occurrence of this expression. */
338 struct occr *next;
339 /* The insn that computes the expression. */
340 rtx insn;
341 /* Nonzero if this [anticipatable] occurrence has been deleted. */
342 char deleted_p;
343 /* Nonzero if this [available] occurrence has been copied to
344 reaching_reg. */
345 /* ??? This is mutually exclusive with deleted_p, so they could share
346 the same byte. */
347 char copied_p;
350 /* Expression and copy propagation hash tables.
351 Each hash table is an array of buckets.
352 ??? It is known that if it were an array of entries, structure elements
353 `next_same_hash' and `bitmap_index' wouldn't be necessary. However, it is
354 not clear whether in the final analysis a sufficient amount of memory would
355 be saved as the size of the available expression bitmaps would be larger
356 [one could build a mapping table without holes afterwards though].
357 Someday I'll perform the computation and figure it out. */
359 struct hash_table
361 /* The table itself.
362 This is an array of `expr_hash_table_size' elements. */
363 struct expr **table;
365 /* Size of the hash table, in elements. */
366 unsigned int size;
368 /* Number of hash table elements. */
369 unsigned int n_elems;
371 /* Whether the table is expression of copy propagation one. */
372 int set_p;
375 /* Expression hash table. */
376 static struct hash_table expr_hash_table;
378 /* Copy propagation hash table. */
379 static struct hash_table set_hash_table;
381 /* Mapping of uids to cuids.
382 Only real insns get cuids. */
383 static int *uid_cuid;
385 /* Highest UID in UID_CUID. */
386 static int max_uid;
388 /* Get the cuid of an insn. */
389 #ifdef ENABLE_CHECKING
390 #define INSN_CUID(INSN) (INSN_UID (INSN) > max_uid ? (abort (), 0) : uid_cuid[INSN_UID (INSN)])
391 #else
392 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
393 #endif
395 /* Number of cuids. */
396 static int max_cuid;
398 /* Mapping of cuids to insns. */
399 static rtx *cuid_insn;
401 /* Get insn from cuid. */
402 #define CUID_INSN(CUID) (cuid_insn[CUID])
404 /* Maximum register number in function prior to doing gcse + 1.
405 Registers created during this pass have regno >= max_gcse_regno.
406 This is named with "gcse" to not collide with global of same name. */
407 static unsigned int max_gcse_regno;
409 /* Table of registers that are modified.
411 For each register, each element is a list of places where the pseudo-reg
412 is set.
414 For simplicity, GCSE is done on sets of pseudo-regs only. PRE GCSE only
415 requires knowledge of which blocks kill which regs [and thus could use
416 a bitmap instead of the lists `reg_set_table' uses].
418 `reg_set_table' and could be turned into an array of bitmaps (num-bbs x
419 num-regs) [however perhaps it may be useful to keep the data as is]. One
420 advantage of recording things this way is that `reg_set_table' is fairly
421 sparse with respect to pseudo regs but for hard regs could be fairly dense
422 [relatively speaking]. And recording sets of pseudo-regs in lists speeds
423 up functions like compute_transp since in the case of pseudo-regs we only
424 need to iterate over the number of times a pseudo-reg is set, not over the
425 number of basic blocks [clearly there is a bit of a slow down in the cases
426 where a pseudo is set more than once in a block, however it is believed
427 that the net effect is to speed things up]. This isn't done for hard-regs
428 because recording call-clobbered hard-regs in `reg_set_table' at each
429 function call can consume a fair bit of memory, and iterating over
430 hard-regs stored this way in compute_transp will be more expensive. */
432 typedef struct reg_set
434 /* The next setting of this register. */
435 struct reg_set *next;
436 /* The insn where it was set. */
437 rtx insn;
438 } reg_set;
440 static reg_set **reg_set_table;
442 /* Size of `reg_set_table'.
443 The table starts out at max_gcse_regno + slop, and is enlarged as
444 necessary. */
445 static int reg_set_table_size;
447 /* Amount to grow `reg_set_table' by when it's full. */
448 #define REG_SET_TABLE_SLOP 100
450 /* This is a list of expressions which are MEMs and will be used by load
451 or store motion.
452 Load motion tracks MEMs which aren't killed by
453 anything except itself. (ie, loads and stores to a single location).
454 We can then allow movement of these MEM refs with a little special
455 allowance. (all stores copy the same value to the reaching reg used
456 for the loads). This means all values used to store into memory must have
457 no side effects so we can re-issue the setter value.
458 Store Motion uses this structure as an expression table to track stores
459 which look interesting, and might be moveable towards the exit block. */
461 struct ls_expr
463 struct expr * expr; /* Gcse expression reference for LM. */
464 rtx pattern; /* Pattern of this mem. */
465 rtx pattern_regs; /* List of registers mentioned by the mem. */
466 rtx loads; /* INSN list of loads seen. */
467 rtx stores; /* INSN list of stores seen. */
468 struct ls_expr * next; /* Next in the list. */
469 int invalid; /* Invalid for some reason. */
470 int index; /* If it maps to a bitmap index. */
471 int hash_index; /* Index when in a hash table. */
472 rtx reaching_reg; /* Register to use when re-writing. */
475 /* Array of implicit set patterns indexed by basic block index. */
476 static rtx *implicit_sets;
478 /* Head of the list of load/store memory refs. */
479 static struct ls_expr * pre_ldst_mems = NULL;
481 /* Bitmap containing one bit for each register in the program.
482 Used when performing GCSE to track which registers have been set since
483 the start of the basic block. */
484 static regset reg_set_bitmap;
486 /* For each block, a bitmap of registers set in the block.
487 This is used by expr_killed_p and compute_transp.
488 It is computed during hash table computation and not by compute_sets
489 as it includes registers added since the last pass (or between cprop and
490 gcse) and it's currently not easy to realloc sbitmap vectors. */
491 static sbitmap *reg_set_in_block;
493 /* Array, indexed by basic block number for a list of insns which modify
494 memory within that block. */
495 static rtx * modify_mem_list;
496 bitmap modify_mem_list_set;
498 /* This array parallels modify_mem_list, but is kept canonicalized. */
499 static rtx * canon_modify_mem_list;
500 bitmap canon_modify_mem_list_set;
501 /* Various variables for statistics gathering. */
503 /* Memory used in a pass.
504 This isn't intended to be absolutely precise. Its intent is only
505 to keep an eye on memory usage. */
506 static int bytes_used;
508 /* GCSE substitutions made. */
509 static int gcse_subst_count;
510 /* Number of copy instructions created. */
511 static int gcse_create_count;
512 /* Number of constants propagated. */
513 static int const_prop_count;
514 /* Number of copys propagated. */
515 static int copy_prop_count;
517 /* These variables are used by classic GCSE.
518 Normally they'd be defined a bit later, but `rd_gen' needs to
519 be declared sooner. */
521 /* Each block has a bitmap of each type.
522 The length of each blocks bitmap is:
524 max_cuid - for reaching definitions
525 n_exprs - for available expressions
527 Thus we view the bitmaps as 2 dimensional arrays. i.e.
528 rd_kill[block_num][cuid_num]
529 ae_kill[block_num][expr_num] */
531 /* For reaching defs */
532 static sbitmap *rd_kill, *rd_gen, *reaching_defs, *rd_out;
534 /* for available exprs */
535 static sbitmap *ae_kill, *ae_gen, *ae_in, *ae_out;
537 /* Objects of this type are passed around by the null-pointer check
538 removal routines. */
539 struct null_pointer_info
541 /* The basic block being processed. */
542 basic_block current_block;
543 /* The first register to be handled in this pass. */
544 unsigned int min_reg;
545 /* One greater than the last register to be handled in this pass. */
546 unsigned int max_reg;
547 sbitmap *nonnull_local;
548 sbitmap *nonnull_killed;
551 static void compute_can_copy (void);
552 static void *gmalloc (unsigned int);
553 static void *grealloc (void *, unsigned int);
554 static void *gcse_alloc (unsigned long);
555 static void alloc_gcse_mem (rtx);
556 static void free_gcse_mem (void);
557 static void alloc_reg_set_mem (int);
558 static void free_reg_set_mem (void);
559 static int get_bitmap_width (int, int, int);
560 static void record_one_set (int, rtx);
561 static void record_set_info (rtx, rtx, void *);
562 static void compute_sets (rtx);
563 static void hash_scan_insn (rtx, struct hash_table *, int);
564 static void hash_scan_set (rtx, rtx, struct hash_table *);
565 static void hash_scan_clobber (rtx, rtx, struct hash_table *);
566 static void hash_scan_call (rtx, rtx, struct hash_table *);
567 static int want_to_gcse_p (rtx);
568 static bool gcse_constant_p (rtx);
569 static int oprs_unchanged_p (rtx, rtx, int);
570 static int oprs_anticipatable_p (rtx, rtx);
571 static int oprs_available_p (rtx, rtx);
572 static void insert_expr_in_table (rtx, enum machine_mode, rtx, int, int,
573 struct hash_table *);
574 static void insert_set_in_table (rtx, rtx, struct hash_table *);
575 static unsigned int hash_expr (rtx, enum machine_mode, int *, int);
576 static unsigned int hash_expr_1 (rtx, enum machine_mode, int *);
577 static unsigned int hash_string_1 (const char *);
578 static unsigned int hash_set (int, int);
579 static int expr_equiv_p (rtx, rtx);
580 static void record_last_reg_set_info (rtx, int);
581 static void record_last_mem_set_info (rtx);
582 static void record_last_set_info (rtx, rtx, void *);
583 static void compute_hash_table (struct hash_table *);
584 static void alloc_hash_table (int, struct hash_table *, int);
585 static void free_hash_table (struct hash_table *);
586 static void compute_hash_table_work (struct hash_table *);
587 static void dump_hash_table (FILE *, const char *, struct hash_table *);
588 static struct expr *lookup_expr (rtx, struct hash_table *);
589 static struct expr *lookup_set (unsigned int, struct hash_table *);
590 static struct expr *next_set (unsigned int, struct expr *);
591 static void reset_opr_set_tables (void);
592 static int oprs_not_set_p (rtx, rtx);
593 static void mark_call (rtx);
594 static void mark_set (rtx, rtx);
595 static void mark_clobber (rtx, rtx);
596 static void mark_oprs_set (rtx);
597 static void alloc_cprop_mem (int, int);
598 static void free_cprop_mem (void);
599 static void compute_transp (rtx, int, sbitmap *, int);
600 static void compute_transpout (void);
601 static void compute_local_properties (sbitmap *, sbitmap *, sbitmap *,
602 struct hash_table *);
603 static void compute_cprop_data (void);
604 static void find_used_regs (rtx *, void *);
605 static int try_replace_reg (rtx, rtx, rtx);
606 static struct expr *find_avail_set (int, rtx);
607 static int cprop_jump (basic_block, rtx, rtx, rtx, rtx);
608 static void mems_conflict_for_gcse_p (rtx, rtx, void *);
609 static int load_killed_in_block_p (basic_block, int, rtx, int);
610 static void canon_list_insert (rtx, rtx, void *);
611 static int cprop_insn (rtx, int);
612 static int cprop (int);
613 static void find_implicit_sets (void);
614 static int one_cprop_pass (int, int, int);
615 static bool constprop_register (rtx, rtx, rtx, int);
616 static struct expr *find_bypass_set (int, int);
617 static bool reg_killed_on_edge (rtx, edge);
618 static int bypass_block (basic_block, rtx, rtx);
619 static int bypass_conditional_jumps (void);
620 static void alloc_pre_mem (int, int);
621 static void free_pre_mem (void);
622 static void compute_pre_data (void);
623 static int pre_expr_reaches_here_p (basic_block, struct expr *,
624 basic_block);
625 static void insert_insn_end_bb (struct expr *, basic_block, int);
626 static void pre_insert_copy_insn (struct expr *, rtx);
627 static void pre_insert_copies (void);
628 static int pre_delete (void);
629 static int pre_gcse (void);
630 static int one_pre_gcse_pass (int);
631 static void add_label_notes (rtx, rtx);
632 static void alloc_code_hoist_mem (int, int);
633 static void free_code_hoist_mem (void);
634 static void compute_code_hoist_vbeinout (void);
635 static void compute_code_hoist_data (void);
636 static int hoist_expr_reaches_here_p (basic_block, int, basic_block, char *);
637 static void hoist_code (void);
638 static int one_code_hoisting_pass (void);
639 static void alloc_rd_mem (int, int);
640 static void free_rd_mem (void);
641 static void handle_rd_kill_set (rtx, int, basic_block);
642 static void compute_kill_rd (void);
643 static void compute_rd (void);
644 static void alloc_avail_expr_mem (int, int);
645 static void free_avail_expr_mem (void);
646 static void compute_ae_gen (struct hash_table *);
647 static int expr_killed_p (rtx, basic_block);
648 static void compute_ae_kill (sbitmap *, sbitmap *, struct hash_table *);
649 static int expr_reaches_here_p (struct occr *, struct expr *, basic_block,
650 int);
651 static rtx computing_insn (struct expr *, rtx);
652 static int def_reaches_here_p (rtx, rtx);
653 static int can_disregard_other_sets (struct reg_set **, rtx, int);
654 static int handle_avail_expr (rtx, struct expr *);
655 static int classic_gcse (void);
656 static int one_classic_gcse_pass (int);
657 static void invalidate_nonnull_info (rtx, rtx, void *);
658 static int delete_null_pointer_checks_1 (unsigned int *, sbitmap *, sbitmap *,
659 struct null_pointer_info *);
660 static rtx process_insert_insn (struct expr *);
661 static int pre_edge_insert (struct edge_list *, struct expr **);
662 static int expr_reaches_here_p_work (struct occr *, struct expr *,
663 basic_block, int, char *);
664 static int pre_expr_reaches_here_p_work (basic_block, struct expr *,
665 basic_block, char *);
666 static struct ls_expr * ldst_entry (rtx);
667 static void free_ldst_entry (struct ls_expr *);
668 static void free_ldst_mems (void);
669 static void print_ldst_list (FILE *);
670 static struct ls_expr * find_rtx_in_ldst (rtx);
671 static int enumerate_ldsts (void);
672 static inline struct ls_expr * first_ls_expr (void);
673 static inline struct ls_expr * next_ls_expr (struct ls_expr *);
674 static int simple_mem (rtx);
675 static void invalidate_any_buried_refs (rtx);
676 static void compute_ld_motion_mems (void);
677 static void trim_ld_motion_mems (void);
678 static void update_ld_motion_stores (struct expr *);
679 static void reg_set_info (rtx, rtx, void *);
680 static bool store_ops_ok (rtx, int *);
681 static rtx extract_mentioned_regs (rtx);
682 static rtx extract_mentioned_regs_helper (rtx, rtx);
683 static void find_moveable_store (rtx, int *, int *);
684 static int compute_store_table (void);
685 static bool load_kills_store (rtx, rtx, int);
686 static bool find_loads (rtx, rtx, int);
687 static bool store_killed_in_insn (rtx, rtx, rtx, int);
688 static bool store_killed_after (rtx, rtx, rtx, basic_block, int *, rtx *);
689 static bool store_killed_before (rtx, rtx, rtx, basic_block, int *);
690 static void build_store_vectors (void);
691 static void insert_insn_start_bb (rtx, basic_block);
692 static int insert_store (struct ls_expr *, edge);
693 static void replace_store_insn (rtx, rtx, basic_block);
694 static void delete_store (struct ls_expr *, basic_block);
695 static void free_store_memory (void);
696 static void store_motion (void);
697 static void free_insn_expr_list_list (rtx *);
698 static void clear_modify_mem_tables (void);
699 static void free_modify_mem_tables (void);
700 static rtx gcse_emit_move_after (rtx, rtx, rtx);
701 static void local_cprop_find_used_regs (rtx *, void *);
702 static bool do_local_cprop (rtx, rtx, int, rtx*);
703 static bool adjust_libcall_notes (rtx, rtx, rtx, rtx*);
704 static void local_cprop_pass (int);
706 /* Entry point for global common subexpression elimination.
707 F is the first instruction in the function. */
710 gcse_main (rtx f, FILE *file)
712 int changed, pass;
713 /* Bytes used at start of pass. */
714 int initial_bytes_used;
715 /* Maximum number of bytes used by a pass. */
716 int max_pass_bytes;
717 /* Point to release obstack data from for each pass. */
718 char *gcse_obstack_bottom;
720 /* We do not construct an accurate cfg in functions which call
721 setjmp, so just punt to be safe. */
722 if (current_function_calls_setjmp)
723 return 0;
725 /* Assume that we do not need to run jump optimizations after gcse. */
726 run_jump_opt_after_gcse = 0;
728 /* For calling dump_foo fns from gdb. */
729 debug_stderr = stderr;
730 gcse_file = file;
732 /* Identify the basic block information for this function, including
733 successors and predecessors. */
734 max_gcse_regno = max_reg_num ();
736 if (file)
737 dump_flow_info (file);
739 /* Return if there's nothing to do. */
740 if (n_basic_blocks <= 1)
741 return 0;
743 /* Trying to perform global optimizations on flow graphs which have
744 a high connectivity will take a long time and is unlikely to be
745 particularly useful.
747 In normal circumstances a cfg should have about twice as many edges
748 as blocks. But we do not want to punish small functions which have
749 a couple switch statements. So we require a relatively large number
750 of basic blocks and the ratio of edges to blocks to be high. */
751 if (n_basic_blocks > 1000 && n_edges / n_basic_blocks >= 20)
753 if (warn_disabled_optimization)
754 warning ("GCSE disabled: %d > 1000 basic blocks and %d >= 20 edges/basic block",
755 n_basic_blocks, n_edges / n_basic_blocks);
756 return 0;
759 /* If allocating memory for the cprop bitmap would take up too much
760 storage it's better just to disable the optimization. */
761 if ((n_basic_blocks
762 * SBITMAP_SET_SIZE (max_gcse_regno)
763 * sizeof (SBITMAP_ELT_TYPE)) > MAX_GCSE_MEMORY)
765 if (warn_disabled_optimization)
766 warning ("GCSE disabled: %d basic blocks and %d registers",
767 n_basic_blocks, max_gcse_regno);
769 return 0;
772 gcc_obstack_init (&gcse_obstack);
773 bytes_used = 0;
775 /* We need alias. */
776 init_alias_analysis ();
777 /* Record where pseudo-registers are set. This data is kept accurate
778 during each pass. ??? We could also record hard-reg information here
779 [since it's unchanging], however it is currently done during hash table
780 computation.
782 It may be tempting to compute MEM set information here too, but MEM sets
783 will be subject to code motion one day and thus we need to compute
784 information about memory sets when we build the hash tables. */
786 alloc_reg_set_mem (max_gcse_regno);
787 compute_sets (f);
789 pass = 0;
790 initial_bytes_used = bytes_used;
791 max_pass_bytes = 0;
792 gcse_obstack_bottom = gcse_alloc (1);
793 changed = 1;
794 while (changed && pass < MAX_GCSE_PASSES)
796 changed = 0;
797 if (file)
798 fprintf (file, "GCSE pass %d\n\n", pass + 1);
800 /* Initialize bytes_used to the space for the pred/succ lists,
801 and the reg_set_table data. */
802 bytes_used = initial_bytes_used;
804 /* Each pass may create new registers, so recalculate each time. */
805 max_gcse_regno = max_reg_num ();
807 alloc_gcse_mem (f);
809 /* Don't allow constant propagation to modify jumps
810 during this pass. */
811 changed = one_cprop_pass (pass + 1, 0, 0);
813 if (optimize_size)
814 changed |= one_classic_gcse_pass (pass + 1);
815 else
817 changed |= one_pre_gcse_pass (pass + 1);
818 /* We may have just created new basic blocks. Release and
819 recompute various things which are sized on the number of
820 basic blocks. */
821 if (changed)
823 free_modify_mem_tables ();
824 modify_mem_list = gmalloc (last_basic_block * sizeof (rtx));
825 canon_modify_mem_list
826 = gmalloc (last_basic_block * sizeof (rtx));
827 memset (modify_mem_list, 0, last_basic_block * sizeof (rtx));
828 memset (canon_modify_mem_list, 0, last_basic_block * sizeof (rtx));
830 free_reg_set_mem ();
831 alloc_reg_set_mem (max_reg_num ());
832 compute_sets (f);
833 run_jump_opt_after_gcse = 1;
836 if (max_pass_bytes < bytes_used)
837 max_pass_bytes = bytes_used;
839 /* Free up memory, then reallocate for code hoisting. We can
840 not re-use the existing allocated memory because the tables
841 will not have info for the insns or registers created by
842 partial redundancy elimination. */
843 free_gcse_mem ();
845 /* It does not make sense to run code hoisting unless we optimizing
846 for code size -- it rarely makes programs faster, and can make
847 them bigger if we did partial redundancy elimination (when optimizing
848 for space, we use a classic gcse algorithm instead of partial
849 redundancy algorithms). */
850 if (optimize_size)
852 max_gcse_regno = max_reg_num ();
853 alloc_gcse_mem (f);
854 changed |= one_code_hoisting_pass ();
855 free_gcse_mem ();
857 if (max_pass_bytes < bytes_used)
858 max_pass_bytes = bytes_used;
861 if (file)
863 fprintf (file, "\n");
864 fflush (file);
867 obstack_free (&gcse_obstack, gcse_obstack_bottom);
868 pass++;
871 /* Do one last pass of copy propagation, including cprop into
872 conditional jumps. */
874 max_gcse_regno = max_reg_num ();
875 alloc_gcse_mem (f);
876 /* This time, go ahead and allow cprop to alter jumps. */
877 one_cprop_pass (pass + 1, 1, 0);
878 free_gcse_mem ();
880 if (file)
882 fprintf (file, "GCSE of %s: %d basic blocks, ",
883 current_function_name, n_basic_blocks);
884 fprintf (file, "%d pass%s, %d bytes\n\n",
885 pass, pass > 1 ? "es" : "", max_pass_bytes);
888 obstack_free (&gcse_obstack, NULL);
889 free_reg_set_mem ();
890 /* We are finished with alias. */
891 end_alias_analysis ();
892 allocate_reg_info (max_reg_num (), FALSE, FALSE);
894 if (!optimize_size && flag_gcse_sm)
895 store_motion ();
897 /* Record where pseudo-registers are set. */
898 return run_jump_opt_after_gcse;
901 /* Misc. utilities. */
903 /* Nonzero for each mode that supports (set (reg) (reg)).
904 This is trivially true for integer and floating point values.
905 It may or may not be true for condition codes. */
906 static char can_copy[(int) NUM_MACHINE_MODES];
908 /* Compute which modes support reg/reg copy operations. */
910 static void
911 compute_can_copy (void)
913 int i;
914 #ifndef AVOID_CCMODE_COPIES
915 rtx reg, insn;
916 #endif
917 memset (can_copy, 0, NUM_MACHINE_MODES);
919 start_sequence ();
920 for (i = 0; i < NUM_MACHINE_MODES; i++)
921 if (GET_MODE_CLASS (i) == MODE_CC)
923 #ifdef AVOID_CCMODE_COPIES
924 can_copy[i] = 0;
925 #else
926 reg = gen_rtx_REG ((enum machine_mode) i, LAST_VIRTUAL_REGISTER + 1);
927 insn = emit_insn (gen_rtx_SET (VOIDmode, reg, reg));
928 if (recog (PATTERN (insn), insn, NULL) >= 0)
929 can_copy[i] = 1;
930 #endif
932 else
933 can_copy[i] = 1;
935 end_sequence ();
938 /* Returns whether the mode supports reg/reg copy operations. */
940 bool
941 can_copy_p (enum machine_mode mode)
943 static bool can_copy_init_p = false;
945 if (! can_copy_init_p)
947 compute_can_copy ();
948 can_copy_init_p = true;
951 return can_copy[mode] != 0;
954 /* Cover function to xmalloc to record bytes allocated. */
956 static void *
957 gmalloc (unsigned int size)
959 bytes_used += size;
960 return xmalloc (size);
963 /* Cover function to xrealloc.
964 We don't record the additional size since we don't know it.
965 It won't affect memory usage stats much anyway. */
967 static void *
968 grealloc (void *ptr, unsigned int size)
970 return xrealloc (ptr, size);
973 /* Cover function to obstack_alloc. */
975 static void *
976 gcse_alloc (unsigned long size)
978 bytes_used += size;
979 return obstack_alloc (&gcse_obstack, size);
982 /* Allocate memory for the cuid mapping array,
983 and reg/memory set tracking tables.
985 This is called at the start of each pass. */
987 static void
988 alloc_gcse_mem (rtx f)
990 int i, n;
991 rtx insn;
993 /* Find the largest UID and create a mapping from UIDs to CUIDs.
994 CUIDs are like UIDs except they increase monotonically, have no gaps,
995 and only apply to real insns. */
997 max_uid = get_max_uid ();
998 n = (max_uid + 1) * sizeof (int);
999 uid_cuid = gmalloc (n);
1000 memset (uid_cuid, 0, n);
1001 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
1003 if (INSN_P (insn))
1004 uid_cuid[INSN_UID (insn)] = i++;
1005 else
1006 uid_cuid[INSN_UID (insn)] = i;
1009 /* Create a table mapping cuids to insns. */
1011 max_cuid = i;
1012 n = (max_cuid + 1) * sizeof (rtx);
1013 cuid_insn = gmalloc (n);
1014 memset (cuid_insn, 0, n);
1015 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
1016 if (INSN_P (insn))
1017 CUID_INSN (i++) = insn;
1019 /* Allocate vars to track sets of regs. */
1020 reg_set_bitmap = BITMAP_XMALLOC ();
1022 /* Allocate vars to track sets of regs, memory per block. */
1023 reg_set_in_block = sbitmap_vector_alloc (last_basic_block, max_gcse_regno);
1024 /* Allocate array to keep a list of insns which modify memory in each
1025 basic block. */
1026 modify_mem_list = gmalloc (last_basic_block * sizeof (rtx));
1027 canon_modify_mem_list = gmalloc (last_basic_block * sizeof (rtx));
1028 memset (modify_mem_list, 0, last_basic_block * sizeof (rtx));
1029 memset (canon_modify_mem_list, 0, last_basic_block * sizeof (rtx));
1030 modify_mem_list_set = BITMAP_XMALLOC ();
1031 canon_modify_mem_list_set = BITMAP_XMALLOC ();
1034 /* Free memory allocated by alloc_gcse_mem. */
1036 static void
1037 free_gcse_mem (void)
1039 free (uid_cuid);
1040 free (cuid_insn);
1042 BITMAP_XFREE (reg_set_bitmap);
1044 sbitmap_vector_free (reg_set_in_block);
1045 free_modify_mem_tables ();
1046 BITMAP_XFREE (modify_mem_list_set);
1047 BITMAP_XFREE (canon_modify_mem_list_set);
1050 /* Many of the global optimization algorithms work by solving dataflow
1051 equations for various expressions. Initially, some local value is
1052 computed for each expression in each block. Then, the values across the
1053 various blocks are combined (by following flow graph edges) to arrive at
1054 global values. Conceptually, each set of equations is independent. We
1055 may therefore solve all the equations in parallel, solve them one at a
1056 time, or pick any intermediate approach.
1058 When you're going to need N two-dimensional bitmaps, each X (say, the
1059 number of blocks) by Y (say, the number of expressions), call this
1060 function. It's not important what X and Y represent; only that Y
1061 correspond to the things that can be done in parallel. This function will
1062 return an appropriate chunking factor C; you should solve C sets of
1063 equations in parallel. By going through this function, we can easily
1064 trade space against time; by solving fewer equations in parallel we use
1065 less space. */
1067 static int
1068 get_bitmap_width (int n, int x, int y)
1070 /* It's not really worth figuring out *exactly* how much memory will
1071 be used by a particular choice. The important thing is to get
1072 something approximately right. */
1073 size_t max_bitmap_memory = 10 * 1024 * 1024;
1075 /* The number of bytes we'd use for a single column of minimum
1076 width. */
1077 size_t column_size = n * x * sizeof (SBITMAP_ELT_TYPE);
1079 /* Often, it's reasonable just to solve all the equations in
1080 parallel. */
1081 if (column_size * SBITMAP_SET_SIZE (y) <= max_bitmap_memory)
1082 return y;
1084 /* Otherwise, pick the largest width we can, without going over the
1085 limit. */
1086 return SBITMAP_ELT_BITS * ((max_bitmap_memory + column_size - 1)
1087 / column_size);
1090 /* Compute the local properties of each recorded expression.
1092 Local properties are those that are defined by the block, irrespective of
1093 other blocks.
1095 An expression is transparent in a block if its operands are not modified
1096 in the block.
1098 An expression is computed (locally available) in a block if it is computed
1099 at least once and expression would contain the same value if the
1100 computation was moved to the end of the block.
1102 An expression is locally anticipatable in a block if it is computed at
1103 least once and expression would contain the same value if the computation
1104 was moved to the beginning of the block.
1106 We call this routine for cprop, pre and code hoisting. They all compute
1107 basically the same information and thus can easily share this code.
1109 TRANSP, COMP, and ANTLOC are destination sbitmaps for recording local
1110 properties. If NULL, then it is not necessary to compute or record that
1111 particular property.
1113 TABLE controls which hash table to look at. If it is set hash table,
1114 additionally, TRANSP is computed as ~TRANSP, since this is really cprop's
1115 ABSALTERED. */
1117 static void
1118 compute_local_properties (sbitmap *transp, sbitmap *comp, sbitmap *antloc, struct hash_table *table)
1120 unsigned int i;
1122 /* Initialize any bitmaps that were passed in. */
1123 if (transp)
1125 if (table->set_p)
1126 sbitmap_vector_zero (transp, last_basic_block);
1127 else
1128 sbitmap_vector_ones (transp, last_basic_block);
1131 if (comp)
1132 sbitmap_vector_zero (comp, last_basic_block);
1133 if (antloc)
1134 sbitmap_vector_zero (antloc, last_basic_block);
1136 for (i = 0; i < table->size; i++)
1138 struct expr *expr;
1140 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
1142 int indx = expr->bitmap_index;
1143 struct occr *occr;
1145 /* The expression is transparent in this block if it is not killed.
1146 We start by assuming all are transparent [none are killed], and
1147 then reset the bits for those that are. */
1148 if (transp)
1149 compute_transp (expr->expr, indx, transp, table->set_p);
1151 /* The occurrences recorded in antic_occr are exactly those that
1152 we want to set to nonzero in ANTLOC. */
1153 if (antloc)
1154 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
1156 SET_BIT (antloc[BLOCK_NUM (occr->insn)], indx);
1158 /* While we're scanning the table, this is a good place to
1159 initialize this. */
1160 occr->deleted_p = 0;
1163 /* The occurrences recorded in avail_occr are exactly those that
1164 we want to set to nonzero in COMP. */
1165 if (comp)
1166 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
1168 SET_BIT (comp[BLOCK_NUM (occr->insn)], indx);
1170 /* While we're scanning the table, this is a good place to
1171 initialize this. */
1172 occr->copied_p = 0;
1175 /* While we're scanning the table, this is a good place to
1176 initialize this. */
1177 expr->reaching_reg = 0;
1182 /* Register set information.
1184 `reg_set_table' records where each register is set or otherwise
1185 modified. */
1187 static struct obstack reg_set_obstack;
1189 static void
1190 alloc_reg_set_mem (int n_regs)
1192 unsigned int n;
1194 reg_set_table_size = n_regs + REG_SET_TABLE_SLOP;
1195 n = reg_set_table_size * sizeof (struct reg_set *);
1196 reg_set_table = gmalloc (n);
1197 memset (reg_set_table, 0, n);
1199 gcc_obstack_init (&reg_set_obstack);
1202 static void
1203 free_reg_set_mem (void)
1205 free (reg_set_table);
1206 obstack_free (&reg_set_obstack, NULL);
1209 /* Record REGNO in the reg_set table. */
1211 static void
1212 record_one_set (int regno, rtx insn)
1214 /* Allocate a new reg_set element and link it onto the list. */
1215 struct reg_set *new_reg_info;
1217 /* If the table isn't big enough, enlarge it. */
1218 if (regno >= reg_set_table_size)
1220 int new_size = regno + REG_SET_TABLE_SLOP;
1222 reg_set_table = grealloc (reg_set_table,
1223 new_size * sizeof (struct reg_set *));
1224 memset (reg_set_table + reg_set_table_size, 0,
1225 (new_size - reg_set_table_size) * sizeof (struct reg_set *));
1226 reg_set_table_size = new_size;
1229 new_reg_info = obstack_alloc (&reg_set_obstack, sizeof (struct reg_set));
1230 bytes_used += sizeof (struct reg_set);
1231 new_reg_info->insn = insn;
1232 new_reg_info->next = reg_set_table[regno];
1233 reg_set_table[regno] = new_reg_info;
1236 /* Called from compute_sets via note_stores to handle one SET or CLOBBER in
1237 an insn. The DATA is really the instruction in which the SET is
1238 occurring. */
1240 static void
1241 record_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED, void *data)
1243 rtx record_set_insn = (rtx) data;
1245 if (GET_CODE (dest) == REG && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
1246 record_one_set (REGNO (dest), record_set_insn);
1249 /* Scan the function and record each set of each pseudo-register.
1251 This is called once, at the start of the gcse pass. See the comments for
1252 `reg_set_table' for further documentation. */
1254 static void
1255 compute_sets (rtx f)
1257 rtx insn;
1259 for (insn = f; insn != 0; insn = NEXT_INSN (insn))
1260 if (INSN_P (insn))
1261 note_stores (PATTERN (insn), record_set_info, insn);
1264 /* Hash table support. */
1266 struct reg_avail_info
1268 basic_block last_bb;
1269 int first_set;
1270 int last_set;
1273 static struct reg_avail_info *reg_avail_info;
1274 static basic_block current_bb;
1277 /* See whether X, the source of a set, is something we want to consider for
1278 GCSE. */
1280 static GTY(()) rtx test_insn;
1281 static int
1282 want_to_gcse_p (rtx x)
1284 int num_clobbers = 0;
1285 int icode;
1287 switch (GET_CODE (x))
1289 case REG:
1290 case SUBREG:
1291 case CONST_INT:
1292 case CONST_DOUBLE:
1293 case CONST_VECTOR:
1294 case CALL:
1295 case CONSTANT_P_RTX:
1296 return 0;
1298 default:
1299 break;
1302 /* If this is a valid operand, we are OK. If it's VOIDmode, we aren't. */
1303 if (general_operand (x, GET_MODE (x)))
1304 return 1;
1305 else if (GET_MODE (x) == VOIDmode)
1306 return 0;
1308 /* Otherwise, check if we can make a valid insn from it. First initialize
1309 our test insn if we haven't already. */
1310 if (test_insn == 0)
1312 test_insn
1313 = make_insn_raw (gen_rtx_SET (VOIDmode,
1314 gen_rtx_REG (word_mode,
1315 FIRST_PSEUDO_REGISTER * 2),
1316 const0_rtx));
1317 NEXT_INSN (test_insn) = PREV_INSN (test_insn) = 0;
1320 /* Now make an insn like the one we would make when GCSE'ing and see if
1321 valid. */
1322 PUT_MODE (SET_DEST (PATTERN (test_insn)), GET_MODE (x));
1323 SET_SRC (PATTERN (test_insn)) = x;
1324 return ((icode = recog (PATTERN (test_insn), test_insn, &num_clobbers)) >= 0
1325 && (num_clobbers == 0 || ! added_clobbers_hard_reg_p (icode)));
1328 /* Return nonzero if the operands of expression X are unchanged from the
1329 start of INSN's basic block up to but not including INSN (if AVAIL_P == 0),
1330 or from INSN to the end of INSN's basic block (if AVAIL_P != 0). */
1332 static int
1333 oprs_unchanged_p (rtx x, rtx insn, int avail_p)
1335 int i, j;
1336 enum rtx_code code;
1337 const char *fmt;
1339 if (x == 0)
1340 return 1;
1342 code = GET_CODE (x);
1343 switch (code)
1345 case REG:
1347 struct reg_avail_info *info = &reg_avail_info[REGNO (x)];
1349 if (info->last_bb != current_bb)
1350 return 1;
1351 if (avail_p)
1352 return info->last_set < INSN_CUID (insn);
1353 else
1354 return info->first_set >= INSN_CUID (insn);
1357 case MEM:
1358 if (load_killed_in_block_p (current_bb, INSN_CUID (insn),
1359 x, avail_p))
1360 return 0;
1361 else
1362 return oprs_unchanged_p (XEXP (x, 0), insn, avail_p);
1364 case PRE_DEC:
1365 case PRE_INC:
1366 case POST_DEC:
1367 case POST_INC:
1368 case PRE_MODIFY:
1369 case POST_MODIFY:
1370 return 0;
1372 case PC:
1373 case CC0: /*FIXME*/
1374 case CONST:
1375 case CONST_INT:
1376 case CONST_DOUBLE:
1377 case CONST_VECTOR:
1378 case SYMBOL_REF:
1379 case LABEL_REF:
1380 case ADDR_VEC:
1381 case ADDR_DIFF_VEC:
1382 return 1;
1384 default:
1385 break;
1388 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
1390 if (fmt[i] == 'e')
1392 /* If we are about to do the last recursive call needed at this
1393 level, change it into iteration. This function is called enough
1394 to be worth it. */
1395 if (i == 0)
1396 return oprs_unchanged_p (XEXP (x, i), insn, avail_p);
1398 else if (! oprs_unchanged_p (XEXP (x, i), insn, avail_p))
1399 return 0;
1401 else if (fmt[i] == 'E')
1402 for (j = 0; j < XVECLEN (x, i); j++)
1403 if (! oprs_unchanged_p (XVECEXP (x, i, j), insn, avail_p))
1404 return 0;
1407 return 1;
1410 /* Used for communication between mems_conflict_for_gcse_p and
1411 load_killed_in_block_p. Nonzero if mems_conflict_for_gcse_p finds a
1412 conflict between two memory references. */
1413 static int gcse_mems_conflict_p;
1415 /* Used for communication between mems_conflict_for_gcse_p and
1416 load_killed_in_block_p. A memory reference for a load instruction,
1417 mems_conflict_for_gcse_p will see if a memory store conflicts with
1418 this memory load. */
1419 static rtx gcse_mem_operand;
1421 /* DEST is the output of an instruction. If it is a memory reference, and
1422 possibly conflicts with the load found in gcse_mem_operand, then set
1423 gcse_mems_conflict_p to a nonzero value. */
1425 static void
1426 mems_conflict_for_gcse_p (rtx dest, rtx setter ATTRIBUTE_UNUSED,
1427 void *data ATTRIBUTE_UNUSED)
1429 while (GET_CODE (dest) == SUBREG
1430 || GET_CODE (dest) == ZERO_EXTRACT
1431 || GET_CODE (dest) == SIGN_EXTRACT
1432 || GET_CODE (dest) == STRICT_LOW_PART)
1433 dest = XEXP (dest, 0);
1435 /* If DEST is not a MEM, then it will not conflict with the load. Note
1436 that function calls are assumed to clobber memory, but are handled
1437 elsewhere. */
1438 if (GET_CODE (dest) != MEM)
1439 return;
1441 /* If we are setting a MEM in our list of specially recognized MEMs,
1442 don't mark as killed this time. */
1444 if (expr_equiv_p (dest, gcse_mem_operand) && pre_ldst_mems != NULL)
1446 if (!find_rtx_in_ldst (dest))
1447 gcse_mems_conflict_p = 1;
1448 return;
1451 if (true_dependence (dest, GET_MODE (dest), gcse_mem_operand,
1452 rtx_addr_varies_p))
1453 gcse_mems_conflict_p = 1;
1456 /* Return nonzero if the expression in X (a memory reference) is killed
1457 in block BB before or after the insn with the CUID in UID_LIMIT.
1458 AVAIL_P is nonzero for kills after UID_LIMIT, and zero for kills
1459 before UID_LIMIT.
1461 To check the entire block, set UID_LIMIT to max_uid + 1 and
1462 AVAIL_P to 0. */
1464 static int
1465 load_killed_in_block_p (basic_block bb, int uid_limit, rtx x, int avail_p)
1467 rtx list_entry = modify_mem_list[bb->index];
1468 while (list_entry)
1470 rtx setter;
1471 /* Ignore entries in the list that do not apply. */
1472 if ((avail_p
1473 && INSN_CUID (XEXP (list_entry, 0)) < uid_limit)
1474 || (! avail_p
1475 && INSN_CUID (XEXP (list_entry, 0)) > uid_limit))
1477 list_entry = XEXP (list_entry, 1);
1478 continue;
1481 setter = XEXP (list_entry, 0);
1483 /* If SETTER is a call everything is clobbered. Note that calls
1484 to pure functions are never put on the list, so we need not
1485 worry about them. */
1486 if (GET_CODE (setter) == CALL_INSN)
1487 return 1;
1489 /* SETTER must be an INSN of some kind that sets memory. Call
1490 note_stores to examine each hunk of memory that is modified.
1492 The note_stores interface is pretty limited, so we have to
1493 communicate via global variables. Yuk. */
1494 gcse_mem_operand = x;
1495 gcse_mems_conflict_p = 0;
1496 note_stores (PATTERN (setter), mems_conflict_for_gcse_p, NULL);
1497 if (gcse_mems_conflict_p)
1498 return 1;
1499 list_entry = XEXP (list_entry, 1);
1501 return 0;
1504 /* Return nonzero if the operands of expression X are unchanged from
1505 the start of INSN's basic block up to but not including INSN. */
1507 static int
1508 oprs_anticipatable_p (rtx x, rtx insn)
1510 return oprs_unchanged_p (x, insn, 0);
1513 /* Return nonzero if the operands of expression X are unchanged from
1514 INSN to the end of INSN's basic block. */
1516 static int
1517 oprs_available_p (rtx x, rtx insn)
1519 return oprs_unchanged_p (x, insn, 1);
1522 /* Hash expression X.
1524 MODE is only used if X is a CONST_INT. DO_NOT_RECORD_P is a boolean
1525 indicating if a volatile operand is found or if the expression contains
1526 something we don't want to insert in the table.
1528 ??? One might want to merge this with canon_hash. Later. */
1530 static unsigned int
1531 hash_expr (rtx x, enum machine_mode mode, int *do_not_record_p, int hash_table_size)
1533 unsigned int hash;
1535 *do_not_record_p = 0;
1537 hash = hash_expr_1 (x, mode, do_not_record_p);
1538 return hash % hash_table_size;
1541 /* Hash a string. Just add its bytes up. */
1543 static inline unsigned
1544 hash_string_1 (const char *ps)
1546 unsigned hash = 0;
1547 const unsigned char *p = (const unsigned char *) ps;
1549 if (p)
1550 while (*p)
1551 hash += *p++;
1553 return hash;
1556 /* Subroutine of hash_expr to do the actual work. */
1558 static unsigned int
1559 hash_expr_1 (rtx x, enum machine_mode mode, int *do_not_record_p)
1561 int i, j;
1562 unsigned hash = 0;
1563 enum rtx_code code;
1564 const char *fmt;
1566 /* Used to turn recursion into iteration. We can't rely on GCC's
1567 tail-recursion elimination since we need to keep accumulating values
1568 in HASH. */
1570 if (x == 0)
1571 return hash;
1573 repeat:
1574 code = GET_CODE (x);
1575 switch (code)
1577 case REG:
1578 hash += ((unsigned int) REG << 7) + REGNO (x);
1579 return hash;
1581 case CONST_INT:
1582 hash += (((unsigned int) CONST_INT << 7) + (unsigned int) mode
1583 + (unsigned int) INTVAL (x));
1584 return hash;
1586 case CONST_DOUBLE:
1587 /* This is like the general case, except that it only counts
1588 the integers representing the constant. */
1589 hash += (unsigned int) code + (unsigned int) GET_MODE (x);
1590 if (GET_MODE (x) != VOIDmode)
1591 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
1592 hash += (unsigned int) XWINT (x, i);
1593 else
1594 hash += ((unsigned int) CONST_DOUBLE_LOW (x)
1595 + (unsigned int) CONST_DOUBLE_HIGH (x));
1596 return hash;
1598 case CONST_VECTOR:
1600 int units;
1601 rtx elt;
1603 units = CONST_VECTOR_NUNITS (x);
1605 for (i = 0; i < units; ++i)
1607 elt = CONST_VECTOR_ELT (x, i);
1608 hash += hash_expr_1 (elt, GET_MODE (elt), do_not_record_p);
1611 return hash;
1614 /* Assume there is only one rtx object for any given label. */
1615 case LABEL_REF:
1616 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
1617 differences and differences between each stage's debugging dumps. */
1618 hash += (((unsigned int) LABEL_REF << 7)
1619 + CODE_LABEL_NUMBER (XEXP (x, 0)));
1620 return hash;
1622 case SYMBOL_REF:
1624 /* Don't hash on the symbol's address to avoid bootstrap differences.
1625 Different hash values may cause expressions to be recorded in
1626 different orders and thus different registers to be used in the
1627 final assembler. This also avoids differences in the dump files
1628 between various stages. */
1629 unsigned int h = 0;
1630 const unsigned char *p = (const unsigned char *) XSTR (x, 0);
1632 while (*p)
1633 h += (h << 7) + *p++; /* ??? revisit */
1635 hash += ((unsigned int) SYMBOL_REF << 7) + h;
1636 return hash;
1639 case MEM:
1640 if (MEM_VOLATILE_P (x))
1642 *do_not_record_p = 1;
1643 return 0;
1646 hash += (unsigned int) MEM;
1647 /* We used alias set for hashing, but this is not good, since the alias
1648 set may differ in -fprofile-arcs and -fbranch-probabilities compilation
1649 causing the profiles to fail to match. */
1650 x = XEXP (x, 0);
1651 goto repeat;
1653 case PRE_DEC:
1654 case PRE_INC:
1655 case POST_DEC:
1656 case POST_INC:
1657 case PC:
1658 case CC0:
1659 case CALL:
1660 case UNSPEC_VOLATILE:
1661 *do_not_record_p = 1;
1662 return 0;
1664 case ASM_OPERANDS:
1665 if (MEM_VOLATILE_P (x))
1667 *do_not_record_p = 1;
1668 return 0;
1670 else
1672 /* We don't want to take the filename and line into account. */
1673 hash += (unsigned) code + (unsigned) GET_MODE (x)
1674 + hash_string_1 (ASM_OPERANDS_TEMPLATE (x))
1675 + hash_string_1 (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
1676 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
1678 if (ASM_OPERANDS_INPUT_LENGTH (x))
1680 for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
1682 hash += (hash_expr_1 (ASM_OPERANDS_INPUT (x, i),
1683 GET_MODE (ASM_OPERANDS_INPUT (x, i)),
1684 do_not_record_p)
1685 + hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT
1686 (x, i)));
1689 hash += hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
1690 x = ASM_OPERANDS_INPUT (x, 0);
1691 mode = GET_MODE (x);
1692 goto repeat;
1694 return hash;
1697 default:
1698 break;
1701 hash += (unsigned) code + (unsigned) GET_MODE (x);
1702 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
1704 if (fmt[i] == 'e')
1706 /* If we are about to do the last recursive call
1707 needed at this level, change it into iteration.
1708 This function is called enough to be worth it. */
1709 if (i == 0)
1711 x = XEXP (x, i);
1712 goto repeat;
1715 hash += hash_expr_1 (XEXP (x, i), 0, do_not_record_p);
1716 if (*do_not_record_p)
1717 return 0;
1720 else if (fmt[i] == 'E')
1721 for (j = 0; j < XVECLEN (x, i); j++)
1723 hash += hash_expr_1 (XVECEXP (x, i, j), 0, do_not_record_p);
1724 if (*do_not_record_p)
1725 return 0;
1728 else if (fmt[i] == 's')
1729 hash += hash_string_1 (XSTR (x, i));
1730 else if (fmt[i] == 'i')
1731 hash += (unsigned int) XINT (x, i);
1732 else
1733 abort ();
1736 return hash;
1739 /* Hash a set of register REGNO.
1741 Sets are hashed on the register that is set. This simplifies the PRE copy
1742 propagation code.
1744 ??? May need to make things more elaborate. Later, as necessary. */
1746 static unsigned int
1747 hash_set (int regno, int hash_table_size)
1749 unsigned int hash;
1751 hash = regno;
1752 return hash % hash_table_size;
1755 /* Return nonzero if exp1 is equivalent to exp2.
1756 ??? Borrowed from cse.c. Might want to remerge with cse.c. Later. */
1758 static int
1759 expr_equiv_p (rtx x, rtx y)
1761 int i, j;
1762 enum rtx_code code;
1763 const char *fmt;
1765 if (x == y)
1766 return 1;
1768 if (x == 0 || y == 0)
1769 return 0;
1771 code = GET_CODE (x);
1772 if (code != GET_CODE (y))
1773 return 0;
1775 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
1776 if (GET_MODE (x) != GET_MODE (y))
1777 return 0;
1779 switch (code)
1781 case PC:
1782 case CC0:
1783 case CONST_INT:
1784 return 0;
1786 case LABEL_REF:
1787 return XEXP (x, 0) == XEXP (y, 0);
1789 case SYMBOL_REF:
1790 return XSTR (x, 0) == XSTR (y, 0);
1792 case REG:
1793 return REGNO (x) == REGNO (y);
1795 case MEM:
1796 /* Can't merge two expressions in different alias sets, since we can
1797 decide that the expression is transparent in a block when it isn't,
1798 due to it being set with the different alias set. */
1799 if (MEM_ALIAS_SET (x) != MEM_ALIAS_SET (y))
1800 return 0;
1801 break;
1803 /* For commutative operations, check both orders. */
1804 case PLUS:
1805 case MULT:
1806 case AND:
1807 case IOR:
1808 case XOR:
1809 case NE:
1810 case EQ:
1811 return ((expr_equiv_p (XEXP (x, 0), XEXP (y, 0))
1812 && expr_equiv_p (XEXP (x, 1), XEXP (y, 1)))
1813 || (expr_equiv_p (XEXP (x, 0), XEXP (y, 1))
1814 && expr_equiv_p (XEXP (x, 1), XEXP (y, 0))));
1816 case ASM_OPERANDS:
1817 /* We don't use the generic code below because we want to
1818 disregard filename and line numbers. */
1820 /* A volatile asm isn't equivalent to any other. */
1821 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
1822 return 0;
1824 if (GET_MODE (x) != GET_MODE (y)
1825 || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
1826 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
1827 ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
1828 || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
1829 || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
1830 return 0;
1832 if (ASM_OPERANDS_INPUT_LENGTH (x))
1834 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
1835 if (! expr_equiv_p (ASM_OPERANDS_INPUT (x, i),
1836 ASM_OPERANDS_INPUT (y, i))
1837 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
1838 ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
1839 return 0;
1842 return 1;
1844 default:
1845 break;
1848 /* Compare the elements. If any pair of corresponding elements
1849 fail to match, return 0 for the whole thing. */
1851 fmt = GET_RTX_FORMAT (code);
1852 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1854 switch (fmt[i])
1856 case 'e':
1857 if (! expr_equiv_p (XEXP (x, i), XEXP (y, i)))
1858 return 0;
1859 break;
1861 case 'E':
1862 if (XVECLEN (x, i) != XVECLEN (y, i))
1863 return 0;
1864 for (j = 0; j < XVECLEN (x, i); j++)
1865 if (! expr_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
1866 return 0;
1867 break;
1869 case 's':
1870 if (strcmp (XSTR (x, i), XSTR (y, i)))
1871 return 0;
1872 break;
1874 case 'i':
1875 if (XINT (x, i) != XINT (y, i))
1876 return 0;
1877 break;
1879 case 'w':
1880 if (XWINT (x, i) != XWINT (y, i))
1881 return 0;
1882 break;
1884 case '0':
1885 break;
1887 default:
1888 abort ();
1892 return 1;
1895 /* Insert expression X in INSN in the hash TABLE.
1896 If it is already present, record it as the last occurrence in INSN's
1897 basic block.
1899 MODE is the mode of the value X is being stored into.
1900 It is only used if X is a CONST_INT.
1902 ANTIC_P is nonzero if X is an anticipatable expression.
1903 AVAIL_P is nonzero if X is an available expression. */
1905 static void
1906 insert_expr_in_table (rtx x, enum machine_mode mode, rtx insn, int antic_p,
1907 int avail_p, struct hash_table *table)
1909 int found, do_not_record_p;
1910 unsigned int hash;
1911 struct expr *cur_expr, *last_expr = NULL;
1912 struct occr *antic_occr, *avail_occr;
1913 struct occr *last_occr = NULL;
1915 hash = hash_expr (x, mode, &do_not_record_p, table->size);
1917 /* Do not insert expression in table if it contains volatile operands,
1918 or if hash_expr determines the expression is something we don't want
1919 to or can't handle. */
1920 if (do_not_record_p)
1921 return;
1923 cur_expr = table->table[hash];
1924 found = 0;
1926 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
1928 /* If the expression isn't found, save a pointer to the end of
1929 the list. */
1930 last_expr = cur_expr;
1931 cur_expr = cur_expr->next_same_hash;
1934 if (! found)
1936 cur_expr = gcse_alloc (sizeof (struct expr));
1937 bytes_used += sizeof (struct expr);
1938 if (table->table[hash] == NULL)
1939 /* This is the first pattern that hashed to this index. */
1940 table->table[hash] = cur_expr;
1941 else
1942 /* Add EXPR to end of this hash chain. */
1943 last_expr->next_same_hash = cur_expr;
1945 /* Set the fields of the expr element. */
1946 cur_expr->expr = x;
1947 cur_expr->bitmap_index = table->n_elems++;
1948 cur_expr->next_same_hash = NULL;
1949 cur_expr->antic_occr = NULL;
1950 cur_expr->avail_occr = NULL;
1953 /* Now record the occurrence(s). */
1954 if (antic_p)
1956 antic_occr = cur_expr->antic_occr;
1958 /* Search for another occurrence in the same basic block. */
1959 while (antic_occr && BLOCK_NUM (antic_occr->insn) != BLOCK_NUM (insn))
1961 /* If an occurrence isn't found, save a pointer to the end of
1962 the list. */
1963 last_occr = antic_occr;
1964 antic_occr = antic_occr->next;
1967 if (antic_occr)
1968 /* Found another instance of the expression in the same basic block.
1969 Prefer the currently recorded one. We want the first one in the
1970 block and the block is scanned from start to end. */
1971 ; /* nothing to do */
1972 else
1974 /* First occurrence of this expression in this basic block. */
1975 antic_occr = gcse_alloc (sizeof (struct occr));
1976 bytes_used += sizeof (struct occr);
1977 /* First occurrence of this expression in any block? */
1978 if (cur_expr->antic_occr == NULL)
1979 cur_expr->antic_occr = antic_occr;
1980 else
1981 last_occr->next = antic_occr;
1983 antic_occr->insn = insn;
1984 antic_occr->next = NULL;
1988 if (avail_p)
1990 avail_occr = cur_expr->avail_occr;
1992 /* Search for another occurrence in the same basic block. */
1993 while (avail_occr && BLOCK_NUM (avail_occr->insn) != BLOCK_NUM (insn))
1995 /* If an occurrence isn't found, save a pointer to the end of
1996 the list. */
1997 last_occr = avail_occr;
1998 avail_occr = avail_occr->next;
2001 if (avail_occr)
2002 /* Found another instance of the expression in the same basic block.
2003 Prefer this occurrence to the currently recorded one. We want
2004 the last one in the block and the block is scanned from start
2005 to end. */
2006 avail_occr->insn = insn;
2007 else
2009 /* First occurrence of this expression in this basic block. */
2010 avail_occr = gcse_alloc (sizeof (struct occr));
2011 bytes_used += sizeof (struct occr);
2013 /* First occurrence of this expression in any block? */
2014 if (cur_expr->avail_occr == NULL)
2015 cur_expr->avail_occr = avail_occr;
2016 else
2017 last_occr->next = avail_occr;
2019 avail_occr->insn = insn;
2020 avail_occr->next = NULL;
2025 /* Insert pattern X in INSN in the hash table.
2026 X is a SET of a reg to either another reg or a constant.
2027 If it is already present, record it as the last occurrence in INSN's
2028 basic block. */
2030 static void
2031 insert_set_in_table (rtx x, rtx insn, struct hash_table *table)
2033 int found;
2034 unsigned int hash;
2035 struct expr *cur_expr, *last_expr = NULL;
2036 struct occr *cur_occr, *last_occr = NULL;
2038 if (GET_CODE (x) != SET
2039 || GET_CODE (SET_DEST (x)) != REG)
2040 abort ();
2042 hash = hash_set (REGNO (SET_DEST (x)), table->size);
2044 cur_expr = table->table[hash];
2045 found = 0;
2047 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
2049 /* If the expression isn't found, save a pointer to the end of
2050 the list. */
2051 last_expr = cur_expr;
2052 cur_expr = cur_expr->next_same_hash;
2055 if (! found)
2057 cur_expr = gcse_alloc (sizeof (struct expr));
2058 bytes_used += sizeof (struct expr);
2059 if (table->table[hash] == NULL)
2060 /* This is the first pattern that hashed to this index. */
2061 table->table[hash] = cur_expr;
2062 else
2063 /* Add EXPR to end of this hash chain. */
2064 last_expr->next_same_hash = cur_expr;
2066 /* Set the fields of the expr element.
2067 We must copy X because it can be modified when copy propagation is
2068 performed on its operands. */
2069 cur_expr->expr = copy_rtx (x);
2070 cur_expr->bitmap_index = table->n_elems++;
2071 cur_expr->next_same_hash = NULL;
2072 cur_expr->antic_occr = NULL;
2073 cur_expr->avail_occr = NULL;
2076 /* Now record the occurrence. */
2077 cur_occr = cur_expr->avail_occr;
2079 /* Search for another occurrence in the same basic block. */
2080 while (cur_occr && BLOCK_NUM (cur_occr->insn) != BLOCK_NUM (insn))
2082 /* If an occurrence isn't found, save a pointer to the end of
2083 the list. */
2084 last_occr = cur_occr;
2085 cur_occr = cur_occr->next;
2088 if (cur_occr)
2089 /* Found another instance of the expression in the same basic block.
2090 Prefer this occurrence to the currently recorded one. We want the
2091 last one in the block and the block is scanned from start to end. */
2092 cur_occr->insn = insn;
2093 else
2095 /* First occurrence of this expression in this basic block. */
2096 cur_occr = gcse_alloc (sizeof (struct occr));
2097 bytes_used += sizeof (struct occr);
2099 /* First occurrence of this expression in any block? */
2100 if (cur_expr->avail_occr == NULL)
2101 cur_expr->avail_occr = cur_occr;
2102 else
2103 last_occr->next = cur_occr;
2105 cur_occr->insn = insn;
2106 cur_occr->next = NULL;
2110 /* Determine whether the rtx X should be treated as a constant for
2111 the purposes of GCSE's constant propagation. */
2113 static bool
2114 gcse_constant_p (rtx x)
2116 /* Consider a COMPARE of two integers constant. */
2117 if (GET_CODE (x) == COMPARE
2118 && GET_CODE (XEXP (x, 0)) == CONST_INT
2119 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2120 return true;
2123 /* Consider a COMPARE of the same registers is a constant
2124 if they are not floating point registers. */
2125 if (GET_CODE(x) == COMPARE
2126 && GET_CODE (XEXP (x, 0)) == REG
2127 && GET_CODE (XEXP (x, 1)) == REG
2128 && REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 1))
2129 && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0)))
2130 && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 1))))
2131 return true;
2133 if (GET_CODE (x) == CONSTANT_P_RTX)
2134 return false;
2136 return CONSTANT_P (x);
2139 /* Scan pattern PAT of INSN and add an entry to the hash TABLE (set or
2140 expression one). */
2142 static void
2143 hash_scan_set (rtx pat, rtx insn, struct hash_table *table)
2145 rtx src = SET_SRC (pat);
2146 rtx dest = SET_DEST (pat);
2147 rtx note;
2149 if (GET_CODE (src) == CALL)
2150 hash_scan_call (src, insn, table);
2152 else if (GET_CODE (dest) == REG)
2154 unsigned int regno = REGNO (dest);
2155 rtx tmp;
2157 /* If this is a single set and we are doing constant propagation,
2158 see if a REG_NOTE shows this equivalent to a constant. */
2159 if (table->set_p && (note = find_reg_equal_equiv_note (insn)) != 0
2160 && gcse_constant_p (XEXP (note, 0)))
2161 src = XEXP (note, 0), pat = gen_rtx_SET (VOIDmode, dest, src);
2163 /* Only record sets of pseudo-regs in the hash table. */
2164 if (! table->set_p
2165 && regno >= FIRST_PSEUDO_REGISTER
2166 /* Don't GCSE something if we can't do a reg/reg copy. */
2167 && can_copy_p (GET_MODE (dest))
2168 /* GCSE commonly inserts instruction after the insn. We can't
2169 do that easily for EH_REGION notes so disable GCSE on these
2170 for now. */
2171 && !find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2172 /* Is SET_SRC something we want to gcse? */
2173 && want_to_gcse_p (src)
2174 /* Don't CSE a nop. */
2175 && ! set_noop_p (pat)
2176 /* Don't GCSE if it has attached REG_EQUIV note.
2177 At this point this only function parameters should have
2178 REG_EQUIV notes and if the argument slot is used somewhere
2179 explicitly, it means address of parameter has been taken,
2180 so we should not extend the lifetime of the pseudo. */
2181 && ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) == 0
2182 || GET_CODE (XEXP (note, 0)) != MEM))
2184 /* An expression is not anticipatable if its operands are
2185 modified before this insn or if this is not the only SET in
2186 this insn. */
2187 int antic_p = oprs_anticipatable_p (src, insn) && single_set (insn);
2188 /* An expression is not available if its operands are
2189 subsequently modified, including this insn. It's also not
2190 available if this is a branch, because we can't insert
2191 a set after the branch. */
2192 int avail_p = (oprs_available_p (src, insn)
2193 && ! JUMP_P (insn));
2195 insert_expr_in_table (src, GET_MODE (dest), insn, antic_p, avail_p, table);
2198 /* Record sets for constant/copy propagation. */
2199 else if (table->set_p
2200 && regno >= FIRST_PSEUDO_REGISTER
2201 && ((GET_CODE (src) == REG
2202 && REGNO (src) >= FIRST_PSEUDO_REGISTER
2203 && can_copy_p (GET_MODE (dest))
2204 && REGNO (src) != regno)
2205 || gcse_constant_p (src))
2206 /* A copy is not available if its src or dest is subsequently
2207 modified. Here we want to search from INSN+1 on, but
2208 oprs_available_p searches from INSN on. */
2209 && (insn == BLOCK_END (BLOCK_NUM (insn))
2210 || ((tmp = next_nonnote_insn (insn)) != NULL_RTX
2211 && oprs_available_p (pat, tmp))))
2212 insert_set_in_table (pat, insn, table);
2216 static void
2217 hash_scan_clobber (rtx x ATTRIBUTE_UNUSED, rtx insn ATTRIBUTE_UNUSED,
2218 struct hash_table *table ATTRIBUTE_UNUSED)
2220 /* Currently nothing to do. */
2223 static void
2224 hash_scan_call (rtx x ATTRIBUTE_UNUSED, rtx insn ATTRIBUTE_UNUSED,
2225 struct hash_table *table ATTRIBUTE_UNUSED)
2227 /* Currently nothing to do. */
2230 /* Process INSN and add hash table entries as appropriate.
2232 Only available expressions that set a single pseudo-reg are recorded.
2234 Single sets in a PARALLEL could be handled, but it's an extra complication
2235 that isn't dealt with right now. The trick is handling the CLOBBERs that
2236 are also in the PARALLEL. Later.
2238 If SET_P is nonzero, this is for the assignment hash table,
2239 otherwise it is for the expression hash table.
2240 If IN_LIBCALL_BLOCK nonzero, we are in a libcall block, and should
2241 not record any expressions. */
2243 static void
2244 hash_scan_insn (rtx insn, struct hash_table *table, int in_libcall_block)
2246 rtx pat = PATTERN (insn);
2247 int i;
2249 if (in_libcall_block)
2250 return;
2252 /* Pick out the sets of INSN and for other forms of instructions record
2253 what's been modified. */
2255 if (GET_CODE (pat) == SET)
2256 hash_scan_set (pat, insn, table);
2257 else if (GET_CODE (pat) == PARALLEL)
2258 for (i = 0; i < XVECLEN (pat, 0); i++)
2260 rtx x = XVECEXP (pat, 0, i);
2262 if (GET_CODE (x) == SET)
2263 hash_scan_set (x, insn, table);
2264 else if (GET_CODE (x) == CLOBBER)
2265 hash_scan_clobber (x, insn, table);
2266 else if (GET_CODE (x) == CALL)
2267 hash_scan_call (x, insn, table);
2270 else if (GET_CODE (pat) == CLOBBER)
2271 hash_scan_clobber (pat, insn, table);
2272 else if (GET_CODE (pat) == CALL)
2273 hash_scan_call (pat, insn, table);
2276 static void
2277 dump_hash_table (FILE *file, const char *name, struct hash_table *table)
2279 int i;
2280 /* Flattened out table, so it's printed in proper order. */
2281 struct expr **flat_table;
2282 unsigned int *hash_val;
2283 struct expr *expr;
2285 flat_table = xcalloc (table->n_elems, sizeof (struct expr *));
2286 hash_val = xmalloc (table->n_elems * sizeof (unsigned int));
2288 for (i = 0; i < (int) table->size; i++)
2289 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
2291 flat_table[expr->bitmap_index] = expr;
2292 hash_val[expr->bitmap_index] = i;
2295 fprintf (file, "%s hash table (%d buckets, %d entries)\n",
2296 name, table->size, table->n_elems);
2298 for (i = 0; i < (int) table->n_elems; i++)
2299 if (flat_table[i] != 0)
2301 expr = flat_table[i];
2302 fprintf (file, "Index %d (hash value %d)\n ",
2303 expr->bitmap_index, hash_val[i]);
2304 print_rtl (file, expr->expr);
2305 fprintf (file, "\n");
2308 fprintf (file, "\n");
2310 free (flat_table);
2311 free (hash_val);
2314 /* Record register first/last/block set information for REGNO in INSN.
2316 first_set records the first place in the block where the register
2317 is set and is used to compute "anticipatability".
2319 last_set records the last place in the block where the register
2320 is set and is used to compute "availability".
2322 last_bb records the block for which first_set and last_set are
2323 valid, as a quick test to invalidate them.
2325 reg_set_in_block records whether the register is set in the block
2326 and is used to compute "transparency". */
2328 static void
2329 record_last_reg_set_info (rtx insn, int regno)
2331 struct reg_avail_info *info = &reg_avail_info[regno];
2332 int cuid = INSN_CUID (insn);
2334 info->last_set = cuid;
2335 if (info->last_bb != current_bb)
2337 info->last_bb = current_bb;
2338 info->first_set = cuid;
2339 SET_BIT (reg_set_in_block[current_bb->index], regno);
2344 /* Record all of the canonicalized MEMs of record_last_mem_set_info's insn.
2345 Note we store a pair of elements in the list, so they have to be
2346 taken off pairwise. */
2348 static void
2349 canon_list_insert (rtx dest ATTRIBUTE_UNUSED, rtx unused1 ATTRIBUTE_UNUSED,
2350 void * v_insn)
2352 rtx dest_addr, insn;
2353 int bb;
2355 while (GET_CODE (dest) == SUBREG
2356 || GET_CODE (dest) == ZERO_EXTRACT
2357 || GET_CODE (dest) == SIGN_EXTRACT
2358 || GET_CODE (dest) == STRICT_LOW_PART)
2359 dest = XEXP (dest, 0);
2361 /* If DEST is not a MEM, then it will not conflict with a load. Note
2362 that function calls are assumed to clobber memory, but are handled
2363 elsewhere. */
2365 if (GET_CODE (dest) != MEM)
2366 return;
2368 dest_addr = get_addr (XEXP (dest, 0));
2369 dest_addr = canon_rtx (dest_addr);
2370 insn = (rtx) v_insn;
2371 bb = BLOCK_NUM (insn);
2373 canon_modify_mem_list[bb] =
2374 alloc_EXPR_LIST (VOIDmode, dest_addr, canon_modify_mem_list[bb]);
2375 canon_modify_mem_list[bb] =
2376 alloc_EXPR_LIST (VOIDmode, dest, canon_modify_mem_list[bb]);
2377 bitmap_set_bit (canon_modify_mem_list_set, bb);
2380 /* Record memory modification information for INSN. We do not actually care
2381 about the memory location(s) that are set, or even how they are set (consider
2382 a CALL_INSN). We merely need to record which insns modify memory. */
2384 static void
2385 record_last_mem_set_info (rtx insn)
2387 int bb = BLOCK_NUM (insn);
2389 /* load_killed_in_block_p will handle the case of calls clobbering
2390 everything. */
2391 modify_mem_list[bb] = alloc_INSN_LIST (insn, modify_mem_list[bb]);
2392 bitmap_set_bit (modify_mem_list_set, bb);
2394 if (GET_CODE (insn) == CALL_INSN)
2396 /* Note that traversals of this loop (other than for free-ing)
2397 will break after encountering a CALL_INSN. So, there's no
2398 need to insert a pair of items, as canon_list_insert does. */
2399 canon_modify_mem_list[bb] =
2400 alloc_INSN_LIST (insn, canon_modify_mem_list[bb]);
2401 bitmap_set_bit (canon_modify_mem_list_set, bb);
2403 else
2404 note_stores (PATTERN (insn), canon_list_insert, (void*) insn);
2407 /* Called from compute_hash_table via note_stores to handle one
2408 SET or CLOBBER in an insn. DATA is really the instruction in which
2409 the SET is taking place. */
2411 static void
2412 record_last_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED, void *data)
2414 rtx last_set_insn = (rtx) data;
2416 if (GET_CODE (dest) == SUBREG)
2417 dest = SUBREG_REG (dest);
2419 if (GET_CODE (dest) == REG)
2420 record_last_reg_set_info (last_set_insn, REGNO (dest));
2421 else if (GET_CODE (dest) == MEM
2422 /* Ignore pushes, they clobber nothing. */
2423 && ! push_operand (dest, GET_MODE (dest)))
2424 record_last_mem_set_info (last_set_insn);
2427 /* Top level function to create an expression or assignment hash table.
2429 Expression entries are placed in the hash table if
2430 - they are of the form (set (pseudo-reg) src),
2431 - src is something we want to perform GCSE on,
2432 - none of the operands are subsequently modified in the block
2434 Assignment entries are placed in the hash table if
2435 - they are of the form (set (pseudo-reg) src),
2436 - src is something we want to perform const/copy propagation on,
2437 - none of the operands or target are subsequently modified in the block
2439 Currently src must be a pseudo-reg or a const_int.
2441 TABLE is the table computed. */
2443 static void
2444 compute_hash_table_work (struct hash_table *table)
2446 unsigned int i;
2448 /* While we compute the hash table we also compute a bit array of which
2449 registers are set in which blocks.
2450 ??? This isn't needed during const/copy propagation, but it's cheap to
2451 compute. Later. */
2452 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
2454 /* re-Cache any INSN_LIST nodes we have allocated. */
2455 clear_modify_mem_tables ();
2456 /* Some working arrays used to track first and last set in each block. */
2457 reg_avail_info = gmalloc (max_gcse_regno * sizeof (struct reg_avail_info));
2459 for (i = 0; i < max_gcse_regno; ++i)
2460 reg_avail_info[i].last_bb = NULL;
2462 FOR_EACH_BB (current_bb)
2464 rtx insn;
2465 unsigned int regno;
2466 int in_libcall_block;
2468 /* First pass over the instructions records information used to
2469 determine when registers and memory are first and last set.
2470 ??? hard-reg reg_set_in_block computation
2471 could be moved to compute_sets since they currently don't change. */
2473 for (insn = current_bb->head;
2474 insn && insn != NEXT_INSN (current_bb->end);
2475 insn = NEXT_INSN (insn))
2477 if (! INSN_P (insn))
2478 continue;
2480 if (GET_CODE (insn) == CALL_INSN)
2482 bool clobbers_all = false;
2483 #ifdef NON_SAVING_SETJMP
2484 if (NON_SAVING_SETJMP
2485 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
2486 clobbers_all = true;
2487 #endif
2489 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2490 if (clobbers_all
2491 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2492 record_last_reg_set_info (insn, regno);
2494 mark_call (insn);
2497 note_stores (PATTERN (insn), record_last_set_info, insn);
2500 /* Insert implicit sets in the hash table. */
2501 if (table->set_p
2502 && implicit_sets[current_bb->index] != NULL_RTX)
2503 hash_scan_set (implicit_sets[current_bb->index],
2504 current_bb->head, table);
2506 /* The next pass builds the hash table. */
2508 for (insn = current_bb->head, in_libcall_block = 0;
2509 insn && insn != NEXT_INSN (current_bb->end);
2510 insn = NEXT_INSN (insn))
2511 if (INSN_P (insn))
2513 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
2514 in_libcall_block = 1;
2515 else if (table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
2516 in_libcall_block = 0;
2517 hash_scan_insn (insn, table, in_libcall_block);
2518 if (!table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
2519 in_libcall_block = 0;
2523 free (reg_avail_info);
2524 reg_avail_info = NULL;
2527 /* Allocate space for the set/expr hash TABLE.
2528 N_INSNS is the number of instructions in the function.
2529 It is used to determine the number of buckets to use.
2530 SET_P determines whether set or expression table will
2531 be created. */
2533 static void
2534 alloc_hash_table (int n_insns, struct hash_table *table, int set_p)
2536 int n;
2538 table->size = n_insns / 4;
2539 if (table->size < 11)
2540 table->size = 11;
2542 /* Attempt to maintain efficient use of hash table.
2543 Making it an odd number is simplest for now.
2544 ??? Later take some measurements. */
2545 table->size |= 1;
2546 n = table->size * sizeof (struct expr *);
2547 table->table = gmalloc (n);
2548 table->set_p = set_p;
2551 /* Free things allocated by alloc_hash_table. */
2553 static void
2554 free_hash_table (struct hash_table *table)
2556 free (table->table);
2559 /* Compute the hash TABLE for doing copy/const propagation or
2560 expression hash table. */
2562 static void
2563 compute_hash_table (struct hash_table *table)
2565 /* Initialize count of number of entries in hash table. */
2566 table->n_elems = 0;
2567 memset (table->table, 0, table->size * sizeof (struct expr *));
2569 compute_hash_table_work (table);
2572 /* Expression tracking support. */
2574 /* Lookup pattern PAT in the expression TABLE.
2575 The result is a pointer to the table entry, or NULL if not found. */
2577 static struct expr *
2578 lookup_expr (rtx pat, struct hash_table *table)
2580 int do_not_record_p;
2581 unsigned int hash = hash_expr (pat, GET_MODE (pat), &do_not_record_p,
2582 table->size);
2583 struct expr *expr;
2585 if (do_not_record_p)
2586 return NULL;
2588 expr = table->table[hash];
2590 while (expr && ! expr_equiv_p (expr->expr, pat))
2591 expr = expr->next_same_hash;
2593 return expr;
2596 /* Lookup REGNO in the set TABLE. The result is a pointer to the
2597 table entry, or NULL if not found. */
2599 static struct expr *
2600 lookup_set (unsigned int regno, struct hash_table *table)
2602 unsigned int hash = hash_set (regno, table->size);
2603 struct expr *expr;
2605 expr = table->table[hash];
2607 while (expr && REGNO (SET_DEST (expr->expr)) != regno)
2608 expr = expr->next_same_hash;
2610 return expr;
2613 /* Return the next entry for REGNO in list EXPR. */
2615 static struct expr *
2616 next_set (unsigned int regno, struct expr *expr)
2619 expr = expr->next_same_hash;
2620 while (expr && REGNO (SET_DEST (expr->expr)) != regno);
2622 return expr;
2625 /* Like free_INSN_LIST_list or free_EXPR_LIST_list, except that the node
2626 types may be mixed. */
2628 static void
2629 free_insn_expr_list_list (rtx *listp)
2631 rtx list, next;
2633 for (list = *listp; list ; list = next)
2635 next = XEXP (list, 1);
2636 if (GET_CODE (list) == EXPR_LIST)
2637 free_EXPR_LIST_node (list);
2638 else
2639 free_INSN_LIST_node (list);
2642 *listp = NULL;
2645 /* Clear canon_modify_mem_list and modify_mem_list tables. */
2646 static void
2647 clear_modify_mem_tables (void)
2649 int i;
2651 EXECUTE_IF_SET_IN_BITMAP
2652 (modify_mem_list_set, 0, i, free_INSN_LIST_list (modify_mem_list + i));
2653 bitmap_clear (modify_mem_list_set);
2655 EXECUTE_IF_SET_IN_BITMAP
2656 (canon_modify_mem_list_set, 0, i,
2657 free_insn_expr_list_list (canon_modify_mem_list + i));
2658 bitmap_clear (canon_modify_mem_list_set);
2661 /* Release memory used by modify_mem_list_set and canon_modify_mem_list_set. */
2663 static void
2664 free_modify_mem_tables (void)
2666 clear_modify_mem_tables ();
2667 free (modify_mem_list);
2668 free (canon_modify_mem_list);
2669 modify_mem_list = 0;
2670 canon_modify_mem_list = 0;
2673 /* Reset tables used to keep track of what's still available [since the
2674 start of the block]. */
2676 static void
2677 reset_opr_set_tables (void)
2679 /* Maintain a bitmap of which regs have been set since beginning of
2680 the block. */
2681 CLEAR_REG_SET (reg_set_bitmap);
2683 /* Also keep a record of the last instruction to modify memory.
2684 For now this is very trivial, we only record whether any memory
2685 location has been modified. */
2686 clear_modify_mem_tables ();
2689 /* Return nonzero if the operands of X are not set before INSN in
2690 INSN's basic block. */
2692 static int
2693 oprs_not_set_p (rtx x, rtx insn)
2695 int i, j;
2696 enum rtx_code code;
2697 const char *fmt;
2699 if (x == 0)
2700 return 1;
2702 code = GET_CODE (x);
2703 switch (code)
2705 case PC:
2706 case CC0:
2707 case CONST:
2708 case CONST_INT:
2709 case CONST_DOUBLE:
2710 case CONST_VECTOR:
2711 case SYMBOL_REF:
2712 case LABEL_REF:
2713 case ADDR_VEC:
2714 case ADDR_DIFF_VEC:
2715 return 1;
2717 case MEM:
2718 if (load_killed_in_block_p (BLOCK_FOR_INSN (insn),
2719 INSN_CUID (insn), x, 0))
2720 return 0;
2721 else
2722 return oprs_not_set_p (XEXP (x, 0), insn);
2724 case REG:
2725 return ! REGNO_REG_SET_P (reg_set_bitmap, REGNO (x));
2727 default:
2728 break;
2731 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
2733 if (fmt[i] == 'e')
2735 /* If we are about to do the last recursive call
2736 needed at this level, change it into iteration.
2737 This function is called enough to be worth it. */
2738 if (i == 0)
2739 return oprs_not_set_p (XEXP (x, i), insn);
2741 if (! oprs_not_set_p (XEXP (x, i), insn))
2742 return 0;
2744 else if (fmt[i] == 'E')
2745 for (j = 0; j < XVECLEN (x, i); j++)
2746 if (! oprs_not_set_p (XVECEXP (x, i, j), insn))
2747 return 0;
2750 return 1;
2753 /* Mark things set by a CALL. */
2755 static void
2756 mark_call (rtx insn)
2758 if (! CONST_OR_PURE_CALL_P (insn))
2759 record_last_mem_set_info (insn);
2762 /* Mark things set by a SET. */
2764 static void
2765 mark_set (rtx pat, rtx insn)
2767 rtx dest = SET_DEST (pat);
2769 while (GET_CODE (dest) == SUBREG
2770 || GET_CODE (dest) == ZERO_EXTRACT
2771 || GET_CODE (dest) == SIGN_EXTRACT
2772 || GET_CODE (dest) == STRICT_LOW_PART)
2773 dest = XEXP (dest, 0);
2775 if (GET_CODE (dest) == REG)
2776 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (dest));
2777 else if (GET_CODE (dest) == MEM)
2778 record_last_mem_set_info (insn);
2780 if (GET_CODE (SET_SRC (pat)) == CALL)
2781 mark_call (insn);
2784 /* Record things set by a CLOBBER. */
2786 static void
2787 mark_clobber (rtx pat, rtx insn)
2789 rtx clob = XEXP (pat, 0);
2791 while (GET_CODE (clob) == SUBREG || GET_CODE (clob) == STRICT_LOW_PART)
2792 clob = XEXP (clob, 0);
2794 if (GET_CODE (clob) == REG)
2795 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (clob));
2796 else
2797 record_last_mem_set_info (insn);
2800 /* Record things set by INSN.
2801 This data is used by oprs_not_set_p. */
2803 static void
2804 mark_oprs_set (rtx insn)
2806 rtx pat = PATTERN (insn);
2807 int i;
2809 if (GET_CODE (pat) == SET)
2810 mark_set (pat, insn);
2811 else if (GET_CODE (pat) == PARALLEL)
2812 for (i = 0; i < XVECLEN (pat, 0); i++)
2814 rtx x = XVECEXP (pat, 0, i);
2816 if (GET_CODE (x) == SET)
2817 mark_set (x, insn);
2818 else if (GET_CODE (x) == CLOBBER)
2819 mark_clobber (x, insn);
2820 else if (GET_CODE (x) == CALL)
2821 mark_call (insn);
2824 else if (GET_CODE (pat) == CLOBBER)
2825 mark_clobber (pat, insn);
2826 else if (GET_CODE (pat) == CALL)
2827 mark_call (insn);
2831 /* Classic GCSE reaching definition support. */
2833 /* Allocate reaching def variables. */
2835 static void
2836 alloc_rd_mem (int n_blocks, int n_insns)
2838 rd_kill = sbitmap_vector_alloc (n_blocks, n_insns);
2839 sbitmap_vector_zero (rd_kill, n_blocks);
2841 rd_gen = sbitmap_vector_alloc (n_blocks, n_insns);
2842 sbitmap_vector_zero (rd_gen, n_blocks);
2844 reaching_defs = sbitmap_vector_alloc (n_blocks, n_insns);
2845 sbitmap_vector_zero (reaching_defs, n_blocks);
2847 rd_out = sbitmap_vector_alloc (n_blocks, n_insns);
2848 sbitmap_vector_zero (rd_out, n_blocks);
2851 /* Free reaching def variables. */
2853 static void
2854 free_rd_mem (void)
2856 sbitmap_vector_free (rd_kill);
2857 sbitmap_vector_free (rd_gen);
2858 sbitmap_vector_free (reaching_defs);
2859 sbitmap_vector_free (rd_out);
2862 /* Add INSN to the kills of BB. REGNO, set in BB, is killed by INSN. */
2864 static void
2865 handle_rd_kill_set (rtx insn, int regno, basic_block bb)
2867 struct reg_set *this_reg;
2869 for (this_reg = reg_set_table[regno]; this_reg; this_reg = this_reg ->next)
2870 if (BLOCK_NUM (this_reg->insn) != BLOCK_NUM (insn))
2871 SET_BIT (rd_kill[bb->index], INSN_CUID (this_reg->insn));
2874 /* Compute the set of kill's for reaching definitions. */
2876 static void
2877 compute_kill_rd (void)
2879 int cuid;
2880 unsigned int regno;
2881 int i;
2882 basic_block bb;
2884 /* For each block
2885 For each set bit in `gen' of the block (i.e each insn which
2886 generates a definition in the block)
2887 Call the reg set by the insn corresponding to that bit regx
2888 Look at the linked list starting at reg_set_table[regx]
2889 For each setting of regx in the linked list, which is not in
2890 this block
2891 Set the bit in `kill' corresponding to that insn. */
2892 FOR_EACH_BB (bb)
2893 for (cuid = 0; cuid < max_cuid; cuid++)
2894 if (TEST_BIT (rd_gen[bb->index], cuid))
2896 rtx insn = CUID_INSN (cuid);
2897 rtx pat = PATTERN (insn);
2899 if (GET_CODE (insn) == CALL_INSN)
2901 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2902 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2903 handle_rd_kill_set (insn, regno, bb);
2906 if (GET_CODE (pat) == PARALLEL)
2908 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
2910 enum rtx_code code = GET_CODE (XVECEXP (pat, 0, i));
2912 if ((code == SET || code == CLOBBER)
2913 && GET_CODE (XEXP (XVECEXP (pat, 0, i), 0)) == REG)
2914 handle_rd_kill_set (insn,
2915 REGNO (XEXP (XVECEXP (pat, 0, i), 0)),
2916 bb);
2919 else if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == REG)
2920 /* Each setting of this register outside of this block
2921 must be marked in the set of kills in this block. */
2922 handle_rd_kill_set (insn, REGNO (SET_DEST (pat)), bb);
2926 /* Compute the reaching definitions as in
2927 Compilers Principles, Techniques, and Tools. Aho, Sethi, Ullman,
2928 Chapter 10. It is the same algorithm as used for computing available
2929 expressions but applied to the gens and kills of reaching definitions. */
2931 static void
2932 compute_rd (void)
2934 int changed, passes;
2935 basic_block bb;
2937 FOR_EACH_BB (bb)
2938 sbitmap_copy (rd_out[bb->index] /*dst*/, rd_gen[bb->index] /*src*/);
2940 passes = 0;
2941 changed = 1;
2942 while (changed)
2944 changed = 0;
2945 FOR_EACH_BB (bb)
2947 sbitmap_union_of_preds (reaching_defs[bb->index], rd_out, bb->index);
2948 changed |= sbitmap_union_of_diff_cg (rd_out[bb->index], rd_gen[bb->index],
2949 reaching_defs[bb->index], rd_kill[bb->index]);
2951 passes++;
2954 if (gcse_file)
2955 fprintf (gcse_file, "reaching def computation: %d passes\n", passes);
2958 /* Classic GCSE available expression support. */
2960 /* Allocate memory for available expression computation. */
2962 static void
2963 alloc_avail_expr_mem (int n_blocks, int n_exprs)
2965 ae_kill = sbitmap_vector_alloc (n_blocks, n_exprs);
2966 sbitmap_vector_zero (ae_kill, n_blocks);
2968 ae_gen = sbitmap_vector_alloc (n_blocks, n_exprs);
2969 sbitmap_vector_zero (ae_gen, n_blocks);
2971 ae_in = sbitmap_vector_alloc (n_blocks, n_exprs);
2972 sbitmap_vector_zero (ae_in, n_blocks);
2974 ae_out = sbitmap_vector_alloc (n_blocks, n_exprs);
2975 sbitmap_vector_zero (ae_out, n_blocks);
2978 static void
2979 free_avail_expr_mem (void)
2981 sbitmap_vector_free (ae_kill);
2982 sbitmap_vector_free (ae_gen);
2983 sbitmap_vector_free (ae_in);
2984 sbitmap_vector_free (ae_out);
2987 /* Compute the set of available expressions generated in each basic block. */
2989 static void
2990 compute_ae_gen (struct hash_table *expr_hash_table)
2992 unsigned int i;
2993 struct expr *expr;
2994 struct occr *occr;
2996 /* For each recorded occurrence of each expression, set ae_gen[bb][expr].
2997 This is all we have to do because an expression is not recorded if it
2998 is not available, and the only expressions we want to work with are the
2999 ones that are recorded. */
3000 for (i = 0; i < expr_hash_table->size; i++)
3001 for (expr = expr_hash_table->table[i]; expr != 0; expr = expr->next_same_hash)
3002 for (occr = expr->avail_occr; occr != 0; occr = occr->next)
3003 SET_BIT (ae_gen[BLOCK_NUM (occr->insn)], expr->bitmap_index);
3006 /* Return nonzero if expression X is killed in BB. */
3008 static int
3009 expr_killed_p (rtx x, basic_block bb)
3011 int i, j;
3012 enum rtx_code code;
3013 const char *fmt;
3015 if (x == 0)
3016 return 1;
3018 code = GET_CODE (x);
3019 switch (code)
3021 case REG:
3022 return TEST_BIT (reg_set_in_block[bb->index], REGNO (x));
3024 case MEM:
3025 if (load_killed_in_block_p (bb, get_max_uid () + 1, x, 0))
3026 return 1;
3027 else
3028 return expr_killed_p (XEXP (x, 0), bb);
3030 case PC:
3031 case CC0: /*FIXME*/
3032 case CONST:
3033 case CONST_INT:
3034 case CONST_DOUBLE:
3035 case CONST_VECTOR:
3036 case SYMBOL_REF:
3037 case LABEL_REF:
3038 case ADDR_VEC:
3039 case ADDR_DIFF_VEC:
3040 return 0;
3042 default:
3043 break;
3046 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
3048 if (fmt[i] == 'e')
3050 /* If we are about to do the last recursive call
3051 needed at this level, change it into iteration.
3052 This function is called enough to be worth it. */
3053 if (i == 0)
3054 return expr_killed_p (XEXP (x, i), bb);
3055 else if (expr_killed_p (XEXP (x, i), bb))
3056 return 1;
3058 else if (fmt[i] == 'E')
3059 for (j = 0; j < XVECLEN (x, i); j++)
3060 if (expr_killed_p (XVECEXP (x, i, j), bb))
3061 return 1;
3064 return 0;
3067 /* Compute the set of available expressions killed in each basic block. */
3069 static void
3070 compute_ae_kill (sbitmap *ae_gen, sbitmap *ae_kill,
3071 struct hash_table *expr_hash_table)
3073 basic_block bb;
3074 unsigned int i;
3075 struct expr *expr;
3077 FOR_EACH_BB (bb)
3078 for (i = 0; i < expr_hash_table->size; i++)
3079 for (expr = expr_hash_table->table[i]; expr; expr = expr->next_same_hash)
3081 /* Skip EXPR if generated in this block. */
3082 if (TEST_BIT (ae_gen[bb->index], expr->bitmap_index))
3083 continue;
3085 if (expr_killed_p (expr->expr, bb))
3086 SET_BIT (ae_kill[bb->index], expr->bitmap_index);
3090 /* Actually perform the Classic GCSE optimizations. */
3092 /* Return nonzero if occurrence OCCR of expression EXPR reaches block BB.
3094 CHECK_SELF_LOOP is nonzero if we should consider a block reaching itself
3095 as a positive reach. We want to do this when there are two computations
3096 of the expression in the block.
3098 VISITED is a pointer to a working buffer for tracking which BB's have
3099 been visited. It is NULL for the top-level call.
3101 We treat reaching expressions that go through blocks containing the same
3102 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
3103 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
3104 2 as not reaching. The intent is to improve the probability of finding
3105 only one reaching expression and to reduce register lifetimes by picking
3106 the closest such expression. */
3108 static int
3109 expr_reaches_here_p_work (struct occr *occr, struct expr *expr,
3110 basic_block bb, int check_self_loop, char *visited)
3112 edge pred;
3114 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
3116 basic_block pred_bb = pred->src;
3118 if (visited[pred_bb->index])
3119 /* This predecessor has already been visited. Nothing to do. */
3121 else if (pred_bb == bb)
3123 /* BB loops on itself. */
3124 if (check_self_loop
3125 && TEST_BIT (ae_gen[pred_bb->index], expr->bitmap_index)
3126 && BLOCK_NUM (occr->insn) == pred_bb->index)
3127 return 1;
3129 visited[pred_bb->index] = 1;
3132 /* Ignore this predecessor if it kills the expression. */
3133 else if (TEST_BIT (ae_kill[pred_bb->index], expr->bitmap_index))
3134 visited[pred_bb->index] = 1;
3136 /* Does this predecessor generate this expression? */
3137 else if (TEST_BIT (ae_gen[pred_bb->index], expr->bitmap_index))
3139 /* Is this the occurrence we're looking for?
3140 Note that there's only one generating occurrence per block
3141 so we just need to check the block number. */
3142 if (BLOCK_NUM (occr->insn) == pred_bb->index)
3143 return 1;
3145 visited[pred_bb->index] = 1;
3148 /* Neither gen nor kill. */
3149 else
3151 visited[pred_bb->index] = 1;
3152 if (expr_reaches_here_p_work (occr, expr, pred_bb, check_self_loop,
3153 visited))
3155 return 1;
3159 /* All paths have been checked. */
3160 return 0;
3163 /* This wrapper for expr_reaches_here_p_work() is to ensure that any
3164 memory allocated for that function is returned. */
3166 static int
3167 expr_reaches_here_p (struct occr *occr, struct expr *expr, basic_block bb,
3168 int check_self_loop)
3170 int rval;
3171 char *visited = xcalloc (last_basic_block, 1);
3173 rval = expr_reaches_here_p_work (occr, expr, bb, check_self_loop, visited);
3175 free (visited);
3176 return rval;
3179 /* Return the instruction that computes EXPR that reaches INSN's basic block.
3180 If there is more than one such instruction, return NULL.
3182 Called only by handle_avail_expr. */
3184 static rtx
3185 computing_insn (struct expr *expr, rtx insn)
3187 basic_block bb = BLOCK_FOR_INSN (insn);
3189 if (expr->avail_occr->next == NULL)
3191 if (BLOCK_FOR_INSN (expr->avail_occr->insn) == bb)
3192 /* The available expression is actually itself
3193 (i.e. a loop in the flow graph) so do nothing. */
3194 return NULL;
3196 /* (FIXME) Case that we found a pattern that was created by
3197 a substitution that took place. */
3198 return expr->avail_occr->insn;
3200 else
3202 /* Pattern is computed more than once.
3203 Search backwards from this insn to see how many of these
3204 computations actually reach this insn. */
3205 struct occr *occr;
3206 rtx insn_computes_expr = NULL;
3207 int can_reach = 0;
3209 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
3211 if (BLOCK_FOR_INSN (occr->insn) == bb)
3213 /* The expression is generated in this block.
3214 The only time we care about this is when the expression
3215 is generated later in the block [and thus there's a loop].
3216 We let the normal cse pass handle the other cases. */
3217 if (INSN_CUID (insn) < INSN_CUID (occr->insn)
3218 && expr_reaches_here_p (occr, expr, bb, 1))
3220 can_reach++;
3221 if (can_reach > 1)
3222 return NULL;
3224 insn_computes_expr = occr->insn;
3227 else if (expr_reaches_here_p (occr, expr, bb, 0))
3229 can_reach++;
3230 if (can_reach > 1)
3231 return NULL;
3233 insn_computes_expr = occr->insn;
3237 if (insn_computes_expr == NULL)
3238 abort ();
3240 return insn_computes_expr;
3244 /* Return nonzero if the definition in DEF_INSN can reach INSN.
3245 Only called by can_disregard_other_sets. */
3247 static int
3248 def_reaches_here_p (rtx insn, rtx def_insn)
3250 rtx reg;
3252 if (TEST_BIT (reaching_defs[BLOCK_NUM (insn)], INSN_CUID (def_insn)))
3253 return 1;
3255 if (BLOCK_NUM (insn) == BLOCK_NUM (def_insn))
3257 if (INSN_CUID (def_insn) < INSN_CUID (insn))
3259 if (GET_CODE (PATTERN (def_insn)) == PARALLEL)
3260 return 1;
3261 else if (GET_CODE (PATTERN (def_insn)) == CLOBBER)
3262 reg = XEXP (PATTERN (def_insn), 0);
3263 else if (GET_CODE (PATTERN (def_insn)) == SET)
3264 reg = SET_DEST (PATTERN (def_insn));
3265 else
3266 abort ();
3268 return ! reg_set_between_p (reg, NEXT_INSN (def_insn), insn);
3270 else
3271 return 0;
3274 return 0;
3277 /* Return nonzero if *ADDR_THIS_REG can only have one value at INSN. The
3278 value returned is the number of definitions that reach INSN. Returning a
3279 value of zero means that [maybe] more than one definition reaches INSN and
3280 the caller can't perform whatever optimization it is trying. i.e. it is
3281 always safe to return zero. */
3283 static int
3284 can_disregard_other_sets (struct reg_set **addr_this_reg, rtx insn, int for_combine)
3286 int number_of_reaching_defs = 0;
3287 struct reg_set *this_reg;
3289 for (this_reg = *addr_this_reg; this_reg != 0; this_reg = this_reg->next)
3290 if (def_reaches_here_p (insn, this_reg->insn))
3292 number_of_reaching_defs++;
3293 /* Ignore parallels for now. */
3294 if (GET_CODE (PATTERN (this_reg->insn)) == PARALLEL)
3295 return 0;
3297 if (!for_combine
3298 && (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER
3299 || ! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)),
3300 SET_SRC (PATTERN (insn)))))
3301 /* A setting of the reg to a different value reaches INSN. */
3302 return 0;
3304 if (number_of_reaching_defs > 1)
3306 /* If in this setting the value the register is being set to is
3307 equal to the previous value the register was set to and this
3308 setting reaches the insn we are trying to do the substitution
3309 on then we are ok. */
3310 if (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER)
3311 return 0;
3312 else if (! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)),
3313 SET_SRC (PATTERN (insn))))
3314 return 0;
3317 *addr_this_reg = this_reg;
3320 return number_of_reaching_defs;
3323 /* Expression computed by insn is available and the substitution is legal,
3324 so try to perform the substitution.
3326 The result is nonzero if any changes were made. */
3328 static int
3329 handle_avail_expr (rtx insn, struct expr *expr)
3331 rtx pat, insn_computes_expr, expr_set;
3332 rtx to;
3333 struct reg_set *this_reg;
3334 int found_setting, use_src;
3335 int changed = 0;
3337 /* We only handle the case where one computation of the expression
3338 reaches this instruction. */
3339 insn_computes_expr = computing_insn (expr, insn);
3340 if (insn_computes_expr == NULL)
3341 return 0;
3342 expr_set = single_set (insn_computes_expr);
3343 if (!expr_set)
3344 abort ();
3346 found_setting = 0;
3347 use_src = 0;
3349 /* At this point we know only one computation of EXPR outside of this
3350 block reaches this insn. Now try to find a register that the
3351 expression is computed into. */
3352 if (GET_CODE (SET_SRC (expr_set)) == REG)
3354 /* This is the case when the available expression that reaches
3355 here has already been handled as an available expression. */
3356 unsigned int regnum_for_replacing
3357 = REGNO (SET_SRC (expr_set));
3359 /* If the register was created by GCSE we can't use `reg_set_table',
3360 however we know it's set only once. */
3361 if (regnum_for_replacing >= max_gcse_regno
3362 /* If the register the expression is computed into is set only once,
3363 or only one set reaches this insn, we can use it. */
3364 || (((this_reg = reg_set_table[regnum_for_replacing]),
3365 this_reg->next == NULL)
3366 || can_disregard_other_sets (&this_reg, insn, 0)))
3368 use_src = 1;
3369 found_setting = 1;
3373 if (!found_setting)
3375 unsigned int regnum_for_replacing
3376 = REGNO (SET_DEST (expr_set));
3378 /* This shouldn't happen. */
3379 if (regnum_for_replacing >= max_gcse_regno)
3380 abort ();
3382 this_reg = reg_set_table[regnum_for_replacing];
3384 /* If the register the expression is computed into is set only once,
3385 or only one set reaches this insn, use it. */
3386 if (this_reg->next == NULL
3387 || can_disregard_other_sets (&this_reg, insn, 0))
3388 found_setting = 1;
3391 if (found_setting)
3393 pat = PATTERN (insn);
3394 if (use_src)
3395 to = SET_SRC (expr_set);
3396 else
3397 to = SET_DEST (expr_set);
3398 changed = validate_change (insn, &SET_SRC (pat), to, 0);
3400 /* We should be able to ignore the return code from validate_change but
3401 to play it safe we check. */
3402 if (changed)
3404 gcse_subst_count++;
3405 if (gcse_file != NULL)
3407 fprintf (gcse_file, "GCSE: Replacing the source in insn %d with",
3408 INSN_UID (insn));
3409 fprintf (gcse_file, " reg %d %s insn %d\n",
3410 REGNO (to), use_src ? "from" : "set in",
3411 INSN_UID (insn_computes_expr));
3416 /* The register that the expr is computed into is set more than once. */
3417 else if (1 /*expensive_op(this_pattrn->op) && do_expensive_gcse)*/)
3419 /* Insert an insn after insnx that copies the reg set in insnx
3420 into a new pseudo register call this new register REGN.
3421 From insnb until end of basic block or until REGB is set
3422 replace all uses of REGB with REGN. */
3423 rtx new_insn;
3425 to = gen_reg_rtx (GET_MODE (SET_DEST (expr_set)));
3427 /* Generate the new insn. */
3428 /* ??? If the change fails, we return 0, even though we created
3429 an insn. I think this is ok. */
3430 new_insn
3431 = emit_insn_after (gen_rtx_SET (VOIDmode, to,
3432 SET_DEST (expr_set)),
3433 insn_computes_expr);
3435 /* Keep register set table up to date. */
3436 record_one_set (REGNO (to), new_insn);
3438 gcse_create_count++;
3439 if (gcse_file != NULL)
3441 fprintf (gcse_file, "GCSE: Creating insn %d to copy value of reg %d",
3442 INSN_UID (NEXT_INSN (insn_computes_expr)),
3443 REGNO (SET_SRC (PATTERN (NEXT_INSN (insn_computes_expr)))));
3444 fprintf (gcse_file, ", computed in insn %d,\n",
3445 INSN_UID (insn_computes_expr));
3446 fprintf (gcse_file, " into newly allocated reg %d\n",
3447 REGNO (to));
3450 pat = PATTERN (insn);
3452 /* Do register replacement for INSN. */
3453 changed = validate_change (insn, &SET_SRC (pat),
3454 SET_DEST (PATTERN
3455 (NEXT_INSN (insn_computes_expr))),
3458 /* We should be able to ignore the return code from validate_change but
3459 to play it safe we check. */
3460 if (changed)
3462 gcse_subst_count++;
3463 if (gcse_file != NULL)
3465 fprintf (gcse_file,
3466 "GCSE: Replacing the source in insn %d with reg %d ",
3467 INSN_UID (insn),
3468 REGNO (SET_DEST (PATTERN (NEXT_INSN
3469 (insn_computes_expr)))));
3470 fprintf (gcse_file, "set in insn %d\n",
3471 INSN_UID (insn_computes_expr));
3476 return changed;
3479 /* Perform classic GCSE. This is called by one_classic_gcse_pass after all
3480 the dataflow analysis has been done.
3482 The result is nonzero if a change was made. */
3484 static int
3485 classic_gcse (void)
3487 int changed;
3488 rtx insn;
3489 basic_block bb;
3491 /* Note we start at block 1. */
3493 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
3494 return 0;
3496 changed = 0;
3497 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR, next_bb)
3499 /* Reset tables used to keep track of what's still valid [since the
3500 start of the block]. */
3501 reset_opr_set_tables ();
3503 for (insn = bb->head;
3504 insn != NULL && insn != NEXT_INSN (bb->end);
3505 insn = NEXT_INSN (insn))
3507 /* Is insn of form (set (pseudo-reg) ...)? */
3508 if (GET_CODE (insn) == INSN
3509 && GET_CODE (PATTERN (insn)) == SET
3510 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
3511 && REGNO (SET_DEST (PATTERN (insn))) >= FIRST_PSEUDO_REGISTER)
3513 rtx pat = PATTERN (insn);
3514 rtx src = SET_SRC (pat);
3515 struct expr *expr;
3517 if (want_to_gcse_p (src)
3518 /* Is the expression recorded? */
3519 && ((expr = lookup_expr (src, &expr_hash_table)) != NULL)
3520 /* Is the expression available [at the start of the
3521 block]? */
3522 && TEST_BIT (ae_in[bb->index], expr->bitmap_index)
3523 /* Are the operands unchanged since the start of the
3524 block? */
3525 && oprs_not_set_p (src, insn))
3526 changed |= handle_avail_expr (insn, expr);
3529 /* Keep track of everything modified by this insn. */
3530 /* ??? Need to be careful w.r.t. mods done to INSN. */
3531 if (INSN_P (insn))
3532 mark_oprs_set (insn);
3536 return changed;
3539 /* Top level routine to perform one classic GCSE pass.
3541 Return nonzero if a change was made. */
3543 static int
3544 one_classic_gcse_pass (int pass)
3546 int changed = 0;
3548 gcse_subst_count = 0;
3549 gcse_create_count = 0;
3551 alloc_hash_table (max_cuid, &expr_hash_table, 0);
3552 alloc_rd_mem (last_basic_block, max_cuid);
3553 compute_hash_table (&expr_hash_table);
3554 if (gcse_file)
3555 dump_hash_table (gcse_file, "Expression", &expr_hash_table);
3557 if (expr_hash_table.n_elems > 0)
3559 compute_kill_rd ();
3560 compute_rd ();
3561 alloc_avail_expr_mem (last_basic_block, expr_hash_table.n_elems);
3562 compute_ae_gen (&expr_hash_table);
3563 compute_ae_kill (ae_gen, ae_kill, &expr_hash_table);
3564 compute_available (ae_gen, ae_kill, ae_out, ae_in);
3565 changed = classic_gcse ();
3566 free_avail_expr_mem ();
3569 free_rd_mem ();
3570 free_hash_table (&expr_hash_table);
3572 if (gcse_file)
3574 fprintf (gcse_file, "\n");
3575 fprintf (gcse_file, "GCSE of %s, pass %d: %d bytes needed, %d substs,",
3576 current_function_name, pass, bytes_used, gcse_subst_count);
3577 fprintf (gcse_file, "%d insns created\n", gcse_create_count);
3580 return changed;
3583 /* Compute copy/constant propagation working variables. */
3585 /* Local properties of assignments. */
3586 static sbitmap *cprop_pavloc;
3587 static sbitmap *cprop_absaltered;
3589 /* Global properties of assignments (computed from the local properties). */
3590 static sbitmap *cprop_avin;
3591 static sbitmap *cprop_avout;
3593 /* Allocate vars used for copy/const propagation. N_BLOCKS is the number of
3594 basic blocks. N_SETS is the number of sets. */
3596 static void
3597 alloc_cprop_mem (int n_blocks, int n_sets)
3599 cprop_pavloc = sbitmap_vector_alloc (n_blocks, n_sets);
3600 cprop_absaltered = sbitmap_vector_alloc (n_blocks, n_sets);
3602 cprop_avin = sbitmap_vector_alloc (n_blocks, n_sets);
3603 cprop_avout = sbitmap_vector_alloc (n_blocks, n_sets);
3606 /* Free vars used by copy/const propagation. */
3608 static void
3609 free_cprop_mem (void)
3611 sbitmap_vector_free (cprop_pavloc);
3612 sbitmap_vector_free (cprop_absaltered);
3613 sbitmap_vector_free (cprop_avin);
3614 sbitmap_vector_free (cprop_avout);
3617 /* For each block, compute whether X is transparent. X is either an
3618 expression or an assignment [though we don't care which, for this context
3619 an assignment is treated as an expression]. For each block where an
3620 element of X is modified, set (SET_P == 1) or reset (SET_P == 0) the INDX
3621 bit in BMAP. */
3623 static void
3624 compute_transp (rtx x, int indx, sbitmap *bmap, int set_p)
3626 int i, j;
3627 basic_block bb;
3628 enum rtx_code code;
3629 reg_set *r;
3630 const char *fmt;
3632 /* repeat is used to turn tail-recursion into iteration since GCC
3633 can't do it when there's no return value. */
3634 repeat:
3636 if (x == 0)
3637 return;
3639 code = GET_CODE (x);
3640 switch (code)
3642 case REG:
3643 if (set_p)
3645 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3647 FOR_EACH_BB (bb)
3648 if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
3649 SET_BIT (bmap[bb->index], indx);
3651 else
3653 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
3654 SET_BIT (bmap[BLOCK_NUM (r->insn)], indx);
3657 else
3659 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3661 FOR_EACH_BB (bb)
3662 if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
3663 RESET_BIT (bmap[bb->index], indx);
3665 else
3667 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
3668 RESET_BIT (bmap[BLOCK_NUM (r->insn)], indx);
3672 return;
3674 case MEM:
3675 FOR_EACH_BB (bb)
3677 rtx list_entry = canon_modify_mem_list[bb->index];
3679 while (list_entry)
3681 rtx dest, dest_addr;
3683 if (GET_CODE (XEXP (list_entry, 0)) == CALL_INSN)
3685 if (set_p)
3686 SET_BIT (bmap[bb->index], indx);
3687 else
3688 RESET_BIT (bmap[bb->index], indx);
3689 break;
3691 /* LIST_ENTRY must be an INSN of some kind that sets memory.
3692 Examine each hunk of memory that is modified. */
3694 dest = XEXP (list_entry, 0);
3695 list_entry = XEXP (list_entry, 1);
3696 dest_addr = XEXP (list_entry, 0);
3698 if (canon_true_dependence (dest, GET_MODE (dest), dest_addr,
3699 x, rtx_addr_varies_p))
3701 if (set_p)
3702 SET_BIT (bmap[bb->index], indx);
3703 else
3704 RESET_BIT (bmap[bb->index], indx);
3705 break;
3707 list_entry = XEXP (list_entry, 1);
3711 x = XEXP (x, 0);
3712 goto repeat;
3714 case PC:
3715 case CC0: /*FIXME*/
3716 case CONST:
3717 case CONST_INT:
3718 case CONST_DOUBLE:
3719 case CONST_VECTOR:
3720 case SYMBOL_REF:
3721 case LABEL_REF:
3722 case ADDR_VEC:
3723 case ADDR_DIFF_VEC:
3724 return;
3726 default:
3727 break;
3730 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
3732 if (fmt[i] == 'e')
3734 /* If we are about to do the last recursive call
3735 needed at this level, change it into iteration.
3736 This function is called enough to be worth it. */
3737 if (i == 0)
3739 x = XEXP (x, i);
3740 goto repeat;
3743 compute_transp (XEXP (x, i), indx, bmap, set_p);
3745 else if (fmt[i] == 'E')
3746 for (j = 0; j < XVECLEN (x, i); j++)
3747 compute_transp (XVECEXP (x, i, j), indx, bmap, set_p);
3751 /* Top level routine to do the dataflow analysis needed by copy/const
3752 propagation. */
3754 static void
3755 compute_cprop_data (void)
3757 compute_local_properties (cprop_absaltered, cprop_pavloc, NULL, &set_hash_table);
3758 compute_available (cprop_pavloc, cprop_absaltered,
3759 cprop_avout, cprop_avin);
3762 /* Copy/constant propagation. */
3764 /* Maximum number of register uses in an insn that we handle. */
3765 #define MAX_USES 8
3767 /* Table of uses found in an insn.
3768 Allocated statically to avoid alloc/free complexity and overhead. */
3769 static struct reg_use reg_use_table[MAX_USES];
3771 /* Index into `reg_use_table' while building it. */
3772 static int reg_use_count;
3774 /* Set up a list of register numbers used in INSN. The found uses are stored
3775 in `reg_use_table'. `reg_use_count' is initialized to zero before entry,
3776 and contains the number of uses in the table upon exit.
3778 ??? If a register appears multiple times we will record it multiple times.
3779 This doesn't hurt anything but it will slow things down. */
3781 static void
3782 find_used_regs (rtx *xptr, void *data ATTRIBUTE_UNUSED)
3784 int i, j;
3785 enum rtx_code code;
3786 const char *fmt;
3787 rtx x = *xptr;
3789 /* repeat is used to turn tail-recursion into iteration since GCC
3790 can't do it when there's no return value. */
3791 repeat:
3792 if (x == 0)
3793 return;
3795 code = GET_CODE (x);
3796 if (REG_P (x))
3798 if (reg_use_count == MAX_USES)
3799 return;
3801 reg_use_table[reg_use_count].reg_rtx = x;
3802 reg_use_count++;
3805 /* Recursively scan the operands of this expression. */
3807 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
3809 if (fmt[i] == 'e')
3811 /* If we are about to do the last recursive call
3812 needed at this level, change it into iteration.
3813 This function is called enough to be worth it. */
3814 if (i == 0)
3816 x = XEXP (x, 0);
3817 goto repeat;
3820 find_used_regs (&XEXP (x, i), data);
3822 else if (fmt[i] == 'E')
3823 for (j = 0; j < XVECLEN (x, i); j++)
3824 find_used_regs (&XVECEXP (x, i, j), data);
3828 /* Try to replace all non-SET_DEST occurrences of FROM in INSN with TO.
3829 Returns nonzero is successful. */
3831 static int
3832 try_replace_reg (rtx from, rtx to, rtx insn)
3834 rtx note = find_reg_equal_equiv_note (insn);
3835 rtx src = 0;
3836 int success = 0;
3837 rtx set = single_set (insn);
3839 validate_replace_src_group (from, to, insn);
3840 if (num_changes_pending () && apply_change_group ())
3841 success = 1;
3843 /* Try to simplify SET_SRC if we have substituted a constant. */
3844 if (success && set && CONSTANT_P (to))
3846 src = simplify_rtx (SET_SRC (set));
3848 if (src)
3849 validate_change (insn, &SET_SRC (set), src, 0);
3852 if (!success && set && reg_mentioned_p (from, SET_SRC (set)))
3854 /* If above failed and this is a single set, try to simplify the source of
3855 the set given our substitution. We could perhaps try this for multiple
3856 SETs, but it probably won't buy us anything. */
3857 src = simplify_replace_rtx (SET_SRC (set), from, to);
3859 if (!rtx_equal_p (src, SET_SRC (set))
3860 && validate_change (insn, &SET_SRC (set), src, 0))
3861 success = 1;
3863 /* If we've failed to do replacement, have a single SET, don't already
3864 have a note, and have no special SET, add a REG_EQUAL note to not
3865 lose information. */
3866 if (!success && note == 0 && set != 0
3867 && GET_CODE (XEXP (set, 0)) != ZERO_EXTRACT
3868 && GET_CODE (XEXP (set, 0)) != SIGN_EXTRACT)
3869 note = set_unique_reg_note (insn, REG_EQUAL, copy_rtx (src));
3872 /* If there is already a NOTE, update the expression in it with our
3873 replacement. */
3874 else if (note != 0)
3875 XEXP (note, 0) = simplify_replace_rtx (XEXP (note, 0), from, to);
3877 /* REG_EQUAL may get simplified into register.
3878 We don't allow that. Remove that note. This code ought
3879 not to happen, because previous code ought to synthesize
3880 reg-reg move, but be on the safe side. */
3881 if (note && REG_P (XEXP (note, 0)))
3882 remove_note (insn, note);
3884 return success;
3887 /* Find a set of REGNOs that are available on entry to INSN's block. Returns
3888 NULL no such set is found. */
3890 static struct expr *
3891 find_avail_set (int regno, rtx insn)
3893 /* SET1 contains the last set found that can be returned to the caller for
3894 use in a substitution. */
3895 struct expr *set1 = 0;
3897 /* Loops are not possible here. To get a loop we would need two sets
3898 available at the start of the block containing INSN. ie we would
3899 need two sets like this available at the start of the block:
3901 (set (reg X) (reg Y))
3902 (set (reg Y) (reg X))
3904 This can not happen since the set of (reg Y) would have killed the
3905 set of (reg X) making it unavailable at the start of this block. */
3906 while (1)
3908 rtx src;
3909 struct expr *set = lookup_set (regno, &set_hash_table);
3911 /* Find a set that is available at the start of the block
3912 which contains INSN. */
3913 while (set)
3915 if (TEST_BIT (cprop_avin[BLOCK_NUM (insn)], set->bitmap_index))
3916 break;
3917 set = next_set (regno, set);
3920 /* If no available set was found we've reached the end of the
3921 (possibly empty) copy chain. */
3922 if (set == 0)
3923 break;
3925 if (GET_CODE (set->expr) != SET)
3926 abort ();
3928 src = SET_SRC (set->expr);
3930 /* We know the set is available.
3931 Now check that SRC is ANTLOC (i.e. none of the source operands
3932 have changed since the start of the block).
3934 If the source operand changed, we may still use it for the next
3935 iteration of this loop, but we may not use it for substitutions. */
3937 if (gcse_constant_p (src) || oprs_not_set_p (src, insn))
3938 set1 = set;
3940 /* If the source of the set is anything except a register, then
3941 we have reached the end of the copy chain. */
3942 if (GET_CODE (src) != REG)
3943 break;
3945 /* Follow the copy chain, ie start another iteration of the loop
3946 and see if we have an available copy into SRC. */
3947 regno = REGNO (src);
3950 /* SET1 holds the last set that was available and anticipatable at
3951 INSN. */
3952 return set1;
3955 /* Subroutine of cprop_insn that tries to propagate constants into
3956 JUMP_INSNS. JUMP must be a conditional jump. If SETCC is non-NULL
3957 it is the instruction that immediately precedes JUMP, and must be a
3958 single SET of a register. FROM is what we will try to replace,
3959 SRC is the constant we will try to substitute for it. Returns nonzero
3960 if a change was made. */
3962 static int
3963 cprop_jump (basic_block bb, rtx setcc, rtx jump, rtx from, rtx src)
3965 rtx new, set_src, note_src;
3966 rtx set = pc_set (jump);
3967 rtx note = find_reg_equal_equiv_note (jump);
3969 if (note)
3971 note_src = XEXP (note, 0);
3972 if (GET_CODE (note_src) == EXPR_LIST)
3973 note_src = NULL_RTX;
3975 else note_src = NULL_RTX;
3977 /* Prefer REG_EQUAL notes except those containing EXPR_LISTs. */
3978 set_src = note_src ? note_src : SET_SRC (set);
3980 /* First substitute the SETCC condition into the JUMP instruction,
3981 then substitute that given values into this expanded JUMP. */
3982 if (setcc != NULL_RTX
3983 && !modified_between_p (from, setcc, jump)
3984 && !modified_between_p (src, setcc, jump))
3986 rtx setcc_src;
3987 rtx setcc_set = single_set (setcc);
3988 rtx setcc_note = find_reg_equal_equiv_note (setcc);
3989 setcc_src = (setcc_note && GET_CODE (XEXP (setcc_note, 0)) != EXPR_LIST)
3990 ? XEXP (setcc_note, 0) : SET_SRC (setcc_set);
3991 set_src = simplify_replace_rtx (set_src, SET_DEST (setcc_set),
3992 setcc_src);
3994 else
3995 setcc = NULL_RTX;
3997 new = simplify_replace_rtx (set_src, from, src);
3999 /* If no simplification can be made, then try the next register. */
4000 if (rtx_equal_p (new, SET_SRC (set)))
4001 return 0;
4003 /* If this is now a no-op delete it, otherwise this must be a valid insn. */
4004 if (new == pc_rtx)
4005 delete_insn (jump);
4006 else
4008 /* Ensure the value computed inside the jump insn to be equivalent
4009 to one computed by setcc. */
4010 if (setcc && modified_in_p (new, setcc))
4011 return 0;
4012 if (! validate_change (jump, &SET_SRC (set), new, 0))
4014 /* When (some) constants are not valid in a comparison, and there
4015 are two registers to be replaced by constants before the entire
4016 comparison can be folded into a constant, we need to keep
4017 intermediate information in REG_EQUAL notes. For targets with
4018 separate compare insns, such notes are added by try_replace_reg.
4019 When we have a combined compare-and-branch instruction, however,
4020 we need to attach a note to the branch itself to make this
4021 optimization work. */
4023 if (!rtx_equal_p (new, note_src))
4024 set_unique_reg_note (jump, REG_EQUAL, copy_rtx (new));
4025 return 0;
4028 /* Remove REG_EQUAL note after simplification. */
4029 if (note_src)
4030 remove_note (jump, note);
4032 /* If this has turned into an unconditional jump,
4033 then put a barrier after it so that the unreachable
4034 code will be deleted. */
4035 if (GET_CODE (SET_SRC (set)) == LABEL_REF)
4036 emit_barrier_after (jump);
4039 #ifdef HAVE_cc0
4040 /* Delete the cc0 setter. */
4041 if (setcc != NULL && CC0_P (SET_DEST (single_set (setcc))))
4042 delete_insn (setcc);
4043 #endif
4045 run_jump_opt_after_gcse = 1;
4047 const_prop_count++;
4048 if (gcse_file != NULL)
4050 fprintf (gcse_file,
4051 "CONST-PROP: Replacing reg %d in jump_insn %d with constant ",
4052 REGNO (from), INSN_UID (jump));
4053 print_rtl (gcse_file, src);
4054 fprintf (gcse_file, "\n");
4056 purge_dead_edges (bb);
4058 return 1;
4061 static bool
4062 constprop_register (rtx insn, rtx from, rtx to, int alter_jumps)
4064 rtx sset;
4066 /* Check for reg or cc0 setting instructions followed by
4067 conditional branch instructions first. */
4068 if (alter_jumps
4069 && (sset = single_set (insn)) != NULL
4070 && NEXT_INSN (insn)
4071 && any_condjump_p (NEXT_INSN (insn)) && onlyjump_p (NEXT_INSN (insn)))
4073 rtx dest = SET_DEST (sset);
4074 if ((REG_P (dest) || CC0_P (dest))
4075 && cprop_jump (BLOCK_FOR_INSN (insn), insn, NEXT_INSN (insn), from, to))
4076 return 1;
4079 /* Handle normal insns next. */
4080 if (GET_CODE (insn) == INSN
4081 && try_replace_reg (from, to, insn))
4082 return 1;
4084 /* Try to propagate a CONST_INT into a conditional jump.
4085 We're pretty specific about what we will handle in this
4086 code, we can extend this as necessary over time.
4088 Right now the insn in question must look like
4089 (set (pc) (if_then_else ...)) */
4090 else if (alter_jumps && any_condjump_p (insn) && onlyjump_p (insn))
4091 return cprop_jump (BLOCK_FOR_INSN (insn), NULL, insn, from, to);
4092 return 0;
4095 /* Perform constant and copy propagation on INSN.
4096 The result is nonzero if a change was made. */
4098 static int
4099 cprop_insn (rtx insn, int alter_jumps)
4101 struct reg_use *reg_used;
4102 int changed = 0;
4103 rtx note;
4105 if (!INSN_P (insn))
4106 return 0;
4108 reg_use_count = 0;
4109 note_uses (&PATTERN (insn), find_used_regs, NULL);
4111 note = find_reg_equal_equiv_note (insn);
4113 /* We may win even when propagating constants into notes. */
4114 if (note)
4115 find_used_regs (&XEXP (note, 0), NULL);
4117 for (reg_used = &reg_use_table[0]; reg_use_count > 0;
4118 reg_used++, reg_use_count--)
4120 unsigned int regno = REGNO (reg_used->reg_rtx);
4121 rtx pat, src;
4122 struct expr *set;
4124 /* Ignore registers created by GCSE.
4125 We do this because ... */
4126 if (regno >= max_gcse_regno)
4127 continue;
4129 /* If the register has already been set in this block, there's
4130 nothing we can do. */
4131 if (! oprs_not_set_p (reg_used->reg_rtx, insn))
4132 continue;
4134 /* Find an assignment that sets reg_used and is available
4135 at the start of the block. */
4136 set = find_avail_set (regno, insn);
4137 if (! set)
4138 continue;
4140 pat = set->expr;
4141 /* ??? We might be able to handle PARALLELs. Later. */
4142 if (GET_CODE (pat) != SET)
4143 abort ();
4145 src = SET_SRC (pat);
4147 /* Constant propagation. */
4148 if (gcse_constant_p (src))
4150 if (constprop_register (insn, reg_used->reg_rtx, src, alter_jumps))
4152 changed = 1;
4153 const_prop_count++;
4154 if (gcse_file != NULL)
4156 fprintf (gcse_file, "GLOBAL CONST-PROP: Replacing reg %d in ", regno);
4157 fprintf (gcse_file, "insn %d with constant ", INSN_UID (insn));
4158 print_rtl (gcse_file, src);
4159 fprintf (gcse_file, "\n");
4161 if (INSN_DELETED_P (insn))
4162 return 1;
4165 else if (GET_CODE (src) == REG
4166 && REGNO (src) >= FIRST_PSEUDO_REGISTER
4167 && REGNO (src) != regno)
4169 if (try_replace_reg (reg_used->reg_rtx, src, insn))
4171 changed = 1;
4172 copy_prop_count++;
4173 if (gcse_file != NULL)
4175 fprintf (gcse_file, "GLOBAL COPY-PROP: Replacing reg %d in insn %d",
4176 regno, INSN_UID (insn));
4177 fprintf (gcse_file, " with reg %d\n", REGNO (src));
4180 /* The original insn setting reg_used may or may not now be
4181 deletable. We leave the deletion to flow. */
4182 /* FIXME: If it turns out that the insn isn't deletable,
4183 then we may have unnecessarily extended register lifetimes
4184 and made things worse. */
4189 return changed;
4192 /* Like find_used_regs, but avoid recording uses that appear in
4193 input-output contexts such as zero_extract or pre_dec. This
4194 restricts the cases we consider to those for which local cprop
4195 can legitimately make replacements. */
4197 static void
4198 local_cprop_find_used_regs (rtx *xptr, void *data)
4200 rtx x = *xptr;
4202 if (x == 0)
4203 return;
4205 switch (GET_CODE (x))
4207 case ZERO_EXTRACT:
4208 case SIGN_EXTRACT:
4209 case STRICT_LOW_PART:
4210 return;
4212 case PRE_DEC:
4213 case PRE_INC:
4214 case POST_DEC:
4215 case POST_INC:
4216 case PRE_MODIFY:
4217 case POST_MODIFY:
4218 /* Can only legitimately appear this early in the context of
4219 stack pushes for function arguments, but handle all of the
4220 codes nonetheless. */
4221 return;
4223 case SUBREG:
4224 /* Setting a subreg of a register larger than word_mode leaves
4225 the non-written words unchanged. */
4226 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) > BITS_PER_WORD)
4227 return;
4228 break;
4230 default:
4231 break;
4234 find_used_regs (xptr, data);
4237 /* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
4238 their REG_EQUAL notes need updating. */
4240 static bool
4241 do_local_cprop (rtx x, rtx insn, int alter_jumps, rtx *libcall_sp)
4243 rtx newreg = NULL, newcnst = NULL;
4245 /* Rule out USE instructions and ASM statements as we don't want to
4246 change the hard registers mentioned. */
4247 if (GET_CODE (x) == REG
4248 && (REGNO (x) >= FIRST_PSEUDO_REGISTER
4249 || (GET_CODE (PATTERN (insn)) != USE
4250 && asm_noperands (PATTERN (insn)) < 0)))
4252 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0);
4253 struct elt_loc_list *l;
4255 if (!val)
4256 return false;
4257 for (l = val->locs; l; l = l->next)
4259 rtx this_rtx = l->loc;
4260 rtx note;
4262 if (l->in_libcall)
4263 continue;
4265 if (gcse_constant_p (this_rtx))
4266 newcnst = this_rtx;
4267 if (REG_P (this_rtx) && REGNO (this_rtx) >= FIRST_PSEUDO_REGISTER
4268 /* Don't copy propagate if it has attached REG_EQUIV note.
4269 At this point this only function parameters should have
4270 REG_EQUIV notes and if the argument slot is used somewhere
4271 explicitly, it means address of parameter has been taken,
4272 so we should not extend the lifetime of the pseudo. */
4273 && (!(note = find_reg_note (l->setting_insn, REG_EQUIV, NULL_RTX))
4274 || GET_CODE (XEXP (note, 0)) != MEM))
4275 newreg = this_rtx;
4277 if (newcnst && constprop_register (insn, x, newcnst, alter_jumps))
4279 /* If we find a case where we can't fix the retval REG_EQUAL notes
4280 match the new register, we either have to abandon this replacement
4281 or fix delete_trivially_dead_insns to preserve the setting insn,
4282 or make it delete the REG_EUAQL note, and fix up all passes that
4283 require the REG_EQUAL note there. */
4284 if (!adjust_libcall_notes (x, newcnst, insn, libcall_sp))
4285 abort ();
4286 if (gcse_file != NULL)
4288 fprintf (gcse_file, "LOCAL CONST-PROP: Replacing reg %d in ",
4289 REGNO (x));
4290 fprintf (gcse_file, "insn %d with constant ",
4291 INSN_UID (insn));
4292 print_rtl (gcse_file, newcnst);
4293 fprintf (gcse_file, "\n");
4295 const_prop_count++;
4296 return true;
4298 else if (newreg && newreg != x && try_replace_reg (x, newreg, insn))
4300 adjust_libcall_notes (x, newreg, insn, libcall_sp);
4301 if (gcse_file != NULL)
4303 fprintf (gcse_file,
4304 "LOCAL COPY-PROP: Replacing reg %d in insn %d",
4305 REGNO (x), INSN_UID (insn));
4306 fprintf (gcse_file, " with reg %d\n", REGNO (newreg));
4308 copy_prop_count++;
4309 return true;
4312 return false;
4315 /* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
4316 their REG_EQUAL notes need updating to reflect that OLDREG has been
4317 replaced with NEWVAL in INSN. Return true if all substitutions could
4318 be made. */
4319 static bool
4320 adjust_libcall_notes (rtx oldreg, rtx newval, rtx insn, rtx *libcall_sp)
4322 rtx end;
4324 while ((end = *libcall_sp++))
4326 rtx note = find_reg_equal_equiv_note (end);
4328 if (! note)
4329 continue;
4331 if (REG_P (newval))
4333 if (reg_set_between_p (newval, PREV_INSN (insn), end))
4337 note = find_reg_equal_equiv_note (end);
4338 if (! note)
4339 continue;
4340 if (reg_mentioned_p (newval, XEXP (note, 0)))
4341 return false;
4343 while ((end = *libcall_sp++));
4344 return true;
4347 XEXP (note, 0) = replace_rtx (XEXP (note, 0), oldreg, newval);
4348 insn = end;
4350 return true;
4353 #define MAX_NESTED_LIBCALLS 9
4355 static void
4356 local_cprop_pass (int alter_jumps)
4358 rtx insn;
4359 struct reg_use *reg_used;
4360 rtx libcall_stack[MAX_NESTED_LIBCALLS + 1], *libcall_sp;
4361 bool changed = false;
4363 cselib_init ();
4364 libcall_sp = &libcall_stack[MAX_NESTED_LIBCALLS];
4365 *libcall_sp = 0;
4366 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4368 if (INSN_P (insn))
4370 rtx note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
4372 if (note)
4374 if (libcall_sp == libcall_stack)
4375 abort ();
4376 *--libcall_sp = XEXP (note, 0);
4378 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
4379 if (note)
4380 libcall_sp++;
4381 note = find_reg_equal_equiv_note (insn);
4384 reg_use_count = 0;
4385 note_uses (&PATTERN (insn), local_cprop_find_used_regs, NULL);
4386 if (note)
4387 local_cprop_find_used_regs (&XEXP (note, 0), NULL);
4389 for (reg_used = &reg_use_table[0]; reg_use_count > 0;
4390 reg_used++, reg_use_count--)
4391 if (do_local_cprop (reg_used->reg_rtx, insn, alter_jumps,
4392 libcall_sp))
4394 changed = true;
4395 break;
4397 if (INSN_DELETED_P (insn))
4398 break;
4400 while (reg_use_count);
4402 cselib_process_insn (insn);
4404 cselib_finish ();
4405 /* Global analysis may get into infinite loops for unreachable blocks. */
4406 if (changed && alter_jumps)
4408 delete_unreachable_blocks ();
4409 free_reg_set_mem ();
4410 alloc_reg_set_mem (max_reg_num ());
4411 compute_sets (get_insns ());
4415 /* Forward propagate copies. This includes copies and constants. Return
4416 nonzero if a change was made. */
4418 static int
4419 cprop (int alter_jumps)
4421 int changed;
4422 basic_block bb;
4423 rtx insn;
4425 /* Note we start at block 1. */
4426 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
4428 if (gcse_file != NULL)
4429 fprintf (gcse_file, "\n");
4430 return 0;
4433 changed = 0;
4434 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR, next_bb)
4436 /* Reset tables used to keep track of what's still valid [since the
4437 start of the block]. */
4438 reset_opr_set_tables ();
4440 for (insn = bb->head;
4441 insn != NULL && insn != NEXT_INSN (bb->end);
4442 insn = NEXT_INSN (insn))
4443 if (INSN_P (insn))
4445 changed |= cprop_insn (insn, alter_jumps);
4447 /* Keep track of everything modified by this insn. */
4448 /* ??? Need to be careful w.r.t. mods done to INSN. Don't
4449 call mark_oprs_set if we turned the insn into a NOTE. */
4450 if (GET_CODE (insn) != NOTE)
4451 mark_oprs_set (insn);
4455 if (gcse_file != NULL)
4456 fprintf (gcse_file, "\n");
4458 return changed;
4461 /* Similar to get_condition, only the resulting condition must be
4462 valid at JUMP, instead of at EARLIEST.
4464 This differs from noce_get_condition in ifcvt.c in that we prefer not to
4465 settle for the condition variable in the jump instruction being integral.
4466 We prefer to be able to record the value of a user variable, rather than
4467 the value of a temporary used in a condition. This could be solved by
4468 recording the value of *every* register scaned by canonicalize_condition,
4469 but this would require some code reorganization. */
4472 fis_get_condition (rtx jump)
4474 rtx cond, set, tmp, insn, earliest;
4475 bool reverse;
4477 if (! any_condjump_p (jump))
4478 return NULL_RTX;
4480 set = pc_set (jump);
4481 cond = XEXP (SET_SRC (set), 0);
4483 /* If this branches to JUMP_LABEL when the condition is false,
4484 reverse the condition. */
4485 reverse = (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
4486 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump));
4488 /* Use canonicalize_condition to do the dirty work of manipulating
4489 MODE_CC values and COMPARE rtx codes. */
4490 tmp = canonicalize_condition (jump, cond, reverse, &earliest, NULL_RTX);
4491 if (!tmp)
4492 return NULL_RTX;
4494 /* Verify that the given condition is valid at JUMP by virtue of not
4495 having been modified since EARLIEST. */
4496 for (insn = earliest; insn != jump; insn = NEXT_INSN (insn))
4497 if (INSN_P (insn) && modified_in_p (tmp, insn))
4498 break;
4499 if (insn == jump)
4500 return tmp;
4502 /* The condition was modified. See if we can get a partial result
4503 that doesn't follow all the reversals. Perhaps combine can fold
4504 them together later. */
4505 tmp = XEXP (tmp, 0);
4506 if (!REG_P (tmp) || GET_MODE_CLASS (GET_MODE (tmp)) != MODE_INT)
4507 return NULL_RTX;
4508 tmp = canonicalize_condition (jump, cond, reverse, &earliest, tmp);
4509 if (!tmp)
4510 return NULL_RTX;
4512 /* For sanity's sake, re-validate the new result. */
4513 for (insn = earliest; insn != jump; insn = NEXT_INSN (insn))
4514 if (INSN_P (insn) && modified_in_p (tmp, insn))
4515 return NULL_RTX;
4517 return tmp;
4520 /* Find the implicit sets of a function. An "implicit set" is a constraint
4521 on the value of a variable, implied by a conditional jump. For example,
4522 following "if (x == 2)", the then branch may be optimized as though the
4523 conditional performed an "explicit set", in this example, "x = 2". This
4524 function records the set patterns that are implicit at the start of each
4525 basic block. */
4527 static void
4528 find_implicit_sets (void)
4530 basic_block bb, dest;
4531 unsigned int count;
4532 rtx cond, new;
4534 count = 0;
4535 FOR_EACH_BB (bb)
4536 /* Check for more than one sucessor. */
4537 if (bb->succ && bb->succ->succ_next)
4539 cond = fis_get_condition (bb->end);
4541 if (cond
4542 && (GET_CODE (cond) == EQ || GET_CODE (cond) == NE)
4543 && GET_CODE (XEXP (cond, 0)) == REG
4544 && REGNO (XEXP (cond, 0)) >= FIRST_PSEUDO_REGISTER
4545 && gcse_constant_p (XEXP (cond, 1)))
4547 dest = GET_CODE (cond) == EQ ? BRANCH_EDGE (bb)->dest
4548 : FALLTHRU_EDGE (bb)->dest;
4550 if (dest && ! dest->pred->pred_next
4551 && dest != EXIT_BLOCK_PTR)
4553 new = gen_rtx_SET (VOIDmode, XEXP (cond, 0),
4554 XEXP (cond, 1));
4555 implicit_sets[dest->index] = new;
4556 if (gcse_file)
4558 fprintf(gcse_file, "Implicit set of reg %d in ",
4559 REGNO (XEXP (cond, 0)));
4560 fprintf(gcse_file, "basic block %d\n", dest->index);
4562 count++;
4567 if (gcse_file)
4568 fprintf (gcse_file, "Found %d implicit sets\n", count);
4571 /* Perform one copy/constant propagation pass.
4572 PASS is the pass count. If CPROP_JUMPS is true, perform constant
4573 propagation into conditional jumps. If BYPASS_JUMPS is true,
4574 perform conditional jump bypassing optimizations. */
4576 static int
4577 one_cprop_pass (int pass, int cprop_jumps, int bypass_jumps)
4579 int changed = 0;
4581 const_prop_count = 0;
4582 copy_prop_count = 0;
4584 local_cprop_pass (cprop_jumps);
4586 /* Determine implicit sets. */
4587 implicit_sets = xcalloc (last_basic_block, sizeof (rtx));
4588 find_implicit_sets ();
4590 alloc_hash_table (max_cuid, &set_hash_table, 1);
4591 compute_hash_table (&set_hash_table);
4593 /* Free implicit_sets before peak usage. */
4594 free (implicit_sets);
4595 implicit_sets = NULL;
4597 if (gcse_file)
4598 dump_hash_table (gcse_file, "SET", &set_hash_table);
4599 if (set_hash_table.n_elems > 0)
4601 alloc_cprop_mem (last_basic_block, set_hash_table.n_elems);
4602 compute_cprop_data ();
4603 changed = cprop (cprop_jumps);
4604 if (bypass_jumps)
4605 changed |= bypass_conditional_jumps ();
4606 free_cprop_mem ();
4609 free_hash_table (&set_hash_table);
4611 if (gcse_file)
4613 fprintf (gcse_file, "CPROP of %s, pass %d: %d bytes needed, ",
4614 current_function_name, pass, bytes_used);
4615 fprintf (gcse_file, "%d const props, %d copy props\n\n",
4616 const_prop_count, copy_prop_count);
4618 /* Global analysis may get into infinite loops for unreachable blocks. */
4619 if (changed && cprop_jumps)
4620 delete_unreachable_blocks ();
4622 return changed;
4625 /* Bypass conditional jumps. */
4627 /* The value of last_basic_block at the beginning of the jump_bypass
4628 pass. The use of redirect_edge_and_branch_force may introduce new
4629 basic blocks, but the data flow analysis is only valid for basic
4630 block indices less than bypass_last_basic_block. */
4632 static int bypass_last_basic_block;
4634 /* Find a set of REGNO to a constant that is available at the end of basic
4635 block BB. Returns NULL if no such set is found. Based heavily upon
4636 find_avail_set. */
4638 static struct expr *
4639 find_bypass_set (int regno, int bb)
4641 struct expr *result = 0;
4643 for (;;)
4645 rtx src;
4646 struct expr *set = lookup_set (regno, &set_hash_table);
4648 while (set)
4650 if (TEST_BIT (cprop_avout[bb], set->bitmap_index))
4651 break;
4652 set = next_set (regno, set);
4655 if (set == 0)
4656 break;
4658 if (GET_CODE (set->expr) != SET)
4659 abort ();
4661 src = SET_SRC (set->expr);
4662 if (gcse_constant_p (src))
4663 result = set;
4665 if (GET_CODE (src) != REG)
4666 break;
4668 regno = REGNO (src);
4670 return result;
4674 /* Subroutine of bypass_block that checks whether a pseudo is killed by
4675 any of the instructions inserted on an edge. Jump bypassing places
4676 condition code setters on CFG edges using insert_insn_on_edge. This
4677 function is required to check that our data flow analysis is still
4678 valid prior to commit_edge_insertions. */
4680 static bool
4681 reg_killed_on_edge (rtx reg, edge e)
4683 rtx insn;
4685 for (insn = e->insns; insn; insn = NEXT_INSN (insn))
4686 if (INSN_P (insn) && reg_set_p (reg, insn))
4687 return true;
4689 return false;
4692 /* Subroutine of bypass_conditional_jumps that attempts to bypass the given
4693 basic block BB which has more than one predecessor. If not NULL, SETCC
4694 is the first instruction of BB, which is immediately followed by JUMP_INSN
4695 JUMP. Otherwise, SETCC is NULL, and JUMP is the first insn of BB.
4696 Returns nonzero if a change was made.
4698 During the jump bypassing pass, we may place copies of SETCC instructions
4699 on CFG edges. The following routine must be careful to pay attention to
4700 these inserted insns when performing its transformations. */
4702 static int
4703 bypass_block (basic_block bb, rtx setcc, rtx jump)
4705 rtx insn, note;
4706 edge e, enext, edest;
4707 int i, change;
4708 int may_be_loop_header;
4710 insn = (setcc != NULL) ? setcc : jump;
4712 /* Determine set of register uses in INSN. */
4713 reg_use_count = 0;
4714 note_uses (&PATTERN (insn), find_used_regs, NULL);
4715 note = find_reg_equal_equiv_note (insn);
4716 if (note)
4717 find_used_regs (&XEXP (note, 0), NULL);
4719 may_be_loop_header = false;
4720 for (e = bb->pred; e; e = e->pred_next)
4721 if (e->flags & EDGE_DFS_BACK)
4723 may_be_loop_header = true;
4724 break;
4727 change = 0;
4728 for (e = bb->pred; e; e = enext)
4730 enext = e->pred_next;
4731 if (e->flags & EDGE_COMPLEX)
4732 continue;
4734 /* We can't redirect edges from new basic blocks. */
4735 if (e->src->index >= bypass_last_basic_block)
4736 continue;
4738 /* The irreducible loops created by redirecting of edges entering the
4739 loop from outside would decrease effectiveness of some of the following
4740 optimizations, so prevent this. */
4741 if (may_be_loop_header
4742 && !(e->flags & EDGE_DFS_BACK))
4743 continue;
4745 for (i = 0; i < reg_use_count; i++)
4747 struct reg_use *reg_used = &reg_use_table[i];
4748 unsigned int regno = REGNO (reg_used->reg_rtx);
4749 basic_block dest, old_dest;
4750 struct expr *set;
4751 rtx src, new;
4753 if (regno >= max_gcse_regno)
4754 continue;
4756 set = find_bypass_set (regno, e->src->index);
4758 if (! set)
4759 continue;
4761 /* Check the data flow is valid after edge insertions. */
4762 if (e->insns && reg_killed_on_edge (reg_used->reg_rtx, e))
4763 continue;
4765 src = SET_SRC (pc_set (jump));
4767 if (setcc != NULL)
4768 src = simplify_replace_rtx (src,
4769 SET_DEST (PATTERN (setcc)),
4770 SET_SRC (PATTERN (setcc)));
4772 new = simplify_replace_rtx (src, reg_used->reg_rtx,
4773 SET_SRC (set->expr));
4775 /* Jump bypassing may have already placed instructions on
4776 edges of the CFG. We can't bypass an outgoing edge that
4777 has instructions associated with it, as these insns won't
4778 get executed if the incoming edge is redirected. */
4780 if (new == pc_rtx)
4782 edest = FALLTHRU_EDGE (bb);
4783 dest = edest->insns ? NULL : edest->dest;
4785 else if (GET_CODE (new) == LABEL_REF)
4787 dest = BLOCK_FOR_INSN (XEXP (new, 0));
4788 /* Don't bypass edges containing instructions. */
4789 for (edest = bb->succ; edest; edest = edest->succ_next)
4790 if (edest->dest == dest && edest->insns)
4792 dest = NULL;
4793 break;
4796 else
4797 dest = NULL;
4799 old_dest = e->dest;
4800 if (dest != NULL
4801 && dest != old_dest
4802 && dest != EXIT_BLOCK_PTR)
4804 redirect_edge_and_branch_force (e, dest);
4806 /* Copy the register setter to the redirected edge.
4807 Don't copy CC0 setters, as CC0 is dead after jump. */
4808 if (setcc)
4810 rtx pat = PATTERN (setcc);
4811 if (!CC0_P (SET_DEST (pat)))
4812 insert_insn_on_edge (copy_insn (pat), e);
4815 if (gcse_file != NULL)
4817 fprintf (gcse_file, "JUMP-BYPASS: Proved reg %d in jump_insn %d equals constant ",
4818 regno, INSN_UID (jump));
4819 print_rtl (gcse_file, SET_SRC (set->expr));
4820 fprintf (gcse_file, "\nBypass edge from %d->%d to %d\n",
4821 e->src->index, old_dest->index, dest->index);
4823 change = 1;
4824 break;
4828 return change;
4831 /* Find basic blocks with more than one predecessor that only contain a
4832 single conditional jump. If the result of the comparison is known at
4833 compile-time from any incoming edge, redirect that edge to the
4834 appropriate target. Returns nonzero if a change was made.
4836 This function is now mis-named, because we also handle indirect jumps. */
4838 static int
4839 bypass_conditional_jumps (void)
4841 basic_block bb;
4842 int changed;
4843 rtx setcc;
4844 rtx insn;
4845 rtx dest;
4847 /* Note we start at block 1. */
4848 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
4849 return 0;
4851 bypass_last_basic_block = last_basic_block;
4852 mark_dfs_back_edges ();
4854 changed = 0;
4855 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb,
4856 EXIT_BLOCK_PTR, next_bb)
4858 /* Check for more than one predecessor. */
4859 if (bb->pred && bb->pred->pred_next)
4861 setcc = NULL_RTX;
4862 for (insn = bb->head;
4863 insn != NULL && insn != NEXT_INSN (bb->end);
4864 insn = NEXT_INSN (insn))
4865 if (GET_CODE (insn) == INSN)
4867 if (setcc)
4868 break;
4869 if (GET_CODE (PATTERN (insn)) != SET)
4870 break;
4872 dest = SET_DEST (PATTERN (insn));
4873 if (REG_P (dest) || CC0_P (dest))
4874 setcc = insn;
4875 else
4876 break;
4878 else if (GET_CODE (insn) == JUMP_INSN)
4880 if ((any_condjump_p (insn) || computed_jump_p (insn))
4881 && onlyjump_p (insn))
4882 changed |= bypass_block (bb, setcc, insn);
4883 break;
4885 else if (INSN_P (insn))
4886 break;
4890 /* If we bypassed any register setting insns, we inserted a
4891 copy on the redirected edge. These need to be committed. */
4892 if (changed)
4893 commit_edge_insertions();
4895 return changed;
4898 /* Compute PRE+LCM working variables. */
4900 /* Local properties of expressions. */
4901 /* Nonzero for expressions that are transparent in the block. */
4902 static sbitmap *transp;
4904 /* Nonzero for expressions that are transparent at the end of the block.
4905 This is only zero for expressions killed by abnormal critical edge
4906 created by a calls. */
4907 static sbitmap *transpout;
4909 /* Nonzero for expressions that are computed (available) in the block. */
4910 static sbitmap *comp;
4912 /* Nonzero for expressions that are locally anticipatable in the block. */
4913 static sbitmap *antloc;
4915 /* Nonzero for expressions where this block is an optimal computation
4916 point. */
4917 static sbitmap *pre_optimal;
4919 /* Nonzero for expressions which are redundant in a particular block. */
4920 static sbitmap *pre_redundant;
4922 /* Nonzero for expressions which should be inserted on a specific edge. */
4923 static sbitmap *pre_insert_map;
4925 /* Nonzero for expressions which should be deleted in a specific block. */
4926 static sbitmap *pre_delete_map;
4928 /* Contains the edge_list returned by pre_edge_lcm. */
4929 static struct edge_list *edge_list;
4931 /* Redundant insns. */
4932 static sbitmap pre_redundant_insns;
4934 /* Allocate vars used for PRE analysis. */
4936 static void
4937 alloc_pre_mem (int n_blocks, int n_exprs)
4939 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
4940 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
4941 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
4943 pre_optimal = NULL;
4944 pre_redundant = NULL;
4945 pre_insert_map = NULL;
4946 pre_delete_map = NULL;
4947 ae_in = NULL;
4948 ae_out = NULL;
4949 ae_kill = sbitmap_vector_alloc (n_blocks, n_exprs);
4951 /* pre_insert and pre_delete are allocated later. */
4954 /* Free vars used for PRE analysis. */
4956 static void
4957 free_pre_mem (void)
4959 sbitmap_vector_free (transp);
4960 sbitmap_vector_free (comp);
4962 /* ANTLOC and AE_KILL are freed just after pre_lcm finishes. */
4964 if (pre_optimal)
4965 sbitmap_vector_free (pre_optimal);
4966 if (pre_redundant)
4967 sbitmap_vector_free (pre_redundant);
4968 if (pre_insert_map)
4969 sbitmap_vector_free (pre_insert_map);
4970 if (pre_delete_map)
4971 sbitmap_vector_free (pre_delete_map);
4972 if (ae_in)
4973 sbitmap_vector_free (ae_in);
4974 if (ae_out)
4975 sbitmap_vector_free (ae_out);
4977 transp = comp = NULL;
4978 pre_optimal = pre_redundant = pre_insert_map = pre_delete_map = NULL;
4979 ae_in = ae_out = NULL;
4982 /* Top level routine to do the dataflow analysis needed by PRE. */
4984 static void
4985 compute_pre_data (void)
4987 sbitmap trapping_expr;
4988 basic_block bb;
4989 unsigned int ui;
4991 compute_local_properties (transp, comp, antloc, &expr_hash_table);
4992 sbitmap_vector_zero (ae_kill, last_basic_block);
4994 /* Collect expressions which might trap. */
4995 trapping_expr = sbitmap_alloc (expr_hash_table.n_elems);
4996 sbitmap_zero (trapping_expr);
4997 for (ui = 0; ui < expr_hash_table.size; ui++)
4999 struct expr *e;
5000 for (e = expr_hash_table.table[ui]; e != NULL; e = e->next_same_hash)
5001 if (may_trap_p (e->expr))
5002 SET_BIT (trapping_expr, e->bitmap_index);
5005 /* Compute ae_kill for each basic block using:
5007 ~(TRANSP | COMP)
5009 This is significantly faster than compute_ae_kill. */
5011 FOR_EACH_BB (bb)
5013 edge e;
5015 /* If the current block is the destination of an abnormal edge, we
5016 kill all trapping expressions because we won't be able to properly
5017 place the instruction on the edge. So make them neither
5018 anticipatable nor transparent. This is fairly conservative. */
5019 for (e = bb->pred; e ; e = e->pred_next)
5020 if (e->flags & EDGE_ABNORMAL)
5022 sbitmap_difference (antloc[bb->index], antloc[bb->index], trapping_expr);
5023 sbitmap_difference (transp[bb->index], transp[bb->index], trapping_expr);
5024 break;
5027 sbitmap_a_or_b (ae_kill[bb->index], transp[bb->index], comp[bb->index]);
5028 sbitmap_not (ae_kill[bb->index], ae_kill[bb->index]);
5031 edge_list = pre_edge_lcm (gcse_file, expr_hash_table.n_elems, transp, comp, antloc,
5032 ae_kill, &pre_insert_map, &pre_delete_map);
5033 sbitmap_vector_free (antloc);
5034 antloc = NULL;
5035 sbitmap_vector_free (ae_kill);
5036 ae_kill = NULL;
5037 sbitmap_free (trapping_expr);
5040 /* PRE utilities */
5042 /* Return nonzero if an occurrence of expression EXPR in OCCR_BB would reach
5043 block BB.
5045 VISITED is a pointer to a working buffer for tracking which BB's have
5046 been visited. It is NULL for the top-level call.
5048 We treat reaching expressions that go through blocks containing the same
5049 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
5050 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
5051 2 as not reaching. The intent is to improve the probability of finding
5052 only one reaching expression and to reduce register lifetimes by picking
5053 the closest such expression. */
5055 static int
5056 pre_expr_reaches_here_p_work (basic_block occr_bb, struct expr *expr, basic_block bb, char *visited)
5058 edge pred;
5060 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
5062 basic_block pred_bb = pred->src;
5064 if (pred->src == ENTRY_BLOCK_PTR
5065 /* Has predecessor has already been visited? */
5066 || visited[pred_bb->index])
5067 ;/* Nothing to do. */
5069 /* Does this predecessor generate this expression? */
5070 else if (TEST_BIT (comp[pred_bb->index], expr->bitmap_index))
5072 /* Is this the occurrence we're looking for?
5073 Note that there's only one generating occurrence per block
5074 so we just need to check the block number. */
5075 if (occr_bb == pred_bb)
5076 return 1;
5078 visited[pred_bb->index] = 1;
5080 /* Ignore this predecessor if it kills the expression. */
5081 else if (! TEST_BIT (transp[pred_bb->index], expr->bitmap_index))
5082 visited[pred_bb->index] = 1;
5084 /* Neither gen nor kill. */
5085 else
5087 visited[pred_bb->index] = 1;
5088 if (pre_expr_reaches_here_p_work (occr_bb, expr, pred_bb, visited))
5089 return 1;
5093 /* All paths have been checked. */
5094 return 0;
5097 /* The wrapper for pre_expr_reaches_here_work that ensures that any
5098 memory allocated for that function is returned. */
5100 static int
5101 pre_expr_reaches_here_p (basic_block occr_bb, struct expr *expr, basic_block bb)
5103 int rval;
5104 char *visited = xcalloc (last_basic_block, 1);
5106 rval = pre_expr_reaches_here_p_work (occr_bb, expr, bb, visited);
5108 free (visited);
5109 return rval;
5113 /* Given an expr, generate RTL which we can insert at the end of a BB,
5114 or on an edge. Set the block number of any insns generated to
5115 the value of BB. */
5117 static rtx
5118 process_insert_insn (struct expr *expr)
5120 rtx reg = expr->reaching_reg;
5121 rtx exp = copy_rtx (expr->expr);
5122 rtx pat;
5124 start_sequence ();
5126 /* If the expression is something that's an operand, like a constant,
5127 just copy it to a register. */
5128 if (general_operand (exp, GET_MODE (reg)))
5129 emit_move_insn (reg, exp);
5131 /* Otherwise, make a new insn to compute this expression and make sure the
5132 insn will be recognized (this also adds any needed CLOBBERs). Copy the
5133 expression to make sure we don't have any sharing issues. */
5134 else if (insn_invalid_p (emit_insn (gen_rtx_SET (VOIDmode, reg, exp))))
5135 abort ();
5137 pat = get_insns ();
5138 end_sequence ();
5140 return pat;
5143 /* Add EXPR to the end of basic block BB.
5145 This is used by both the PRE and code hoisting.
5147 For PRE, we want to verify that the expr is either transparent
5148 or locally anticipatable in the target block. This check makes
5149 no sense for code hoisting. */
5151 static void
5152 insert_insn_end_bb (struct expr *expr, basic_block bb, int pre)
5154 rtx insn = bb->end;
5155 rtx new_insn;
5156 rtx reg = expr->reaching_reg;
5157 int regno = REGNO (reg);
5158 rtx pat, pat_end;
5160 pat = process_insert_insn (expr);
5161 if (pat == NULL_RTX || ! INSN_P (pat))
5162 abort ();
5164 pat_end = pat;
5165 while (NEXT_INSN (pat_end) != NULL_RTX)
5166 pat_end = NEXT_INSN (pat_end);
5168 /* If the last insn is a jump, insert EXPR in front [taking care to
5169 handle cc0, etc. properly]. Similarly we need to care trapping
5170 instructions in presence of non-call exceptions. */
5172 if (GET_CODE (insn) == JUMP_INSN
5173 || (GET_CODE (insn) == INSN
5174 && (bb->succ->succ_next || (bb->succ->flags & EDGE_ABNORMAL))))
5176 #ifdef HAVE_cc0
5177 rtx note;
5178 #endif
5179 /* It should always be the case that we can put these instructions
5180 anywhere in the basic block with performing PRE optimizations.
5181 Check this. */
5182 if (GET_CODE (insn) == INSN && pre
5183 && !TEST_BIT (antloc[bb->index], expr->bitmap_index)
5184 && !TEST_BIT (transp[bb->index], expr->bitmap_index))
5185 abort ();
5187 /* If this is a jump table, then we can't insert stuff here. Since
5188 we know the previous real insn must be the tablejump, we insert
5189 the new instruction just before the tablejump. */
5190 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
5191 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
5192 insn = prev_real_insn (insn);
5194 #ifdef HAVE_cc0
5195 /* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts
5196 if cc0 isn't set. */
5197 note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
5198 if (note)
5199 insn = XEXP (note, 0);
5200 else
5202 rtx maybe_cc0_setter = prev_nonnote_insn (insn);
5203 if (maybe_cc0_setter
5204 && INSN_P (maybe_cc0_setter)
5205 && sets_cc0_p (PATTERN (maybe_cc0_setter)))
5206 insn = maybe_cc0_setter;
5208 #endif
5209 /* FIXME: What if something in cc0/jump uses value set in new insn? */
5210 new_insn = emit_insn_before (pat, insn);
5213 /* Likewise if the last insn is a call, as will happen in the presence
5214 of exception handling. */
5215 else if (GET_CODE (insn) == CALL_INSN
5216 && (bb->succ->succ_next || (bb->succ->flags & EDGE_ABNORMAL)))
5218 /* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers,
5219 we search backward and place the instructions before the first
5220 parameter is loaded. Do this for everyone for consistency and a
5221 presumption that we'll get better code elsewhere as well.
5223 It should always be the case that we can put these instructions
5224 anywhere in the basic block with performing PRE optimizations.
5225 Check this. */
5227 if (pre
5228 && !TEST_BIT (antloc[bb->index], expr->bitmap_index)
5229 && !TEST_BIT (transp[bb->index], expr->bitmap_index))
5230 abort ();
5232 /* Since different machines initialize their parameter registers
5233 in different orders, assume nothing. Collect the set of all
5234 parameter registers. */
5235 insn = find_first_parameter_load (insn, bb->head);
5237 /* If we found all the parameter loads, then we want to insert
5238 before the first parameter load.
5240 If we did not find all the parameter loads, then we might have
5241 stopped on the head of the block, which could be a CODE_LABEL.
5242 If we inserted before the CODE_LABEL, then we would be putting
5243 the insn in the wrong basic block. In that case, put the insn
5244 after the CODE_LABEL. Also, respect NOTE_INSN_BASIC_BLOCK. */
5245 while (GET_CODE (insn) == CODE_LABEL
5246 || NOTE_INSN_BASIC_BLOCK_P (insn))
5247 insn = NEXT_INSN (insn);
5249 new_insn = emit_insn_before (pat, insn);
5251 else
5252 new_insn = emit_insn_after (pat, insn);
5254 while (1)
5256 if (INSN_P (pat))
5258 add_label_notes (PATTERN (pat), new_insn);
5259 note_stores (PATTERN (pat), record_set_info, pat);
5261 if (pat == pat_end)
5262 break;
5263 pat = NEXT_INSN (pat);
5266 gcse_create_count++;
5268 if (gcse_file)
5270 fprintf (gcse_file, "PRE/HOIST: end of bb %d, insn %d, ",
5271 bb->index, INSN_UID (new_insn));
5272 fprintf (gcse_file, "copying expression %d to reg %d\n",
5273 expr->bitmap_index, regno);
5277 /* Insert partially redundant expressions on edges in the CFG to make
5278 the expressions fully redundant. */
5280 static int
5281 pre_edge_insert (struct edge_list *edge_list, struct expr **index_map)
5283 int e, i, j, num_edges, set_size, did_insert = 0;
5284 sbitmap *inserted;
5286 /* Where PRE_INSERT_MAP is nonzero, we add the expression on that edge
5287 if it reaches any of the deleted expressions. */
5289 set_size = pre_insert_map[0]->size;
5290 num_edges = NUM_EDGES (edge_list);
5291 inserted = sbitmap_vector_alloc (num_edges, expr_hash_table.n_elems);
5292 sbitmap_vector_zero (inserted, num_edges);
5294 for (e = 0; e < num_edges; e++)
5296 int indx;
5297 basic_block bb = INDEX_EDGE_PRED_BB (edge_list, e);
5299 for (i = indx = 0; i < set_size; i++, indx += SBITMAP_ELT_BITS)
5301 SBITMAP_ELT_TYPE insert = pre_insert_map[e]->elms[i];
5303 for (j = indx; insert && j < (int) expr_hash_table.n_elems; j++, insert >>= 1)
5304 if ((insert & 1) != 0 && index_map[j]->reaching_reg != NULL_RTX)
5306 struct expr *expr = index_map[j];
5307 struct occr *occr;
5309 /* Now look at each deleted occurrence of this expression. */
5310 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5312 if (! occr->deleted_p)
5313 continue;
5315 /* Insert this expression on this edge if if it would
5316 reach the deleted occurrence in BB. */
5317 if (!TEST_BIT (inserted[e], j))
5319 rtx insn;
5320 edge eg = INDEX_EDGE (edge_list, e);
5322 /* We can't insert anything on an abnormal and
5323 critical edge, so we insert the insn at the end of
5324 the previous block. There are several alternatives
5325 detailed in Morgans book P277 (sec 10.5) for
5326 handling this situation. This one is easiest for
5327 now. */
5329 if ((eg->flags & EDGE_ABNORMAL) == EDGE_ABNORMAL)
5330 insert_insn_end_bb (index_map[j], bb, 0);
5331 else
5333 insn = process_insert_insn (index_map[j]);
5334 insert_insn_on_edge (insn, eg);
5337 if (gcse_file)
5339 fprintf (gcse_file, "PRE/HOIST: edge (%d,%d), ",
5340 bb->index,
5341 INDEX_EDGE_SUCC_BB (edge_list, e)->index);
5342 fprintf (gcse_file, "copy expression %d\n",
5343 expr->bitmap_index);
5346 update_ld_motion_stores (expr);
5347 SET_BIT (inserted[e], j);
5348 did_insert = 1;
5349 gcse_create_count++;
5356 sbitmap_vector_free (inserted);
5357 return did_insert;
5360 /* Copy the result of INSN to REG. INDX is the expression number. */
5362 static void
5363 pre_insert_copy_insn (struct expr *expr, rtx insn)
5365 rtx reg = expr->reaching_reg;
5366 int regno = REGNO (reg);
5367 int indx = expr->bitmap_index;
5368 rtx set = single_set (insn);
5369 rtx new_insn;
5371 if (!set)
5372 abort ();
5374 new_insn = emit_insn_after (gen_move_insn (reg, copy_rtx (SET_DEST (set))), insn);
5376 /* Keep register set table up to date. */
5377 record_one_set (regno, new_insn);
5379 gcse_create_count++;
5381 if (gcse_file)
5382 fprintf (gcse_file,
5383 "PRE: bb %d, insn %d, copy expression %d in insn %d to reg %d\n",
5384 BLOCK_NUM (insn), INSN_UID (new_insn), indx,
5385 INSN_UID (insn), regno);
5386 update_ld_motion_stores (expr);
5389 /* Copy available expressions that reach the redundant expression
5390 to `reaching_reg'. */
5392 static void
5393 pre_insert_copies (void)
5395 unsigned int i;
5396 struct expr *expr;
5397 struct occr *occr;
5398 struct occr *avail;
5400 /* For each available expression in the table, copy the result to
5401 `reaching_reg' if the expression reaches a deleted one.
5403 ??? The current algorithm is rather brute force.
5404 Need to do some profiling. */
5406 for (i = 0; i < expr_hash_table.size; i++)
5407 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
5409 /* If the basic block isn't reachable, PPOUT will be TRUE. However,
5410 we don't want to insert a copy here because the expression may not
5411 really be redundant. So only insert an insn if the expression was
5412 deleted. This test also avoids further processing if the
5413 expression wasn't deleted anywhere. */
5414 if (expr->reaching_reg == NULL)
5415 continue;
5417 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5419 if (! occr->deleted_p)
5420 continue;
5422 for (avail = expr->avail_occr; avail != NULL; avail = avail->next)
5424 rtx insn = avail->insn;
5426 /* No need to handle this one if handled already. */
5427 if (avail->copied_p)
5428 continue;
5430 /* Don't handle this one if it's a redundant one. */
5431 if (TEST_BIT (pre_redundant_insns, INSN_CUID (insn)))
5432 continue;
5434 /* Or if the expression doesn't reach the deleted one. */
5435 if (! pre_expr_reaches_here_p (BLOCK_FOR_INSN (avail->insn),
5436 expr,
5437 BLOCK_FOR_INSN (occr->insn)))
5438 continue;
5440 /* Copy the result of avail to reaching_reg. */
5441 pre_insert_copy_insn (expr, insn);
5442 avail->copied_p = 1;
5448 /* Emit move from SRC to DEST noting the equivalence with expression computed
5449 in INSN. */
5450 static rtx
5451 gcse_emit_move_after (rtx src, rtx dest, rtx insn)
5453 rtx new;
5454 rtx set = single_set (insn), set2;
5455 rtx note;
5456 rtx eqv;
5458 /* This should never fail since we're creating a reg->reg copy
5459 we've verified to be valid. */
5461 new = emit_insn_after (gen_move_insn (dest, src), insn);
5463 /* Note the equivalence for local CSE pass. */
5464 set2 = single_set (new);
5465 if (!set2 || !rtx_equal_p (SET_DEST (set2), dest))
5466 return new;
5467 if ((note = find_reg_equal_equiv_note (insn)))
5468 eqv = XEXP (note, 0);
5469 else
5470 eqv = SET_SRC (set);
5472 set_unique_reg_note (new, REG_EQUAL, copy_insn_1 (eqv));
5474 return new;
5477 /* Delete redundant computations.
5478 Deletion is done by changing the insn to copy the `reaching_reg' of
5479 the expression into the result of the SET. It is left to later passes
5480 (cprop, cse2, flow, combine, regmove) to propagate the copy or eliminate it.
5482 Returns nonzero if a change is made. */
5484 static int
5485 pre_delete (void)
5487 unsigned int i;
5488 int changed;
5489 struct expr *expr;
5490 struct occr *occr;
5492 changed = 0;
5493 for (i = 0; i < expr_hash_table.size; i++)
5494 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
5496 int indx = expr->bitmap_index;
5498 /* We only need to search antic_occr since we require
5499 ANTLOC != 0. */
5501 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5503 rtx insn = occr->insn;
5504 rtx set;
5505 basic_block bb = BLOCK_FOR_INSN (insn);
5507 if (TEST_BIT (pre_delete_map[bb->index], indx))
5509 set = single_set (insn);
5510 if (! set)
5511 abort ();
5513 /* Create a pseudo-reg to store the result of reaching
5514 expressions into. Get the mode for the new pseudo from
5515 the mode of the original destination pseudo. */
5516 if (expr->reaching_reg == NULL)
5517 expr->reaching_reg
5518 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
5520 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
5521 delete_insn (insn);
5522 occr->deleted_p = 1;
5523 SET_BIT (pre_redundant_insns, INSN_CUID (insn));
5524 changed = 1;
5525 gcse_subst_count++;
5527 if (gcse_file)
5529 fprintf (gcse_file,
5530 "PRE: redundant insn %d (expression %d) in ",
5531 INSN_UID (insn), indx);
5532 fprintf (gcse_file, "bb %d, reaching reg is %d\n",
5533 bb->index, REGNO (expr->reaching_reg));
5539 return changed;
5542 /* Perform GCSE optimizations using PRE.
5543 This is called by one_pre_gcse_pass after all the dataflow analysis
5544 has been done.
5546 This is based on the original Morel-Renvoise paper Fred Chow's thesis, and
5547 lazy code motion from Knoop, Ruthing and Steffen as described in Advanced
5548 Compiler Design and Implementation.
5550 ??? A new pseudo reg is created to hold the reaching expression. The nice
5551 thing about the classical approach is that it would try to use an existing
5552 reg. If the register can't be adequately optimized [i.e. we introduce
5553 reload problems], one could add a pass here to propagate the new register
5554 through the block.
5556 ??? We don't handle single sets in PARALLELs because we're [currently] not
5557 able to copy the rest of the parallel when we insert copies to create full
5558 redundancies from partial redundancies. However, there's no reason why we
5559 can't handle PARALLELs in the cases where there are no partial
5560 redundancies. */
5562 static int
5563 pre_gcse (void)
5565 unsigned int i;
5566 int did_insert, changed;
5567 struct expr **index_map;
5568 struct expr *expr;
5570 /* Compute a mapping from expression number (`bitmap_index') to
5571 hash table entry. */
5573 index_map = xcalloc (expr_hash_table.n_elems, sizeof (struct expr *));
5574 for (i = 0; i < expr_hash_table.size; i++)
5575 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
5576 index_map[expr->bitmap_index] = expr;
5578 /* Reset bitmap used to track which insns are redundant. */
5579 pre_redundant_insns = sbitmap_alloc (max_cuid);
5580 sbitmap_zero (pre_redundant_insns);
5582 /* Delete the redundant insns first so that
5583 - we know what register to use for the new insns and for the other
5584 ones with reaching expressions
5585 - we know which insns are redundant when we go to create copies */
5587 changed = pre_delete ();
5589 did_insert = pre_edge_insert (edge_list, index_map);
5591 /* In other places with reaching expressions, copy the expression to the
5592 specially allocated pseudo-reg that reaches the redundant expr. */
5593 pre_insert_copies ();
5594 if (did_insert)
5596 commit_edge_insertions ();
5597 changed = 1;
5600 free (index_map);
5601 sbitmap_free (pre_redundant_insns);
5602 return changed;
5605 /* Top level routine to perform one PRE GCSE pass.
5607 Return nonzero if a change was made. */
5609 static int
5610 one_pre_gcse_pass (int pass)
5612 int changed = 0;
5614 gcse_subst_count = 0;
5615 gcse_create_count = 0;
5617 alloc_hash_table (max_cuid, &expr_hash_table, 0);
5618 add_noreturn_fake_exit_edges ();
5619 if (flag_gcse_lm)
5620 compute_ld_motion_mems ();
5622 compute_hash_table (&expr_hash_table);
5623 trim_ld_motion_mems ();
5624 if (gcse_file)
5625 dump_hash_table (gcse_file, "Expression", &expr_hash_table);
5627 if (expr_hash_table.n_elems > 0)
5629 alloc_pre_mem (last_basic_block, expr_hash_table.n_elems);
5630 compute_pre_data ();
5631 changed |= pre_gcse ();
5632 free_edge_list (edge_list);
5633 free_pre_mem ();
5636 free_ldst_mems ();
5637 remove_fake_edges ();
5638 free_hash_table (&expr_hash_table);
5640 if (gcse_file)
5642 fprintf (gcse_file, "\nPRE GCSE of %s, pass %d: %d bytes needed, ",
5643 current_function_name, pass, bytes_used);
5644 fprintf (gcse_file, "%d substs, %d insns created\n",
5645 gcse_subst_count, gcse_create_count);
5648 return changed;
5651 /* If X contains any LABEL_REF's, add REG_LABEL notes for them to INSN.
5652 If notes are added to an insn which references a CODE_LABEL, the
5653 LABEL_NUSES count is incremented. We have to add REG_LABEL notes,
5654 because the following loop optimization pass requires them. */
5656 /* ??? This is very similar to the loop.c add_label_notes function. We
5657 could probably share code here. */
5659 /* ??? If there was a jump optimization pass after gcse and before loop,
5660 then we would not need to do this here, because jump would add the
5661 necessary REG_LABEL notes. */
5663 static void
5664 add_label_notes (rtx x, rtx insn)
5666 enum rtx_code code = GET_CODE (x);
5667 int i, j;
5668 const char *fmt;
5670 if (code == LABEL_REF && !LABEL_REF_NONLOCAL_P (x))
5672 /* This code used to ignore labels that referred to dispatch tables to
5673 avoid flow generating (slightly) worse code.
5675 We no longer ignore such label references (see LABEL_REF handling in
5676 mark_jump_label for additional information). */
5678 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, XEXP (x, 0),
5679 REG_NOTES (insn));
5680 if (LABEL_P (XEXP (x, 0)))
5681 LABEL_NUSES (XEXP (x, 0))++;
5682 return;
5685 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
5687 if (fmt[i] == 'e')
5688 add_label_notes (XEXP (x, i), insn);
5689 else if (fmt[i] == 'E')
5690 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5691 add_label_notes (XVECEXP (x, i, j), insn);
5695 /* Compute transparent outgoing information for each block.
5697 An expression is transparent to an edge unless it is killed by
5698 the edge itself. This can only happen with abnormal control flow,
5699 when the edge is traversed through a call. This happens with
5700 non-local labels and exceptions.
5702 This would not be necessary if we split the edge. While this is
5703 normally impossible for abnormal critical edges, with some effort
5704 it should be possible with exception handling, since we still have
5705 control over which handler should be invoked. But due to increased
5706 EH table sizes, this may not be worthwhile. */
5708 static void
5709 compute_transpout (void)
5711 basic_block bb;
5712 unsigned int i;
5713 struct expr *expr;
5715 sbitmap_vector_ones (transpout, last_basic_block);
5717 FOR_EACH_BB (bb)
5719 /* Note that flow inserted a nop a the end of basic blocks that
5720 end in call instructions for reasons other than abnormal
5721 control flow. */
5722 if (GET_CODE (bb->end) != CALL_INSN)
5723 continue;
5725 for (i = 0; i < expr_hash_table.size; i++)
5726 for (expr = expr_hash_table.table[i]; expr ; expr = expr->next_same_hash)
5727 if (GET_CODE (expr->expr) == MEM)
5729 if (GET_CODE (XEXP (expr->expr, 0)) == SYMBOL_REF
5730 && CONSTANT_POOL_ADDRESS_P (XEXP (expr->expr, 0)))
5731 continue;
5733 /* ??? Optimally, we would use interprocedural alias
5734 analysis to determine if this mem is actually killed
5735 by this call. */
5736 RESET_BIT (transpout[bb->index], expr->bitmap_index);
5741 /* Removal of useless null pointer checks */
5743 /* Called via note_stores. X is set by SETTER. If X is a register we must
5744 invalidate nonnull_local and set nonnull_killed. DATA is really a
5745 `null_pointer_info *'.
5747 We ignore hard registers. */
5749 static void
5750 invalidate_nonnull_info (rtx x, rtx setter ATTRIBUTE_UNUSED, void *data)
5752 unsigned int regno;
5753 struct null_pointer_info *npi = (struct null_pointer_info *) data;
5755 while (GET_CODE (x) == SUBREG)
5756 x = SUBREG_REG (x);
5758 /* Ignore anything that is not a register or is a hard register. */
5759 if (GET_CODE (x) != REG
5760 || REGNO (x) < npi->min_reg
5761 || REGNO (x) >= npi->max_reg)
5762 return;
5764 regno = REGNO (x) - npi->min_reg;
5766 RESET_BIT (npi->nonnull_local[npi->current_block->index], regno);
5767 SET_BIT (npi->nonnull_killed[npi->current_block->index], regno);
5770 /* Do null-pointer check elimination for the registers indicated in
5771 NPI. NONNULL_AVIN and NONNULL_AVOUT are pre-allocated sbitmaps;
5772 they are not our responsibility to free. */
5774 static int
5775 delete_null_pointer_checks_1 (unsigned int *block_reg, sbitmap *nonnull_avin,
5776 sbitmap *nonnull_avout,
5777 struct null_pointer_info *npi)
5779 basic_block bb, current_block;
5780 sbitmap *nonnull_local = npi->nonnull_local;
5781 sbitmap *nonnull_killed = npi->nonnull_killed;
5782 int something_changed = 0;
5784 /* Compute local properties, nonnull and killed. A register will have
5785 the nonnull property if at the end of the current block its value is
5786 known to be nonnull. The killed property indicates that somewhere in
5787 the block any information we had about the register is killed.
5789 Note that a register can have both properties in a single block. That
5790 indicates that it's killed, then later in the block a new value is
5791 computed. */
5792 sbitmap_vector_zero (nonnull_local, last_basic_block);
5793 sbitmap_vector_zero (nonnull_killed, last_basic_block);
5795 FOR_EACH_BB (current_block)
5797 rtx insn, stop_insn;
5799 /* Set the current block for invalidate_nonnull_info. */
5800 npi->current_block = current_block;
5802 /* Scan each insn in the basic block looking for memory references and
5803 register sets. */
5804 stop_insn = NEXT_INSN (current_block->end);
5805 for (insn = current_block->head;
5806 insn != stop_insn;
5807 insn = NEXT_INSN (insn))
5809 rtx set;
5810 rtx reg;
5812 /* Ignore anything that is not a normal insn. */
5813 if (! INSN_P (insn))
5814 continue;
5816 /* Basically ignore anything that is not a simple SET. We do have
5817 to make sure to invalidate nonnull_local and set nonnull_killed
5818 for such insns though. */
5819 set = single_set (insn);
5820 if (!set)
5822 note_stores (PATTERN (insn), invalidate_nonnull_info, npi);
5823 continue;
5826 /* See if we've got a usable memory load. We handle it first
5827 in case it uses its address register as a dest (which kills
5828 the nonnull property). */
5829 if (GET_CODE (SET_SRC (set)) == MEM
5830 && GET_CODE ((reg = XEXP (SET_SRC (set), 0))) == REG
5831 && REGNO (reg) >= npi->min_reg
5832 && REGNO (reg) < npi->max_reg)
5833 SET_BIT (nonnull_local[current_block->index],
5834 REGNO (reg) - npi->min_reg);
5836 /* Now invalidate stuff clobbered by this insn. */
5837 note_stores (PATTERN (insn), invalidate_nonnull_info, npi);
5839 /* And handle stores, we do these last since any sets in INSN can
5840 not kill the nonnull property if it is derived from a MEM
5841 appearing in a SET_DEST. */
5842 if (GET_CODE (SET_DEST (set)) == MEM
5843 && GET_CODE ((reg = XEXP (SET_DEST (set), 0))) == REG
5844 && REGNO (reg) >= npi->min_reg
5845 && REGNO (reg) < npi->max_reg)
5846 SET_BIT (nonnull_local[current_block->index],
5847 REGNO (reg) - npi->min_reg);
5851 /* Now compute global properties based on the local properties. This
5852 is a classic global availability algorithm. */
5853 compute_available (nonnull_local, nonnull_killed,
5854 nonnull_avout, nonnull_avin);
5856 /* Now look at each bb and see if it ends with a compare of a value
5857 against zero. */
5858 FOR_EACH_BB (bb)
5860 rtx last_insn = bb->end;
5861 rtx condition, earliest;
5862 int compare_and_branch;
5864 /* Since MIN_REG is always at least FIRST_PSEUDO_REGISTER, and
5865 since BLOCK_REG[BB] is zero if this block did not end with a
5866 comparison against zero, this condition works. */
5867 if (block_reg[bb->index] < npi->min_reg
5868 || block_reg[bb->index] >= npi->max_reg)
5869 continue;
5871 /* LAST_INSN is a conditional jump. Get its condition. */
5872 condition = get_condition (last_insn, &earliest);
5874 /* If we can't determine the condition then skip. */
5875 if (! condition)
5876 continue;
5878 /* Is the register known to have a nonzero value? */
5879 if (!TEST_BIT (nonnull_avout[bb->index], block_reg[bb->index] - npi->min_reg))
5880 continue;
5882 /* Try to compute whether the compare/branch at the loop end is one or
5883 two instructions. */
5884 if (earliest == last_insn)
5885 compare_and_branch = 1;
5886 else if (earliest == prev_nonnote_insn (last_insn))
5887 compare_and_branch = 2;
5888 else
5889 continue;
5891 /* We know the register in this comparison is nonnull at exit from
5892 this block. We can optimize this comparison. */
5893 if (GET_CODE (condition) == NE)
5895 rtx new_jump;
5897 new_jump = emit_jump_insn_after (gen_jump (JUMP_LABEL (last_insn)),
5898 last_insn);
5899 JUMP_LABEL (new_jump) = JUMP_LABEL (last_insn);
5900 LABEL_NUSES (JUMP_LABEL (new_jump))++;
5901 emit_barrier_after (new_jump);
5904 something_changed = 1;
5905 delete_insn (last_insn);
5906 if (compare_and_branch == 2)
5907 delete_insn (earliest);
5908 purge_dead_edges (bb);
5910 /* Don't check this block again. (Note that BLOCK_END is
5911 invalid here; we deleted the last instruction in the
5912 block.) */
5913 block_reg[bb->index] = 0;
5916 return something_changed;
5919 /* Find EQ/NE comparisons against zero which can be (indirectly) evaluated
5920 at compile time.
5922 This is conceptually similar to global constant/copy propagation and
5923 classic global CSE (it even uses the same dataflow equations as cprop).
5925 If a register is used as memory address with the form (mem (reg)), then we
5926 know that REG can not be zero at that point in the program. Any instruction
5927 which sets REG "kills" this property.
5929 So, if every path leading to a conditional branch has an available memory
5930 reference of that form, then we know the register can not have the value
5931 zero at the conditional branch.
5933 So we merely need to compute the local properties and propagate that data
5934 around the cfg, then optimize where possible.
5936 We run this pass two times. Once before CSE, then again after CSE. This
5937 has proven to be the most profitable approach. It is rare for new
5938 optimization opportunities of this nature to appear after the first CSE
5939 pass.
5941 This could probably be integrated with global cprop with a little work. */
5944 delete_null_pointer_checks (rtx f ATTRIBUTE_UNUSED)
5946 sbitmap *nonnull_avin, *nonnull_avout;
5947 unsigned int *block_reg;
5948 basic_block bb;
5949 int reg;
5950 int regs_per_pass;
5951 int max_reg;
5952 struct null_pointer_info npi;
5953 int something_changed = 0;
5955 /* If we have only a single block, then there's nothing to do. */
5956 if (n_basic_blocks <= 1)
5957 return 0;
5959 /* Trying to perform global optimizations on flow graphs which have
5960 a high connectivity will take a long time and is unlikely to be
5961 particularly useful.
5963 In normal circumstances a cfg should have about twice as many edges
5964 as blocks. But we do not want to punish small functions which have
5965 a couple switch statements. So we require a relatively large number
5966 of basic blocks and the ratio of edges to blocks to be high. */
5967 if (n_basic_blocks > 1000 && n_edges / n_basic_blocks >= 20)
5968 return 0;
5970 /* We need four bitmaps, each with a bit for each register in each
5971 basic block. */
5972 max_reg = max_reg_num ();
5973 regs_per_pass = get_bitmap_width (4, last_basic_block, max_reg);
5975 /* Allocate bitmaps to hold local and global properties. */
5976 npi.nonnull_local = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
5977 npi.nonnull_killed = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
5978 nonnull_avin = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
5979 nonnull_avout = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
5981 /* Go through the basic blocks, seeing whether or not each block
5982 ends with a conditional branch whose condition is a comparison
5983 against zero. Record the register compared in BLOCK_REG. */
5984 block_reg = xcalloc (last_basic_block, sizeof (int));
5985 FOR_EACH_BB (bb)
5987 rtx last_insn = bb->end;
5988 rtx condition, earliest, reg;
5990 /* We only want conditional branches. */
5991 if (GET_CODE (last_insn) != JUMP_INSN
5992 || !any_condjump_p (last_insn)
5993 || !onlyjump_p (last_insn))
5994 continue;
5996 /* LAST_INSN is a conditional jump. Get its condition. */
5997 condition = get_condition (last_insn, &earliest);
5999 /* If we were unable to get the condition, or it is not an equality
6000 comparison against zero then there's nothing we can do. */
6001 if (!condition
6002 || (GET_CODE (condition) != NE && GET_CODE (condition) != EQ)
6003 || GET_CODE (XEXP (condition, 1)) != CONST_INT
6004 || (XEXP (condition, 1)
6005 != CONST0_RTX (GET_MODE (XEXP (condition, 0)))))
6006 continue;
6008 /* We must be checking a register against zero. */
6009 reg = XEXP (condition, 0);
6010 if (GET_CODE (reg) != REG)
6011 continue;
6013 block_reg[bb->index] = REGNO (reg);
6016 /* Go through the algorithm for each block of registers. */
6017 for (reg = FIRST_PSEUDO_REGISTER; reg < max_reg; reg += regs_per_pass)
6019 npi.min_reg = reg;
6020 npi.max_reg = MIN (reg + regs_per_pass, max_reg);
6021 something_changed |= delete_null_pointer_checks_1 (block_reg,
6022 nonnull_avin,
6023 nonnull_avout,
6024 &npi);
6027 /* Free the table of registers compared at the end of every block. */
6028 free (block_reg);
6030 /* Free bitmaps. */
6031 sbitmap_vector_free (npi.nonnull_local);
6032 sbitmap_vector_free (npi.nonnull_killed);
6033 sbitmap_vector_free (nonnull_avin);
6034 sbitmap_vector_free (nonnull_avout);
6036 return something_changed;
6039 /* Code Hoisting variables and subroutines. */
6041 /* Very busy expressions. */
6042 static sbitmap *hoist_vbein;
6043 static sbitmap *hoist_vbeout;
6045 /* Hoistable expressions. */
6046 static sbitmap *hoist_exprs;
6048 /* Dominator bitmaps. */
6049 dominance_info dominators;
6051 /* ??? We could compute post dominators and run this algorithm in
6052 reverse to perform tail merging, doing so would probably be
6053 more effective than the tail merging code in jump.c.
6055 It's unclear if tail merging could be run in parallel with
6056 code hoisting. It would be nice. */
6058 /* Allocate vars used for code hoisting analysis. */
6060 static void
6061 alloc_code_hoist_mem (int n_blocks, int n_exprs)
6063 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
6064 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
6065 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
6067 hoist_vbein = sbitmap_vector_alloc (n_blocks, n_exprs);
6068 hoist_vbeout = sbitmap_vector_alloc (n_blocks, n_exprs);
6069 hoist_exprs = sbitmap_vector_alloc (n_blocks, n_exprs);
6070 transpout = sbitmap_vector_alloc (n_blocks, n_exprs);
6073 /* Free vars used for code hoisting analysis. */
6075 static void
6076 free_code_hoist_mem (void)
6078 sbitmap_vector_free (antloc);
6079 sbitmap_vector_free (transp);
6080 sbitmap_vector_free (comp);
6082 sbitmap_vector_free (hoist_vbein);
6083 sbitmap_vector_free (hoist_vbeout);
6084 sbitmap_vector_free (hoist_exprs);
6085 sbitmap_vector_free (transpout);
6087 free_dominance_info (dominators);
6090 /* Compute the very busy expressions at entry/exit from each block.
6092 An expression is very busy if all paths from a given point
6093 compute the expression. */
6095 static void
6096 compute_code_hoist_vbeinout (void)
6098 int changed, passes;
6099 basic_block bb;
6101 sbitmap_vector_zero (hoist_vbeout, last_basic_block);
6102 sbitmap_vector_zero (hoist_vbein, last_basic_block);
6104 passes = 0;
6105 changed = 1;
6107 while (changed)
6109 changed = 0;
6111 /* We scan the blocks in the reverse order to speed up
6112 the convergence. */
6113 FOR_EACH_BB_REVERSE (bb)
6115 changed |= sbitmap_a_or_b_and_c_cg (hoist_vbein[bb->index], antloc[bb->index],
6116 hoist_vbeout[bb->index], transp[bb->index]);
6117 if (bb->next_bb != EXIT_BLOCK_PTR)
6118 sbitmap_intersection_of_succs (hoist_vbeout[bb->index], hoist_vbein, bb->index);
6121 passes++;
6124 if (gcse_file)
6125 fprintf (gcse_file, "hoisting vbeinout computation: %d passes\n", passes);
6128 /* Top level routine to do the dataflow analysis needed by code hoisting. */
6130 static void
6131 compute_code_hoist_data (void)
6133 compute_local_properties (transp, comp, antloc, &expr_hash_table);
6134 compute_transpout ();
6135 compute_code_hoist_vbeinout ();
6136 dominators = calculate_dominance_info (CDI_DOMINATORS);
6137 if (gcse_file)
6138 fprintf (gcse_file, "\n");
6141 /* Determine if the expression identified by EXPR_INDEX would
6142 reach BB unimpared if it was placed at the end of EXPR_BB.
6144 It's unclear exactly what Muchnick meant by "unimpared". It seems
6145 to me that the expression must either be computed or transparent in
6146 *every* block in the path(s) from EXPR_BB to BB. Any other definition
6147 would allow the expression to be hoisted out of loops, even if
6148 the expression wasn't a loop invariant.
6150 Contrast this to reachability for PRE where an expression is
6151 considered reachable if *any* path reaches instead of *all*
6152 paths. */
6154 static int
6155 hoist_expr_reaches_here_p (basic_block expr_bb, int expr_index, basic_block bb, char *visited)
6157 edge pred;
6158 int visited_allocated_locally = 0;
6161 if (visited == NULL)
6163 visited_allocated_locally = 1;
6164 visited = xcalloc (last_basic_block, 1);
6167 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
6169 basic_block pred_bb = pred->src;
6171 if (pred->src == ENTRY_BLOCK_PTR)
6172 break;
6173 else if (pred_bb == expr_bb)
6174 continue;
6175 else if (visited[pred_bb->index])
6176 continue;
6178 /* Does this predecessor generate this expression? */
6179 else if (TEST_BIT (comp[pred_bb->index], expr_index))
6180 break;
6181 else if (! TEST_BIT (transp[pred_bb->index], expr_index))
6182 break;
6184 /* Not killed. */
6185 else
6187 visited[pred_bb->index] = 1;
6188 if (! hoist_expr_reaches_here_p (expr_bb, expr_index,
6189 pred_bb, visited))
6190 break;
6193 if (visited_allocated_locally)
6194 free (visited);
6196 return (pred == NULL);
6199 /* Actually perform code hoisting. */
6201 static void
6202 hoist_code (void)
6204 basic_block bb, dominated;
6205 basic_block *domby;
6206 unsigned int domby_len;
6207 unsigned int i,j;
6208 struct expr **index_map;
6209 struct expr *expr;
6211 sbitmap_vector_zero (hoist_exprs, last_basic_block);
6213 /* Compute a mapping from expression number (`bitmap_index') to
6214 hash table entry. */
6216 index_map = xcalloc (expr_hash_table.n_elems, sizeof (struct expr *));
6217 for (i = 0; i < expr_hash_table.size; i++)
6218 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
6219 index_map[expr->bitmap_index] = expr;
6221 /* Walk over each basic block looking for potentially hoistable
6222 expressions, nothing gets hoisted from the entry block. */
6223 FOR_EACH_BB (bb)
6225 int found = 0;
6226 int insn_inserted_p;
6228 domby_len = get_dominated_by (dominators, bb, &domby);
6229 /* Examine each expression that is very busy at the exit of this
6230 block. These are the potentially hoistable expressions. */
6231 for (i = 0; i < hoist_vbeout[bb->index]->n_bits; i++)
6233 int hoistable = 0;
6235 if (TEST_BIT (hoist_vbeout[bb->index], i)
6236 && TEST_BIT (transpout[bb->index], i))
6238 /* We've found a potentially hoistable expression, now
6239 we look at every block BB dominates to see if it
6240 computes the expression. */
6241 for (j = 0; j < domby_len; j++)
6243 dominated = domby[j];
6244 /* Ignore self dominance. */
6245 if (bb == dominated)
6246 continue;
6247 /* We've found a dominated block, now see if it computes
6248 the busy expression and whether or not moving that
6249 expression to the "beginning" of that block is safe. */
6250 if (!TEST_BIT (antloc[dominated->index], i))
6251 continue;
6253 /* Note if the expression would reach the dominated block
6254 unimpared if it was placed at the end of BB.
6256 Keep track of how many times this expression is hoistable
6257 from a dominated block into BB. */
6258 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
6259 hoistable++;
6262 /* If we found more than one hoistable occurrence of this
6263 expression, then note it in the bitmap of expressions to
6264 hoist. It makes no sense to hoist things which are computed
6265 in only one BB, and doing so tends to pessimize register
6266 allocation. One could increase this value to try harder
6267 to avoid any possible code expansion due to register
6268 allocation issues; however experiments have shown that
6269 the vast majority of hoistable expressions are only movable
6270 from two successors, so raising this threshold is likely
6271 to nullify any benefit we get from code hoisting. */
6272 if (hoistable > 1)
6274 SET_BIT (hoist_exprs[bb->index], i);
6275 found = 1;
6279 /* If we found nothing to hoist, then quit now. */
6280 if (! found)
6282 free (domby);
6283 continue;
6286 /* Loop over all the hoistable expressions. */
6287 for (i = 0; i < hoist_exprs[bb->index]->n_bits; i++)
6289 /* We want to insert the expression into BB only once, so
6290 note when we've inserted it. */
6291 insn_inserted_p = 0;
6293 /* These tests should be the same as the tests above. */
6294 if (TEST_BIT (hoist_vbeout[bb->index], i))
6296 /* We've found a potentially hoistable expression, now
6297 we look at every block BB dominates to see if it
6298 computes the expression. */
6299 for (j = 0; j < domby_len; j++)
6301 dominated = domby[j];
6302 /* Ignore self dominance. */
6303 if (bb == dominated)
6304 continue;
6306 /* We've found a dominated block, now see if it computes
6307 the busy expression and whether or not moving that
6308 expression to the "beginning" of that block is safe. */
6309 if (!TEST_BIT (antloc[dominated->index], i))
6310 continue;
6312 /* The expression is computed in the dominated block and
6313 it would be safe to compute it at the start of the
6314 dominated block. Now we have to determine if the
6315 expression would reach the dominated block if it was
6316 placed at the end of BB. */
6317 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
6319 struct expr *expr = index_map[i];
6320 struct occr *occr = expr->antic_occr;
6321 rtx insn;
6322 rtx set;
6324 /* Find the right occurrence of this expression. */
6325 while (BLOCK_FOR_INSN (occr->insn) != dominated && occr)
6326 occr = occr->next;
6328 /* Should never happen. */
6329 if (!occr)
6330 abort ();
6332 insn = occr->insn;
6334 set = single_set (insn);
6335 if (! set)
6336 abort ();
6338 /* Create a pseudo-reg to store the result of reaching
6339 expressions into. Get the mode for the new pseudo
6340 from the mode of the original destination pseudo. */
6341 if (expr->reaching_reg == NULL)
6342 expr->reaching_reg
6343 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
6345 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
6346 delete_insn (insn);
6347 occr->deleted_p = 1;
6348 if (!insn_inserted_p)
6350 insert_insn_end_bb (index_map[i], bb, 0);
6351 insn_inserted_p = 1;
6357 free (domby);
6360 free (index_map);
6363 /* Top level routine to perform one code hoisting (aka unification) pass
6365 Return nonzero if a change was made. */
6367 static int
6368 one_code_hoisting_pass (void)
6370 int changed = 0;
6372 alloc_hash_table (max_cuid, &expr_hash_table, 0);
6373 compute_hash_table (&expr_hash_table);
6374 if (gcse_file)
6375 dump_hash_table (gcse_file, "Code Hosting Expressions", &expr_hash_table);
6377 if (expr_hash_table.n_elems > 0)
6379 alloc_code_hoist_mem (last_basic_block, expr_hash_table.n_elems);
6380 compute_code_hoist_data ();
6381 hoist_code ();
6382 free_code_hoist_mem ();
6385 free_hash_table (&expr_hash_table);
6387 return changed;
6390 /* Here we provide the things required to do store motion towards
6391 the exit. In order for this to be effective, gcse also needed to
6392 be taught how to move a load when it is kill only by a store to itself.
6394 int i;
6395 float a[10];
6397 void foo(float scale)
6399 for (i=0; i<10; i++)
6400 a[i] *= scale;
6403 'i' is both loaded and stored to in the loop. Normally, gcse cannot move
6404 the load out since its live around the loop, and stored at the bottom
6405 of the loop.
6407 The 'Load Motion' referred to and implemented in this file is
6408 an enhancement to gcse which when using edge based lcm, recognizes
6409 this situation and allows gcse to move the load out of the loop.
6411 Once gcse has hoisted the load, store motion can then push this
6412 load towards the exit, and we end up with no loads or stores of 'i'
6413 in the loop. */
6415 /* This will search the ldst list for a matching expression. If it
6416 doesn't find one, we create one and initialize it. */
6418 static struct ls_expr *
6419 ldst_entry (rtx x)
6421 struct ls_expr * ptr;
6423 for (ptr = first_ls_expr(); ptr != NULL; ptr = next_ls_expr (ptr))
6424 if (expr_equiv_p (ptr->pattern, x))
6425 break;
6427 if (!ptr)
6429 ptr = xmalloc (sizeof (struct ls_expr));
6431 ptr->next = pre_ldst_mems;
6432 ptr->expr = NULL;
6433 ptr->pattern = x;
6434 ptr->pattern_regs = NULL_RTX;
6435 ptr->loads = NULL_RTX;
6436 ptr->stores = NULL_RTX;
6437 ptr->reaching_reg = NULL_RTX;
6438 ptr->invalid = 0;
6439 ptr->index = 0;
6440 ptr->hash_index = 0;
6441 pre_ldst_mems = ptr;
6444 return ptr;
6447 /* Free up an individual ldst entry. */
6449 static void
6450 free_ldst_entry (struct ls_expr * ptr)
6452 free_INSN_LIST_list (& ptr->loads);
6453 free_INSN_LIST_list (& ptr->stores);
6455 free (ptr);
6458 /* Free up all memory associated with the ldst list. */
6460 static void
6461 free_ldst_mems (void)
6463 while (pre_ldst_mems)
6465 struct ls_expr * tmp = pre_ldst_mems;
6467 pre_ldst_mems = pre_ldst_mems->next;
6469 free_ldst_entry (tmp);
6472 pre_ldst_mems = NULL;
6475 /* Dump debugging info about the ldst list. */
6477 static void
6478 print_ldst_list (FILE * file)
6480 struct ls_expr * ptr;
6482 fprintf (file, "LDST list: \n");
6484 for (ptr = first_ls_expr(); ptr != NULL; ptr = next_ls_expr (ptr))
6486 fprintf (file, " Pattern (%3d): ", ptr->index);
6488 print_rtl (file, ptr->pattern);
6490 fprintf (file, "\n Loads : ");
6492 if (ptr->loads)
6493 print_rtl (file, ptr->loads);
6494 else
6495 fprintf (file, "(nil)");
6497 fprintf (file, "\n Stores : ");
6499 if (ptr->stores)
6500 print_rtl (file, ptr->stores);
6501 else
6502 fprintf (file, "(nil)");
6504 fprintf (file, "\n\n");
6507 fprintf (file, "\n");
6510 /* Returns 1 if X is in the list of ldst only expressions. */
6512 static struct ls_expr *
6513 find_rtx_in_ldst (rtx x)
6515 struct ls_expr * ptr;
6517 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
6518 if (expr_equiv_p (ptr->pattern, x) && ! ptr->invalid)
6519 return ptr;
6521 return NULL;
6524 /* Assign each element of the list of mems a monotonically increasing value. */
6526 static int
6527 enumerate_ldsts (void)
6529 struct ls_expr * ptr;
6530 int n = 0;
6532 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
6533 ptr->index = n++;
6535 return n;
6538 /* Return first item in the list. */
6540 static inline struct ls_expr *
6541 first_ls_expr (void)
6543 return pre_ldst_mems;
6546 /* Return the next item in the list after the specified one. */
6548 static inline struct ls_expr *
6549 next_ls_expr (struct ls_expr * ptr)
6551 return ptr->next;
6554 /* Load Motion for loads which only kill themselves. */
6556 /* Return true if x is a simple MEM operation, with no registers or
6557 side effects. These are the types of loads we consider for the
6558 ld_motion list, otherwise we let the usual aliasing take care of it. */
6560 static int
6561 simple_mem (rtx x)
6563 if (GET_CODE (x) != MEM)
6564 return 0;
6566 if (MEM_VOLATILE_P (x))
6567 return 0;
6569 if (GET_MODE (x) == BLKmode)
6570 return 0;
6572 /* If we are handling exceptions, we must be careful with memory references
6573 that may trap. If we are not, the behavior is undefined, so we may just
6574 continue. */
6575 if (flag_non_call_exceptions && may_trap_p (x))
6576 return 0;
6578 if (side_effects_p (x))
6579 return 0;
6581 /* Do not consider function arguments passed on stack. */
6582 if (reg_mentioned_p (stack_pointer_rtx, x))
6583 return 0;
6585 if (flag_float_store && FLOAT_MODE_P (GET_MODE (x)))
6586 return 0;
6588 return 1;
6591 /* Make sure there isn't a buried reference in this pattern anywhere.
6592 If there is, invalidate the entry for it since we're not capable
6593 of fixing it up just yet.. We have to be sure we know about ALL
6594 loads since the aliasing code will allow all entries in the
6595 ld_motion list to not-alias itself. If we miss a load, we will get
6596 the wrong value since gcse might common it and we won't know to
6597 fix it up. */
6599 static void
6600 invalidate_any_buried_refs (rtx x)
6602 const char * fmt;
6603 int i, j;
6604 struct ls_expr * ptr;
6606 /* Invalidate it in the list. */
6607 if (GET_CODE (x) == MEM && simple_mem (x))
6609 ptr = ldst_entry (x);
6610 ptr->invalid = 1;
6613 /* Recursively process the insn. */
6614 fmt = GET_RTX_FORMAT (GET_CODE (x));
6616 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6618 if (fmt[i] == 'e')
6619 invalidate_any_buried_refs (XEXP (x, i));
6620 else if (fmt[i] == 'E')
6621 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6622 invalidate_any_buried_refs (XVECEXP (x, i, j));
6626 /* Find all the 'simple' MEMs which are used in LOADs and STORES. Simple
6627 being defined as MEM loads and stores to symbols, with no side effects
6628 and no registers in the expression. For a MEM destination, we also
6629 check that the insn is still valid if we replace the destination with a
6630 REG, as is done in update_ld_motion_stores. If there are any uses/defs
6631 which don't match this criteria, they are invalidated and trimmed out
6632 later. */
6634 static void
6635 compute_ld_motion_mems (void)
6637 struct ls_expr * ptr;
6638 basic_block bb;
6639 rtx insn;
6641 pre_ldst_mems = NULL;
6643 FOR_EACH_BB (bb)
6645 for (insn = bb->head;
6646 insn && insn != NEXT_INSN (bb->end);
6647 insn = NEXT_INSN (insn))
6649 if (INSN_P (insn))
6651 if (GET_CODE (PATTERN (insn)) == SET)
6653 rtx src = SET_SRC (PATTERN (insn));
6654 rtx dest = SET_DEST (PATTERN (insn));
6656 /* Check for a simple LOAD... */
6657 if (GET_CODE (src) == MEM && simple_mem (src))
6659 ptr = ldst_entry (src);
6660 if (GET_CODE (dest) == REG)
6661 ptr->loads = alloc_INSN_LIST (insn, ptr->loads);
6662 else
6663 ptr->invalid = 1;
6665 else
6667 /* Make sure there isn't a buried load somewhere. */
6668 invalidate_any_buried_refs (src);
6671 /* Check for stores. Don't worry about aliased ones, they
6672 will block any movement we might do later. We only care
6673 about this exact pattern since those are the only
6674 circumstance that we will ignore the aliasing info. */
6675 if (GET_CODE (dest) == MEM && simple_mem (dest))
6677 ptr = ldst_entry (dest);
6679 if (GET_CODE (src) != MEM
6680 && GET_CODE (src) != ASM_OPERANDS
6681 /* Check for REG manually since want_to_gcse_p
6682 returns 0 for all REGs. */
6683 && (REG_P (src) || want_to_gcse_p (src)))
6684 ptr->stores = alloc_INSN_LIST (insn, ptr->stores);
6685 else
6686 ptr->invalid = 1;
6689 else
6690 invalidate_any_buried_refs (PATTERN (insn));
6696 /* Remove any references that have been either invalidated or are not in the
6697 expression list for pre gcse. */
6699 static void
6700 trim_ld_motion_mems (void)
6702 struct ls_expr * last = NULL;
6703 struct ls_expr * ptr = first_ls_expr ();
6705 while (ptr != NULL)
6707 int del = ptr->invalid;
6708 struct expr * expr = NULL;
6710 /* Delete if entry has been made invalid. */
6711 if (!del)
6713 unsigned int i;
6715 del = 1;
6716 /* Delete if we cannot find this mem in the expression list. */
6717 for (i = 0; i < expr_hash_table.size && del; i++)
6719 for (expr = expr_hash_table.table[i];
6720 expr != NULL;
6721 expr = expr->next_same_hash)
6722 if (expr_equiv_p (expr->expr, ptr->pattern))
6724 del = 0;
6725 break;
6730 if (del)
6732 if (last != NULL)
6734 last->next = ptr->next;
6735 free_ldst_entry (ptr);
6736 ptr = last->next;
6738 else
6740 pre_ldst_mems = pre_ldst_mems->next;
6741 free_ldst_entry (ptr);
6742 ptr = pre_ldst_mems;
6745 else
6747 /* Set the expression field if we are keeping it. */
6748 last = ptr;
6749 ptr->expr = expr;
6750 ptr = ptr->next;
6754 /* Show the world what we've found. */
6755 if (gcse_file && pre_ldst_mems != NULL)
6756 print_ldst_list (gcse_file);
6759 /* This routine will take an expression which we are replacing with
6760 a reaching register, and update any stores that are needed if
6761 that expression is in the ld_motion list. Stores are updated by
6762 copying their SRC to the reaching register, and then storeing
6763 the reaching register into the store location. These keeps the
6764 correct value in the reaching register for the loads. */
6766 static void
6767 update_ld_motion_stores (struct expr * expr)
6769 struct ls_expr * mem_ptr;
6771 if ((mem_ptr = find_rtx_in_ldst (expr->expr)))
6773 /* We can try to find just the REACHED stores, but is shouldn't
6774 matter to set the reaching reg everywhere... some might be
6775 dead and should be eliminated later. */
6777 /* We replace (set mem expr) with (set reg expr) (set mem reg)
6778 where reg is the reaching reg used in the load. We checked in
6779 compute_ld_motion_mems that we can replace (set mem expr) with
6780 (set reg expr) in that insn. */
6781 rtx list = mem_ptr->stores;
6783 for ( ; list != NULL_RTX; list = XEXP (list, 1))
6785 rtx insn = XEXP (list, 0);
6786 rtx pat = PATTERN (insn);
6787 rtx src = SET_SRC (pat);
6788 rtx reg = expr->reaching_reg;
6789 rtx copy, new;
6791 /* If we've already copied it, continue. */
6792 if (expr->reaching_reg == src)
6793 continue;
6795 if (gcse_file)
6797 fprintf (gcse_file, "PRE: store updated with reaching reg ");
6798 print_rtl (gcse_file, expr->reaching_reg);
6799 fprintf (gcse_file, ":\n ");
6800 print_inline_rtx (gcse_file, insn, 8);
6801 fprintf (gcse_file, "\n");
6804 copy = gen_move_insn ( reg, copy_rtx (SET_SRC (pat)));
6805 new = emit_insn_before (copy, insn);
6806 record_one_set (REGNO (reg), new);
6807 SET_SRC (pat) = reg;
6809 /* un-recognize this pattern since it's probably different now. */
6810 INSN_CODE (insn) = -1;
6811 gcse_create_count++;
6816 /* Store motion code. */
6818 #define ANTIC_STORE_LIST(x) ((x)->loads)
6819 #define AVAIL_STORE_LIST(x) ((x)->stores)
6820 #define LAST_AVAIL_CHECK_FAILURE(x) ((x)->reaching_reg)
6822 /* This is used to communicate the target bitvector we want to use in the
6823 reg_set_info routine when called via the note_stores mechanism. */
6824 static int * regvec;
6826 /* And current insn, for the same routine. */
6827 static rtx compute_store_table_current_insn;
6829 /* Used in computing the reverse edge graph bit vectors. */
6830 static sbitmap * st_antloc;
6832 /* Global holding the number of store expressions we are dealing with. */
6833 static int num_stores;
6835 /* Checks to set if we need to mark a register set. Called from note_stores. */
6837 static void
6838 reg_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED,
6839 void *data ATTRIBUTE_UNUSED)
6841 if (GET_CODE (dest) == SUBREG)
6842 dest = SUBREG_REG (dest);
6844 if (GET_CODE (dest) == REG)
6845 regvec[REGNO (dest)] = INSN_UID (compute_store_table_current_insn);
6848 /* Return zero if some of the registers in list X are killed
6849 due to set of registers in bitmap REGS_SET. */
6851 static bool
6852 store_ops_ok (rtx x, int *regs_set)
6854 rtx reg;
6856 for (; x; x = XEXP (x, 1))
6858 reg = XEXP (x, 0);
6859 if (regs_set[REGNO(reg)])
6860 return false;
6863 return true;
6866 /* Returns a list of registers mentioned in X. */
6867 static rtx
6868 extract_mentioned_regs (rtx x)
6870 return extract_mentioned_regs_helper (x, NULL_RTX);
6873 /* Helper for extract_mentioned_regs; ACCUM is used to accumulate used
6874 registers. */
6875 static rtx
6876 extract_mentioned_regs_helper (rtx x, rtx accum)
6878 int i;
6879 enum rtx_code code;
6880 const char * fmt;
6882 /* Repeat is used to turn tail-recursion into iteration. */
6883 repeat:
6885 if (x == 0)
6886 return accum;
6888 code = GET_CODE (x);
6889 switch (code)
6891 case REG:
6892 return alloc_EXPR_LIST (0, x, accum);
6894 case MEM:
6895 x = XEXP (x, 0);
6896 goto repeat;
6898 case PRE_DEC:
6899 case PRE_INC:
6900 case POST_DEC:
6901 case POST_INC:
6902 /* We do not run this function with arguments having side effects. */
6903 abort ();
6905 case PC:
6906 case CC0: /*FIXME*/
6907 case CONST:
6908 case CONST_INT:
6909 case CONST_DOUBLE:
6910 case CONST_VECTOR:
6911 case SYMBOL_REF:
6912 case LABEL_REF:
6913 case ADDR_VEC:
6914 case ADDR_DIFF_VEC:
6915 return accum;
6917 default:
6918 break;
6921 i = GET_RTX_LENGTH (code) - 1;
6922 fmt = GET_RTX_FORMAT (code);
6924 for (; i >= 0; i--)
6926 if (fmt[i] == 'e')
6928 rtx tem = XEXP (x, i);
6930 /* If we are about to do the last recursive call
6931 needed at this level, change it into iteration. */
6932 if (i == 0)
6934 x = tem;
6935 goto repeat;
6938 accum = extract_mentioned_regs_helper (tem, accum);
6940 else if (fmt[i] == 'E')
6942 int j;
6944 for (j = 0; j < XVECLEN (x, i); j++)
6945 accum = extract_mentioned_regs_helper (XVECEXP (x, i, j), accum);
6949 return accum;
6952 /* Determine whether INSN is MEM store pattern that we will consider moving.
6953 REGS_SET_BEFORE is bitmap of registers set before (and including) the
6954 current insn, REGS_SET_AFTER is bitmap of registers set after (and
6955 including) the insn in this basic block. We must be passing through BB from
6956 head to end, as we are using this fact to speed things up.
6958 The results are stored this way:
6960 -- the first anticipatable expression is added into ANTIC_STORE_LIST
6961 -- if the processed expression is not anticipatable, NULL_RTX is added
6962 there instead, so that we can use it as indicator that no further
6963 expression of this type may be anticipatable
6964 -- if the expression is available, it is added as head of AVAIL_STORE_LIST;
6965 consequently, all of them but this head are dead and may be deleted.
6966 -- if the expression is not available, the insn due to that it fails to be
6967 available is stored in reaching_reg.
6969 The things are complicated a bit by fact that there already may be stores
6970 to the same MEM from other blocks; also caller must take care of the
6971 necessary cleanup of the temporary markers after end of the basic block.
6974 static void
6975 find_moveable_store (rtx insn, int *regs_set_before, int *regs_set_after)
6977 struct ls_expr * ptr;
6978 rtx dest, set, tmp;
6979 int check_anticipatable, check_available;
6980 basic_block bb = BLOCK_FOR_INSN (insn);
6982 set = single_set (insn);
6983 if (!set)
6984 return;
6986 dest = SET_DEST (set);
6988 if (GET_CODE (dest) != MEM || MEM_VOLATILE_P (dest)
6989 || GET_MODE (dest) == BLKmode)
6990 return;
6992 if (side_effects_p (dest))
6993 return;
6995 /* If we are handling exceptions, we must be careful with memory references
6996 that may trap. If we are not, the behavior is undefined, so we may just
6997 continue. */
6998 if (flag_non_call_exceptions && may_trap_p (dest))
6999 return;
7001 ptr = ldst_entry (dest);
7002 if (!ptr->pattern_regs)
7003 ptr->pattern_regs = extract_mentioned_regs (dest);
7005 /* Do not check for anticipatability if we either found one anticipatable
7006 store already, or tested for one and found out that it was killed. */
7007 check_anticipatable = 0;
7008 if (!ANTIC_STORE_LIST (ptr))
7009 check_anticipatable = 1;
7010 else
7012 tmp = XEXP (ANTIC_STORE_LIST (ptr), 0);
7013 if (tmp != NULL_RTX
7014 && BLOCK_FOR_INSN (tmp) != bb)
7015 check_anticipatable = 1;
7017 if (check_anticipatable)
7019 if (store_killed_before (dest, ptr->pattern_regs, insn, bb, regs_set_before))
7020 tmp = NULL_RTX;
7021 else
7022 tmp = insn;
7023 ANTIC_STORE_LIST (ptr) = alloc_INSN_LIST (tmp,
7024 ANTIC_STORE_LIST (ptr));
7027 /* It is not necessary to check whether store is available if we did
7028 it successfully before; if we failed before, do not bother to check
7029 until we reach the insn that caused us to fail. */
7030 check_available = 0;
7031 if (!AVAIL_STORE_LIST (ptr))
7032 check_available = 1;
7033 else
7035 tmp = XEXP (AVAIL_STORE_LIST (ptr), 0);
7036 if (BLOCK_FOR_INSN (tmp) != bb)
7037 check_available = 1;
7039 if (check_available)
7041 /* Check that we have already reached the insn at that the check
7042 failed last time. */
7043 if (LAST_AVAIL_CHECK_FAILURE (ptr))
7045 for (tmp = bb->end;
7046 tmp != insn && tmp != LAST_AVAIL_CHECK_FAILURE (ptr);
7047 tmp = PREV_INSN (tmp))
7048 continue;
7049 if (tmp == insn)
7050 check_available = 0;
7052 else
7053 check_available = store_killed_after (dest, ptr->pattern_regs, insn,
7054 bb, regs_set_after,
7055 &LAST_AVAIL_CHECK_FAILURE (ptr));
7057 if (!check_available)
7058 AVAIL_STORE_LIST (ptr) = alloc_INSN_LIST (insn, AVAIL_STORE_LIST (ptr));
7061 /* Find available and anticipatable stores. */
7063 static int
7064 compute_store_table (void)
7066 int ret;
7067 basic_block bb;
7068 unsigned regno;
7069 rtx insn, pat, tmp;
7070 int *last_set_in, *already_set;
7071 struct ls_expr * ptr, **prev_next_ptr_ptr;
7073 max_gcse_regno = max_reg_num ();
7075 reg_set_in_block = sbitmap_vector_alloc (last_basic_block,
7076 max_gcse_regno);
7077 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
7078 pre_ldst_mems = 0;
7079 last_set_in = xmalloc (sizeof (int) * max_gcse_regno);
7080 already_set = xmalloc (sizeof (int) * max_gcse_regno);
7082 /* Find all the stores we care about. */
7083 FOR_EACH_BB (bb)
7085 /* First compute the registers set in this block. */
7086 memset (last_set_in, 0, sizeof (int) * max_gcse_regno);
7087 regvec = last_set_in;
7089 for (insn = bb->head;
7090 insn != NEXT_INSN (bb->end);
7091 insn = NEXT_INSN (insn))
7093 if (! INSN_P (insn))
7094 continue;
7096 if (GET_CODE (insn) == CALL_INSN)
7098 bool clobbers_all = false;
7099 #ifdef NON_SAVING_SETJMP
7100 if (NON_SAVING_SETJMP
7101 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
7102 clobbers_all = true;
7103 #endif
7105 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7106 if (clobbers_all
7107 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
7108 last_set_in[regno] = INSN_UID (insn);
7111 pat = PATTERN (insn);
7112 compute_store_table_current_insn = insn;
7113 note_stores (pat, reg_set_info, NULL);
7116 /* Record the set registers. */
7117 for (regno = 0; regno < max_gcse_regno; regno++)
7118 if (last_set_in[regno])
7119 SET_BIT (reg_set_in_block[bb->index], regno);
7121 /* Now find the stores. */
7122 memset (already_set, 0, sizeof (int) * max_gcse_regno);
7123 regvec = already_set;
7124 for (insn = bb->head;
7125 insn != NEXT_INSN (bb->end);
7126 insn = NEXT_INSN (insn))
7128 if (! INSN_P (insn))
7129 continue;
7131 if (GET_CODE (insn) == CALL_INSN)
7133 bool clobbers_all = false;
7134 #ifdef NON_SAVING_SETJMP
7135 if (NON_SAVING_SETJMP
7136 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
7137 clobbers_all = true;
7138 #endif
7140 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7141 if (clobbers_all
7142 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
7143 already_set[regno] = 1;
7146 pat = PATTERN (insn);
7147 note_stores (pat, reg_set_info, NULL);
7149 /* Now that we've marked regs, look for stores. */
7150 find_moveable_store (insn, already_set, last_set_in);
7152 /* Unmark regs that are no longer set. */
7153 for (regno = 0; regno < max_gcse_regno; regno++)
7154 if (last_set_in[regno] == INSN_UID (insn))
7155 last_set_in[regno] = 0;
7158 /* Clear temporary marks. */
7159 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7161 LAST_AVAIL_CHECK_FAILURE(ptr) = NULL_RTX;
7162 if (ANTIC_STORE_LIST (ptr)
7163 && (tmp = XEXP (ANTIC_STORE_LIST (ptr), 0)) == NULL_RTX)
7164 ANTIC_STORE_LIST (ptr) = XEXP (ANTIC_STORE_LIST (ptr), 1);
7168 /* Remove the stores that are not available anywhere, as there will
7169 be no opportunity to optimize them. */
7170 for (ptr = pre_ldst_mems, prev_next_ptr_ptr = &pre_ldst_mems;
7171 ptr != NULL;
7172 ptr = *prev_next_ptr_ptr)
7174 if (!AVAIL_STORE_LIST (ptr))
7176 *prev_next_ptr_ptr = ptr->next;
7177 free_ldst_entry (ptr);
7179 else
7180 prev_next_ptr_ptr = &ptr->next;
7183 ret = enumerate_ldsts ();
7185 if (gcse_file)
7187 fprintf (gcse_file, "ST_avail and ST_antic (shown under loads..)\n");
7188 print_ldst_list (gcse_file);
7191 free (last_set_in);
7192 free (already_set);
7193 return ret;
7196 /* Check to see if the load X is aliased with STORE_PATTERN.
7197 AFTER is true if we are checking the case when STORE_PATTERN occurs
7198 after the X. */
7200 static bool
7201 load_kills_store (rtx x, rtx store_pattern, int after)
7203 if (after)
7204 return anti_dependence (x, store_pattern);
7205 else
7206 return true_dependence (store_pattern, GET_MODE (store_pattern), x,
7207 rtx_addr_varies_p);
7210 /* Go through the entire insn X, looking for any loads which might alias
7211 STORE_PATTERN. Return true if found.
7212 AFTER is true if we are checking the case when STORE_PATTERN occurs
7213 after the insn X. */
7215 static bool
7216 find_loads (rtx x, rtx store_pattern, int after)
7218 const char * fmt;
7219 int i, j;
7220 int ret = false;
7222 if (!x)
7223 return false;
7225 if (GET_CODE (x) == SET)
7226 x = SET_SRC (x);
7228 if (GET_CODE (x) == MEM)
7230 if (load_kills_store (x, store_pattern, after))
7231 return true;
7234 /* Recursively process the insn. */
7235 fmt = GET_RTX_FORMAT (GET_CODE (x));
7237 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0 && !ret; i--)
7239 if (fmt[i] == 'e')
7240 ret |= find_loads (XEXP (x, i), store_pattern, after);
7241 else if (fmt[i] == 'E')
7242 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7243 ret |= find_loads (XVECEXP (x, i, j), store_pattern, after);
7245 return ret;
7248 /* Check if INSN kills the store pattern X (is aliased with it).
7249 AFTER is true if we are checking the case when store X occurs
7250 after the insn. Return true if it it does. */
7252 static bool
7253 store_killed_in_insn (rtx x, rtx x_regs, rtx insn, int after)
7255 rtx reg, base;
7257 if (!INSN_P (insn))
7258 return false;
7260 if (GET_CODE (insn) == CALL_INSN)
7262 /* A normal or pure call might read from pattern,
7263 but a const call will not. */
7264 if (! CONST_OR_PURE_CALL_P (insn) || pure_call_p (insn))
7265 return true;
7267 /* But even a const call reads its parameters. Check whether the
7268 base of some of registers used in mem is stack pointer. */
7269 for (reg = x_regs; reg; reg = XEXP (reg, 1))
7271 base = find_base_term (XEXP (reg, 0));
7272 if (!base
7273 || (GET_CODE (base) == ADDRESS
7274 && GET_MODE (base) == Pmode
7275 && XEXP (base, 0) == stack_pointer_rtx))
7276 return true;
7279 return false;
7282 if (GET_CODE (PATTERN (insn)) == SET)
7284 rtx pat = PATTERN (insn);
7285 rtx dest = SET_DEST (pat);
7287 if (GET_CODE (dest) == SIGN_EXTRACT
7288 || GET_CODE (dest) == ZERO_EXTRACT)
7289 dest = XEXP (dest, 0);
7291 /* Check for memory stores to aliased objects. */
7292 if (GET_CODE (dest) == MEM
7293 && !expr_equiv_p (dest, x))
7295 if (after)
7297 if (output_dependence (dest, x))
7298 return true;
7300 else
7302 if (output_dependence (x, dest))
7303 return true;
7306 return find_loads (SET_SRC (pat), x, after);
7308 else
7309 return find_loads (PATTERN (insn), x, after);
7312 /* Returns true if the expression X is loaded or clobbered on or after INSN
7313 within basic block BB. REGS_SET_AFTER is bitmap of registers set in
7314 or after the insn. X_REGS is list of registers mentioned in X. If the store
7315 is killed, return the last insn in that it occurs in FAIL_INSN. */
7317 static bool
7318 store_killed_after (rtx x, rtx x_regs, rtx insn, basic_block bb,
7319 int *regs_set_after, rtx *fail_insn)
7321 rtx last = bb->end, act;
7323 if (!store_ops_ok (x_regs, regs_set_after))
7325 /* We do not know where it will happen. */
7326 if (fail_insn)
7327 *fail_insn = NULL_RTX;
7328 return true;
7331 /* Scan from the end, so that fail_insn is determined correctly. */
7332 for (act = last; act != PREV_INSN (insn); act = PREV_INSN (act))
7333 if (store_killed_in_insn (x, x_regs, act, false))
7335 if (fail_insn)
7336 *fail_insn = act;
7337 return true;
7340 return false;
7343 /* Returns true if the expression X is loaded or clobbered on or before INSN
7344 within basic block BB. X_REGS is list of registers mentioned in X.
7345 REGS_SET_BEFORE is bitmap of registers set before or in this insn. */
7346 static bool
7347 store_killed_before (rtx x, rtx x_regs, rtx insn, basic_block bb,
7348 int *regs_set_before)
7350 rtx first = bb->head;
7352 if (!store_ops_ok (x_regs, regs_set_before))
7353 return true;
7355 for ( ; insn != PREV_INSN (first); insn = PREV_INSN (insn))
7356 if (store_killed_in_insn (x, x_regs, insn, true))
7357 return true;
7359 return false;
7362 /* Fill in available, anticipatable, transparent and kill vectors in
7363 STORE_DATA, based on lists of available and anticipatable stores. */
7364 static void
7365 build_store_vectors (void)
7367 basic_block bb;
7368 int *regs_set_in_block;
7369 rtx insn, st;
7370 struct ls_expr * ptr;
7371 unsigned regno;
7373 /* Build the gen_vector. This is any store in the table which is not killed
7374 by aliasing later in its block. */
7375 ae_gen = sbitmap_vector_alloc (last_basic_block, num_stores);
7376 sbitmap_vector_zero (ae_gen, last_basic_block);
7378 st_antloc = sbitmap_vector_alloc (last_basic_block, num_stores);
7379 sbitmap_vector_zero (st_antloc, last_basic_block);
7381 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7383 for (st = AVAIL_STORE_LIST (ptr); st != NULL; st = XEXP (st, 1))
7385 insn = XEXP (st, 0);
7386 bb = BLOCK_FOR_INSN (insn);
7388 /* If we've already seen an available expression in this block,
7389 we can delete this one (It occurs earlier in the block). We'll
7390 copy the SRC expression to an unused register in case there
7391 are any side effects. */
7392 if (TEST_BIT (ae_gen[bb->index], ptr->index))
7394 rtx r = gen_reg_rtx (GET_MODE (ptr->pattern));
7395 if (gcse_file)
7396 fprintf (gcse_file, "Removing redundant store:\n");
7397 replace_store_insn (r, XEXP (st, 0), bb);
7398 continue;
7400 SET_BIT (ae_gen[bb->index], ptr->index);
7403 for (st = ANTIC_STORE_LIST (ptr); st != NULL; st = XEXP (st, 1))
7405 insn = XEXP (st, 0);
7406 bb = BLOCK_FOR_INSN (insn);
7407 SET_BIT (st_antloc[bb->index], ptr->index);
7411 ae_kill = sbitmap_vector_alloc (last_basic_block, num_stores);
7412 sbitmap_vector_zero (ae_kill, last_basic_block);
7414 transp = sbitmap_vector_alloc (last_basic_block, num_stores);
7415 sbitmap_vector_zero (transp, last_basic_block);
7416 regs_set_in_block = xmalloc (sizeof (int) * max_gcse_regno);
7418 FOR_EACH_BB (bb)
7420 for (regno = 0; regno < max_gcse_regno; regno++)
7421 regs_set_in_block[regno] = TEST_BIT (reg_set_in_block[bb->index], regno);
7423 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7425 if (store_killed_after (ptr->pattern, ptr->pattern_regs, bb->head,
7426 bb, regs_set_in_block, NULL))
7428 /* It should not be necessary to consider the expression
7429 killed if it is both anticipatable and available. */
7430 if (!TEST_BIT (st_antloc[bb->index], ptr->index)
7431 || !TEST_BIT (ae_gen[bb->index], ptr->index))
7432 SET_BIT (ae_kill[bb->index], ptr->index);
7434 else
7435 SET_BIT (transp[bb->index], ptr->index);
7439 free (regs_set_in_block);
7441 if (gcse_file)
7443 dump_sbitmap_vector (gcse_file, "st_antloc", "", st_antloc, last_basic_block);
7444 dump_sbitmap_vector (gcse_file, "st_kill", "", ae_kill, last_basic_block);
7445 dump_sbitmap_vector (gcse_file, "Transpt", "", transp, last_basic_block);
7446 dump_sbitmap_vector (gcse_file, "st_avloc", "", ae_gen, last_basic_block);
7450 /* Insert an instruction at the beginning of a basic block, and update
7451 the BLOCK_HEAD if needed. */
7453 static void
7454 insert_insn_start_bb (rtx insn, basic_block bb)
7456 /* Insert at start of successor block. */
7457 rtx prev = PREV_INSN (bb->head);
7458 rtx before = bb->head;
7459 while (before != 0)
7461 if (GET_CODE (before) != CODE_LABEL
7462 && (GET_CODE (before) != NOTE
7463 || NOTE_LINE_NUMBER (before) != NOTE_INSN_BASIC_BLOCK))
7464 break;
7465 prev = before;
7466 if (prev == bb->end)
7467 break;
7468 before = NEXT_INSN (before);
7471 insn = emit_insn_after (insn, prev);
7473 if (gcse_file)
7475 fprintf (gcse_file, "STORE_MOTION insert store at start of BB %d:\n",
7476 bb->index);
7477 print_inline_rtx (gcse_file, insn, 6);
7478 fprintf (gcse_file, "\n");
7482 /* This routine will insert a store on an edge. EXPR is the ldst entry for
7483 the memory reference, and E is the edge to insert it on. Returns nonzero
7484 if an edge insertion was performed. */
7486 static int
7487 insert_store (struct ls_expr * expr, edge e)
7489 rtx reg, insn;
7490 basic_block bb;
7491 edge tmp;
7493 /* We did all the deleted before this insert, so if we didn't delete a
7494 store, then we haven't set the reaching reg yet either. */
7495 if (expr->reaching_reg == NULL_RTX)
7496 return 0;
7498 reg = expr->reaching_reg;
7499 insn = gen_move_insn (copy_rtx (expr->pattern), reg);
7501 /* If we are inserting this expression on ALL predecessor edges of a BB,
7502 insert it at the start of the BB, and reset the insert bits on the other
7503 edges so we don't try to insert it on the other edges. */
7504 bb = e->dest;
7505 for (tmp = e->dest->pred; tmp ; tmp = tmp->pred_next)
7507 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
7508 if (index == EDGE_INDEX_NO_EDGE)
7509 abort ();
7510 if (! TEST_BIT (pre_insert_map[index], expr->index))
7511 break;
7514 /* If tmp is NULL, we found an insertion on every edge, blank the
7515 insertion vector for these edges, and insert at the start of the BB. */
7516 if (!tmp && bb != EXIT_BLOCK_PTR)
7518 for (tmp = e->dest->pred; tmp ; tmp = tmp->pred_next)
7520 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
7521 RESET_BIT (pre_insert_map[index], expr->index);
7523 insert_insn_start_bb (insn, bb);
7524 return 0;
7527 /* We can't insert on this edge, so we'll insert at the head of the
7528 successors block. See Morgan, sec 10.5. */
7529 if ((e->flags & EDGE_ABNORMAL) == EDGE_ABNORMAL)
7531 insert_insn_start_bb (insn, bb);
7532 return 0;
7535 insert_insn_on_edge (insn, e);
7537 if (gcse_file)
7539 fprintf (gcse_file, "STORE_MOTION insert insn on edge (%d, %d):\n",
7540 e->src->index, e->dest->index);
7541 print_inline_rtx (gcse_file, insn, 6);
7542 fprintf (gcse_file, "\n");
7545 return 1;
7548 /* This routine will replace a store with a SET to a specified register. */
7550 static void
7551 replace_store_insn (rtx reg, rtx del, basic_block bb)
7553 rtx insn;
7555 insn = gen_move_insn (reg, SET_SRC (single_set (del)));
7556 insn = emit_insn_after (insn, del);
7558 if (gcse_file)
7560 fprintf (gcse_file,
7561 "STORE_MOTION delete insn in BB %d:\n ", bb->index);
7562 print_inline_rtx (gcse_file, del, 6);
7563 fprintf (gcse_file, "\nSTORE MOTION replaced with insn:\n ");
7564 print_inline_rtx (gcse_file, insn, 6);
7565 fprintf (gcse_file, "\n");
7568 delete_insn (del);
7572 /* Delete a store, but copy the value that would have been stored into
7573 the reaching_reg for later storing. */
7575 static void
7576 delete_store (struct ls_expr * expr, basic_block bb)
7578 rtx reg, i, del;
7580 if (expr->reaching_reg == NULL_RTX)
7581 expr->reaching_reg = gen_reg_rtx (GET_MODE (expr->pattern));
7583 reg = expr->reaching_reg;
7585 for (i = AVAIL_STORE_LIST (expr); i; i = XEXP (i, 1))
7587 del = XEXP (i, 0);
7588 if (BLOCK_FOR_INSN (del) == bb)
7590 /* We know there is only one since we deleted redundant
7591 ones during the available computation. */
7592 replace_store_insn (reg, del, bb);
7593 break;
7598 /* Free memory used by store motion. */
7600 static void
7601 free_store_memory (void)
7603 free_ldst_mems ();
7605 if (ae_gen)
7606 sbitmap_vector_free (ae_gen);
7607 if (ae_kill)
7608 sbitmap_vector_free (ae_kill);
7609 if (transp)
7610 sbitmap_vector_free (transp);
7611 if (st_antloc)
7612 sbitmap_vector_free (st_antloc);
7613 if (pre_insert_map)
7614 sbitmap_vector_free (pre_insert_map);
7615 if (pre_delete_map)
7616 sbitmap_vector_free (pre_delete_map);
7617 if (reg_set_in_block)
7618 sbitmap_vector_free (reg_set_in_block);
7620 ae_gen = ae_kill = transp = st_antloc = NULL;
7621 pre_insert_map = pre_delete_map = reg_set_in_block = NULL;
7624 /* Perform store motion. Much like gcse, except we move expressions the
7625 other way by looking at the flowgraph in reverse. */
7627 static void
7628 store_motion (void)
7630 basic_block bb;
7631 int x;
7632 struct ls_expr * ptr;
7633 int update_flow = 0;
7635 if (gcse_file)
7637 fprintf (gcse_file, "before store motion\n");
7638 print_rtl (gcse_file, get_insns ());
7641 init_alias_analysis ();
7643 /* Find all the available and anticipatable stores. */
7644 num_stores = compute_store_table ();
7645 if (num_stores == 0)
7647 sbitmap_vector_free (reg_set_in_block);
7648 end_alias_analysis ();
7649 return;
7652 /* Now compute kill & transp vectors. */
7653 build_store_vectors ();
7654 add_noreturn_fake_exit_edges ();
7656 edge_list = pre_edge_rev_lcm (gcse_file, num_stores, transp, ae_gen,
7657 st_antloc, ae_kill, &pre_insert_map,
7658 &pre_delete_map);
7660 /* Now we want to insert the new stores which are going to be needed. */
7661 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7663 FOR_EACH_BB (bb)
7664 if (TEST_BIT (pre_delete_map[bb->index], ptr->index))
7665 delete_store (ptr, bb);
7667 for (x = 0; x < NUM_EDGES (edge_list); x++)
7668 if (TEST_BIT (pre_insert_map[x], ptr->index))
7669 update_flow |= insert_store (ptr, INDEX_EDGE (edge_list, x));
7672 if (update_flow)
7673 commit_edge_insertions ();
7675 free_store_memory ();
7676 free_edge_list (edge_list);
7677 remove_fake_edges ();
7678 end_alias_analysis ();
7682 /* Entry point for jump bypassing optimization pass. */
7685 bypass_jumps (FILE *file)
7687 int changed;
7689 /* We do not construct an accurate cfg in functions which call
7690 setjmp, so just punt to be safe. */
7691 if (current_function_calls_setjmp)
7692 return 0;
7694 /* For calling dump_foo fns from gdb. */
7695 debug_stderr = stderr;
7696 gcse_file = file;
7698 /* Identify the basic block information for this function, including
7699 successors and predecessors. */
7700 max_gcse_regno = max_reg_num ();
7702 if (file)
7703 dump_flow_info (file);
7705 /* Return if there's nothing to do. */
7706 if (n_basic_blocks <= 1)
7707 return 0;
7709 /* Trying to perform global optimizations on flow graphs which have
7710 a high connectivity will take a long time and is unlikely to be
7711 particularly useful.
7713 In normal circumstances a cfg should have about twice as many edges
7714 as blocks. But we do not want to punish small functions which have
7715 a couple switch statements. So we require a relatively large number
7716 of basic blocks and the ratio of edges to blocks to be high. */
7717 if (n_basic_blocks > 1000 && n_edges / n_basic_blocks >= 20)
7719 if (warn_disabled_optimization)
7720 warning ("BYPASS disabled: %d > 1000 basic blocks and %d >= 20 edges/basic block",
7721 n_basic_blocks, n_edges / n_basic_blocks);
7722 return 0;
7725 /* If allocating memory for the cprop bitmap would take up too much
7726 storage it's better just to disable the optimization. */
7727 if ((n_basic_blocks
7728 * SBITMAP_SET_SIZE (max_gcse_regno)
7729 * sizeof (SBITMAP_ELT_TYPE)) > MAX_GCSE_MEMORY)
7731 if (warn_disabled_optimization)
7732 warning ("GCSE disabled: %d basic blocks and %d registers",
7733 n_basic_blocks, max_gcse_regno);
7735 return 0;
7738 gcc_obstack_init (&gcse_obstack);
7739 bytes_used = 0;
7741 /* We need alias. */
7742 init_alias_analysis ();
7744 /* Record where pseudo-registers are set. This data is kept accurate
7745 during each pass. ??? We could also record hard-reg information here
7746 [since it's unchanging], however it is currently done during hash table
7747 computation.
7749 It may be tempting to compute MEM set information here too, but MEM sets
7750 will be subject to code motion one day and thus we need to compute
7751 information about memory sets when we build the hash tables. */
7753 alloc_reg_set_mem (max_gcse_regno);
7754 compute_sets (get_insns ());
7756 max_gcse_regno = max_reg_num ();
7757 alloc_gcse_mem (get_insns ());
7758 changed = one_cprop_pass (1, 1, 1);
7759 free_gcse_mem ();
7761 if (file)
7763 fprintf (file, "BYPASS of %s: %d basic blocks, ",
7764 current_function_name, n_basic_blocks);
7765 fprintf (file, "%d bytes\n\n", bytes_used);
7768 obstack_free (&gcse_obstack, NULL);
7769 free_reg_set_mem ();
7771 /* We are finished with alias. */
7772 end_alias_analysis ();
7773 allocate_reg_info (max_reg_num (), FALSE, FALSE);
7775 return changed;
7778 #include "gt-gcse.h"