Fixed rare threading problem
[official-gcc.git] / gcc / gcse.c
blob9d83f05903969d1d5ea322d1763f2142a59143aa
1 /* Global common subexpression elimination/Partial redundancy elimination
2 and global constant/copy propagation for GNU compiler.
3 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 02111-1307, USA. */
23 /* TODO
24 - reordering of memory allocation and freeing to be more space efficient
25 - do rough calc of how many regs are needed in each block, and a rough
26 calc of how many regs are available in each class and use that to
27 throttle back the code in cases where RTX_COST is minimal.
28 - a store to the same address as a load does not kill the load if the
29 source of the store is also the destination of the load. Handling this
30 allows more load motion, particularly out of loops.
31 - ability to realloc sbitmap vectors would allow one initial computation
32 of reg_set_in_block with only subsequent additions, rather than
33 recomputing it for each pass
37 /* References searched while implementing this.
39 Compilers Principles, Techniques and Tools
40 Aho, Sethi, Ullman
41 Addison-Wesley, 1988
43 Global Optimization by Suppression of Partial Redundancies
44 E. Morel, C. Renvoise
45 communications of the acm, Vol. 22, Num. 2, Feb. 1979
47 A Portable Machine-Independent Global Optimizer - Design and Measurements
48 Frederick Chow
49 Stanford Ph.D. thesis, Dec. 1983
51 A Fast Algorithm for Code Movement Optimization
52 D.M. Dhamdhere
53 SIGPLAN Notices, Vol. 23, Num. 10, Oct. 1988
55 A Solution to a Problem with Morel and Renvoise's
56 Global Optimization by Suppression of Partial Redundancies
57 K-H Drechsler, M.P. Stadel
58 ACM TOPLAS, Vol. 10, Num. 4, Oct. 1988
60 Practical Adaptation of the Global Optimization
61 Algorithm of Morel and Renvoise
62 D.M. Dhamdhere
63 ACM TOPLAS, Vol. 13, Num. 2. Apr. 1991
65 Efficiently Computing Static Single Assignment Form and the Control
66 Dependence Graph
67 R. Cytron, J. Ferrante, B.K. Rosen, M.N. Wegman, and F.K. Zadeck
68 ACM TOPLAS, Vol. 13, Num. 4, Oct. 1991
70 Lazy Code Motion
71 J. Knoop, O. Ruthing, B. Steffen
72 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
74 What's In a Region? Or Computing Control Dependence Regions in Near-Linear
75 Time for Reducible Flow Control
76 Thomas Ball
77 ACM Letters on Programming Languages and Systems,
78 Vol. 2, Num. 1-4, Mar-Dec 1993
80 An Efficient Representation for Sparse Sets
81 Preston Briggs, Linda Torczon
82 ACM Letters on Programming Languages and Systems,
83 Vol. 2, Num. 1-4, Mar-Dec 1993
85 A Variation of Knoop, Ruthing, and Steffen's Lazy Code Motion
86 K-H Drechsler, M.P. Stadel
87 ACM SIGPLAN Notices, Vol. 28, Num. 5, May 1993
89 Partial Dead Code Elimination
90 J. Knoop, O. Ruthing, B. Steffen
91 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
93 Effective Partial Redundancy Elimination
94 P. Briggs, K.D. Cooper
95 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
97 The Program Structure Tree: Computing Control Regions in Linear Time
98 R. Johnson, D. Pearson, K. Pingali
99 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
101 Optimal Code Motion: Theory and Practice
102 J. Knoop, O. Ruthing, B. Steffen
103 ACM TOPLAS, Vol. 16, Num. 4, Jul. 1994
105 The power of assignment motion
106 J. Knoop, O. Ruthing, B. Steffen
107 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
109 Global code motion / global value numbering
110 C. Click
111 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
113 Value Driven Redundancy Elimination
114 L.T. Simpson
115 Rice University Ph.D. thesis, Apr. 1996
117 Value Numbering
118 L.T. Simpson
119 Massively Scalar Compiler Project, Rice University, Sep. 1996
121 High Performance Compilers for Parallel Computing
122 Michael Wolfe
123 Addison-Wesley, 1996
125 Advanced Compiler Design and Implementation
126 Steven Muchnick
127 Morgan Kaufmann, 1997
129 Building an Optimizing Compiler
130 Robert Morgan
131 Digital Press, 1998
133 People wishing to speed up the code here should read:
134 Elimination Algorithms for Data Flow Analysis
135 B.G. Ryder, M.C. Paull
136 ACM Computing Surveys, Vol. 18, Num. 3, Sep. 1986
138 How to Analyze Large Programs Efficiently and Informatively
139 D.M. Dhamdhere, B.K. Rosen, F.K. Zadeck
140 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
142 People wishing to do something different can find various possibilities
143 in the above papers and elsewhere.
146 #include "config.h"
147 #include "system.h"
148 #include "coretypes.h"
149 #include "tm.h"
150 #include "toplev.h"
152 #include "rtl.h"
153 #include "tm_p.h"
154 #include "regs.h"
155 #include "hard-reg-set.h"
156 #include "flags.h"
157 #include "real.h"
158 #include "insn-config.h"
159 #include "recog.h"
160 #include "basic-block.h"
161 #include "output.h"
162 #include "function.h"
163 #include "expr.h"
164 #include "except.h"
165 #include "ggc.h"
166 #include "params.h"
167 #include "cselib.h"
169 #include "obstack.h"
171 /* Propagate flow information through back edges and thus enable PRE's
172 moving loop invariant calculations out of loops.
174 Originally this tended to create worse overall code, but several
175 improvements during the development of PRE seem to have made following
176 back edges generally a win.
178 Note much of the loop invariant code motion done here would normally
179 be done by loop.c, which has more heuristics for when to move invariants
180 out of loops. At some point we might need to move some of those
181 heuristics into gcse.c. */
183 /* We support GCSE via Partial Redundancy Elimination. PRE optimizations
184 are a superset of those done by GCSE.
186 We perform the following steps:
188 1) Compute basic block information.
190 2) Compute table of places where registers are set.
192 3) Perform copy/constant propagation.
194 4) Perform global cse.
196 5) Perform another pass of copy/constant propagation.
198 Two passes of copy/constant propagation are done because the first one
199 enables more GCSE and the second one helps to clean up the copies that
200 GCSE creates. This is needed more for PRE than for Classic because Classic
201 GCSE will try to use an existing register containing the common
202 subexpression rather than create a new one. This is harder to do for PRE
203 because of the code motion (which Classic GCSE doesn't do).
205 Expressions we are interested in GCSE-ing are of the form
206 (set (pseudo-reg) (expression)).
207 Function want_to_gcse_p says what these are.
209 PRE handles moving invariant expressions out of loops (by treating them as
210 partially redundant).
212 Eventually it would be nice to replace cse.c/gcse.c with SSA (static single
213 assignment) based GVN (global value numbering). L. T. Simpson's paper
214 (Rice University) on value numbering is a useful reference for this.
216 **********************
218 We used to support multiple passes but there are diminishing returns in
219 doing so. The first pass usually makes 90% of the changes that are doable.
220 A second pass can make a few more changes made possible by the first pass.
221 Experiments show any further passes don't make enough changes to justify
222 the expense.
224 A study of spec92 using an unlimited number of passes:
225 [1 pass] = 1208 substitutions, [2] = 577, [3] = 202, [4] = 192, [5] = 83,
226 [6] = 34, [7] = 17, [8] = 9, [9] = 4, [10] = 4, [11] = 2,
227 [12] = 2, [13] = 1, [15] = 1, [16] = 2, [41] = 1
229 It was found doing copy propagation between each pass enables further
230 substitutions.
232 PRE is quite expensive in complicated functions because the DFA can take
233 awhile to converge. Hence we only perform one pass. The parameter max-gcse-passes can
234 be modified if one wants to experiment.
236 **********************
238 The steps for PRE are:
240 1) Build the hash table of expressions we wish to GCSE (expr_hash_table).
242 2) Perform the data flow analysis for PRE.
244 3) Delete the redundant instructions
246 4) Insert the required copies [if any] that make the partially
247 redundant instructions fully redundant.
249 5) For other reaching expressions, insert an instruction to copy the value
250 to a newly created pseudo that will reach the redundant instruction.
252 The deletion is done first so that when we do insertions we
253 know which pseudo reg to use.
255 Various papers have argued that PRE DFA is expensive (O(n^2)) and others
256 argue it is not. The number of iterations for the algorithm to converge
257 is typically 2-4 so I don't view it as that expensive (relatively speaking).
259 PRE GCSE depends heavily on the second CSE pass to clean up the copies
260 we create. To make an expression reach the place where it's redundant,
261 the result of the expression is copied to a new register, and the redundant
262 expression is deleted by replacing it with this new register. Classic GCSE
263 doesn't have this problem as much as it computes the reaching defs of
264 each register in each block and thus can try to use an existing register.
266 **********************
268 A fair bit of simplicity is created by creating small functions for simple
269 tasks, even when the function is only called in one place. This may
270 measurably slow things down [or may not] by creating more function call
271 overhead than is necessary. The source is laid out so that it's trivial
272 to make the affected functions inline so that one can measure what speed
273 up, if any, can be achieved, and maybe later when things settle things can
274 be rearranged.
276 Help stamp out big monolithic functions! */
278 /* GCSE global vars. */
280 /* -dG dump file. */
281 static FILE *gcse_file;
283 /* Note whether or not we should run jump optimization after gcse. We
284 want to do this for two cases.
286 * If we changed any jumps via cprop.
288 * If we added any labels via edge splitting. */
290 static int run_jump_opt_after_gcse;
292 /* Bitmaps are normally not included in debugging dumps.
293 However it's useful to be able to print them from GDB.
294 We could create special functions for this, but it's simpler to
295 just allow passing stderr to the dump_foo fns. Since stderr can
296 be a macro, we store a copy here. */
297 static FILE *debug_stderr;
299 /* An obstack for our working variables. */
300 static struct obstack gcse_obstack;
302 struct reg_use {rtx reg_rtx; };
304 /* Hash table of expressions. */
306 struct expr
308 /* The expression (SET_SRC for expressions, PATTERN for assignments). */
309 rtx expr;
310 /* Index in the available expression bitmaps. */
311 int bitmap_index;
312 /* Next entry with the same hash. */
313 struct expr *next_same_hash;
314 /* List of anticipatable occurrences in basic blocks in the function.
315 An "anticipatable occurrence" is one that is the first occurrence in the
316 basic block, the operands are not modified in the basic block prior
317 to the occurrence and the output is not used between the start of
318 the block and the occurrence. */
319 struct occr *antic_occr;
320 /* List of available occurrence in basic blocks in the function.
321 An "available occurrence" is one that is the last occurrence in the
322 basic block and the operands are not modified by following statements in
323 the basic block [including this insn]. */
324 struct occr *avail_occr;
325 /* Non-null if the computation is PRE redundant.
326 The value is the newly created pseudo-reg to record a copy of the
327 expression in all the places that reach the redundant copy. */
328 rtx reaching_reg;
331 /* Occurrence of an expression.
332 There is one per basic block. If a pattern appears more than once the
333 last appearance is used [or first for anticipatable expressions]. */
335 struct occr
337 /* Next occurrence of this expression. */
338 struct occr *next;
339 /* The insn that computes the expression. */
340 rtx insn;
341 /* Nonzero if this [anticipatable] occurrence has been deleted. */
342 char deleted_p;
343 /* Nonzero if this [available] occurrence has been copied to
344 reaching_reg. */
345 /* ??? This is mutually exclusive with deleted_p, so they could share
346 the same byte. */
347 char copied_p;
350 /* Expression and copy propagation hash tables.
351 Each hash table is an array of buckets.
352 ??? It is known that if it were an array of entries, structure elements
353 `next_same_hash' and `bitmap_index' wouldn't be necessary. However, it is
354 not clear whether in the final analysis a sufficient amount of memory would
355 be saved as the size of the available expression bitmaps would be larger
356 [one could build a mapping table without holes afterwards though].
357 Someday I'll perform the computation and figure it out. */
359 struct hash_table
361 /* The table itself.
362 This is an array of `expr_hash_table_size' elements. */
363 struct expr **table;
365 /* Size of the hash table, in elements. */
366 unsigned int size;
368 /* Number of hash table elements. */
369 unsigned int n_elems;
371 /* Whether the table is expression of copy propagation one. */
372 int set_p;
375 /* Expression hash table. */
376 static struct hash_table expr_hash_table;
378 /* Copy propagation hash table. */
379 static struct hash_table set_hash_table;
381 /* Mapping of uids to cuids.
382 Only real insns get cuids. */
383 static int *uid_cuid;
385 /* Highest UID in UID_CUID. */
386 static int max_uid;
388 /* Get the cuid of an insn. */
389 #ifdef ENABLE_CHECKING
390 #define INSN_CUID(INSN) (INSN_UID (INSN) > max_uid ? (abort (), 0) : uid_cuid[INSN_UID (INSN)])
391 #else
392 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
393 #endif
395 /* Number of cuids. */
396 static int max_cuid;
398 /* Mapping of cuids to insns. */
399 static rtx *cuid_insn;
401 /* Get insn from cuid. */
402 #define CUID_INSN(CUID) (cuid_insn[CUID])
404 /* Maximum register number in function prior to doing gcse + 1.
405 Registers created during this pass have regno >= max_gcse_regno.
406 This is named with "gcse" to not collide with global of same name. */
407 static unsigned int max_gcse_regno;
409 /* Table of registers that are modified.
411 For each register, each element is a list of places where the pseudo-reg
412 is set.
414 For simplicity, GCSE is done on sets of pseudo-regs only. PRE GCSE only
415 requires knowledge of which blocks kill which regs [and thus could use
416 a bitmap instead of the lists `reg_set_table' uses].
418 `reg_set_table' and could be turned into an array of bitmaps (num-bbs x
419 num-regs) [however perhaps it may be useful to keep the data as is]. One
420 advantage of recording things this way is that `reg_set_table' is fairly
421 sparse with respect to pseudo regs but for hard regs could be fairly dense
422 [relatively speaking]. And recording sets of pseudo-regs in lists speeds
423 up functions like compute_transp since in the case of pseudo-regs we only
424 need to iterate over the number of times a pseudo-reg is set, not over the
425 number of basic blocks [clearly there is a bit of a slow down in the cases
426 where a pseudo is set more than once in a block, however it is believed
427 that the net effect is to speed things up]. This isn't done for hard-regs
428 because recording call-clobbered hard-regs in `reg_set_table' at each
429 function call can consume a fair bit of memory, and iterating over
430 hard-regs stored this way in compute_transp will be more expensive. */
432 typedef struct reg_set
434 /* The next setting of this register. */
435 struct reg_set *next;
436 /* The insn where it was set. */
437 rtx insn;
438 } reg_set;
440 static reg_set **reg_set_table;
442 /* Size of `reg_set_table'.
443 The table starts out at max_gcse_regno + slop, and is enlarged as
444 necessary. */
445 static int reg_set_table_size;
447 /* Amount to grow `reg_set_table' by when it's full. */
448 #define REG_SET_TABLE_SLOP 100
450 /* This is a list of expressions which are MEMs and will be used by load
451 or store motion.
452 Load motion tracks MEMs which aren't killed by
453 anything except itself. (ie, loads and stores to a single location).
454 We can then allow movement of these MEM refs with a little special
455 allowance. (all stores copy the same value to the reaching reg used
456 for the loads). This means all values used to store into memory must have
457 no side effects so we can re-issue the setter value.
458 Store Motion uses this structure as an expression table to track stores
459 which look interesting, and might be moveable towards the exit block. */
461 struct ls_expr
463 struct expr * expr; /* Gcse expression reference for LM. */
464 rtx pattern; /* Pattern of this mem. */
465 rtx pattern_regs; /* List of registers mentioned by the mem. */
466 rtx loads; /* INSN list of loads seen. */
467 rtx stores; /* INSN list of stores seen. */
468 struct ls_expr * next; /* Next in the list. */
469 int invalid; /* Invalid for some reason. */
470 int index; /* If it maps to a bitmap index. */
471 int hash_index; /* Index when in a hash table. */
472 rtx reaching_reg; /* Register to use when re-writing. */
475 /* Array of implicit set patterns indexed by basic block index. */
476 static rtx *implicit_sets;
478 /* Head of the list of load/store memory refs. */
479 static struct ls_expr * pre_ldst_mems = NULL;
481 /* Bitmap containing one bit for each register in the program.
482 Used when performing GCSE to track which registers have been set since
483 the start of the basic block. */
484 static regset reg_set_bitmap;
486 /* For each block, a bitmap of registers set in the block.
487 This is used by expr_killed_p and compute_transp.
488 It is computed during hash table computation and not by compute_sets
489 as it includes registers added since the last pass (or between cprop and
490 gcse) and it's currently not easy to realloc sbitmap vectors. */
491 static sbitmap *reg_set_in_block;
493 /* Array, indexed by basic block number for a list of insns which modify
494 memory within that block. */
495 static rtx * modify_mem_list;
496 bitmap modify_mem_list_set;
498 /* This array parallels modify_mem_list, but is kept canonicalized. */
499 static rtx * canon_modify_mem_list;
500 bitmap canon_modify_mem_list_set;
501 /* Various variables for statistics gathering. */
503 /* Memory used in a pass.
504 This isn't intended to be absolutely precise. Its intent is only
505 to keep an eye on memory usage. */
506 static int bytes_used;
508 /* GCSE substitutions made. */
509 static int gcse_subst_count;
510 /* Number of copy instructions created. */
511 static int gcse_create_count;
512 /* Number of constants propagated. */
513 static int const_prop_count;
514 /* Number of copys propagated. */
515 static int copy_prop_count;
517 /* These variables are used by classic GCSE.
518 Normally they'd be defined a bit later, but `rd_gen' needs to
519 be declared sooner. */
521 /* Each block has a bitmap of each type.
522 The length of each blocks bitmap is:
524 max_cuid - for reaching definitions
525 n_exprs - for available expressions
527 Thus we view the bitmaps as 2 dimensional arrays. i.e.
528 rd_kill[block_num][cuid_num]
529 ae_kill[block_num][expr_num] */
531 /* For reaching defs */
532 static sbitmap *rd_kill, *rd_gen, *reaching_defs, *rd_out;
534 /* for available exprs */
535 static sbitmap *ae_kill, *ae_gen, *ae_in, *ae_out;
537 /* Objects of this type are passed around by the null-pointer check
538 removal routines. */
539 struct null_pointer_info
541 /* The basic block being processed. */
542 basic_block current_block;
543 /* The first register to be handled in this pass. */
544 unsigned int min_reg;
545 /* One greater than the last register to be handled in this pass. */
546 unsigned int max_reg;
547 sbitmap *nonnull_local;
548 sbitmap *nonnull_killed;
551 static void compute_can_copy (void);
552 static char *gmalloc (unsigned int);
553 static char *grealloc (char *, unsigned int);
554 static char *gcse_alloc (unsigned long);
555 static void alloc_gcse_mem (rtx);
556 static void free_gcse_mem (void);
557 static void alloc_reg_set_mem (int);
558 static void free_reg_set_mem (void);
559 static int get_bitmap_width (int, int, int);
560 static void record_one_set (int, rtx);
561 static void record_set_info (rtx, rtx, void *);
562 static void compute_sets (rtx);
563 static void hash_scan_insn (rtx, struct hash_table *, int);
564 static void hash_scan_set (rtx, rtx, struct hash_table *);
565 static void hash_scan_clobber (rtx, rtx, struct hash_table *);
566 static void hash_scan_call (rtx, rtx, struct hash_table *);
567 static int want_to_gcse_p (rtx);
568 static bool gcse_constant_p (rtx);
569 static int oprs_unchanged_p (rtx, rtx, int);
570 static int oprs_anticipatable_p (rtx, rtx);
571 static int oprs_available_p (rtx, rtx);
572 static void insert_expr_in_table (rtx, enum machine_mode, rtx, int, int,
573 struct hash_table *);
574 static void insert_set_in_table (rtx, rtx, struct hash_table *);
575 static unsigned int hash_expr (rtx, enum machine_mode, int *, int);
576 static unsigned int hash_expr_1 (rtx, enum machine_mode, int *);
577 static unsigned int hash_string_1 (const char *);
578 static unsigned int hash_set (int, int);
579 static int expr_equiv_p (rtx, rtx);
580 static void record_last_reg_set_info (rtx, int);
581 static void record_last_mem_set_info (rtx);
582 static void record_last_set_info (rtx, rtx, void *);
583 static void compute_hash_table (struct hash_table *);
584 static void alloc_hash_table (int, struct hash_table *, int);
585 static void free_hash_table (struct hash_table *);
586 static void compute_hash_table_work (struct hash_table *);
587 static void dump_hash_table (FILE *, const char *, struct hash_table *);
588 static struct expr *lookup_expr (rtx, struct hash_table *);
589 static struct expr *lookup_set (unsigned int, struct hash_table *);
590 static struct expr *next_set (unsigned int, struct expr *);
591 static void reset_opr_set_tables (void);
592 static int oprs_not_set_p (rtx, rtx);
593 static void mark_call (rtx);
594 static void mark_set (rtx, rtx);
595 static void mark_clobber (rtx, rtx);
596 static void mark_oprs_set (rtx);
597 static void alloc_cprop_mem (int, int);
598 static void free_cprop_mem (void);
599 static void compute_transp (rtx, int, sbitmap *, int);
600 static void compute_transpout (void);
601 static void compute_local_properties (sbitmap *, sbitmap *, sbitmap *,
602 struct hash_table *);
603 static void compute_cprop_data (void);
604 static void find_used_regs (rtx *, void *);
605 static int try_replace_reg (rtx, rtx, rtx);
606 static struct expr *find_avail_set (int, rtx);
607 static int cprop_jump (basic_block, rtx, rtx, rtx, rtx);
608 static void mems_conflict_for_gcse_p (rtx, rtx, void *);
609 static int load_killed_in_block_p (basic_block, int, rtx, int);
610 static void canon_list_insert (rtx, rtx, void *);
611 static int cprop_insn (rtx, int);
612 static int cprop (int);
613 static void find_implicit_sets (void);
614 static int one_cprop_pass (int, int, int);
615 static bool constprop_register (rtx, rtx, rtx, int);
616 static struct expr *find_bypass_set (int, int);
617 static bool reg_killed_on_edge (rtx, edge);
618 static int bypass_block (basic_block, rtx, rtx);
619 static int bypass_conditional_jumps (void);
620 static void alloc_pre_mem (int, int);
621 static void free_pre_mem (void);
622 static void compute_pre_data (void);
623 static int pre_expr_reaches_here_p (basic_block, struct expr *,
624 basic_block);
625 static void insert_insn_end_bb (struct expr *, basic_block, int);
626 static void pre_insert_copy_insn (struct expr *, rtx);
627 static void pre_insert_copies (void);
628 static int pre_delete (void);
629 static int pre_gcse (void);
630 static int one_pre_gcse_pass (int);
631 static void add_label_notes (rtx, rtx);
632 static void alloc_code_hoist_mem (int, int);
633 static void free_code_hoist_mem (void);
634 static void compute_code_hoist_vbeinout (void);
635 static void compute_code_hoist_data (void);
636 static int hoist_expr_reaches_here_p (basic_block, int, basic_block, char *);
637 static void hoist_code (void);
638 static int one_code_hoisting_pass (void);
639 static void alloc_rd_mem (int, int);
640 static void free_rd_mem (void);
641 static void handle_rd_kill_set (rtx, int, basic_block);
642 static void compute_kill_rd (void);
643 static void compute_rd (void);
644 static void alloc_avail_expr_mem (int, int);
645 static void free_avail_expr_mem (void);
646 static void compute_ae_gen (struct hash_table *);
647 static int expr_killed_p (rtx, basic_block);
648 static void compute_ae_kill (sbitmap *, sbitmap *, struct hash_table *);
649 static int expr_reaches_here_p (struct occr *, struct expr *, basic_block,
650 int);
651 static rtx computing_insn (struct expr *, rtx);
652 static int def_reaches_here_p (rtx, rtx);
653 static int can_disregard_other_sets (struct reg_set **, rtx, int);
654 static int handle_avail_expr (rtx, struct expr *);
655 static int classic_gcse (void);
656 static int one_classic_gcse_pass (int);
657 static void invalidate_nonnull_info (rtx, rtx, void *);
658 static int delete_null_pointer_checks_1 (unsigned int *, sbitmap *, sbitmap *,
659 struct null_pointer_info *);
660 static rtx process_insert_insn (struct expr *);
661 static int pre_edge_insert (struct edge_list *, struct expr **);
662 static int expr_reaches_here_p_work (struct occr *, struct expr *,
663 basic_block, int, char *);
664 static int pre_expr_reaches_here_p_work (basic_block, struct expr *,
665 basic_block, char *);
666 static struct ls_expr * ldst_entry (rtx);
667 static void free_ldst_entry (struct ls_expr *);
668 static void free_ldst_mems (void);
669 static void print_ldst_list (FILE *);
670 static struct ls_expr * find_rtx_in_ldst (rtx);
671 static int enumerate_ldsts (void);
672 static inline struct ls_expr * first_ls_expr (void);
673 static inline struct ls_expr * next_ls_expr (struct ls_expr *);
674 static int simple_mem (rtx);
675 static void invalidate_any_buried_refs (rtx);
676 static void compute_ld_motion_mems (void);
677 static void trim_ld_motion_mems (void);
678 static void update_ld_motion_stores (struct expr *);
679 static void reg_set_info (rtx, rtx, void *);
680 static bool store_ops_ok (rtx, int *);
681 static rtx extract_mentioned_regs (rtx);
682 static rtx extract_mentioned_regs_helper (rtx, rtx);
683 static void find_moveable_store (rtx, int *, int *);
684 static int compute_store_table (void);
685 static bool load_kills_store (rtx, rtx);
686 static bool find_loads (rtx, rtx);
687 static bool store_killed_in_insn (rtx, rtx, rtx);
688 static bool store_killed_after (rtx, rtx, rtx, basic_block, int *, rtx *);
689 static bool store_killed_before (rtx, rtx, rtx, basic_block, int *);
690 static void build_store_vectors (void);
691 static void insert_insn_start_bb (rtx, basic_block);
692 static int insert_store (struct ls_expr *, edge);
693 static void replace_store_insn (rtx, rtx, basic_block);
694 static void delete_store (struct ls_expr *, basic_block);
695 static void free_store_memory (void);
696 static void store_motion (void);
697 static void free_insn_expr_list_list (rtx *);
698 static void clear_modify_mem_tables (void);
699 static void free_modify_mem_tables (void);
700 static rtx gcse_emit_move_after (rtx, rtx, rtx);
701 static void local_cprop_find_used_regs (rtx *, void *);
702 static bool do_local_cprop (rtx, rtx, int, rtx*);
703 static bool adjust_libcall_notes (rtx, rtx, rtx, rtx*);
704 static void local_cprop_pass (int);
706 /* Entry point for global common subexpression elimination.
707 F is the first instruction in the function. */
710 gcse_main (rtx f, FILE *file)
712 int changed, pass;
713 /* Bytes used at start of pass. */
714 int initial_bytes_used;
715 /* Maximum number of bytes used by a pass. */
716 int max_pass_bytes;
717 /* Point to release obstack data from for each pass. */
718 char *gcse_obstack_bottom;
720 /* We do not construct an accurate cfg in functions which call
721 setjmp, so just punt to be safe. */
722 if (current_function_calls_setjmp)
723 return 0;
725 /* Assume that we do not need to run jump optimizations after gcse. */
726 run_jump_opt_after_gcse = 0;
728 /* For calling dump_foo fns from gdb. */
729 debug_stderr = stderr;
730 gcse_file = file;
732 /* Identify the basic block information for this function, including
733 successors and predecessors. */
734 max_gcse_regno = max_reg_num ();
736 if (file)
737 dump_flow_info (file);
739 /* Return if there's nothing to do. */
740 if (n_basic_blocks <= 1)
741 return 0;
743 /* Trying to perform global optimizations on flow graphs which have
744 a high connectivity will take a long time and is unlikely to be
745 particularly useful.
747 In normal circumstances a cfg should have about twice as many edges
748 as blocks. But we do not want to punish small functions which have
749 a couple switch statements. So we require a relatively large number
750 of basic blocks and the ratio of edges to blocks to be high. */
751 if (n_basic_blocks > 1000 && n_edges / n_basic_blocks >= 20)
753 if (warn_disabled_optimization)
754 warning ("GCSE disabled: %d > 1000 basic blocks and %d >= 20 edges/basic block",
755 n_basic_blocks, n_edges / n_basic_blocks);
756 return 0;
759 /* If allocating memory for the cprop bitmap would take up too much
760 storage it's better just to disable the optimization. */
761 if ((n_basic_blocks
762 * SBITMAP_SET_SIZE (max_gcse_regno)
763 * sizeof (SBITMAP_ELT_TYPE)) > MAX_GCSE_MEMORY)
765 if (warn_disabled_optimization)
766 warning ("GCSE disabled: %d basic blocks and %d registers",
767 n_basic_blocks, max_gcse_regno);
769 return 0;
772 gcc_obstack_init (&gcse_obstack);
773 bytes_used = 0;
775 /* We need alias. */
776 init_alias_analysis ();
777 /* Record where pseudo-registers are set. This data is kept accurate
778 during each pass. ??? We could also record hard-reg information here
779 [since it's unchanging], however it is currently done during hash table
780 computation.
782 It may be tempting to compute MEM set information here too, but MEM sets
783 will be subject to code motion one day and thus we need to compute
784 information about memory sets when we build the hash tables. */
786 alloc_reg_set_mem (max_gcse_regno);
787 compute_sets (f);
789 pass = 0;
790 initial_bytes_used = bytes_used;
791 max_pass_bytes = 0;
792 gcse_obstack_bottom = gcse_alloc (1);
793 changed = 1;
794 while (changed && pass < MAX_GCSE_PASSES)
796 changed = 0;
797 if (file)
798 fprintf (file, "GCSE pass %d\n\n", pass + 1);
800 /* Initialize bytes_used to the space for the pred/succ lists,
801 and the reg_set_table data. */
802 bytes_used = initial_bytes_used;
804 /* Each pass may create new registers, so recalculate each time. */
805 max_gcse_regno = max_reg_num ();
807 alloc_gcse_mem (f);
809 /* Don't allow constant propagation to modify jumps
810 during this pass. */
811 changed = one_cprop_pass (pass + 1, 0, 0);
813 if (optimize_size)
814 changed |= one_classic_gcse_pass (pass + 1);
815 else
817 changed |= one_pre_gcse_pass (pass + 1);
818 /* We may have just created new basic blocks. Release and
819 recompute various things which are sized on the number of
820 basic blocks. */
821 if (changed)
823 free_modify_mem_tables ();
824 modify_mem_list
825 = (rtx *) gmalloc (last_basic_block * sizeof (rtx));
826 canon_modify_mem_list
827 = (rtx *) gmalloc (last_basic_block * sizeof (rtx));
828 memset ((char *) modify_mem_list, 0, last_basic_block * sizeof (rtx));
829 memset ((char *) canon_modify_mem_list, 0, last_basic_block * sizeof (rtx));
831 free_reg_set_mem ();
832 alloc_reg_set_mem (max_reg_num ());
833 compute_sets (f);
834 run_jump_opt_after_gcse = 1;
837 if (max_pass_bytes < bytes_used)
838 max_pass_bytes = bytes_used;
840 /* Free up memory, then reallocate for code hoisting. We can
841 not re-use the existing allocated memory because the tables
842 will not have info for the insns or registers created by
843 partial redundancy elimination. */
844 free_gcse_mem ();
846 /* It does not make sense to run code hoisting unless we optimizing
847 for code size -- it rarely makes programs faster, and can make
848 them bigger if we did partial redundancy elimination (when optimizing
849 for space, we use a classic gcse algorithm instead of partial
850 redundancy algorithms). */
851 if (optimize_size)
853 max_gcse_regno = max_reg_num ();
854 alloc_gcse_mem (f);
855 changed |= one_code_hoisting_pass ();
856 free_gcse_mem ();
858 if (max_pass_bytes < bytes_used)
859 max_pass_bytes = bytes_used;
862 if (file)
864 fprintf (file, "\n");
865 fflush (file);
868 obstack_free (&gcse_obstack, gcse_obstack_bottom);
869 pass++;
872 /* Do one last pass of copy propagation, including cprop into
873 conditional jumps. */
875 max_gcse_regno = max_reg_num ();
876 alloc_gcse_mem (f);
877 /* This time, go ahead and allow cprop to alter jumps. */
878 one_cprop_pass (pass + 1, 1, 0);
879 free_gcse_mem ();
881 if (file)
883 fprintf (file, "GCSE of %s: %d basic blocks, ",
884 current_function_name, n_basic_blocks);
885 fprintf (file, "%d pass%s, %d bytes\n\n",
886 pass, pass > 1 ? "es" : "", max_pass_bytes);
889 obstack_free (&gcse_obstack, NULL);
890 free_reg_set_mem ();
891 /* We are finished with alias. */
892 end_alias_analysis ();
893 allocate_reg_info (max_reg_num (), FALSE, FALSE);
895 if (!optimize_size && flag_gcse_sm)
896 store_motion ();
898 /* Record where pseudo-registers are set. */
899 return run_jump_opt_after_gcse;
902 /* Misc. utilities. */
904 /* Nonzero for each mode that supports (set (reg) (reg)).
905 This is trivially true for integer and floating point values.
906 It may or may not be true for condition codes. */
907 static char can_copy[(int) NUM_MACHINE_MODES];
909 /* Compute which modes support reg/reg copy operations. */
911 static void
912 compute_can_copy (void)
914 int i;
915 #ifndef AVOID_CCMODE_COPIES
916 rtx reg, insn;
917 #endif
918 memset (can_copy, 0, NUM_MACHINE_MODES);
920 start_sequence ();
921 for (i = 0; i < NUM_MACHINE_MODES; i++)
922 if (GET_MODE_CLASS (i) == MODE_CC)
924 #ifdef AVOID_CCMODE_COPIES
925 can_copy[i] = 0;
926 #else
927 reg = gen_rtx_REG ((enum machine_mode) i, LAST_VIRTUAL_REGISTER + 1);
928 insn = emit_insn (gen_rtx_SET (VOIDmode, reg, reg));
929 if (recog (PATTERN (insn), insn, NULL) >= 0)
930 can_copy[i] = 1;
931 #endif
933 else
934 can_copy[i] = 1;
936 end_sequence ();
939 /* Returns whether the mode supports reg/reg copy operations. */
941 bool
942 can_copy_p (enum machine_mode mode)
944 static bool can_copy_init_p = false;
946 if (! can_copy_init_p)
948 compute_can_copy ();
949 can_copy_init_p = true;
952 return can_copy[mode] != 0;
955 /* Cover function to xmalloc to record bytes allocated. */
957 static char *
958 gmalloc (unsigned int size)
960 bytes_used += size;
961 return xmalloc (size);
964 /* Cover function to xrealloc.
965 We don't record the additional size since we don't know it.
966 It won't affect memory usage stats much anyway. */
968 static char *
969 grealloc (char *ptr, unsigned int size)
971 return xrealloc (ptr, size);
974 /* Cover function to obstack_alloc. */
976 static char *
977 gcse_alloc (unsigned long size)
979 bytes_used += size;
980 return (char *) obstack_alloc (&gcse_obstack, size);
983 /* Allocate memory for the cuid mapping array,
984 and reg/memory set tracking tables.
986 This is called at the start of each pass. */
988 static void
989 alloc_gcse_mem (rtx f)
991 int i, n;
992 rtx insn;
994 /* Find the largest UID and create a mapping from UIDs to CUIDs.
995 CUIDs are like UIDs except they increase monotonically, have no gaps,
996 and only apply to real insns. */
998 max_uid = get_max_uid ();
999 n = (max_uid + 1) * sizeof (int);
1000 uid_cuid = (int *) gmalloc (n);
1001 memset ((char *) uid_cuid, 0, n);
1002 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
1004 if (INSN_P (insn))
1005 uid_cuid[INSN_UID (insn)] = i++;
1006 else
1007 uid_cuid[INSN_UID (insn)] = i;
1010 /* Create a table mapping cuids to insns. */
1012 max_cuid = i;
1013 n = (max_cuid + 1) * sizeof (rtx);
1014 cuid_insn = (rtx *) gmalloc (n);
1015 memset ((char *) cuid_insn, 0, n);
1016 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
1017 if (INSN_P (insn))
1018 CUID_INSN (i++) = insn;
1020 /* Allocate vars to track sets of regs. */
1021 reg_set_bitmap = BITMAP_XMALLOC ();
1023 /* Allocate vars to track sets of regs, memory per block. */
1024 reg_set_in_block = (sbitmap *) sbitmap_vector_alloc (last_basic_block,
1025 max_gcse_regno);
1026 /* Allocate array to keep a list of insns which modify memory in each
1027 basic block. */
1028 modify_mem_list = (rtx *) gmalloc (last_basic_block * sizeof (rtx));
1029 canon_modify_mem_list = (rtx *) gmalloc (last_basic_block * sizeof (rtx));
1030 memset ((char *) modify_mem_list, 0, last_basic_block * sizeof (rtx));
1031 memset ((char *) canon_modify_mem_list, 0, last_basic_block * sizeof (rtx));
1032 modify_mem_list_set = BITMAP_XMALLOC ();
1033 canon_modify_mem_list_set = BITMAP_XMALLOC ();
1036 /* Free memory allocated by alloc_gcse_mem. */
1038 static void
1039 free_gcse_mem (void)
1041 free (uid_cuid);
1042 free (cuid_insn);
1044 BITMAP_XFREE (reg_set_bitmap);
1046 sbitmap_vector_free (reg_set_in_block);
1047 free_modify_mem_tables ();
1048 BITMAP_XFREE (modify_mem_list_set);
1049 BITMAP_XFREE (canon_modify_mem_list_set);
1052 /* Many of the global optimization algorithms work by solving dataflow
1053 equations for various expressions. Initially, some local value is
1054 computed for each expression in each block. Then, the values across the
1055 various blocks are combined (by following flow graph edges) to arrive at
1056 global values. Conceptually, each set of equations is independent. We
1057 may therefore solve all the equations in parallel, solve them one at a
1058 time, or pick any intermediate approach.
1060 When you're going to need N two-dimensional bitmaps, each X (say, the
1061 number of blocks) by Y (say, the number of expressions), call this
1062 function. It's not important what X and Y represent; only that Y
1063 correspond to the things that can be done in parallel. This function will
1064 return an appropriate chunking factor C; you should solve C sets of
1065 equations in parallel. By going through this function, we can easily
1066 trade space against time; by solving fewer equations in parallel we use
1067 less space. */
1069 static int
1070 get_bitmap_width (int n, int x, int y)
1072 /* It's not really worth figuring out *exactly* how much memory will
1073 be used by a particular choice. The important thing is to get
1074 something approximately right. */
1075 size_t max_bitmap_memory = 10 * 1024 * 1024;
1077 /* The number of bytes we'd use for a single column of minimum
1078 width. */
1079 size_t column_size = n * x * sizeof (SBITMAP_ELT_TYPE);
1081 /* Often, it's reasonable just to solve all the equations in
1082 parallel. */
1083 if (column_size * SBITMAP_SET_SIZE (y) <= max_bitmap_memory)
1084 return y;
1086 /* Otherwise, pick the largest width we can, without going over the
1087 limit. */
1088 return SBITMAP_ELT_BITS * ((max_bitmap_memory + column_size - 1)
1089 / column_size);
1092 /* Compute the local properties of each recorded expression.
1094 Local properties are those that are defined by the block, irrespective of
1095 other blocks.
1097 An expression is transparent in a block if its operands are not modified
1098 in the block.
1100 An expression is computed (locally available) in a block if it is computed
1101 at least once and expression would contain the same value if the
1102 computation was moved to the end of the block.
1104 An expression is locally anticipatable in a block if it is computed at
1105 least once and expression would contain the same value if the computation
1106 was moved to the beginning of the block.
1108 We call this routine for cprop, pre and code hoisting. They all compute
1109 basically the same information and thus can easily share this code.
1111 TRANSP, COMP, and ANTLOC are destination sbitmaps for recording local
1112 properties. If NULL, then it is not necessary to compute or record that
1113 particular property.
1115 TABLE controls which hash table to look at. If it is set hash table,
1116 additionally, TRANSP is computed as ~TRANSP, since this is really cprop's
1117 ABSALTERED. */
1119 static void
1120 compute_local_properties (sbitmap *transp, sbitmap *comp, sbitmap *antloc, struct hash_table *table)
1122 unsigned int i;
1124 /* Initialize any bitmaps that were passed in. */
1125 if (transp)
1127 if (table->set_p)
1128 sbitmap_vector_zero (transp, last_basic_block);
1129 else
1130 sbitmap_vector_ones (transp, last_basic_block);
1133 if (comp)
1134 sbitmap_vector_zero (comp, last_basic_block);
1135 if (antloc)
1136 sbitmap_vector_zero (antloc, last_basic_block);
1138 for (i = 0; i < table->size; i++)
1140 struct expr *expr;
1142 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
1144 int indx = expr->bitmap_index;
1145 struct occr *occr;
1147 /* The expression is transparent in this block if it is not killed.
1148 We start by assuming all are transparent [none are killed], and
1149 then reset the bits for those that are. */
1150 if (transp)
1151 compute_transp (expr->expr, indx, transp, table->set_p);
1153 /* The occurrences recorded in antic_occr are exactly those that
1154 we want to set to nonzero in ANTLOC. */
1155 if (antloc)
1156 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
1158 SET_BIT (antloc[BLOCK_NUM (occr->insn)], indx);
1160 /* While we're scanning the table, this is a good place to
1161 initialize this. */
1162 occr->deleted_p = 0;
1165 /* The occurrences recorded in avail_occr are exactly those that
1166 we want to set to nonzero in COMP. */
1167 if (comp)
1168 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
1170 SET_BIT (comp[BLOCK_NUM (occr->insn)], indx);
1172 /* While we're scanning the table, this is a good place to
1173 initialize this. */
1174 occr->copied_p = 0;
1177 /* While we're scanning the table, this is a good place to
1178 initialize this. */
1179 expr->reaching_reg = 0;
1184 /* Register set information.
1186 `reg_set_table' records where each register is set or otherwise
1187 modified. */
1189 static struct obstack reg_set_obstack;
1191 static void
1192 alloc_reg_set_mem (int n_regs)
1194 unsigned int n;
1196 reg_set_table_size = n_regs + REG_SET_TABLE_SLOP;
1197 n = reg_set_table_size * sizeof (struct reg_set *);
1198 reg_set_table = (struct reg_set **) gmalloc (n);
1199 memset ((char *) reg_set_table, 0, n);
1201 gcc_obstack_init (&reg_set_obstack);
1204 static void
1205 free_reg_set_mem (void)
1207 free (reg_set_table);
1208 obstack_free (&reg_set_obstack, NULL);
1211 /* Record REGNO in the reg_set table. */
1213 static void
1214 record_one_set (int regno, rtx insn)
1216 /* Allocate a new reg_set element and link it onto the list. */
1217 struct reg_set *new_reg_info;
1219 /* If the table isn't big enough, enlarge it. */
1220 if (regno >= reg_set_table_size)
1222 int new_size = regno + REG_SET_TABLE_SLOP;
1224 reg_set_table
1225 = (struct reg_set **) grealloc ((char *) reg_set_table,
1226 new_size * sizeof (struct reg_set *));
1227 memset ((char *) (reg_set_table + reg_set_table_size), 0,
1228 (new_size - reg_set_table_size) * sizeof (struct reg_set *));
1229 reg_set_table_size = new_size;
1232 new_reg_info = (struct reg_set *) obstack_alloc (&reg_set_obstack,
1233 sizeof (struct reg_set));
1234 bytes_used += sizeof (struct reg_set);
1235 new_reg_info->insn = insn;
1236 new_reg_info->next = reg_set_table[regno];
1237 reg_set_table[regno] = new_reg_info;
1240 /* Called from compute_sets via note_stores to handle one SET or CLOBBER in
1241 an insn. The DATA is really the instruction in which the SET is
1242 occurring. */
1244 static void
1245 record_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED, void *data)
1247 rtx record_set_insn = (rtx) data;
1249 if (GET_CODE (dest) == REG && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
1250 record_one_set (REGNO (dest), record_set_insn);
1253 /* Scan the function and record each set of each pseudo-register.
1255 This is called once, at the start of the gcse pass. See the comments for
1256 `reg_set_table' for further documentation. */
1258 static void
1259 compute_sets (rtx f)
1261 rtx insn;
1263 for (insn = f; insn != 0; insn = NEXT_INSN (insn))
1264 if (INSN_P (insn))
1265 note_stores (PATTERN (insn), record_set_info, insn);
1268 /* Hash table support. */
1270 struct reg_avail_info
1272 basic_block last_bb;
1273 int first_set;
1274 int last_set;
1277 static struct reg_avail_info *reg_avail_info;
1278 static basic_block current_bb;
1281 /* See whether X, the source of a set, is something we want to consider for
1282 GCSE. */
1284 static GTY(()) rtx test_insn;
1285 static int
1286 want_to_gcse_p (rtx x)
1288 int num_clobbers = 0;
1289 int icode;
1291 switch (GET_CODE (x))
1293 case REG:
1294 case SUBREG:
1295 case CONST_INT:
1296 case CONST_DOUBLE:
1297 case CONST_VECTOR:
1298 case CALL:
1299 case CONSTANT_P_RTX:
1300 return 0;
1302 default:
1303 break;
1306 /* If this is a valid operand, we are OK. If it's VOIDmode, we aren't. */
1307 if (general_operand (x, GET_MODE (x)))
1308 return 1;
1309 else if (GET_MODE (x) == VOIDmode)
1310 return 0;
1312 /* Otherwise, check if we can make a valid insn from it. First initialize
1313 our test insn if we haven't already. */
1314 if (test_insn == 0)
1316 test_insn
1317 = make_insn_raw (gen_rtx_SET (VOIDmode,
1318 gen_rtx_REG (word_mode,
1319 FIRST_PSEUDO_REGISTER * 2),
1320 const0_rtx));
1321 NEXT_INSN (test_insn) = PREV_INSN (test_insn) = 0;
1324 /* Now make an insn like the one we would make when GCSE'ing and see if
1325 valid. */
1326 PUT_MODE (SET_DEST (PATTERN (test_insn)), GET_MODE (x));
1327 SET_SRC (PATTERN (test_insn)) = x;
1328 return ((icode = recog (PATTERN (test_insn), test_insn, &num_clobbers)) >= 0
1329 && (num_clobbers == 0 || ! added_clobbers_hard_reg_p (icode)));
1332 /* Return nonzero if the operands of expression X are unchanged from the
1333 start of INSN's basic block up to but not including INSN (if AVAIL_P == 0),
1334 or from INSN to the end of INSN's basic block (if AVAIL_P != 0). */
1336 static int
1337 oprs_unchanged_p (rtx x, rtx insn, int avail_p)
1339 int i, j;
1340 enum rtx_code code;
1341 const char *fmt;
1343 if (x == 0)
1344 return 1;
1346 code = GET_CODE (x);
1347 switch (code)
1349 case REG:
1351 struct reg_avail_info *info = &reg_avail_info[REGNO (x)];
1353 if (info->last_bb != current_bb)
1354 return 1;
1355 if (avail_p)
1356 return info->last_set < INSN_CUID (insn);
1357 else
1358 return info->first_set >= INSN_CUID (insn);
1361 case MEM:
1362 if (load_killed_in_block_p (current_bb, INSN_CUID (insn),
1363 x, avail_p))
1364 return 0;
1365 else
1366 return oprs_unchanged_p (XEXP (x, 0), insn, avail_p);
1368 case PRE_DEC:
1369 case PRE_INC:
1370 case POST_DEC:
1371 case POST_INC:
1372 case PRE_MODIFY:
1373 case POST_MODIFY:
1374 return 0;
1376 case PC:
1377 case CC0: /*FIXME*/
1378 case CONST:
1379 case CONST_INT:
1380 case CONST_DOUBLE:
1381 case CONST_VECTOR:
1382 case SYMBOL_REF:
1383 case LABEL_REF:
1384 case ADDR_VEC:
1385 case ADDR_DIFF_VEC:
1386 return 1;
1388 default:
1389 break;
1392 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
1394 if (fmt[i] == 'e')
1396 /* If we are about to do the last recursive call needed at this
1397 level, change it into iteration. This function is called enough
1398 to be worth it. */
1399 if (i == 0)
1400 return oprs_unchanged_p (XEXP (x, i), insn, avail_p);
1402 else if (! oprs_unchanged_p (XEXP (x, i), insn, avail_p))
1403 return 0;
1405 else if (fmt[i] == 'E')
1406 for (j = 0; j < XVECLEN (x, i); j++)
1407 if (! oprs_unchanged_p (XVECEXP (x, i, j), insn, avail_p))
1408 return 0;
1411 return 1;
1414 /* Used for communication between mems_conflict_for_gcse_p and
1415 load_killed_in_block_p. Nonzero if mems_conflict_for_gcse_p finds a
1416 conflict between two memory references. */
1417 static int gcse_mems_conflict_p;
1419 /* Used for communication between mems_conflict_for_gcse_p and
1420 load_killed_in_block_p. A memory reference for a load instruction,
1421 mems_conflict_for_gcse_p will see if a memory store conflicts with
1422 this memory load. */
1423 static rtx gcse_mem_operand;
1425 /* DEST is the output of an instruction. If it is a memory reference, and
1426 possibly conflicts with the load found in gcse_mem_operand, then set
1427 gcse_mems_conflict_p to a nonzero value. */
1429 static void
1430 mems_conflict_for_gcse_p (rtx dest, rtx setter ATTRIBUTE_UNUSED,
1431 void *data ATTRIBUTE_UNUSED)
1433 while (GET_CODE (dest) == SUBREG
1434 || GET_CODE (dest) == ZERO_EXTRACT
1435 || GET_CODE (dest) == SIGN_EXTRACT
1436 || GET_CODE (dest) == STRICT_LOW_PART)
1437 dest = XEXP (dest, 0);
1439 /* If DEST is not a MEM, then it will not conflict with the load. Note
1440 that function calls are assumed to clobber memory, but are handled
1441 elsewhere. */
1442 if (GET_CODE (dest) != MEM)
1443 return;
1445 /* If we are setting a MEM in our list of specially recognized MEMs,
1446 don't mark as killed this time. */
1448 if (expr_equiv_p (dest, gcse_mem_operand) && pre_ldst_mems != NULL)
1450 if (!find_rtx_in_ldst (dest))
1451 gcse_mems_conflict_p = 1;
1452 return;
1455 if (true_dependence (dest, GET_MODE (dest), gcse_mem_operand,
1456 rtx_addr_varies_p))
1457 gcse_mems_conflict_p = 1;
1460 /* Return nonzero if the expression in X (a memory reference) is killed
1461 in block BB before or after the insn with the CUID in UID_LIMIT.
1462 AVAIL_P is nonzero for kills after UID_LIMIT, and zero for kills
1463 before UID_LIMIT.
1465 To check the entire block, set UID_LIMIT to max_uid + 1 and
1466 AVAIL_P to 0. */
1468 static int
1469 load_killed_in_block_p (basic_block bb, int uid_limit, rtx x, int avail_p)
1471 rtx list_entry = modify_mem_list[bb->index];
1472 while (list_entry)
1474 rtx setter;
1475 /* Ignore entries in the list that do not apply. */
1476 if ((avail_p
1477 && INSN_CUID (XEXP (list_entry, 0)) < uid_limit)
1478 || (! avail_p
1479 && INSN_CUID (XEXP (list_entry, 0)) > uid_limit))
1481 list_entry = XEXP (list_entry, 1);
1482 continue;
1485 setter = XEXP (list_entry, 0);
1487 /* If SETTER is a call everything is clobbered. Note that calls
1488 to pure functions are never put on the list, so we need not
1489 worry about them. */
1490 if (GET_CODE (setter) == CALL_INSN)
1491 return 1;
1493 /* SETTER must be an INSN of some kind that sets memory. Call
1494 note_stores to examine each hunk of memory that is modified.
1496 The note_stores interface is pretty limited, so we have to
1497 communicate via global variables. Yuk. */
1498 gcse_mem_operand = x;
1499 gcse_mems_conflict_p = 0;
1500 note_stores (PATTERN (setter), mems_conflict_for_gcse_p, NULL);
1501 if (gcse_mems_conflict_p)
1502 return 1;
1503 list_entry = XEXP (list_entry, 1);
1505 return 0;
1508 /* Return nonzero if the operands of expression X are unchanged from
1509 the start of INSN's basic block up to but not including INSN. */
1511 static int
1512 oprs_anticipatable_p (rtx x, rtx insn)
1514 return oprs_unchanged_p (x, insn, 0);
1517 /* Return nonzero if the operands of expression X are unchanged from
1518 INSN to the end of INSN's basic block. */
1520 static int
1521 oprs_available_p (rtx x, rtx insn)
1523 return oprs_unchanged_p (x, insn, 1);
1526 /* Hash expression X.
1528 MODE is only used if X is a CONST_INT. DO_NOT_RECORD_P is a boolean
1529 indicating if a volatile operand is found or if the expression contains
1530 something we don't want to insert in the table.
1532 ??? One might want to merge this with canon_hash. Later. */
1534 static unsigned int
1535 hash_expr (rtx x, enum machine_mode mode, int *do_not_record_p, int hash_table_size)
1537 unsigned int hash;
1539 *do_not_record_p = 0;
1541 hash = hash_expr_1 (x, mode, do_not_record_p);
1542 return hash % hash_table_size;
1545 /* Hash a string. Just add its bytes up. */
1547 static inline unsigned
1548 hash_string_1 (const char *ps)
1550 unsigned hash = 0;
1551 const unsigned char *p = (const unsigned char *) ps;
1553 if (p)
1554 while (*p)
1555 hash += *p++;
1557 return hash;
1560 /* Subroutine of hash_expr to do the actual work. */
1562 static unsigned int
1563 hash_expr_1 (rtx x, enum machine_mode mode, int *do_not_record_p)
1565 int i, j;
1566 unsigned hash = 0;
1567 enum rtx_code code;
1568 const char *fmt;
1570 /* Used to turn recursion into iteration. We can't rely on GCC's
1571 tail-recursion elimination since we need to keep accumulating values
1572 in HASH. */
1574 if (x == 0)
1575 return hash;
1577 repeat:
1578 code = GET_CODE (x);
1579 switch (code)
1581 case REG:
1582 hash += ((unsigned int) REG << 7) + REGNO (x);
1583 return hash;
1585 case CONST_INT:
1586 hash += (((unsigned int) CONST_INT << 7) + (unsigned int) mode
1587 + (unsigned int) INTVAL (x));
1588 return hash;
1590 case CONST_DOUBLE:
1591 /* This is like the general case, except that it only counts
1592 the integers representing the constant. */
1593 hash += (unsigned int) code + (unsigned int) GET_MODE (x);
1594 if (GET_MODE (x) != VOIDmode)
1595 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
1596 hash += (unsigned int) XWINT (x, i);
1597 else
1598 hash += ((unsigned int) CONST_DOUBLE_LOW (x)
1599 + (unsigned int) CONST_DOUBLE_HIGH (x));
1600 return hash;
1602 case CONST_VECTOR:
1604 int units;
1605 rtx elt;
1607 units = CONST_VECTOR_NUNITS (x);
1609 for (i = 0; i < units; ++i)
1611 elt = CONST_VECTOR_ELT (x, i);
1612 hash += hash_expr_1 (elt, GET_MODE (elt), do_not_record_p);
1615 return hash;
1618 /* Assume there is only one rtx object for any given label. */
1619 case LABEL_REF:
1620 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
1621 differences and differences between each stage's debugging dumps. */
1622 hash += (((unsigned int) LABEL_REF << 7)
1623 + CODE_LABEL_NUMBER (XEXP (x, 0)));
1624 return hash;
1626 case SYMBOL_REF:
1628 /* Don't hash on the symbol's address to avoid bootstrap differences.
1629 Different hash values may cause expressions to be recorded in
1630 different orders and thus different registers to be used in the
1631 final assembler. This also avoids differences in the dump files
1632 between various stages. */
1633 unsigned int h = 0;
1634 const unsigned char *p = (const unsigned char *) XSTR (x, 0);
1636 while (*p)
1637 h += (h << 7) + *p++; /* ??? revisit */
1639 hash += ((unsigned int) SYMBOL_REF << 7) + h;
1640 return hash;
1643 case MEM:
1644 if (MEM_VOLATILE_P (x))
1646 *do_not_record_p = 1;
1647 return 0;
1650 hash += (unsigned int) MEM;
1651 /* We used alias set for hashing, but this is not good, since the alias
1652 set may differ in -fprofile-arcs and -fbranch-probabilities compilation
1653 causing the profiles to fail to match. */
1654 x = XEXP (x, 0);
1655 goto repeat;
1657 case PRE_DEC:
1658 case PRE_INC:
1659 case POST_DEC:
1660 case POST_INC:
1661 case PC:
1662 case CC0:
1663 case CALL:
1664 case UNSPEC_VOLATILE:
1665 *do_not_record_p = 1;
1666 return 0;
1668 case ASM_OPERANDS:
1669 if (MEM_VOLATILE_P (x))
1671 *do_not_record_p = 1;
1672 return 0;
1674 else
1676 /* We don't want to take the filename and line into account. */
1677 hash += (unsigned) code + (unsigned) GET_MODE (x)
1678 + hash_string_1 (ASM_OPERANDS_TEMPLATE (x))
1679 + hash_string_1 (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
1680 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
1682 if (ASM_OPERANDS_INPUT_LENGTH (x))
1684 for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
1686 hash += (hash_expr_1 (ASM_OPERANDS_INPUT (x, i),
1687 GET_MODE (ASM_OPERANDS_INPUT (x, i)),
1688 do_not_record_p)
1689 + hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT
1690 (x, i)));
1693 hash += hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
1694 x = ASM_OPERANDS_INPUT (x, 0);
1695 mode = GET_MODE (x);
1696 goto repeat;
1698 return hash;
1701 default:
1702 break;
1705 hash += (unsigned) code + (unsigned) GET_MODE (x);
1706 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
1708 if (fmt[i] == 'e')
1710 /* If we are about to do the last recursive call
1711 needed at this level, change it into iteration.
1712 This function is called enough to be worth it. */
1713 if (i == 0)
1715 x = XEXP (x, i);
1716 goto repeat;
1719 hash += hash_expr_1 (XEXP (x, i), 0, do_not_record_p);
1720 if (*do_not_record_p)
1721 return 0;
1724 else if (fmt[i] == 'E')
1725 for (j = 0; j < XVECLEN (x, i); j++)
1727 hash += hash_expr_1 (XVECEXP (x, i, j), 0, do_not_record_p);
1728 if (*do_not_record_p)
1729 return 0;
1732 else if (fmt[i] == 's')
1733 hash += hash_string_1 (XSTR (x, i));
1734 else if (fmt[i] == 'i')
1735 hash += (unsigned int) XINT (x, i);
1736 else
1737 abort ();
1740 return hash;
1743 /* Hash a set of register REGNO.
1745 Sets are hashed on the register that is set. This simplifies the PRE copy
1746 propagation code.
1748 ??? May need to make things more elaborate. Later, as necessary. */
1750 static unsigned int
1751 hash_set (int regno, int hash_table_size)
1753 unsigned int hash;
1755 hash = regno;
1756 return hash % hash_table_size;
1759 /* Return nonzero if exp1 is equivalent to exp2.
1760 ??? Borrowed from cse.c. Might want to remerge with cse.c. Later. */
1762 static int
1763 expr_equiv_p (rtx x, rtx y)
1765 int i, j;
1766 enum rtx_code code;
1767 const char *fmt;
1769 if (x == y)
1770 return 1;
1772 if (x == 0 || y == 0)
1773 return 0;
1775 code = GET_CODE (x);
1776 if (code != GET_CODE (y))
1777 return 0;
1779 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
1780 if (GET_MODE (x) != GET_MODE (y))
1781 return 0;
1783 switch (code)
1785 case PC:
1786 case CC0:
1787 case CONST_INT:
1788 return 0;
1790 case LABEL_REF:
1791 return XEXP (x, 0) == XEXP (y, 0);
1793 case SYMBOL_REF:
1794 return XSTR (x, 0) == XSTR (y, 0);
1796 case REG:
1797 return REGNO (x) == REGNO (y);
1799 case MEM:
1800 /* Can't merge two expressions in different alias sets, since we can
1801 decide that the expression is transparent in a block when it isn't,
1802 due to it being set with the different alias set. */
1803 if (MEM_ALIAS_SET (x) != MEM_ALIAS_SET (y))
1804 return 0;
1805 break;
1807 /* For commutative operations, check both orders. */
1808 case PLUS:
1809 case MULT:
1810 case AND:
1811 case IOR:
1812 case XOR:
1813 case NE:
1814 case EQ:
1815 return ((expr_equiv_p (XEXP (x, 0), XEXP (y, 0))
1816 && expr_equiv_p (XEXP (x, 1), XEXP (y, 1)))
1817 || (expr_equiv_p (XEXP (x, 0), XEXP (y, 1))
1818 && expr_equiv_p (XEXP (x, 1), XEXP (y, 0))));
1820 case ASM_OPERANDS:
1821 /* We don't use the generic code below because we want to
1822 disregard filename and line numbers. */
1824 /* A volatile asm isn't equivalent to any other. */
1825 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
1826 return 0;
1828 if (GET_MODE (x) != GET_MODE (y)
1829 || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
1830 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
1831 ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
1832 || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
1833 || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
1834 return 0;
1836 if (ASM_OPERANDS_INPUT_LENGTH (x))
1838 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
1839 if (! expr_equiv_p (ASM_OPERANDS_INPUT (x, i),
1840 ASM_OPERANDS_INPUT (y, i))
1841 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
1842 ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
1843 return 0;
1846 return 1;
1848 default:
1849 break;
1852 /* Compare the elements. If any pair of corresponding elements
1853 fail to match, return 0 for the whole thing. */
1855 fmt = GET_RTX_FORMAT (code);
1856 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1858 switch (fmt[i])
1860 case 'e':
1861 if (! expr_equiv_p (XEXP (x, i), XEXP (y, i)))
1862 return 0;
1863 break;
1865 case 'E':
1866 if (XVECLEN (x, i) != XVECLEN (y, i))
1867 return 0;
1868 for (j = 0; j < XVECLEN (x, i); j++)
1869 if (! expr_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
1870 return 0;
1871 break;
1873 case 's':
1874 if (strcmp (XSTR (x, i), XSTR (y, i)))
1875 return 0;
1876 break;
1878 case 'i':
1879 if (XINT (x, i) != XINT (y, i))
1880 return 0;
1881 break;
1883 case 'w':
1884 if (XWINT (x, i) != XWINT (y, i))
1885 return 0;
1886 break;
1888 case '0':
1889 break;
1891 default:
1892 abort ();
1896 return 1;
1899 /* Insert expression X in INSN in the hash TABLE.
1900 If it is already present, record it as the last occurrence in INSN's
1901 basic block.
1903 MODE is the mode of the value X is being stored into.
1904 It is only used if X is a CONST_INT.
1906 ANTIC_P is nonzero if X is an anticipatable expression.
1907 AVAIL_P is nonzero if X is an available expression. */
1909 static void
1910 insert_expr_in_table (rtx x, enum machine_mode mode, rtx insn, int antic_p,
1911 int avail_p, struct hash_table *table)
1913 int found, do_not_record_p;
1914 unsigned int hash;
1915 struct expr *cur_expr, *last_expr = NULL;
1916 struct occr *antic_occr, *avail_occr;
1917 struct occr *last_occr = NULL;
1919 hash = hash_expr (x, mode, &do_not_record_p, table->size);
1921 /* Do not insert expression in table if it contains volatile operands,
1922 or if hash_expr determines the expression is something we don't want
1923 to or can't handle. */
1924 if (do_not_record_p)
1925 return;
1927 cur_expr = table->table[hash];
1928 found = 0;
1930 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
1932 /* If the expression isn't found, save a pointer to the end of
1933 the list. */
1934 last_expr = cur_expr;
1935 cur_expr = cur_expr->next_same_hash;
1938 if (! found)
1940 cur_expr = (struct expr *) gcse_alloc (sizeof (struct expr));
1941 bytes_used += sizeof (struct expr);
1942 if (table->table[hash] == NULL)
1943 /* This is the first pattern that hashed to this index. */
1944 table->table[hash] = cur_expr;
1945 else
1946 /* Add EXPR to end of this hash chain. */
1947 last_expr->next_same_hash = cur_expr;
1949 /* Set the fields of the expr element. */
1950 cur_expr->expr = x;
1951 cur_expr->bitmap_index = table->n_elems++;
1952 cur_expr->next_same_hash = NULL;
1953 cur_expr->antic_occr = NULL;
1954 cur_expr->avail_occr = NULL;
1957 /* Now record the occurrence(s). */
1958 if (antic_p)
1960 antic_occr = cur_expr->antic_occr;
1962 /* Search for another occurrence in the same basic block. */
1963 while (antic_occr && BLOCK_NUM (antic_occr->insn) != BLOCK_NUM (insn))
1965 /* If an occurrence isn't found, save a pointer to the end of
1966 the list. */
1967 last_occr = antic_occr;
1968 antic_occr = antic_occr->next;
1971 if (antic_occr)
1972 /* Found another instance of the expression in the same basic block.
1973 Prefer the currently recorded one. We want the first one in the
1974 block and the block is scanned from start to end. */
1975 ; /* nothing to do */
1976 else
1978 /* First occurrence of this expression in this basic block. */
1979 antic_occr = (struct occr *) gcse_alloc (sizeof (struct occr));
1980 bytes_used += sizeof (struct occr);
1981 /* First occurrence of this expression in any block? */
1982 if (cur_expr->antic_occr == NULL)
1983 cur_expr->antic_occr = antic_occr;
1984 else
1985 last_occr->next = antic_occr;
1987 antic_occr->insn = insn;
1988 antic_occr->next = NULL;
1992 if (avail_p)
1994 avail_occr = cur_expr->avail_occr;
1996 /* Search for another occurrence in the same basic block. */
1997 while (avail_occr && BLOCK_NUM (avail_occr->insn) != BLOCK_NUM (insn))
1999 /* If an occurrence isn't found, save a pointer to the end of
2000 the list. */
2001 last_occr = avail_occr;
2002 avail_occr = avail_occr->next;
2005 if (avail_occr)
2006 /* Found another instance of the expression in the same basic block.
2007 Prefer this occurrence to the currently recorded one. We want
2008 the last one in the block and the block is scanned from start
2009 to end. */
2010 avail_occr->insn = insn;
2011 else
2013 /* First occurrence of this expression in this basic block. */
2014 avail_occr = (struct occr *) gcse_alloc (sizeof (struct occr));
2015 bytes_used += sizeof (struct occr);
2017 /* First occurrence of this expression in any block? */
2018 if (cur_expr->avail_occr == NULL)
2019 cur_expr->avail_occr = avail_occr;
2020 else
2021 last_occr->next = avail_occr;
2023 avail_occr->insn = insn;
2024 avail_occr->next = NULL;
2029 /* Insert pattern X in INSN in the hash table.
2030 X is a SET of a reg to either another reg or a constant.
2031 If it is already present, record it as the last occurrence in INSN's
2032 basic block. */
2034 static void
2035 insert_set_in_table (rtx x, rtx insn, struct hash_table *table)
2037 int found;
2038 unsigned int hash;
2039 struct expr *cur_expr, *last_expr = NULL;
2040 struct occr *cur_occr, *last_occr = NULL;
2042 if (GET_CODE (x) != SET
2043 || GET_CODE (SET_DEST (x)) != REG)
2044 abort ();
2046 hash = hash_set (REGNO (SET_DEST (x)), table->size);
2048 cur_expr = table->table[hash];
2049 found = 0;
2051 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
2053 /* If the expression isn't found, save a pointer to the end of
2054 the list. */
2055 last_expr = cur_expr;
2056 cur_expr = cur_expr->next_same_hash;
2059 if (! found)
2061 cur_expr = (struct expr *) gcse_alloc (sizeof (struct expr));
2062 bytes_used += sizeof (struct expr);
2063 if (table->table[hash] == NULL)
2064 /* This is the first pattern that hashed to this index. */
2065 table->table[hash] = cur_expr;
2066 else
2067 /* Add EXPR to end of this hash chain. */
2068 last_expr->next_same_hash = cur_expr;
2070 /* Set the fields of the expr element.
2071 We must copy X because it can be modified when copy propagation is
2072 performed on its operands. */
2073 cur_expr->expr = copy_rtx (x);
2074 cur_expr->bitmap_index = table->n_elems++;
2075 cur_expr->next_same_hash = NULL;
2076 cur_expr->antic_occr = NULL;
2077 cur_expr->avail_occr = NULL;
2080 /* Now record the occurrence. */
2081 cur_occr = cur_expr->avail_occr;
2083 /* Search for another occurrence in the same basic block. */
2084 while (cur_occr && BLOCK_NUM (cur_occr->insn) != BLOCK_NUM (insn))
2086 /* If an occurrence isn't found, save a pointer to the end of
2087 the list. */
2088 last_occr = cur_occr;
2089 cur_occr = cur_occr->next;
2092 if (cur_occr)
2093 /* Found another instance of the expression in the same basic block.
2094 Prefer this occurrence to the currently recorded one. We want the
2095 last one in the block and the block is scanned from start to end. */
2096 cur_occr->insn = insn;
2097 else
2099 /* First occurrence of this expression in this basic block. */
2100 cur_occr = (struct occr *) gcse_alloc (sizeof (struct occr));
2101 bytes_used += sizeof (struct occr);
2103 /* First occurrence of this expression in any block? */
2104 if (cur_expr->avail_occr == NULL)
2105 cur_expr->avail_occr = cur_occr;
2106 else
2107 last_occr->next = cur_occr;
2109 cur_occr->insn = insn;
2110 cur_occr->next = NULL;
2114 /* Determine whether the rtx X should be treated as a constant for
2115 the purposes of GCSE's constant propagation. */
2117 static bool
2118 gcse_constant_p (rtx x)
2120 /* Consider a COMPARE of two integers constant. */
2121 if (GET_CODE (x) == COMPARE
2122 && GET_CODE (XEXP (x, 0)) == CONST_INT
2123 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2124 return true;
2126 if (GET_CODE (x) == CONSTANT_P_RTX)
2127 return false;
2129 return CONSTANT_P (x);
2132 /* Scan pattern PAT of INSN and add an entry to the hash TABLE (set or
2133 expression one). */
2135 static void
2136 hash_scan_set (rtx pat, rtx insn, struct hash_table *table)
2138 rtx src = SET_SRC (pat);
2139 rtx dest = SET_DEST (pat);
2140 rtx note;
2142 if (GET_CODE (src) == CALL)
2143 hash_scan_call (src, insn, table);
2145 else if (GET_CODE (dest) == REG)
2147 unsigned int regno = REGNO (dest);
2148 rtx tmp;
2150 /* If this is a single set and we are doing constant propagation,
2151 see if a REG_NOTE shows this equivalent to a constant. */
2152 if (table->set_p && (note = find_reg_equal_equiv_note (insn)) != 0
2153 && gcse_constant_p (XEXP (note, 0)))
2154 src = XEXP (note, 0), pat = gen_rtx_SET (VOIDmode, dest, src);
2156 /* Only record sets of pseudo-regs in the hash table. */
2157 if (! table->set_p
2158 && regno >= FIRST_PSEUDO_REGISTER
2159 /* Don't GCSE something if we can't do a reg/reg copy. */
2160 && can_copy_p (GET_MODE (dest))
2161 /* GCSE commonly inserts instruction after the insn. We can't
2162 do that easily for EH_REGION notes so disable GCSE on these
2163 for now. */
2164 && !find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2165 /* Is SET_SRC something we want to gcse? */
2166 && want_to_gcse_p (src)
2167 /* Don't CSE a nop. */
2168 && ! set_noop_p (pat)
2169 /* Don't GCSE if it has attached REG_EQUIV note.
2170 At this point this only function parameters should have
2171 REG_EQUIV notes and if the argument slot is used somewhere
2172 explicitly, it means address of parameter has been taken,
2173 so we should not extend the lifetime of the pseudo. */
2174 && ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) == 0
2175 || GET_CODE (XEXP (note, 0)) != MEM))
2177 /* An expression is not anticipatable if its operands are
2178 modified before this insn or if this is not the only SET in
2179 this insn. */
2180 int antic_p = oprs_anticipatable_p (src, insn) && single_set (insn);
2181 /* An expression is not available if its operands are
2182 subsequently modified, including this insn. It's also not
2183 available if this is a branch, because we can't insert
2184 a set after the branch. */
2185 int avail_p = (oprs_available_p (src, insn)
2186 && ! JUMP_P (insn));
2188 insert_expr_in_table (src, GET_MODE (dest), insn, antic_p, avail_p, table);
2191 /* Record sets for constant/copy propagation. */
2192 else if (table->set_p
2193 && regno >= FIRST_PSEUDO_REGISTER
2194 && ((GET_CODE (src) == REG
2195 && REGNO (src) >= FIRST_PSEUDO_REGISTER
2196 && can_copy_p (GET_MODE (dest))
2197 && REGNO (src) != regno)
2198 || gcse_constant_p (src))
2199 /* A copy is not available if its src or dest is subsequently
2200 modified. Here we want to search from INSN+1 on, but
2201 oprs_available_p searches from INSN on. */
2202 && (insn == BLOCK_END (BLOCK_NUM (insn))
2203 || ((tmp = next_nonnote_insn (insn)) != NULL_RTX
2204 && oprs_available_p (pat, tmp))))
2205 insert_set_in_table (pat, insn, table);
2209 static void
2210 hash_scan_clobber (rtx x ATTRIBUTE_UNUSED, rtx insn ATTRIBUTE_UNUSED,
2211 struct hash_table *table ATTRIBUTE_UNUSED)
2213 /* Currently nothing to do. */
2216 static void
2217 hash_scan_call (rtx x ATTRIBUTE_UNUSED, rtx insn ATTRIBUTE_UNUSED,
2218 struct hash_table *table ATTRIBUTE_UNUSED)
2220 /* Currently nothing to do. */
2223 /* Process INSN and add hash table entries as appropriate.
2225 Only available expressions that set a single pseudo-reg are recorded.
2227 Single sets in a PARALLEL could be handled, but it's an extra complication
2228 that isn't dealt with right now. The trick is handling the CLOBBERs that
2229 are also in the PARALLEL. Later.
2231 If SET_P is nonzero, this is for the assignment hash table,
2232 otherwise it is for the expression hash table.
2233 If IN_LIBCALL_BLOCK nonzero, we are in a libcall block, and should
2234 not record any expressions. */
2236 static void
2237 hash_scan_insn (rtx insn, struct hash_table *table, int in_libcall_block)
2239 rtx pat = PATTERN (insn);
2240 int i;
2242 if (in_libcall_block)
2243 return;
2245 /* Pick out the sets of INSN and for other forms of instructions record
2246 what's been modified. */
2248 if (GET_CODE (pat) == SET)
2249 hash_scan_set (pat, insn, table);
2250 else if (GET_CODE (pat) == PARALLEL)
2251 for (i = 0; i < XVECLEN (pat, 0); i++)
2253 rtx x = XVECEXP (pat, 0, i);
2255 if (GET_CODE (x) == SET)
2256 hash_scan_set (x, insn, table);
2257 else if (GET_CODE (x) == CLOBBER)
2258 hash_scan_clobber (x, insn, table);
2259 else if (GET_CODE (x) == CALL)
2260 hash_scan_call (x, insn, table);
2263 else if (GET_CODE (pat) == CLOBBER)
2264 hash_scan_clobber (pat, insn, table);
2265 else if (GET_CODE (pat) == CALL)
2266 hash_scan_call (pat, insn, table);
2269 static void
2270 dump_hash_table (FILE *file, const char *name, struct hash_table *table)
2272 int i;
2273 /* Flattened out table, so it's printed in proper order. */
2274 struct expr **flat_table;
2275 unsigned int *hash_val;
2276 struct expr *expr;
2278 flat_table
2279 = (struct expr **) xcalloc (table->n_elems, sizeof (struct expr *));
2280 hash_val = (unsigned int *) xmalloc (table->n_elems * sizeof (unsigned int));
2282 for (i = 0; i < (int) table->size; i++)
2283 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
2285 flat_table[expr->bitmap_index] = expr;
2286 hash_val[expr->bitmap_index] = i;
2289 fprintf (file, "%s hash table (%d buckets, %d entries)\n",
2290 name, table->size, table->n_elems);
2292 for (i = 0; i < (int) table->n_elems; i++)
2293 if (flat_table[i] != 0)
2295 expr = flat_table[i];
2296 fprintf (file, "Index %d (hash value %d)\n ",
2297 expr->bitmap_index, hash_val[i]);
2298 print_rtl (file, expr->expr);
2299 fprintf (file, "\n");
2302 fprintf (file, "\n");
2304 free (flat_table);
2305 free (hash_val);
2308 /* Record register first/last/block set information for REGNO in INSN.
2310 first_set records the first place in the block where the register
2311 is set and is used to compute "anticipatability".
2313 last_set records the last place in the block where the register
2314 is set and is used to compute "availability".
2316 last_bb records the block for which first_set and last_set are
2317 valid, as a quick test to invalidate them.
2319 reg_set_in_block records whether the register is set in the block
2320 and is used to compute "transparency". */
2322 static void
2323 record_last_reg_set_info (rtx insn, int regno)
2325 struct reg_avail_info *info = &reg_avail_info[regno];
2326 int cuid = INSN_CUID (insn);
2328 info->last_set = cuid;
2329 if (info->last_bb != current_bb)
2331 info->last_bb = current_bb;
2332 info->first_set = cuid;
2333 SET_BIT (reg_set_in_block[current_bb->index], regno);
2338 /* Record all of the canonicalized MEMs of record_last_mem_set_info's insn.
2339 Note we store a pair of elements in the list, so they have to be
2340 taken off pairwise. */
2342 static void
2343 canon_list_insert (rtx dest ATTRIBUTE_UNUSED, rtx unused1 ATTRIBUTE_UNUSED,
2344 void * v_insn)
2346 rtx dest_addr, insn;
2347 int bb;
2349 while (GET_CODE (dest) == SUBREG
2350 || GET_CODE (dest) == ZERO_EXTRACT
2351 || GET_CODE (dest) == SIGN_EXTRACT
2352 || GET_CODE (dest) == STRICT_LOW_PART)
2353 dest = XEXP (dest, 0);
2355 /* If DEST is not a MEM, then it will not conflict with a load. Note
2356 that function calls are assumed to clobber memory, but are handled
2357 elsewhere. */
2359 if (GET_CODE (dest) != MEM)
2360 return;
2362 dest_addr = get_addr (XEXP (dest, 0));
2363 dest_addr = canon_rtx (dest_addr);
2364 insn = (rtx) v_insn;
2365 bb = BLOCK_NUM (insn);
2367 canon_modify_mem_list[bb] =
2368 alloc_EXPR_LIST (VOIDmode, dest_addr, canon_modify_mem_list[bb]);
2369 canon_modify_mem_list[bb] =
2370 alloc_EXPR_LIST (VOIDmode, dest, canon_modify_mem_list[bb]);
2371 bitmap_set_bit (canon_modify_mem_list_set, bb);
2374 /* Record memory modification information for INSN. We do not actually care
2375 about the memory location(s) that are set, or even how they are set (consider
2376 a CALL_INSN). We merely need to record which insns modify memory. */
2378 static void
2379 record_last_mem_set_info (rtx insn)
2381 int bb = BLOCK_NUM (insn);
2383 /* load_killed_in_block_p will handle the case of calls clobbering
2384 everything. */
2385 modify_mem_list[bb] = alloc_INSN_LIST (insn, modify_mem_list[bb]);
2386 bitmap_set_bit (modify_mem_list_set, bb);
2388 if (GET_CODE (insn) == CALL_INSN)
2390 /* Note that traversals of this loop (other than for free-ing)
2391 will break after encountering a CALL_INSN. So, there's no
2392 need to insert a pair of items, as canon_list_insert does. */
2393 canon_modify_mem_list[bb] =
2394 alloc_INSN_LIST (insn, canon_modify_mem_list[bb]);
2395 bitmap_set_bit (canon_modify_mem_list_set, bb);
2397 else
2398 note_stores (PATTERN (insn), canon_list_insert, (void*) insn);
2401 /* Called from compute_hash_table via note_stores to handle one
2402 SET or CLOBBER in an insn. DATA is really the instruction in which
2403 the SET is taking place. */
2405 static void
2406 record_last_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED, void *data)
2408 rtx last_set_insn = (rtx) data;
2410 if (GET_CODE (dest) == SUBREG)
2411 dest = SUBREG_REG (dest);
2413 if (GET_CODE (dest) == REG)
2414 record_last_reg_set_info (last_set_insn, REGNO (dest));
2415 else if (GET_CODE (dest) == MEM
2416 /* Ignore pushes, they clobber nothing. */
2417 && ! push_operand (dest, GET_MODE (dest)))
2418 record_last_mem_set_info (last_set_insn);
2421 /* Top level function to create an expression or assignment hash table.
2423 Expression entries are placed in the hash table if
2424 - they are of the form (set (pseudo-reg) src),
2425 - src is something we want to perform GCSE on,
2426 - none of the operands are subsequently modified in the block
2428 Assignment entries are placed in the hash table if
2429 - they are of the form (set (pseudo-reg) src),
2430 - src is something we want to perform const/copy propagation on,
2431 - none of the operands or target are subsequently modified in the block
2433 Currently src must be a pseudo-reg or a const_int.
2435 TABLE is the table computed. */
2437 static void
2438 compute_hash_table_work (struct hash_table *table)
2440 unsigned int i;
2442 /* While we compute the hash table we also compute a bit array of which
2443 registers are set in which blocks.
2444 ??? This isn't needed during const/copy propagation, but it's cheap to
2445 compute. Later. */
2446 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
2448 /* re-Cache any INSN_LIST nodes we have allocated. */
2449 clear_modify_mem_tables ();
2450 /* Some working arrays used to track first and last set in each block. */
2451 reg_avail_info = (struct reg_avail_info*)
2452 gmalloc (max_gcse_regno * sizeof (struct reg_avail_info));
2454 for (i = 0; i < max_gcse_regno; ++i)
2455 reg_avail_info[i].last_bb = NULL;
2457 FOR_EACH_BB (current_bb)
2459 rtx insn;
2460 unsigned int regno;
2461 int in_libcall_block;
2463 /* First pass over the instructions records information used to
2464 determine when registers and memory are first and last set.
2465 ??? hard-reg reg_set_in_block computation
2466 could be moved to compute_sets since they currently don't change. */
2468 for (insn = current_bb->head;
2469 insn && insn != NEXT_INSN (current_bb->end);
2470 insn = NEXT_INSN (insn))
2472 if (! INSN_P (insn))
2473 continue;
2475 if (GET_CODE (insn) == CALL_INSN)
2477 bool clobbers_all = false;
2478 #ifdef NON_SAVING_SETJMP
2479 if (NON_SAVING_SETJMP
2480 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
2481 clobbers_all = true;
2482 #endif
2484 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2485 if (clobbers_all
2486 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2487 record_last_reg_set_info (insn, regno);
2489 mark_call (insn);
2492 note_stores (PATTERN (insn), record_last_set_info, insn);
2495 /* Insert implicit sets in the hash table. */
2496 if (table->set_p
2497 && implicit_sets[current_bb->index] != NULL_RTX)
2498 hash_scan_set (implicit_sets[current_bb->index],
2499 current_bb->head, table);
2501 /* The next pass builds the hash table. */
2503 for (insn = current_bb->head, in_libcall_block = 0;
2504 insn && insn != NEXT_INSN (current_bb->end);
2505 insn = NEXT_INSN (insn))
2506 if (INSN_P (insn))
2508 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
2509 in_libcall_block = 1;
2510 else if (table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
2511 in_libcall_block = 0;
2512 hash_scan_insn (insn, table, in_libcall_block);
2513 if (!table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
2514 in_libcall_block = 0;
2518 free (reg_avail_info);
2519 reg_avail_info = NULL;
2522 /* Allocate space for the set/expr hash TABLE.
2523 N_INSNS is the number of instructions in the function.
2524 It is used to determine the number of buckets to use.
2525 SET_P determines whether set or expression table will
2526 be created. */
2528 static void
2529 alloc_hash_table (int n_insns, struct hash_table *table, int set_p)
2531 int n;
2533 table->size = n_insns / 4;
2534 if (table->size < 11)
2535 table->size = 11;
2537 /* Attempt to maintain efficient use of hash table.
2538 Making it an odd number is simplest for now.
2539 ??? Later take some measurements. */
2540 table->size |= 1;
2541 n = table->size * sizeof (struct expr *);
2542 table->table = (struct expr **) gmalloc (n);
2543 table->set_p = set_p;
2546 /* Free things allocated by alloc_hash_table. */
2548 static void
2549 free_hash_table (struct hash_table *table)
2551 free (table->table);
2554 /* Compute the hash TABLE for doing copy/const propagation or
2555 expression hash table. */
2557 static void
2558 compute_hash_table (struct hash_table *table)
2560 /* Initialize count of number of entries in hash table. */
2561 table->n_elems = 0;
2562 memset ((char *) table->table, 0,
2563 table->size * sizeof (struct expr *));
2565 compute_hash_table_work (table);
2568 /* Expression tracking support. */
2570 /* Lookup pattern PAT in the expression TABLE.
2571 The result is a pointer to the table entry, or NULL if not found. */
2573 static struct expr *
2574 lookup_expr (rtx pat, struct hash_table *table)
2576 int do_not_record_p;
2577 unsigned int hash = hash_expr (pat, GET_MODE (pat), &do_not_record_p,
2578 table->size);
2579 struct expr *expr;
2581 if (do_not_record_p)
2582 return NULL;
2584 expr = table->table[hash];
2586 while (expr && ! expr_equiv_p (expr->expr, pat))
2587 expr = expr->next_same_hash;
2589 return expr;
2592 /* Lookup REGNO in the set TABLE. The result is a pointer to the
2593 table entry, or NULL if not found. */
2595 static struct expr *
2596 lookup_set (unsigned int regno, struct hash_table *table)
2598 unsigned int hash = hash_set (regno, table->size);
2599 struct expr *expr;
2601 expr = table->table[hash];
2603 while (expr && REGNO (SET_DEST (expr->expr)) != regno)
2604 expr = expr->next_same_hash;
2606 return expr;
2609 /* Return the next entry for REGNO in list EXPR. */
2611 static struct expr *
2612 next_set (unsigned int regno, struct expr *expr)
2615 expr = expr->next_same_hash;
2616 while (expr && REGNO (SET_DEST (expr->expr)) != regno);
2618 return expr;
2621 /* Like free_INSN_LIST_list or free_EXPR_LIST_list, except that the node
2622 types may be mixed. */
2624 static void
2625 free_insn_expr_list_list (rtx *listp)
2627 rtx list, next;
2629 for (list = *listp; list ; list = next)
2631 next = XEXP (list, 1);
2632 if (GET_CODE (list) == EXPR_LIST)
2633 free_EXPR_LIST_node (list);
2634 else
2635 free_INSN_LIST_node (list);
2638 *listp = NULL;
2641 /* Clear canon_modify_mem_list and modify_mem_list tables. */
2642 static void
2643 clear_modify_mem_tables (void)
2645 int i;
2647 EXECUTE_IF_SET_IN_BITMAP
2648 (modify_mem_list_set, 0, i, free_INSN_LIST_list (modify_mem_list + i));
2649 bitmap_clear (modify_mem_list_set);
2651 EXECUTE_IF_SET_IN_BITMAP
2652 (canon_modify_mem_list_set, 0, i,
2653 free_insn_expr_list_list (canon_modify_mem_list + i));
2654 bitmap_clear (canon_modify_mem_list_set);
2657 /* Release memory used by modify_mem_list_set and canon_modify_mem_list_set. */
2659 static void
2660 free_modify_mem_tables (void)
2662 clear_modify_mem_tables ();
2663 free (modify_mem_list);
2664 free (canon_modify_mem_list);
2665 modify_mem_list = 0;
2666 canon_modify_mem_list = 0;
2669 /* Reset tables used to keep track of what's still available [since the
2670 start of the block]. */
2672 static void
2673 reset_opr_set_tables (void)
2675 /* Maintain a bitmap of which regs have been set since beginning of
2676 the block. */
2677 CLEAR_REG_SET (reg_set_bitmap);
2679 /* Also keep a record of the last instruction to modify memory.
2680 For now this is very trivial, we only record whether any memory
2681 location has been modified. */
2682 clear_modify_mem_tables ();
2685 /* Return nonzero if the operands of X are not set before INSN in
2686 INSN's basic block. */
2688 static int
2689 oprs_not_set_p (rtx x, rtx insn)
2691 int i, j;
2692 enum rtx_code code;
2693 const char *fmt;
2695 if (x == 0)
2696 return 1;
2698 code = GET_CODE (x);
2699 switch (code)
2701 case PC:
2702 case CC0:
2703 case CONST:
2704 case CONST_INT:
2705 case CONST_DOUBLE:
2706 case CONST_VECTOR:
2707 case SYMBOL_REF:
2708 case LABEL_REF:
2709 case ADDR_VEC:
2710 case ADDR_DIFF_VEC:
2711 return 1;
2713 case MEM:
2714 if (load_killed_in_block_p (BLOCK_FOR_INSN (insn),
2715 INSN_CUID (insn), x, 0))
2716 return 0;
2717 else
2718 return oprs_not_set_p (XEXP (x, 0), insn);
2720 case REG:
2721 return ! REGNO_REG_SET_P (reg_set_bitmap, REGNO (x));
2723 default:
2724 break;
2727 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
2729 if (fmt[i] == 'e')
2731 /* If we are about to do the last recursive call
2732 needed at this level, change it into iteration.
2733 This function is called enough to be worth it. */
2734 if (i == 0)
2735 return oprs_not_set_p (XEXP (x, i), insn);
2737 if (! oprs_not_set_p (XEXP (x, i), insn))
2738 return 0;
2740 else if (fmt[i] == 'E')
2741 for (j = 0; j < XVECLEN (x, i); j++)
2742 if (! oprs_not_set_p (XVECEXP (x, i, j), insn))
2743 return 0;
2746 return 1;
2749 /* Mark things set by a CALL. */
2751 static void
2752 mark_call (rtx insn)
2754 if (! CONST_OR_PURE_CALL_P (insn))
2755 record_last_mem_set_info (insn);
2758 /* Mark things set by a SET. */
2760 static void
2761 mark_set (rtx pat, rtx insn)
2763 rtx dest = SET_DEST (pat);
2765 while (GET_CODE (dest) == SUBREG
2766 || GET_CODE (dest) == ZERO_EXTRACT
2767 || GET_CODE (dest) == SIGN_EXTRACT
2768 || GET_CODE (dest) == STRICT_LOW_PART)
2769 dest = XEXP (dest, 0);
2771 if (GET_CODE (dest) == REG)
2772 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (dest));
2773 else if (GET_CODE (dest) == MEM)
2774 record_last_mem_set_info (insn);
2776 if (GET_CODE (SET_SRC (pat)) == CALL)
2777 mark_call (insn);
2780 /* Record things set by a CLOBBER. */
2782 static void
2783 mark_clobber (rtx pat, rtx insn)
2785 rtx clob = XEXP (pat, 0);
2787 while (GET_CODE (clob) == SUBREG || GET_CODE (clob) == STRICT_LOW_PART)
2788 clob = XEXP (clob, 0);
2790 if (GET_CODE (clob) == REG)
2791 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (clob));
2792 else
2793 record_last_mem_set_info (insn);
2796 /* Record things set by INSN.
2797 This data is used by oprs_not_set_p. */
2799 static void
2800 mark_oprs_set (rtx insn)
2802 rtx pat = PATTERN (insn);
2803 int i;
2805 if (GET_CODE (pat) == SET)
2806 mark_set (pat, insn);
2807 else if (GET_CODE (pat) == PARALLEL)
2808 for (i = 0; i < XVECLEN (pat, 0); i++)
2810 rtx x = XVECEXP (pat, 0, i);
2812 if (GET_CODE (x) == SET)
2813 mark_set (x, insn);
2814 else if (GET_CODE (x) == CLOBBER)
2815 mark_clobber (x, insn);
2816 else if (GET_CODE (x) == CALL)
2817 mark_call (insn);
2820 else if (GET_CODE (pat) == CLOBBER)
2821 mark_clobber (pat, insn);
2822 else if (GET_CODE (pat) == CALL)
2823 mark_call (insn);
2827 /* Classic GCSE reaching definition support. */
2829 /* Allocate reaching def variables. */
2831 static void
2832 alloc_rd_mem (int n_blocks, int n_insns)
2834 rd_kill = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2835 sbitmap_vector_zero (rd_kill, n_blocks);
2837 rd_gen = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2838 sbitmap_vector_zero (rd_gen, n_blocks);
2840 reaching_defs = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2841 sbitmap_vector_zero (reaching_defs, n_blocks);
2843 rd_out = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2844 sbitmap_vector_zero (rd_out, n_blocks);
2847 /* Free reaching def variables. */
2849 static void
2850 free_rd_mem (void)
2852 sbitmap_vector_free (rd_kill);
2853 sbitmap_vector_free (rd_gen);
2854 sbitmap_vector_free (reaching_defs);
2855 sbitmap_vector_free (rd_out);
2858 /* Add INSN to the kills of BB. REGNO, set in BB, is killed by INSN. */
2860 static void
2861 handle_rd_kill_set (rtx insn, int regno, basic_block bb)
2863 struct reg_set *this_reg;
2865 for (this_reg = reg_set_table[regno]; this_reg; this_reg = this_reg ->next)
2866 if (BLOCK_NUM (this_reg->insn) != BLOCK_NUM (insn))
2867 SET_BIT (rd_kill[bb->index], INSN_CUID (this_reg->insn));
2870 /* Compute the set of kill's for reaching definitions. */
2872 static void
2873 compute_kill_rd (void)
2875 int cuid;
2876 unsigned int regno;
2877 int i;
2878 basic_block bb;
2880 /* For each block
2881 For each set bit in `gen' of the block (i.e each insn which
2882 generates a definition in the block)
2883 Call the reg set by the insn corresponding to that bit regx
2884 Look at the linked list starting at reg_set_table[regx]
2885 For each setting of regx in the linked list, which is not in
2886 this block
2887 Set the bit in `kill' corresponding to that insn. */
2888 FOR_EACH_BB (bb)
2889 for (cuid = 0; cuid < max_cuid; cuid++)
2890 if (TEST_BIT (rd_gen[bb->index], cuid))
2892 rtx insn = CUID_INSN (cuid);
2893 rtx pat = PATTERN (insn);
2895 if (GET_CODE (insn) == CALL_INSN)
2897 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2898 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2899 handle_rd_kill_set (insn, regno, bb);
2902 if (GET_CODE (pat) == PARALLEL)
2904 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
2906 enum rtx_code code = GET_CODE (XVECEXP (pat, 0, i));
2908 if ((code == SET || code == CLOBBER)
2909 && GET_CODE (XEXP (XVECEXP (pat, 0, i), 0)) == REG)
2910 handle_rd_kill_set (insn,
2911 REGNO (XEXP (XVECEXP (pat, 0, i), 0)),
2912 bb);
2915 else if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == REG)
2916 /* Each setting of this register outside of this block
2917 must be marked in the set of kills in this block. */
2918 handle_rd_kill_set (insn, REGNO (SET_DEST (pat)), bb);
2922 /* Compute the reaching definitions as in
2923 Compilers Principles, Techniques, and Tools. Aho, Sethi, Ullman,
2924 Chapter 10. It is the same algorithm as used for computing available
2925 expressions but applied to the gens and kills of reaching definitions. */
2927 static void
2928 compute_rd (void)
2930 int changed, passes;
2931 basic_block bb;
2933 FOR_EACH_BB (bb)
2934 sbitmap_copy (rd_out[bb->index] /*dst*/, rd_gen[bb->index] /*src*/);
2936 passes = 0;
2937 changed = 1;
2938 while (changed)
2940 changed = 0;
2941 FOR_EACH_BB (bb)
2943 sbitmap_union_of_preds (reaching_defs[bb->index], rd_out, bb->index);
2944 changed |= sbitmap_union_of_diff_cg (rd_out[bb->index], rd_gen[bb->index],
2945 reaching_defs[bb->index], rd_kill[bb->index]);
2947 passes++;
2950 if (gcse_file)
2951 fprintf (gcse_file, "reaching def computation: %d passes\n", passes);
2954 /* Classic GCSE available expression support. */
2956 /* Allocate memory for available expression computation. */
2958 static void
2959 alloc_avail_expr_mem (int n_blocks, int n_exprs)
2961 ae_kill = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
2962 sbitmap_vector_zero (ae_kill, n_blocks);
2964 ae_gen = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
2965 sbitmap_vector_zero (ae_gen, n_blocks);
2967 ae_in = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
2968 sbitmap_vector_zero (ae_in, n_blocks);
2970 ae_out = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
2971 sbitmap_vector_zero (ae_out, n_blocks);
2974 static void
2975 free_avail_expr_mem (void)
2977 sbitmap_vector_free (ae_kill);
2978 sbitmap_vector_free (ae_gen);
2979 sbitmap_vector_free (ae_in);
2980 sbitmap_vector_free (ae_out);
2983 /* Compute the set of available expressions generated in each basic block. */
2985 static void
2986 compute_ae_gen (struct hash_table *expr_hash_table)
2988 unsigned int i;
2989 struct expr *expr;
2990 struct occr *occr;
2992 /* For each recorded occurrence of each expression, set ae_gen[bb][expr].
2993 This is all we have to do because an expression is not recorded if it
2994 is not available, and the only expressions we want to work with are the
2995 ones that are recorded. */
2996 for (i = 0; i < expr_hash_table->size; i++)
2997 for (expr = expr_hash_table->table[i]; expr != 0; expr = expr->next_same_hash)
2998 for (occr = expr->avail_occr; occr != 0; occr = occr->next)
2999 SET_BIT (ae_gen[BLOCK_NUM (occr->insn)], expr->bitmap_index);
3002 /* Return nonzero if expression X is killed in BB. */
3004 static int
3005 expr_killed_p (rtx x, basic_block bb)
3007 int i, j;
3008 enum rtx_code code;
3009 const char *fmt;
3011 if (x == 0)
3012 return 1;
3014 code = GET_CODE (x);
3015 switch (code)
3017 case REG:
3018 return TEST_BIT (reg_set_in_block[bb->index], REGNO (x));
3020 case MEM:
3021 if (load_killed_in_block_p (bb, get_max_uid () + 1, x, 0))
3022 return 1;
3023 else
3024 return expr_killed_p (XEXP (x, 0), bb);
3026 case PC:
3027 case CC0: /*FIXME*/
3028 case CONST:
3029 case CONST_INT:
3030 case CONST_DOUBLE:
3031 case CONST_VECTOR:
3032 case SYMBOL_REF:
3033 case LABEL_REF:
3034 case ADDR_VEC:
3035 case ADDR_DIFF_VEC:
3036 return 0;
3038 default:
3039 break;
3042 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
3044 if (fmt[i] == 'e')
3046 /* If we are about to do the last recursive call
3047 needed at this level, change it into iteration.
3048 This function is called enough to be worth it. */
3049 if (i == 0)
3050 return expr_killed_p (XEXP (x, i), bb);
3051 else if (expr_killed_p (XEXP (x, i), bb))
3052 return 1;
3054 else if (fmt[i] == 'E')
3055 for (j = 0; j < XVECLEN (x, i); j++)
3056 if (expr_killed_p (XVECEXP (x, i, j), bb))
3057 return 1;
3060 return 0;
3063 /* Compute the set of available expressions killed in each basic block. */
3065 static void
3066 compute_ae_kill (sbitmap *ae_gen, sbitmap *ae_kill,
3067 struct hash_table *expr_hash_table)
3069 basic_block bb;
3070 unsigned int i;
3071 struct expr *expr;
3073 FOR_EACH_BB (bb)
3074 for (i = 0; i < expr_hash_table->size; i++)
3075 for (expr = expr_hash_table->table[i]; expr; expr = expr->next_same_hash)
3077 /* Skip EXPR if generated in this block. */
3078 if (TEST_BIT (ae_gen[bb->index], expr->bitmap_index))
3079 continue;
3081 if (expr_killed_p (expr->expr, bb))
3082 SET_BIT (ae_kill[bb->index], expr->bitmap_index);
3086 /* Actually perform the Classic GCSE optimizations. */
3088 /* Return nonzero if occurrence OCCR of expression EXPR reaches block BB.
3090 CHECK_SELF_LOOP is nonzero if we should consider a block reaching itself
3091 as a positive reach. We want to do this when there are two computations
3092 of the expression in the block.
3094 VISITED is a pointer to a working buffer for tracking which BB's have
3095 been visited. It is NULL for the top-level call.
3097 We treat reaching expressions that go through blocks containing the same
3098 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
3099 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
3100 2 as not reaching. The intent is to improve the probability of finding
3101 only one reaching expression and to reduce register lifetimes by picking
3102 the closest such expression. */
3104 static int
3105 expr_reaches_here_p_work (struct occr *occr, struct expr *expr,
3106 basic_block bb, int check_self_loop, char *visited)
3108 edge pred;
3110 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
3112 basic_block pred_bb = pred->src;
3114 if (visited[pred_bb->index])
3115 /* This predecessor has already been visited. Nothing to do. */
3117 else if (pred_bb == bb)
3119 /* BB loops on itself. */
3120 if (check_self_loop
3121 && TEST_BIT (ae_gen[pred_bb->index], expr->bitmap_index)
3122 && BLOCK_NUM (occr->insn) == pred_bb->index)
3123 return 1;
3125 visited[pred_bb->index] = 1;
3128 /* Ignore this predecessor if it kills the expression. */
3129 else if (TEST_BIT (ae_kill[pred_bb->index], expr->bitmap_index))
3130 visited[pred_bb->index] = 1;
3132 /* Does this predecessor generate this expression? */
3133 else if (TEST_BIT (ae_gen[pred_bb->index], expr->bitmap_index))
3135 /* Is this the occurrence we're looking for?
3136 Note that there's only one generating occurrence per block
3137 so we just need to check the block number. */
3138 if (BLOCK_NUM (occr->insn) == pred_bb->index)
3139 return 1;
3141 visited[pred_bb->index] = 1;
3144 /* Neither gen nor kill. */
3145 else
3147 visited[pred_bb->index] = 1;
3148 if (expr_reaches_here_p_work (occr, expr, pred_bb, check_self_loop,
3149 visited))
3151 return 1;
3155 /* All paths have been checked. */
3156 return 0;
3159 /* This wrapper for expr_reaches_here_p_work() is to ensure that any
3160 memory allocated for that function is returned. */
3162 static int
3163 expr_reaches_here_p (struct occr *occr, struct expr *expr, basic_block bb,
3164 int check_self_loop)
3166 int rval;
3167 char *visited = (char *) xcalloc (last_basic_block, 1);
3169 rval = expr_reaches_here_p_work (occr, expr, bb, check_self_loop, visited);
3171 free (visited);
3172 return rval;
3175 /* Return the instruction that computes EXPR that reaches INSN's basic block.
3176 If there is more than one such instruction, return NULL.
3178 Called only by handle_avail_expr. */
3180 static rtx
3181 computing_insn (struct expr *expr, rtx insn)
3183 basic_block bb = BLOCK_FOR_INSN (insn);
3185 if (expr->avail_occr->next == NULL)
3187 if (BLOCK_FOR_INSN (expr->avail_occr->insn) == bb)
3188 /* The available expression is actually itself
3189 (i.e. a loop in the flow graph) so do nothing. */
3190 return NULL;
3192 /* (FIXME) Case that we found a pattern that was created by
3193 a substitution that took place. */
3194 return expr->avail_occr->insn;
3196 else
3198 /* Pattern is computed more than once.
3199 Search backwards from this insn to see how many of these
3200 computations actually reach this insn. */
3201 struct occr *occr;
3202 rtx insn_computes_expr = NULL;
3203 int can_reach = 0;
3205 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
3207 if (BLOCK_FOR_INSN (occr->insn) == bb)
3209 /* The expression is generated in this block.
3210 The only time we care about this is when the expression
3211 is generated later in the block [and thus there's a loop].
3212 We let the normal cse pass handle the other cases. */
3213 if (INSN_CUID (insn) < INSN_CUID (occr->insn)
3214 && expr_reaches_here_p (occr, expr, bb, 1))
3216 can_reach++;
3217 if (can_reach > 1)
3218 return NULL;
3220 insn_computes_expr = occr->insn;
3223 else if (expr_reaches_here_p (occr, expr, bb, 0))
3225 can_reach++;
3226 if (can_reach > 1)
3227 return NULL;
3229 insn_computes_expr = occr->insn;
3233 if (insn_computes_expr == NULL)
3234 abort ();
3236 return insn_computes_expr;
3240 /* Return nonzero if the definition in DEF_INSN can reach INSN.
3241 Only called by can_disregard_other_sets. */
3243 static int
3244 def_reaches_here_p (rtx insn, rtx def_insn)
3246 rtx reg;
3248 if (TEST_BIT (reaching_defs[BLOCK_NUM (insn)], INSN_CUID (def_insn)))
3249 return 1;
3251 if (BLOCK_NUM (insn) == BLOCK_NUM (def_insn))
3253 if (INSN_CUID (def_insn) < INSN_CUID (insn))
3255 if (GET_CODE (PATTERN (def_insn)) == PARALLEL)
3256 return 1;
3257 else if (GET_CODE (PATTERN (def_insn)) == CLOBBER)
3258 reg = XEXP (PATTERN (def_insn), 0);
3259 else if (GET_CODE (PATTERN (def_insn)) == SET)
3260 reg = SET_DEST (PATTERN (def_insn));
3261 else
3262 abort ();
3264 return ! reg_set_between_p (reg, NEXT_INSN (def_insn), insn);
3266 else
3267 return 0;
3270 return 0;
3273 /* Return nonzero if *ADDR_THIS_REG can only have one value at INSN. The
3274 value returned is the number of definitions that reach INSN. Returning a
3275 value of zero means that [maybe] more than one definition reaches INSN and
3276 the caller can't perform whatever optimization it is trying. i.e. it is
3277 always safe to return zero. */
3279 static int
3280 can_disregard_other_sets (struct reg_set **addr_this_reg, rtx insn, int for_combine)
3282 int number_of_reaching_defs = 0;
3283 struct reg_set *this_reg;
3285 for (this_reg = *addr_this_reg; this_reg != 0; this_reg = this_reg->next)
3286 if (def_reaches_here_p (insn, this_reg->insn))
3288 number_of_reaching_defs++;
3289 /* Ignore parallels for now. */
3290 if (GET_CODE (PATTERN (this_reg->insn)) == PARALLEL)
3291 return 0;
3293 if (!for_combine
3294 && (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER
3295 || ! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)),
3296 SET_SRC (PATTERN (insn)))))
3297 /* A setting of the reg to a different value reaches INSN. */
3298 return 0;
3300 if (number_of_reaching_defs > 1)
3302 /* If in this setting the value the register is being set to is
3303 equal to the previous value the register was set to and this
3304 setting reaches the insn we are trying to do the substitution
3305 on then we are ok. */
3306 if (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER)
3307 return 0;
3308 else if (! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)),
3309 SET_SRC (PATTERN (insn))))
3310 return 0;
3313 *addr_this_reg = this_reg;
3316 return number_of_reaching_defs;
3319 /* Expression computed by insn is available and the substitution is legal,
3320 so try to perform the substitution.
3322 The result is nonzero if any changes were made. */
3324 static int
3325 handle_avail_expr (rtx insn, struct expr *expr)
3327 rtx pat, insn_computes_expr, expr_set;
3328 rtx to;
3329 struct reg_set *this_reg;
3330 int found_setting, use_src;
3331 int changed = 0;
3333 /* We only handle the case where one computation of the expression
3334 reaches this instruction. */
3335 insn_computes_expr = computing_insn (expr, insn);
3336 if (insn_computes_expr == NULL)
3337 return 0;
3338 expr_set = single_set (insn_computes_expr);
3339 if (!expr_set)
3340 abort ();
3342 found_setting = 0;
3343 use_src = 0;
3345 /* At this point we know only one computation of EXPR outside of this
3346 block reaches this insn. Now try to find a register that the
3347 expression is computed into. */
3348 if (GET_CODE (SET_SRC (expr_set)) == REG)
3350 /* This is the case when the available expression that reaches
3351 here has already been handled as an available expression. */
3352 unsigned int regnum_for_replacing
3353 = REGNO (SET_SRC (expr_set));
3355 /* If the register was created by GCSE we can't use `reg_set_table',
3356 however we know it's set only once. */
3357 if (regnum_for_replacing >= max_gcse_regno
3358 /* If the register the expression is computed into is set only once,
3359 or only one set reaches this insn, we can use it. */
3360 || (((this_reg = reg_set_table[regnum_for_replacing]),
3361 this_reg->next == NULL)
3362 || can_disregard_other_sets (&this_reg, insn, 0)))
3364 use_src = 1;
3365 found_setting = 1;
3369 if (!found_setting)
3371 unsigned int regnum_for_replacing
3372 = REGNO (SET_DEST (expr_set));
3374 /* This shouldn't happen. */
3375 if (regnum_for_replacing >= max_gcse_regno)
3376 abort ();
3378 this_reg = reg_set_table[regnum_for_replacing];
3380 /* If the register the expression is computed into is set only once,
3381 or only one set reaches this insn, use it. */
3382 if (this_reg->next == NULL
3383 || can_disregard_other_sets (&this_reg, insn, 0))
3384 found_setting = 1;
3387 if (found_setting)
3389 pat = PATTERN (insn);
3390 if (use_src)
3391 to = SET_SRC (expr_set);
3392 else
3393 to = SET_DEST (expr_set);
3394 changed = validate_change (insn, &SET_SRC (pat), to, 0);
3396 /* We should be able to ignore the return code from validate_change but
3397 to play it safe we check. */
3398 if (changed)
3400 gcse_subst_count++;
3401 if (gcse_file != NULL)
3403 fprintf (gcse_file, "GCSE: Replacing the source in insn %d with",
3404 INSN_UID (insn));
3405 fprintf (gcse_file, " reg %d %s insn %d\n",
3406 REGNO (to), use_src ? "from" : "set in",
3407 INSN_UID (insn_computes_expr));
3412 /* The register that the expr is computed into is set more than once. */
3413 else if (1 /*expensive_op(this_pattrn->op) && do_expensive_gcse)*/)
3415 /* Insert an insn after insnx that copies the reg set in insnx
3416 into a new pseudo register call this new register REGN.
3417 From insnb until end of basic block or until REGB is set
3418 replace all uses of REGB with REGN. */
3419 rtx new_insn;
3421 to = gen_reg_rtx (GET_MODE (SET_DEST (expr_set)));
3423 /* Generate the new insn. */
3424 /* ??? If the change fails, we return 0, even though we created
3425 an insn. I think this is ok. */
3426 new_insn
3427 = emit_insn_after (gen_rtx_SET (VOIDmode, to,
3428 SET_DEST (expr_set)),
3429 insn_computes_expr);
3431 /* Keep register set table up to date. */
3432 record_one_set (REGNO (to), new_insn);
3434 gcse_create_count++;
3435 if (gcse_file != NULL)
3437 fprintf (gcse_file, "GCSE: Creating insn %d to copy value of reg %d",
3438 INSN_UID (NEXT_INSN (insn_computes_expr)),
3439 REGNO (SET_SRC (PATTERN (NEXT_INSN (insn_computes_expr)))));
3440 fprintf (gcse_file, ", computed in insn %d,\n",
3441 INSN_UID (insn_computes_expr));
3442 fprintf (gcse_file, " into newly allocated reg %d\n",
3443 REGNO (to));
3446 pat = PATTERN (insn);
3448 /* Do register replacement for INSN. */
3449 changed = validate_change (insn, &SET_SRC (pat),
3450 SET_DEST (PATTERN
3451 (NEXT_INSN (insn_computes_expr))),
3454 /* We should be able to ignore the return code from validate_change but
3455 to play it safe we check. */
3456 if (changed)
3458 gcse_subst_count++;
3459 if (gcse_file != NULL)
3461 fprintf (gcse_file,
3462 "GCSE: Replacing the source in insn %d with reg %d ",
3463 INSN_UID (insn),
3464 REGNO (SET_DEST (PATTERN (NEXT_INSN
3465 (insn_computes_expr)))));
3466 fprintf (gcse_file, "set in insn %d\n",
3467 INSN_UID (insn_computes_expr));
3472 return changed;
3475 /* Perform classic GCSE. This is called by one_classic_gcse_pass after all
3476 the dataflow analysis has been done.
3478 The result is nonzero if a change was made. */
3480 static int
3481 classic_gcse (void)
3483 int changed;
3484 rtx insn;
3485 basic_block bb;
3487 /* Note we start at block 1. */
3489 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
3490 return 0;
3492 changed = 0;
3493 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR, next_bb)
3495 /* Reset tables used to keep track of what's still valid [since the
3496 start of the block]. */
3497 reset_opr_set_tables ();
3499 for (insn = bb->head;
3500 insn != NULL && insn != NEXT_INSN (bb->end);
3501 insn = NEXT_INSN (insn))
3503 /* Is insn of form (set (pseudo-reg) ...)? */
3504 if (GET_CODE (insn) == INSN
3505 && GET_CODE (PATTERN (insn)) == SET
3506 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
3507 && REGNO (SET_DEST (PATTERN (insn))) >= FIRST_PSEUDO_REGISTER)
3509 rtx pat = PATTERN (insn);
3510 rtx src = SET_SRC (pat);
3511 struct expr *expr;
3513 if (want_to_gcse_p (src)
3514 /* Is the expression recorded? */
3515 && ((expr = lookup_expr (src, &expr_hash_table)) != NULL)
3516 /* Is the expression available [at the start of the
3517 block]? */
3518 && TEST_BIT (ae_in[bb->index], expr->bitmap_index)
3519 /* Are the operands unchanged since the start of the
3520 block? */
3521 && oprs_not_set_p (src, insn))
3522 changed |= handle_avail_expr (insn, expr);
3525 /* Keep track of everything modified by this insn. */
3526 /* ??? Need to be careful w.r.t. mods done to INSN. */
3527 if (INSN_P (insn))
3528 mark_oprs_set (insn);
3532 return changed;
3535 /* Top level routine to perform one classic GCSE pass.
3537 Return nonzero if a change was made. */
3539 static int
3540 one_classic_gcse_pass (int pass)
3542 int changed = 0;
3544 gcse_subst_count = 0;
3545 gcse_create_count = 0;
3547 alloc_hash_table (max_cuid, &expr_hash_table, 0);
3548 alloc_rd_mem (last_basic_block, max_cuid);
3549 compute_hash_table (&expr_hash_table);
3550 if (gcse_file)
3551 dump_hash_table (gcse_file, "Expression", &expr_hash_table);
3553 if (expr_hash_table.n_elems > 0)
3555 compute_kill_rd ();
3556 compute_rd ();
3557 alloc_avail_expr_mem (last_basic_block, expr_hash_table.n_elems);
3558 compute_ae_gen (&expr_hash_table);
3559 compute_ae_kill (ae_gen, ae_kill, &expr_hash_table);
3560 compute_available (ae_gen, ae_kill, ae_out, ae_in);
3561 changed = classic_gcse ();
3562 free_avail_expr_mem ();
3565 free_rd_mem ();
3566 free_hash_table (&expr_hash_table);
3568 if (gcse_file)
3570 fprintf (gcse_file, "\n");
3571 fprintf (gcse_file, "GCSE of %s, pass %d: %d bytes needed, %d substs,",
3572 current_function_name, pass, bytes_used, gcse_subst_count);
3573 fprintf (gcse_file, "%d insns created\n", gcse_create_count);
3576 return changed;
3579 /* Compute copy/constant propagation working variables. */
3581 /* Local properties of assignments. */
3582 static sbitmap *cprop_pavloc;
3583 static sbitmap *cprop_absaltered;
3585 /* Global properties of assignments (computed from the local properties). */
3586 static sbitmap *cprop_avin;
3587 static sbitmap *cprop_avout;
3589 /* Allocate vars used for copy/const propagation. N_BLOCKS is the number of
3590 basic blocks. N_SETS is the number of sets. */
3592 static void
3593 alloc_cprop_mem (int n_blocks, int n_sets)
3595 cprop_pavloc = sbitmap_vector_alloc (n_blocks, n_sets);
3596 cprop_absaltered = sbitmap_vector_alloc (n_blocks, n_sets);
3598 cprop_avin = sbitmap_vector_alloc (n_blocks, n_sets);
3599 cprop_avout = sbitmap_vector_alloc (n_blocks, n_sets);
3602 /* Free vars used by copy/const propagation. */
3604 static void
3605 free_cprop_mem (void)
3607 sbitmap_vector_free (cprop_pavloc);
3608 sbitmap_vector_free (cprop_absaltered);
3609 sbitmap_vector_free (cprop_avin);
3610 sbitmap_vector_free (cprop_avout);
3613 /* For each block, compute whether X is transparent. X is either an
3614 expression or an assignment [though we don't care which, for this context
3615 an assignment is treated as an expression]. For each block where an
3616 element of X is modified, set (SET_P == 1) or reset (SET_P == 0) the INDX
3617 bit in BMAP. */
3619 static void
3620 compute_transp (rtx x, int indx, sbitmap *bmap, int set_p)
3622 int i, j;
3623 basic_block bb;
3624 enum rtx_code code;
3625 reg_set *r;
3626 const char *fmt;
3628 /* repeat is used to turn tail-recursion into iteration since GCC
3629 can't do it when there's no return value. */
3630 repeat:
3632 if (x == 0)
3633 return;
3635 code = GET_CODE (x);
3636 switch (code)
3638 case REG:
3639 if (set_p)
3641 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3643 FOR_EACH_BB (bb)
3644 if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
3645 SET_BIT (bmap[bb->index], indx);
3647 else
3649 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
3650 SET_BIT (bmap[BLOCK_NUM (r->insn)], indx);
3653 else
3655 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3657 FOR_EACH_BB (bb)
3658 if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
3659 RESET_BIT (bmap[bb->index], indx);
3661 else
3663 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
3664 RESET_BIT (bmap[BLOCK_NUM (r->insn)], indx);
3668 return;
3670 case MEM:
3671 FOR_EACH_BB (bb)
3673 rtx list_entry = canon_modify_mem_list[bb->index];
3675 while (list_entry)
3677 rtx dest, dest_addr;
3679 if (GET_CODE (XEXP (list_entry, 0)) == CALL_INSN)
3681 if (set_p)
3682 SET_BIT (bmap[bb->index], indx);
3683 else
3684 RESET_BIT (bmap[bb->index], indx);
3685 break;
3687 /* LIST_ENTRY must be an INSN of some kind that sets memory.
3688 Examine each hunk of memory that is modified. */
3690 dest = XEXP (list_entry, 0);
3691 list_entry = XEXP (list_entry, 1);
3692 dest_addr = XEXP (list_entry, 0);
3694 if (canon_true_dependence (dest, GET_MODE (dest), dest_addr,
3695 x, rtx_addr_varies_p))
3697 if (set_p)
3698 SET_BIT (bmap[bb->index], indx);
3699 else
3700 RESET_BIT (bmap[bb->index], indx);
3701 break;
3703 list_entry = XEXP (list_entry, 1);
3707 x = XEXP (x, 0);
3708 goto repeat;
3710 case PC:
3711 case CC0: /*FIXME*/
3712 case CONST:
3713 case CONST_INT:
3714 case CONST_DOUBLE:
3715 case CONST_VECTOR:
3716 case SYMBOL_REF:
3717 case LABEL_REF:
3718 case ADDR_VEC:
3719 case ADDR_DIFF_VEC:
3720 return;
3722 default:
3723 break;
3726 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
3728 if (fmt[i] == 'e')
3730 /* If we are about to do the last recursive call
3731 needed at this level, change it into iteration.
3732 This function is called enough to be worth it. */
3733 if (i == 0)
3735 x = XEXP (x, i);
3736 goto repeat;
3739 compute_transp (XEXP (x, i), indx, bmap, set_p);
3741 else if (fmt[i] == 'E')
3742 for (j = 0; j < XVECLEN (x, i); j++)
3743 compute_transp (XVECEXP (x, i, j), indx, bmap, set_p);
3747 /* Top level routine to do the dataflow analysis needed by copy/const
3748 propagation. */
3750 static void
3751 compute_cprop_data (void)
3753 compute_local_properties (cprop_absaltered, cprop_pavloc, NULL, &set_hash_table);
3754 compute_available (cprop_pavloc, cprop_absaltered,
3755 cprop_avout, cprop_avin);
3758 /* Copy/constant propagation. */
3760 /* Maximum number of register uses in an insn that we handle. */
3761 #define MAX_USES 8
3763 /* Table of uses found in an insn.
3764 Allocated statically to avoid alloc/free complexity and overhead. */
3765 static struct reg_use reg_use_table[MAX_USES];
3767 /* Index into `reg_use_table' while building it. */
3768 static int reg_use_count;
3770 /* Set up a list of register numbers used in INSN. The found uses are stored
3771 in `reg_use_table'. `reg_use_count' is initialized to zero before entry,
3772 and contains the number of uses in the table upon exit.
3774 ??? If a register appears multiple times we will record it multiple times.
3775 This doesn't hurt anything but it will slow things down. */
3777 static void
3778 find_used_regs (rtx *xptr, void *data ATTRIBUTE_UNUSED)
3780 int i, j;
3781 enum rtx_code code;
3782 const char *fmt;
3783 rtx x = *xptr;
3785 /* repeat is used to turn tail-recursion into iteration since GCC
3786 can't do it when there's no return value. */
3787 repeat:
3788 if (x == 0)
3789 return;
3791 code = GET_CODE (x);
3792 if (REG_P (x))
3794 if (reg_use_count == MAX_USES)
3795 return;
3797 reg_use_table[reg_use_count].reg_rtx = x;
3798 reg_use_count++;
3801 /* Recursively scan the operands of this expression. */
3803 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
3805 if (fmt[i] == 'e')
3807 /* If we are about to do the last recursive call
3808 needed at this level, change it into iteration.
3809 This function is called enough to be worth it. */
3810 if (i == 0)
3812 x = XEXP (x, 0);
3813 goto repeat;
3816 find_used_regs (&XEXP (x, i), data);
3818 else if (fmt[i] == 'E')
3819 for (j = 0; j < XVECLEN (x, i); j++)
3820 find_used_regs (&XVECEXP (x, i, j), data);
3824 /* Try to replace all non-SET_DEST occurrences of FROM in INSN with TO.
3825 Returns nonzero is successful. */
3827 static int
3828 try_replace_reg (rtx from, rtx to, rtx insn)
3830 rtx note = find_reg_equal_equiv_note (insn);
3831 rtx src = 0;
3832 int success = 0;
3833 rtx set = single_set (insn);
3835 validate_replace_src_group (from, to, insn);
3836 if (num_changes_pending () && apply_change_group ())
3837 success = 1;
3839 /* Try to simplify SET_SRC if we have substituted a constant. */
3840 if (success && set && CONSTANT_P (to))
3842 src = simplify_rtx (SET_SRC (set));
3844 if (src)
3845 validate_change (insn, &SET_SRC (set), src, 0);
3848 if (!success && set && reg_mentioned_p (from, SET_SRC (set)))
3850 /* If above failed and this is a single set, try to simplify the source of
3851 the set given our substitution. We could perhaps try this for multiple
3852 SETs, but it probably won't buy us anything. */
3853 src = simplify_replace_rtx (SET_SRC (set), from, to);
3855 if (!rtx_equal_p (src, SET_SRC (set))
3856 && validate_change (insn, &SET_SRC (set), src, 0))
3857 success = 1;
3859 /* If we've failed to do replacement, have a single SET, and don't already
3860 have a note, add a REG_EQUAL note to not lose information. */
3861 if (!success && note == 0 && set != 0)
3862 note = set_unique_reg_note (insn, REG_EQUAL, copy_rtx (src));
3865 /* If there is already a NOTE, update the expression in it with our
3866 replacement. */
3867 else if (note != 0)
3868 XEXP (note, 0) = simplify_replace_rtx (XEXP (note, 0), from, to);
3870 /* REG_EQUAL may get simplified into register.
3871 We don't allow that. Remove that note. This code ought
3872 not to happen, because previous code ought to synthesize
3873 reg-reg move, but be on the safe side. */
3874 if (note && REG_P (XEXP (note, 0)))
3875 remove_note (insn, note);
3877 return success;
3880 /* Find a set of REGNOs that are available on entry to INSN's block. Returns
3881 NULL no such set is found. */
3883 static struct expr *
3884 find_avail_set (int regno, rtx insn)
3886 /* SET1 contains the last set found that can be returned to the caller for
3887 use in a substitution. */
3888 struct expr *set1 = 0;
3890 /* Loops are not possible here. To get a loop we would need two sets
3891 available at the start of the block containing INSN. ie we would
3892 need two sets like this available at the start of the block:
3894 (set (reg X) (reg Y))
3895 (set (reg Y) (reg X))
3897 This can not happen since the set of (reg Y) would have killed the
3898 set of (reg X) making it unavailable at the start of this block. */
3899 while (1)
3901 rtx src;
3902 struct expr *set = lookup_set (regno, &set_hash_table);
3904 /* Find a set that is available at the start of the block
3905 which contains INSN. */
3906 while (set)
3908 if (TEST_BIT (cprop_avin[BLOCK_NUM (insn)], set->bitmap_index))
3909 break;
3910 set = next_set (regno, set);
3913 /* If no available set was found we've reached the end of the
3914 (possibly empty) copy chain. */
3915 if (set == 0)
3916 break;
3918 if (GET_CODE (set->expr) != SET)
3919 abort ();
3921 src = SET_SRC (set->expr);
3923 /* We know the set is available.
3924 Now check that SRC is ANTLOC (i.e. none of the source operands
3925 have changed since the start of the block).
3927 If the source operand changed, we may still use it for the next
3928 iteration of this loop, but we may not use it for substitutions. */
3930 if (gcse_constant_p (src) || oprs_not_set_p (src, insn))
3931 set1 = set;
3933 /* If the source of the set is anything except a register, then
3934 we have reached the end of the copy chain. */
3935 if (GET_CODE (src) != REG)
3936 break;
3938 /* Follow the copy chain, ie start another iteration of the loop
3939 and see if we have an available copy into SRC. */
3940 regno = REGNO (src);
3943 /* SET1 holds the last set that was available and anticipatable at
3944 INSN. */
3945 return set1;
3948 /* Subroutine of cprop_insn that tries to propagate constants into
3949 JUMP_INSNS. JUMP must be a conditional jump. If SETCC is non-NULL
3950 it is the instruction that immediately precedes JUMP, and must be a
3951 single SET of a register. FROM is what we will try to replace,
3952 SRC is the constant we will try to substitute for it. Returns nonzero
3953 if a change was made. */
3955 static int
3956 cprop_jump (basic_block bb, rtx setcc, rtx jump, rtx from, rtx src)
3958 rtx new, set_src, note_src;
3959 rtx set = pc_set (jump);
3960 rtx note = find_reg_equal_equiv_note (jump);
3962 if (note)
3964 note_src = XEXP (note, 0);
3965 if (GET_CODE (note_src) == EXPR_LIST)
3966 note_src = NULL_RTX;
3968 else note_src = NULL_RTX;
3970 /* Prefer REG_EQUAL notes except those containing EXPR_LISTs. */
3971 set_src = note_src ? note_src : SET_SRC (set);
3973 /* First substitute the SETCC condition into the JUMP instruction,
3974 then substitute that given values into this expanded JUMP. */
3975 if (setcc != NULL_RTX
3976 && !modified_between_p (from, setcc, jump)
3977 && !modified_between_p (src, setcc, jump))
3979 rtx setcc_src;
3980 rtx setcc_set = single_set (setcc);
3981 rtx setcc_note = find_reg_equal_equiv_note (setcc);
3982 setcc_src = (setcc_note && GET_CODE (XEXP (setcc_note, 0)) != EXPR_LIST)
3983 ? XEXP (setcc_note, 0) : SET_SRC (setcc_set);
3984 set_src = simplify_replace_rtx (set_src, SET_DEST (setcc_set),
3985 setcc_src);
3987 else
3988 setcc = NULL_RTX;
3990 new = simplify_replace_rtx (set_src, from, src);
3992 /* If no simplification can be made, then try the next register. */
3993 if (rtx_equal_p (new, SET_SRC (set)))
3994 return 0;
3996 /* If this is now a no-op delete it, otherwise this must be a valid insn. */
3997 if (new == pc_rtx)
3998 delete_insn (jump);
3999 else
4001 /* Ensure the value computed inside the jump insn to be equivalent
4002 to one computed by setcc. */
4003 if (setcc && modified_in_p (new, setcc))
4004 return 0;
4005 if (! validate_change (jump, &SET_SRC (set), new, 0))
4007 /* When (some) constants are not valid in a comparison, and there
4008 are two registers to be replaced by constants before the entire
4009 comparison can be folded into a constant, we need to keep
4010 intermediate information in REG_EQUAL notes. For targets with
4011 separate compare insns, such notes are added by try_replace_reg.
4012 When we have a combined compare-and-branch instruction, however,
4013 we need to attach a note to the branch itself to make this
4014 optimization work. */
4016 if (!rtx_equal_p (new, note_src))
4017 set_unique_reg_note (jump, REG_EQUAL, copy_rtx (new));
4018 return 0;
4021 /* Remove REG_EQUAL note after simplification. */
4022 if (note_src)
4023 remove_note (jump, note);
4025 /* If this has turned into an unconditional jump,
4026 then put a barrier after it so that the unreachable
4027 code will be deleted. */
4028 if (GET_CODE (SET_SRC (set)) == LABEL_REF)
4029 emit_barrier_after (jump);
4032 #ifdef HAVE_cc0
4033 /* Delete the cc0 setter. */
4034 if (setcc != NULL && CC0_P (SET_DEST (single_set (setcc))))
4035 delete_insn (setcc);
4036 #endif
4038 run_jump_opt_after_gcse = 1;
4040 const_prop_count++;
4041 if (gcse_file != NULL)
4043 fprintf (gcse_file,
4044 "CONST-PROP: Replacing reg %d in jump_insn %d with constant ",
4045 REGNO (from), INSN_UID (jump));
4046 print_rtl (gcse_file, src);
4047 fprintf (gcse_file, "\n");
4049 purge_dead_edges (bb);
4051 return 1;
4054 static bool
4055 constprop_register (rtx insn, rtx from, rtx to, int alter_jumps)
4057 rtx sset;
4059 /* Check for reg or cc0 setting instructions followed by
4060 conditional branch instructions first. */
4061 if (alter_jumps
4062 && (sset = single_set (insn)) != NULL
4063 && NEXT_INSN (insn)
4064 && any_condjump_p (NEXT_INSN (insn)) && onlyjump_p (NEXT_INSN (insn)))
4066 rtx dest = SET_DEST (sset);
4067 if ((REG_P (dest) || CC0_P (dest))
4068 && cprop_jump (BLOCK_FOR_INSN (insn), insn, NEXT_INSN (insn), from, to))
4069 return 1;
4072 /* Handle normal insns next. */
4073 if (GET_CODE (insn) == INSN
4074 && try_replace_reg (from, to, insn))
4075 return 1;
4077 /* Try to propagate a CONST_INT into a conditional jump.
4078 We're pretty specific about what we will handle in this
4079 code, we can extend this as necessary over time.
4081 Right now the insn in question must look like
4082 (set (pc) (if_then_else ...)) */
4083 else if (alter_jumps && any_condjump_p (insn) && onlyjump_p (insn))
4084 return cprop_jump (BLOCK_FOR_INSN (insn), NULL, insn, from, to);
4085 return 0;
4088 /* Perform constant and copy propagation on INSN.
4089 The result is nonzero if a change was made. */
4091 static int
4092 cprop_insn (rtx insn, int alter_jumps)
4094 struct reg_use *reg_used;
4095 int changed = 0;
4096 rtx note;
4098 if (!INSN_P (insn))
4099 return 0;
4101 reg_use_count = 0;
4102 note_uses (&PATTERN (insn), find_used_regs, NULL);
4104 note = find_reg_equal_equiv_note (insn);
4106 /* We may win even when propagating constants into notes. */
4107 if (note)
4108 find_used_regs (&XEXP (note, 0), NULL);
4110 for (reg_used = &reg_use_table[0]; reg_use_count > 0;
4111 reg_used++, reg_use_count--)
4113 unsigned int regno = REGNO (reg_used->reg_rtx);
4114 rtx pat, src;
4115 struct expr *set;
4117 /* Ignore registers created by GCSE.
4118 We do this because ... */
4119 if (regno >= max_gcse_regno)
4120 continue;
4122 /* If the register has already been set in this block, there's
4123 nothing we can do. */
4124 if (! oprs_not_set_p (reg_used->reg_rtx, insn))
4125 continue;
4127 /* Find an assignment that sets reg_used and is available
4128 at the start of the block. */
4129 set = find_avail_set (regno, insn);
4130 if (! set)
4131 continue;
4133 pat = set->expr;
4134 /* ??? We might be able to handle PARALLELs. Later. */
4135 if (GET_CODE (pat) != SET)
4136 abort ();
4138 src = SET_SRC (pat);
4140 /* Constant propagation. */
4141 if (gcse_constant_p (src))
4143 if (constprop_register (insn, reg_used->reg_rtx, src, alter_jumps))
4145 changed = 1;
4146 const_prop_count++;
4147 if (gcse_file != NULL)
4149 fprintf (gcse_file, "GLOBAL CONST-PROP: Replacing reg %d in ", regno);
4150 fprintf (gcse_file, "insn %d with constant ", INSN_UID (insn));
4151 print_rtl (gcse_file, src);
4152 fprintf (gcse_file, "\n");
4154 if (INSN_DELETED_P (insn))
4155 return 1;
4158 else if (GET_CODE (src) == REG
4159 && REGNO (src) >= FIRST_PSEUDO_REGISTER
4160 && REGNO (src) != regno)
4162 if (try_replace_reg (reg_used->reg_rtx, src, insn))
4164 changed = 1;
4165 copy_prop_count++;
4166 if (gcse_file != NULL)
4168 fprintf (gcse_file, "GLOBAL COPY-PROP: Replacing reg %d in insn %d",
4169 regno, INSN_UID (insn));
4170 fprintf (gcse_file, " with reg %d\n", REGNO (src));
4173 /* The original insn setting reg_used may or may not now be
4174 deletable. We leave the deletion to flow. */
4175 /* FIXME: If it turns out that the insn isn't deletable,
4176 then we may have unnecessarily extended register lifetimes
4177 and made things worse. */
4182 return changed;
4185 /* Like find_used_regs, but avoid recording uses that appear in
4186 input-output contexts such as zero_extract or pre_dec. This
4187 restricts the cases we consider to those for which local cprop
4188 can legitimately make replacements. */
4190 static void
4191 local_cprop_find_used_regs (rtx *xptr, void *data)
4193 rtx x = *xptr;
4195 if (x == 0)
4196 return;
4198 switch (GET_CODE (x))
4200 case ZERO_EXTRACT:
4201 case SIGN_EXTRACT:
4202 case STRICT_LOW_PART:
4203 return;
4205 case PRE_DEC:
4206 case PRE_INC:
4207 case POST_DEC:
4208 case POST_INC:
4209 case PRE_MODIFY:
4210 case POST_MODIFY:
4211 /* Can only legitimately appear this early in the context of
4212 stack pushes for function arguments, but handle all of the
4213 codes nonetheless. */
4214 return;
4216 case SUBREG:
4217 /* Setting a subreg of a register larger than word_mode leaves
4218 the non-written words unchanged. */
4219 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) > BITS_PER_WORD)
4220 return;
4221 break;
4223 default:
4224 break;
4227 find_used_regs (xptr, data);
4230 /* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
4231 their REG_EQUAL notes need updating. */
4233 static bool
4234 do_local_cprop (rtx x, rtx insn, int alter_jumps, rtx *libcall_sp)
4236 rtx newreg = NULL, newcnst = NULL;
4238 /* Rule out USE instructions and ASM statements as we don't want to
4239 change the hard registers mentioned. */
4240 if (GET_CODE (x) == REG
4241 && (REGNO (x) >= FIRST_PSEUDO_REGISTER
4242 || (GET_CODE (PATTERN (insn)) != USE
4243 && asm_noperands (PATTERN (insn)) < 0)))
4245 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0);
4246 struct elt_loc_list *l;
4248 if (!val)
4249 return false;
4250 for (l = val->locs; l; l = l->next)
4252 rtx this_rtx = l->loc;
4253 rtx note;
4255 if (l->in_libcall)
4256 continue;
4258 if (gcse_constant_p (this_rtx))
4259 newcnst = this_rtx;
4260 if (REG_P (this_rtx) && REGNO (this_rtx) >= FIRST_PSEUDO_REGISTER
4261 /* Don't copy propagate if it has attached REG_EQUIV note.
4262 At this point this only function parameters should have
4263 REG_EQUIV notes and if the argument slot is used somewhere
4264 explicitly, it means address of parameter has been taken,
4265 so we should not extend the lifetime of the pseudo. */
4266 && (!(note = find_reg_note (l->setting_insn, REG_EQUIV, NULL_RTX))
4267 || GET_CODE (XEXP (note, 0)) != MEM))
4268 newreg = this_rtx;
4270 if (newcnst && constprop_register (insn, x, newcnst, alter_jumps))
4272 /* If we find a case where we can't fix the retval REG_EQUAL notes
4273 match the new register, we either have to abandon this replacement
4274 or fix delete_trivially_dead_insns to preserve the setting insn,
4275 or make it delete the REG_EUAQL note, and fix up all passes that
4276 require the REG_EQUAL note there. */
4277 if (!adjust_libcall_notes (x, newcnst, insn, libcall_sp))
4278 abort ();
4279 if (gcse_file != NULL)
4281 fprintf (gcse_file, "LOCAL CONST-PROP: Replacing reg %d in ",
4282 REGNO (x));
4283 fprintf (gcse_file, "insn %d with constant ",
4284 INSN_UID (insn));
4285 print_rtl (gcse_file, newcnst);
4286 fprintf (gcse_file, "\n");
4288 const_prop_count++;
4289 return true;
4291 else if (newreg && newreg != x && try_replace_reg (x, newreg, insn))
4293 adjust_libcall_notes (x, newreg, insn, libcall_sp);
4294 if (gcse_file != NULL)
4296 fprintf (gcse_file,
4297 "LOCAL COPY-PROP: Replacing reg %d in insn %d",
4298 REGNO (x), INSN_UID (insn));
4299 fprintf (gcse_file, " with reg %d\n", REGNO (newreg));
4301 copy_prop_count++;
4302 return true;
4305 return false;
4308 /* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
4309 their REG_EQUAL notes need updating to reflect that OLDREG has been
4310 replaced with NEWVAL in INSN. Return true if all substitutions could
4311 be made. */
4312 static bool
4313 adjust_libcall_notes (rtx oldreg, rtx newval, rtx insn, rtx *libcall_sp)
4315 rtx end;
4317 while ((end = *libcall_sp++))
4319 rtx note = find_reg_equal_equiv_note (end);
4321 if (! note)
4322 continue;
4324 if (REG_P (newval))
4326 if (reg_set_between_p (newval, PREV_INSN (insn), end))
4330 note = find_reg_equal_equiv_note (end);
4331 if (! note)
4332 continue;
4333 if (reg_mentioned_p (newval, XEXP (note, 0)))
4334 return false;
4336 while ((end = *libcall_sp++));
4337 return true;
4340 XEXP (note, 0) = replace_rtx (XEXP (note, 0), oldreg, newval);
4341 insn = end;
4343 return true;
4346 #define MAX_NESTED_LIBCALLS 9
4348 static void
4349 local_cprop_pass (int alter_jumps)
4351 rtx insn;
4352 struct reg_use *reg_used;
4353 rtx libcall_stack[MAX_NESTED_LIBCALLS + 1], *libcall_sp;
4354 bool changed = false;
4356 cselib_init ();
4357 libcall_sp = &libcall_stack[MAX_NESTED_LIBCALLS];
4358 *libcall_sp = 0;
4359 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4361 if (INSN_P (insn))
4363 rtx note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
4365 if (note)
4367 if (libcall_sp == libcall_stack)
4368 abort ();
4369 *--libcall_sp = XEXP (note, 0);
4371 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
4372 if (note)
4373 libcall_sp++;
4374 note = find_reg_equal_equiv_note (insn);
4377 reg_use_count = 0;
4378 note_uses (&PATTERN (insn), local_cprop_find_used_regs, NULL);
4379 if (note)
4380 local_cprop_find_used_regs (&XEXP (note, 0), NULL);
4382 for (reg_used = &reg_use_table[0]; reg_use_count > 0;
4383 reg_used++, reg_use_count--)
4384 if (do_local_cprop (reg_used->reg_rtx, insn, alter_jumps,
4385 libcall_sp))
4387 changed = true;
4388 break;
4390 if (INSN_DELETED_P (insn))
4391 break;
4393 while (reg_use_count);
4395 cselib_process_insn (insn);
4397 cselib_finish ();
4398 /* Global analysis may get into infinite loops for unreachable blocks. */
4399 if (changed && alter_jumps)
4401 delete_unreachable_blocks ();
4402 free_reg_set_mem ();
4403 alloc_reg_set_mem (max_reg_num ());
4404 compute_sets (get_insns ());
4408 /* Forward propagate copies. This includes copies and constants. Return
4409 nonzero if a change was made. */
4411 static int
4412 cprop (int alter_jumps)
4414 int changed;
4415 basic_block bb;
4416 rtx insn;
4418 /* Note we start at block 1. */
4419 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
4421 if (gcse_file != NULL)
4422 fprintf (gcse_file, "\n");
4423 return 0;
4426 changed = 0;
4427 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR, next_bb)
4429 /* Reset tables used to keep track of what's still valid [since the
4430 start of the block]. */
4431 reset_opr_set_tables ();
4433 for (insn = bb->head;
4434 insn != NULL && insn != NEXT_INSN (bb->end);
4435 insn = NEXT_INSN (insn))
4436 if (INSN_P (insn))
4438 changed |= cprop_insn (insn, alter_jumps);
4440 /* Keep track of everything modified by this insn. */
4441 /* ??? Need to be careful w.r.t. mods done to INSN. Don't
4442 call mark_oprs_set if we turned the insn into a NOTE. */
4443 if (GET_CODE (insn) != NOTE)
4444 mark_oprs_set (insn);
4448 if (gcse_file != NULL)
4449 fprintf (gcse_file, "\n");
4451 return changed;
4454 /* Similar to get_condition, only the resulting condition must be
4455 valid at JUMP, instead of at EARLIEST.
4457 This differs from noce_get_condition in ifcvt.c in that we prefer not to
4458 settle for the condition variable in the jump instruction being integral.
4459 We prefer to be able to record the value of a user variable, rather than
4460 the value of a temporary used in a condition. This could be solved by
4461 recording the value of *every* register scaned by canonicalize_condition,
4462 but this would require some code reorganization. */
4465 fis_get_condition (rtx jump)
4467 rtx cond, set, tmp, insn, earliest;
4468 bool reverse;
4470 if (! any_condjump_p (jump))
4471 return NULL_RTX;
4473 set = pc_set (jump);
4474 cond = XEXP (SET_SRC (set), 0);
4476 /* If this branches to JUMP_LABEL when the condition is false,
4477 reverse the condition. */
4478 reverse = (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
4479 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump));
4481 /* Use canonicalize_condition to do the dirty work of manipulating
4482 MODE_CC values and COMPARE rtx codes. */
4483 tmp = canonicalize_condition (jump, cond, reverse, &earliest, NULL_RTX);
4484 if (!tmp)
4485 return NULL_RTX;
4487 /* Verify that the given condition is valid at JUMP by virtue of not
4488 having been modified since EARLIEST. */
4489 for (insn = earliest; insn != jump; insn = NEXT_INSN (insn))
4490 if (INSN_P (insn) && modified_in_p (tmp, insn))
4491 break;
4492 if (insn == jump)
4493 return tmp;
4495 /* The condition was modified. See if we can get a partial result
4496 that doesn't follow all the reversals. Perhaps combine can fold
4497 them together later. */
4498 tmp = XEXP (tmp, 0);
4499 if (!REG_P (tmp) || GET_MODE_CLASS (GET_MODE (tmp)) != MODE_INT)
4500 return NULL_RTX;
4501 tmp = canonicalize_condition (jump, cond, reverse, &earliest, tmp);
4502 if (!tmp)
4503 return NULL_RTX;
4505 /* For sanity's sake, re-validate the new result. */
4506 for (insn = earliest; insn != jump; insn = NEXT_INSN (insn))
4507 if (INSN_P (insn) && modified_in_p (tmp, insn))
4508 return NULL_RTX;
4510 return tmp;
4513 /* Find the implicit sets of a function. An "implicit set" is a constraint
4514 on the value of a variable, implied by a conditional jump. For example,
4515 following "if (x == 2)", the then branch may be optimized as though the
4516 conditional performed an "explicit set", in this example, "x = 2". This
4517 function records the set patterns that are implicit at the start of each
4518 basic block. */
4520 static void
4521 find_implicit_sets (void)
4523 basic_block bb, dest;
4524 unsigned int count;
4525 rtx cond, new;
4527 count = 0;
4528 FOR_EACH_BB (bb)
4529 /* Check for more than one sucessor. */
4530 if (bb->succ && bb->succ->succ_next)
4532 cond = fis_get_condition (bb->end);
4534 if (cond
4535 && (GET_CODE (cond) == EQ || GET_CODE (cond) == NE)
4536 && GET_CODE (XEXP (cond, 0)) == REG
4537 && REGNO (XEXP (cond, 0)) >= FIRST_PSEUDO_REGISTER
4538 && gcse_constant_p (XEXP (cond, 1)))
4540 dest = GET_CODE (cond) == EQ ? BRANCH_EDGE (bb)->dest
4541 : FALLTHRU_EDGE (bb)->dest;
4543 if (dest && ! dest->pred->pred_next
4544 && dest != EXIT_BLOCK_PTR)
4546 new = gen_rtx_SET (VOIDmode, XEXP (cond, 0),
4547 XEXP (cond, 1));
4548 implicit_sets[dest->index] = new;
4549 if (gcse_file)
4551 fprintf(gcse_file, "Implicit set of reg %d in ",
4552 REGNO (XEXP (cond, 0)));
4553 fprintf(gcse_file, "basic block %d\n", dest->index);
4555 count++;
4560 if (gcse_file)
4561 fprintf (gcse_file, "Found %d implicit sets\n", count);
4564 /* Perform one copy/constant propagation pass.
4565 PASS is the pass count. If CPROP_JUMPS is true, perform constant
4566 propagation into conditional jumps. If BYPASS_JUMPS is true,
4567 perform conditional jump bypassing optimizations. */
4569 static int
4570 one_cprop_pass (int pass, int cprop_jumps, int bypass_jumps)
4572 int changed = 0;
4574 const_prop_count = 0;
4575 copy_prop_count = 0;
4577 local_cprop_pass (cprop_jumps);
4579 /* Determine implicit sets. */
4580 implicit_sets = (rtx *) xcalloc (last_basic_block, sizeof (rtx));
4581 find_implicit_sets ();
4583 alloc_hash_table (max_cuid, &set_hash_table, 1);
4584 compute_hash_table (&set_hash_table);
4586 /* Free implicit_sets before peak usage. */
4587 free (implicit_sets);
4588 implicit_sets = NULL;
4590 if (gcse_file)
4591 dump_hash_table (gcse_file, "SET", &set_hash_table);
4592 if (set_hash_table.n_elems > 0)
4594 alloc_cprop_mem (last_basic_block, set_hash_table.n_elems);
4595 compute_cprop_data ();
4596 changed = cprop (cprop_jumps);
4597 if (bypass_jumps)
4598 changed |= bypass_conditional_jumps ();
4599 free_cprop_mem ();
4602 free_hash_table (&set_hash_table);
4604 if (gcse_file)
4606 fprintf (gcse_file, "CPROP of %s, pass %d: %d bytes needed, ",
4607 current_function_name, pass, bytes_used);
4608 fprintf (gcse_file, "%d const props, %d copy props\n\n",
4609 const_prop_count, copy_prop_count);
4611 /* Global analysis may get into infinite loops for unreachable blocks. */
4612 if (changed && cprop_jumps)
4613 delete_unreachable_blocks ();
4615 return changed;
4618 /* Bypass conditional jumps. */
4620 /* The value of last_basic_block at the beginning of the jump_bypass
4621 pass. The use of redirect_edge_and_branch_force may introduce new
4622 basic blocks, but the data flow analysis is only valid for basic
4623 block indices less than bypass_last_basic_block. */
4625 static int bypass_last_basic_block;
4627 /* Find a set of REGNO to a constant that is available at the end of basic
4628 block BB. Returns NULL if no such set is found. Based heavily upon
4629 find_avail_set. */
4631 static struct expr *
4632 find_bypass_set (int regno, int bb)
4634 struct expr *result = 0;
4636 for (;;)
4638 rtx src;
4639 struct expr *set = lookup_set (regno, &set_hash_table);
4641 while (set)
4643 if (TEST_BIT (cprop_avout[bb], set->bitmap_index))
4644 break;
4645 set = next_set (regno, set);
4648 if (set == 0)
4649 break;
4651 if (GET_CODE (set->expr) != SET)
4652 abort ();
4654 src = SET_SRC (set->expr);
4655 if (gcse_constant_p (src))
4656 result = set;
4658 if (GET_CODE (src) != REG)
4659 break;
4661 regno = REGNO (src);
4663 return result;
4667 /* Subroutine of bypass_block that checks whether a pseudo is killed by
4668 any of the instructions inserted on an edge. Jump bypassing places
4669 condition code setters on CFG edges using insert_insn_on_edge. This
4670 function is required to check that our data flow analysis is still
4671 valid prior to commit_edge_insertions. */
4673 static bool
4674 reg_killed_on_edge (rtx reg, edge e)
4676 rtx insn;
4678 for (insn = e->insns; insn; insn = NEXT_INSN (insn))
4679 if (INSN_P (insn) && reg_set_p (reg, insn))
4680 return true;
4682 return false;
4685 /* Subroutine of bypass_conditional_jumps that attempts to bypass the given
4686 basic block BB which has more than one predecessor. If not NULL, SETCC
4687 is the first instruction of BB, which is immediately followed by JUMP_INSN
4688 JUMP. Otherwise, SETCC is NULL, and JUMP is the first insn of BB.
4689 Returns nonzero if a change was made.
4691 During the jump bypassing pass, we may place copies of SETCC instructions
4692 on CFG edges. The following routine must be careful to pay attention to
4693 these inserted insns when performing its transformations. */
4695 static int
4696 bypass_block (basic_block bb, rtx setcc, rtx jump)
4698 rtx insn, note;
4699 edge e, enext, edest;
4700 int i, change;
4701 int may_be_loop_header;
4703 insn = (setcc != NULL) ? setcc : jump;
4705 /* Determine set of register uses in INSN. */
4706 reg_use_count = 0;
4707 note_uses (&PATTERN (insn), find_used_regs, NULL);
4708 note = find_reg_equal_equiv_note (insn);
4709 if (note)
4710 find_used_regs (&XEXP (note, 0), NULL);
4712 may_be_loop_header = false;
4713 for (e = bb->pred; e; e = e->pred_next)
4714 if (e->flags & EDGE_DFS_BACK)
4716 may_be_loop_header = true;
4717 break;
4720 change = 0;
4721 for (e = bb->pred; e; e = enext)
4723 enext = e->pred_next;
4724 if (e->flags & EDGE_COMPLEX)
4725 continue;
4727 /* We can't redirect edges from new basic blocks. */
4728 if (e->src->index >= bypass_last_basic_block)
4729 continue;
4731 /* The irreducible loops created by redirecting of edges entering the
4732 loop from outside would decrease effectiveness of some of the following
4733 optimizations, so prevent this. */
4734 if (may_be_loop_header
4735 && !(e->flags & EDGE_DFS_BACK))
4736 continue;
4738 for (i = 0; i < reg_use_count; i++)
4740 struct reg_use *reg_used = &reg_use_table[i];
4741 unsigned int regno = REGNO (reg_used->reg_rtx);
4742 basic_block dest, old_dest;
4743 struct expr *set;
4744 rtx src, new;
4746 if (regno >= max_gcse_regno)
4747 continue;
4749 set = find_bypass_set (regno, e->src->index);
4751 if (! set)
4752 continue;
4754 /* Check the data flow is valid after edge insertions. */
4755 if (e->insns && reg_killed_on_edge (reg_used->reg_rtx, e))
4756 continue;
4758 src = SET_SRC (pc_set (jump));
4760 if (setcc != NULL)
4761 src = simplify_replace_rtx (src,
4762 SET_DEST (PATTERN (setcc)),
4763 SET_SRC (PATTERN (setcc)));
4765 new = simplify_replace_rtx (src, reg_used->reg_rtx,
4766 SET_SRC (set->expr));
4768 /* Jump bypassing may have already placed instructions on
4769 edges of the CFG. We can't bypass an outgoing edge that
4770 has instructions associated with it, as these insns won't
4771 get executed if the incoming edge is redirected. */
4773 if (new == pc_rtx)
4775 edest = FALLTHRU_EDGE (bb);
4776 dest = edest->insns ? NULL : edest->dest;
4778 else if (GET_CODE (new) == LABEL_REF)
4780 dest = BLOCK_FOR_INSN (XEXP (new, 0));
4781 /* Don't bypass edges containing instructions. */
4782 for (edest = bb->succ; edest; edest = edest->succ_next)
4783 if (edest->dest == dest && edest->insns)
4785 dest = NULL;
4786 break;
4789 else
4790 dest = NULL;
4792 old_dest = e->dest;
4793 if (dest != NULL
4794 && dest != old_dest
4795 && dest != EXIT_BLOCK_PTR)
4797 redirect_edge_and_branch_force (e, dest);
4799 /* Copy the register setter to the redirected edge.
4800 Don't copy CC0 setters, as CC0 is dead after jump. */
4801 if (setcc)
4803 rtx pat = PATTERN (setcc);
4804 if (!CC0_P (SET_DEST (pat)))
4805 insert_insn_on_edge (copy_insn (pat), e);
4808 if (gcse_file != NULL)
4810 fprintf (gcse_file, "JUMP-BYPASS: Proved reg %d in jump_insn %d equals constant ",
4811 regno, INSN_UID (jump));
4812 print_rtl (gcse_file, SET_SRC (set->expr));
4813 fprintf (gcse_file, "\nBypass edge from %d->%d to %d\n",
4814 e->src->index, old_dest->index, dest->index);
4816 change = 1;
4817 break;
4821 return change;
4824 /* Find basic blocks with more than one predecessor that only contain a
4825 single conditional jump. If the result of the comparison is known at
4826 compile-time from any incoming edge, redirect that edge to the
4827 appropriate target. Returns nonzero if a change was made.
4829 This function is now mis-named, because we also handle indirect jumps. */
4831 static int
4832 bypass_conditional_jumps (void)
4834 basic_block bb;
4835 int changed;
4836 rtx setcc;
4837 rtx insn;
4838 rtx dest;
4840 /* Note we start at block 1. */
4841 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
4842 return 0;
4844 bypass_last_basic_block = last_basic_block;
4845 mark_dfs_back_edges ();
4847 changed = 0;
4848 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb,
4849 EXIT_BLOCK_PTR, next_bb)
4851 /* Check for more than one predecessor. */
4852 if (bb->pred && bb->pred->pred_next)
4854 setcc = NULL_RTX;
4855 for (insn = bb->head;
4856 insn != NULL && insn != NEXT_INSN (bb->end);
4857 insn = NEXT_INSN (insn))
4858 if (GET_CODE (insn) == INSN)
4860 if (setcc)
4861 break;
4862 if (GET_CODE (PATTERN (insn)) != SET)
4863 break;
4865 dest = SET_DEST (PATTERN (insn));
4866 if (REG_P (dest) || CC0_P (dest))
4867 setcc = insn;
4868 else
4869 break;
4871 else if (GET_CODE (insn) == JUMP_INSN)
4873 if ((any_condjump_p (insn) || computed_jump_p (insn))
4874 && onlyjump_p (insn))
4875 changed |= bypass_block (bb, setcc, insn);
4876 break;
4878 else if (INSN_P (insn))
4879 break;
4883 /* If we bypassed any register setting insns, we inserted a
4884 copy on the redirected edge. These need to be committed. */
4885 if (changed)
4886 commit_edge_insertions();
4888 return changed;
4891 /* Compute PRE+LCM working variables. */
4893 /* Local properties of expressions. */
4894 /* Nonzero for expressions that are transparent in the block. */
4895 static sbitmap *transp;
4897 /* Nonzero for expressions that are transparent at the end of the block.
4898 This is only zero for expressions killed by abnormal critical edge
4899 created by a calls. */
4900 static sbitmap *transpout;
4902 /* Nonzero for expressions that are computed (available) in the block. */
4903 static sbitmap *comp;
4905 /* Nonzero for expressions that are locally anticipatable in the block. */
4906 static sbitmap *antloc;
4908 /* Nonzero for expressions where this block is an optimal computation
4909 point. */
4910 static sbitmap *pre_optimal;
4912 /* Nonzero for expressions which are redundant in a particular block. */
4913 static sbitmap *pre_redundant;
4915 /* Nonzero for expressions which should be inserted on a specific edge. */
4916 static sbitmap *pre_insert_map;
4918 /* Nonzero for expressions which should be deleted in a specific block. */
4919 static sbitmap *pre_delete_map;
4921 /* Contains the edge_list returned by pre_edge_lcm. */
4922 static struct edge_list *edge_list;
4924 /* Redundant insns. */
4925 static sbitmap pre_redundant_insns;
4927 /* Allocate vars used for PRE analysis. */
4929 static void
4930 alloc_pre_mem (int n_blocks, int n_exprs)
4932 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
4933 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
4934 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
4936 pre_optimal = NULL;
4937 pre_redundant = NULL;
4938 pre_insert_map = NULL;
4939 pre_delete_map = NULL;
4940 ae_in = NULL;
4941 ae_out = NULL;
4942 ae_kill = sbitmap_vector_alloc (n_blocks, n_exprs);
4944 /* pre_insert and pre_delete are allocated later. */
4947 /* Free vars used for PRE analysis. */
4949 static void
4950 free_pre_mem (void)
4952 sbitmap_vector_free (transp);
4953 sbitmap_vector_free (comp);
4955 /* ANTLOC and AE_KILL are freed just after pre_lcm finishes. */
4957 if (pre_optimal)
4958 sbitmap_vector_free (pre_optimal);
4959 if (pre_redundant)
4960 sbitmap_vector_free (pre_redundant);
4961 if (pre_insert_map)
4962 sbitmap_vector_free (pre_insert_map);
4963 if (pre_delete_map)
4964 sbitmap_vector_free (pre_delete_map);
4965 if (ae_in)
4966 sbitmap_vector_free (ae_in);
4967 if (ae_out)
4968 sbitmap_vector_free (ae_out);
4970 transp = comp = NULL;
4971 pre_optimal = pre_redundant = pre_insert_map = pre_delete_map = NULL;
4972 ae_in = ae_out = NULL;
4975 /* Top level routine to do the dataflow analysis needed by PRE. */
4977 static void
4978 compute_pre_data (void)
4980 sbitmap trapping_expr;
4981 basic_block bb;
4982 unsigned int ui;
4984 compute_local_properties (transp, comp, antloc, &expr_hash_table);
4985 sbitmap_vector_zero (ae_kill, last_basic_block);
4987 /* Collect expressions which might trap. */
4988 trapping_expr = sbitmap_alloc (expr_hash_table.n_elems);
4989 sbitmap_zero (trapping_expr);
4990 for (ui = 0; ui < expr_hash_table.size; ui++)
4992 struct expr *e;
4993 for (e = expr_hash_table.table[ui]; e != NULL; e = e->next_same_hash)
4994 if (may_trap_p (e->expr))
4995 SET_BIT (trapping_expr, e->bitmap_index);
4998 /* Compute ae_kill for each basic block using:
5000 ~(TRANSP | COMP)
5002 This is significantly faster than compute_ae_kill. */
5004 FOR_EACH_BB (bb)
5006 edge e;
5008 /* If the current block is the destination of an abnormal edge, we
5009 kill all trapping expressions because we won't be able to properly
5010 place the instruction on the edge. So make them neither
5011 anticipatable nor transparent. This is fairly conservative. */
5012 for (e = bb->pred; e ; e = e->pred_next)
5013 if (e->flags & EDGE_ABNORMAL)
5015 sbitmap_difference (antloc[bb->index], antloc[bb->index], trapping_expr);
5016 sbitmap_difference (transp[bb->index], transp[bb->index], trapping_expr);
5017 break;
5020 sbitmap_a_or_b (ae_kill[bb->index], transp[bb->index], comp[bb->index]);
5021 sbitmap_not (ae_kill[bb->index], ae_kill[bb->index]);
5024 edge_list = pre_edge_lcm (gcse_file, expr_hash_table.n_elems, transp, comp, antloc,
5025 ae_kill, &pre_insert_map, &pre_delete_map);
5026 sbitmap_vector_free (antloc);
5027 antloc = NULL;
5028 sbitmap_vector_free (ae_kill);
5029 ae_kill = NULL;
5030 sbitmap_free (trapping_expr);
5033 /* PRE utilities */
5035 /* Return nonzero if an occurrence of expression EXPR in OCCR_BB would reach
5036 block BB.
5038 VISITED is a pointer to a working buffer for tracking which BB's have
5039 been visited. It is NULL for the top-level call.
5041 We treat reaching expressions that go through blocks containing the same
5042 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
5043 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
5044 2 as not reaching. The intent is to improve the probability of finding
5045 only one reaching expression and to reduce register lifetimes by picking
5046 the closest such expression. */
5048 static int
5049 pre_expr_reaches_here_p_work (basic_block occr_bb, struct expr *expr, basic_block bb, char *visited)
5051 edge pred;
5053 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
5055 basic_block pred_bb = pred->src;
5057 if (pred->src == ENTRY_BLOCK_PTR
5058 /* Has predecessor has already been visited? */
5059 || visited[pred_bb->index])
5060 ;/* Nothing to do. */
5062 /* Does this predecessor generate this expression? */
5063 else if (TEST_BIT (comp[pred_bb->index], expr->bitmap_index))
5065 /* Is this the occurrence we're looking for?
5066 Note that there's only one generating occurrence per block
5067 so we just need to check the block number. */
5068 if (occr_bb == pred_bb)
5069 return 1;
5071 visited[pred_bb->index] = 1;
5073 /* Ignore this predecessor if it kills the expression. */
5074 else if (! TEST_BIT (transp[pred_bb->index], expr->bitmap_index))
5075 visited[pred_bb->index] = 1;
5077 /* Neither gen nor kill. */
5078 else
5080 visited[pred_bb->index] = 1;
5081 if (pre_expr_reaches_here_p_work (occr_bb, expr, pred_bb, visited))
5082 return 1;
5086 /* All paths have been checked. */
5087 return 0;
5090 /* The wrapper for pre_expr_reaches_here_work that ensures that any
5091 memory allocated for that function is returned. */
5093 static int
5094 pre_expr_reaches_here_p (basic_block occr_bb, struct expr *expr, basic_block bb)
5096 int rval;
5097 char *visited = (char *) xcalloc (last_basic_block, 1);
5099 rval = pre_expr_reaches_here_p_work (occr_bb, expr, bb, visited);
5101 free (visited);
5102 return rval;
5106 /* Given an expr, generate RTL which we can insert at the end of a BB,
5107 or on an edge. Set the block number of any insns generated to
5108 the value of BB. */
5110 static rtx
5111 process_insert_insn (struct expr *expr)
5113 rtx reg = expr->reaching_reg;
5114 rtx exp = copy_rtx (expr->expr);
5115 rtx pat;
5117 start_sequence ();
5119 /* If the expression is something that's an operand, like a constant,
5120 just copy it to a register. */
5121 if (general_operand (exp, GET_MODE (reg)))
5122 emit_move_insn (reg, exp);
5124 /* Otherwise, make a new insn to compute this expression and make sure the
5125 insn will be recognized (this also adds any needed CLOBBERs). Copy the
5126 expression to make sure we don't have any sharing issues. */
5127 else if (insn_invalid_p (emit_insn (gen_rtx_SET (VOIDmode, reg, exp))))
5128 abort ();
5130 pat = get_insns ();
5131 end_sequence ();
5133 return pat;
5136 /* Add EXPR to the end of basic block BB.
5138 This is used by both the PRE and code hoisting.
5140 For PRE, we want to verify that the expr is either transparent
5141 or locally anticipatable in the target block. This check makes
5142 no sense for code hoisting. */
5144 static void
5145 insert_insn_end_bb (struct expr *expr, basic_block bb, int pre)
5147 rtx insn = bb->end;
5148 rtx new_insn;
5149 rtx reg = expr->reaching_reg;
5150 int regno = REGNO (reg);
5151 rtx pat, pat_end;
5153 pat = process_insert_insn (expr);
5154 if (pat == NULL_RTX || ! INSN_P (pat))
5155 abort ();
5157 pat_end = pat;
5158 while (NEXT_INSN (pat_end) != NULL_RTX)
5159 pat_end = NEXT_INSN (pat_end);
5161 /* If the last insn is a jump, insert EXPR in front [taking care to
5162 handle cc0, etc. properly]. Similarly we need to care trapping
5163 instructions in presence of non-call exceptions. */
5165 if (GET_CODE (insn) == JUMP_INSN
5166 || (GET_CODE (insn) == INSN
5167 && (bb->succ->succ_next || (bb->succ->flags & EDGE_ABNORMAL))))
5169 #ifdef HAVE_cc0
5170 rtx note;
5171 #endif
5172 /* It should always be the case that we can put these instructions
5173 anywhere in the basic block with performing PRE optimizations.
5174 Check this. */
5175 if (GET_CODE (insn) == INSN && pre
5176 && !TEST_BIT (antloc[bb->index], expr->bitmap_index)
5177 && !TEST_BIT (transp[bb->index], expr->bitmap_index))
5178 abort ();
5180 /* If this is a jump table, then we can't insert stuff here. Since
5181 we know the previous real insn must be the tablejump, we insert
5182 the new instruction just before the tablejump. */
5183 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
5184 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
5185 insn = prev_real_insn (insn);
5187 #ifdef HAVE_cc0
5188 /* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts
5189 if cc0 isn't set. */
5190 note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
5191 if (note)
5192 insn = XEXP (note, 0);
5193 else
5195 rtx maybe_cc0_setter = prev_nonnote_insn (insn);
5196 if (maybe_cc0_setter
5197 && INSN_P (maybe_cc0_setter)
5198 && sets_cc0_p (PATTERN (maybe_cc0_setter)))
5199 insn = maybe_cc0_setter;
5201 #endif
5202 /* FIXME: What if something in cc0/jump uses value set in new insn? */
5203 new_insn = emit_insn_before (pat, insn);
5206 /* Likewise if the last insn is a call, as will happen in the presence
5207 of exception handling. */
5208 else if (GET_CODE (insn) == CALL_INSN
5209 && (bb->succ->succ_next || (bb->succ->flags & EDGE_ABNORMAL)))
5211 /* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers,
5212 we search backward and place the instructions before the first
5213 parameter is loaded. Do this for everyone for consistency and a
5214 presumption that we'll get better code elsewhere as well.
5216 It should always be the case that we can put these instructions
5217 anywhere in the basic block with performing PRE optimizations.
5218 Check this. */
5220 if (pre
5221 && !TEST_BIT (antloc[bb->index], expr->bitmap_index)
5222 && !TEST_BIT (transp[bb->index], expr->bitmap_index))
5223 abort ();
5225 /* Since different machines initialize their parameter registers
5226 in different orders, assume nothing. Collect the set of all
5227 parameter registers. */
5228 insn = find_first_parameter_load (insn, bb->head);
5230 /* If we found all the parameter loads, then we want to insert
5231 before the first parameter load.
5233 If we did not find all the parameter loads, then we might have
5234 stopped on the head of the block, which could be a CODE_LABEL.
5235 If we inserted before the CODE_LABEL, then we would be putting
5236 the insn in the wrong basic block. In that case, put the insn
5237 after the CODE_LABEL. Also, respect NOTE_INSN_BASIC_BLOCK. */
5238 while (GET_CODE (insn) == CODE_LABEL
5239 || NOTE_INSN_BASIC_BLOCK_P (insn))
5240 insn = NEXT_INSN (insn);
5242 new_insn = emit_insn_before (pat, insn);
5244 else
5245 new_insn = emit_insn_after (pat, insn);
5247 while (1)
5249 if (INSN_P (pat))
5251 add_label_notes (PATTERN (pat), new_insn);
5252 note_stores (PATTERN (pat), record_set_info, pat);
5254 if (pat == pat_end)
5255 break;
5256 pat = NEXT_INSN (pat);
5259 gcse_create_count++;
5261 if (gcse_file)
5263 fprintf (gcse_file, "PRE/HOIST: end of bb %d, insn %d, ",
5264 bb->index, INSN_UID (new_insn));
5265 fprintf (gcse_file, "copying expression %d to reg %d\n",
5266 expr->bitmap_index, regno);
5270 /* Insert partially redundant expressions on edges in the CFG to make
5271 the expressions fully redundant. */
5273 static int
5274 pre_edge_insert (struct edge_list *edge_list, struct expr **index_map)
5276 int e, i, j, num_edges, set_size, did_insert = 0;
5277 sbitmap *inserted;
5279 /* Where PRE_INSERT_MAP is nonzero, we add the expression on that edge
5280 if it reaches any of the deleted expressions. */
5282 set_size = pre_insert_map[0]->size;
5283 num_edges = NUM_EDGES (edge_list);
5284 inserted = sbitmap_vector_alloc (num_edges, expr_hash_table.n_elems);
5285 sbitmap_vector_zero (inserted, num_edges);
5287 for (e = 0; e < num_edges; e++)
5289 int indx;
5290 basic_block bb = INDEX_EDGE_PRED_BB (edge_list, e);
5292 for (i = indx = 0; i < set_size; i++, indx += SBITMAP_ELT_BITS)
5294 SBITMAP_ELT_TYPE insert = pre_insert_map[e]->elms[i];
5296 for (j = indx; insert && j < (int) expr_hash_table.n_elems; j++, insert >>= 1)
5297 if ((insert & 1) != 0 && index_map[j]->reaching_reg != NULL_RTX)
5299 struct expr *expr = index_map[j];
5300 struct occr *occr;
5302 /* Now look at each deleted occurrence of this expression. */
5303 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5305 if (! occr->deleted_p)
5306 continue;
5308 /* Insert this expression on this edge if if it would
5309 reach the deleted occurrence in BB. */
5310 if (!TEST_BIT (inserted[e], j))
5312 rtx insn;
5313 edge eg = INDEX_EDGE (edge_list, e);
5315 /* We can't insert anything on an abnormal and
5316 critical edge, so we insert the insn at the end of
5317 the previous block. There are several alternatives
5318 detailed in Morgans book P277 (sec 10.5) for
5319 handling this situation. This one is easiest for
5320 now. */
5322 if ((eg->flags & EDGE_ABNORMAL) == EDGE_ABNORMAL)
5323 insert_insn_end_bb (index_map[j], bb, 0);
5324 else
5326 insn = process_insert_insn (index_map[j]);
5327 insert_insn_on_edge (insn, eg);
5330 if (gcse_file)
5332 fprintf (gcse_file, "PRE/HOIST: edge (%d,%d), ",
5333 bb->index,
5334 INDEX_EDGE_SUCC_BB (edge_list, e)->index);
5335 fprintf (gcse_file, "copy expression %d\n",
5336 expr->bitmap_index);
5339 update_ld_motion_stores (expr);
5340 SET_BIT (inserted[e], j);
5341 did_insert = 1;
5342 gcse_create_count++;
5349 sbitmap_vector_free (inserted);
5350 return did_insert;
5353 /* Copy the result of INSN to REG. INDX is the expression number. */
5355 static void
5356 pre_insert_copy_insn (struct expr *expr, rtx insn)
5358 rtx reg = expr->reaching_reg;
5359 int regno = REGNO (reg);
5360 int indx = expr->bitmap_index;
5361 rtx set = single_set (insn);
5362 rtx new_insn;
5364 if (!set)
5365 abort ();
5367 new_insn = emit_insn_after (gen_move_insn (reg, copy_rtx (SET_DEST (set))), insn);
5369 /* Keep register set table up to date. */
5370 record_one_set (regno, new_insn);
5372 gcse_create_count++;
5374 if (gcse_file)
5375 fprintf (gcse_file,
5376 "PRE: bb %d, insn %d, copy expression %d in insn %d to reg %d\n",
5377 BLOCK_NUM (insn), INSN_UID (new_insn), indx,
5378 INSN_UID (insn), regno);
5379 update_ld_motion_stores (expr);
5382 /* Copy available expressions that reach the redundant expression
5383 to `reaching_reg'. */
5385 static void
5386 pre_insert_copies (void)
5388 unsigned int i;
5389 struct expr *expr;
5390 struct occr *occr;
5391 struct occr *avail;
5393 /* For each available expression in the table, copy the result to
5394 `reaching_reg' if the expression reaches a deleted one.
5396 ??? The current algorithm is rather brute force.
5397 Need to do some profiling. */
5399 for (i = 0; i < expr_hash_table.size; i++)
5400 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
5402 /* If the basic block isn't reachable, PPOUT will be TRUE. However,
5403 we don't want to insert a copy here because the expression may not
5404 really be redundant. So only insert an insn if the expression was
5405 deleted. This test also avoids further processing if the
5406 expression wasn't deleted anywhere. */
5407 if (expr->reaching_reg == NULL)
5408 continue;
5410 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5412 if (! occr->deleted_p)
5413 continue;
5415 for (avail = expr->avail_occr; avail != NULL; avail = avail->next)
5417 rtx insn = avail->insn;
5419 /* No need to handle this one if handled already. */
5420 if (avail->copied_p)
5421 continue;
5423 /* Don't handle this one if it's a redundant one. */
5424 if (TEST_BIT (pre_redundant_insns, INSN_CUID (insn)))
5425 continue;
5427 /* Or if the expression doesn't reach the deleted one. */
5428 if (! pre_expr_reaches_here_p (BLOCK_FOR_INSN (avail->insn),
5429 expr,
5430 BLOCK_FOR_INSN (occr->insn)))
5431 continue;
5433 /* Copy the result of avail to reaching_reg. */
5434 pre_insert_copy_insn (expr, insn);
5435 avail->copied_p = 1;
5441 /* Emit move from SRC to DEST noting the equivalence with expression computed
5442 in INSN. */
5443 static rtx
5444 gcse_emit_move_after (rtx src, rtx dest, rtx insn)
5446 rtx new;
5447 rtx set = single_set (insn), set2;
5448 rtx note;
5449 rtx eqv;
5451 /* This should never fail since we're creating a reg->reg copy
5452 we've verified to be valid. */
5454 new = emit_insn_after (gen_move_insn (dest, src), insn);
5456 /* Note the equivalence for local CSE pass. */
5457 set2 = single_set (new);
5458 if (!set2 || !rtx_equal_p (SET_DEST (set2), dest))
5459 return new;
5460 if ((note = find_reg_equal_equiv_note (insn)))
5461 eqv = XEXP (note, 0);
5462 else
5463 eqv = SET_SRC (set);
5465 set_unique_reg_note (new, REG_EQUAL, copy_insn_1 (eqv));
5467 return new;
5470 /* Delete redundant computations.
5471 Deletion is done by changing the insn to copy the `reaching_reg' of
5472 the expression into the result of the SET. It is left to later passes
5473 (cprop, cse2, flow, combine, regmove) to propagate the copy or eliminate it.
5475 Returns nonzero if a change is made. */
5477 static int
5478 pre_delete (void)
5480 unsigned int i;
5481 int changed;
5482 struct expr *expr;
5483 struct occr *occr;
5485 changed = 0;
5486 for (i = 0; i < expr_hash_table.size; i++)
5487 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
5489 int indx = expr->bitmap_index;
5491 /* We only need to search antic_occr since we require
5492 ANTLOC != 0. */
5494 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5496 rtx insn = occr->insn;
5497 rtx set;
5498 basic_block bb = BLOCK_FOR_INSN (insn);
5500 if (TEST_BIT (pre_delete_map[bb->index], indx))
5502 set = single_set (insn);
5503 if (! set)
5504 abort ();
5506 /* Create a pseudo-reg to store the result of reaching
5507 expressions into. Get the mode for the new pseudo from
5508 the mode of the original destination pseudo. */
5509 if (expr->reaching_reg == NULL)
5510 expr->reaching_reg
5511 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
5513 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
5514 delete_insn (insn);
5515 occr->deleted_p = 1;
5516 SET_BIT (pre_redundant_insns, INSN_CUID (insn));
5517 changed = 1;
5518 gcse_subst_count++;
5520 if (gcse_file)
5522 fprintf (gcse_file,
5523 "PRE: redundant insn %d (expression %d) in ",
5524 INSN_UID (insn), indx);
5525 fprintf (gcse_file, "bb %d, reaching reg is %d\n",
5526 bb->index, REGNO (expr->reaching_reg));
5532 return changed;
5535 /* Perform GCSE optimizations using PRE.
5536 This is called by one_pre_gcse_pass after all the dataflow analysis
5537 has been done.
5539 This is based on the original Morel-Renvoise paper Fred Chow's thesis, and
5540 lazy code motion from Knoop, Ruthing and Steffen as described in Advanced
5541 Compiler Design and Implementation.
5543 ??? A new pseudo reg is created to hold the reaching expression. The nice
5544 thing about the classical approach is that it would try to use an existing
5545 reg. If the register can't be adequately optimized [i.e. we introduce
5546 reload problems], one could add a pass here to propagate the new register
5547 through the block.
5549 ??? We don't handle single sets in PARALLELs because we're [currently] not
5550 able to copy the rest of the parallel when we insert copies to create full
5551 redundancies from partial redundancies. However, there's no reason why we
5552 can't handle PARALLELs in the cases where there are no partial
5553 redundancies. */
5555 static int
5556 pre_gcse (void)
5558 unsigned int i;
5559 int did_insert, changed;
5560 struct expr **index_map;
5561 struct expr *expr;
5563 /* Compute a mapping from expression number (`bitmap_index') to
5564 hash table entry. */
5566 index_map = (struct expr **) xcalloc (expr_hash_table.n_elems, sizeof (struct expr *));
5567 for (i = 0; i < expr_hash_table.size; i++)
5568 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
5569 index_map[expr->bitmap_index] = expr;
5571 /* Reset bitmap used to track which insns are redundant. */
5572 pre_redundant_insns = sbitmap_alloc (max_cuid);
5573 sbitmap_zero (pre_redundant_insns);
5575 /* Delete the redundant insns first so that
5576 - we know what register to use for the new insns and for the other
5577 ones with reaching expressions
5578 - we know which insns are redundant when we go to create copies */
5580 changed = pre_delete ();
5582 did_insert = pre_edge_insert (edge_list, index_map);
5584 /* In other places with reaching expressions, copy the expression to the
5585 specially allocated pseudo-reg that reaches the redundant expr. */
5586 pre_insert_copies ();
5587 if (did_insert)
5589 commit_edge_insertions ();
5590 changed = 1;
5593 free (index_map);
5594 sbitmap_free (pre_redundant_insns);
5595 return changed;
5598 /* Top level routine to perform one PRE GCSE pass.
5600 Return nonzero if a change was made. */
5602 static int
5603 one_pre_gcse_pass (int pass)
5605 int changed = 0;
5607 gcse_subst_count = 0;
5608 gcse_create_count = 0;
5610 alloc_hash_table (max_cuid, &expr_hash_table, 0);
5611 add_noreturn_fake_exit_edges ();
5612 if (flag_gcse_lm)
5613 compute_ld_motion_mems ();
5615 compute_hash_table (&expr_hash_table);
5616 trim_ld_motion_mems ();
5617 if (gcse_file)
5618 dump_hash_table (gcse_file, "Expression", &expr_hash_table);
5620 if (expr_hash_table.n_elems > 0)
5622 alloc_pre_mem (last_basic_block, expr_hash_table.n_elems);
5623 compute_pre_data ();
5624 changed |= pre_gcse ();
5625 free_edge_list (edge_list);
5626 free_pre_mem ();
5629 free_ldst_mems ();
5630 remove_fake_edges ();
5631 free_hash_table (&expr_hash_table);
5633 if (gcse_file)
5635 fprintf (gcse_file, "\nPRE GCSE of %s, pass %d: %d bytes needed, ",
5636 current_function_name, pass, bytes_used);
5637 fprintf (gcse_file, "%d substs, %d insns created\n",
5638 gcse_subst_count, gcse_create_count);
5641 return changed;
5644 /* If X contains any LABEL_REF's, add REG_LABEL notes for them to INSN.
5645 If notes are added to an insn which references a CODE_LABEL, the
5646 LABEL_NUSES count is incremented. We have to add REG_LABEL notes,
5647 because the following loop optimization pass requires them. */
5649 /* ??? This is very similar to the loop.c add_label_notes function. We
5650 could probably share code here. */
5652 /* ??? If there was a jump optimization pass after gcse and before loop,
5653 then we would not need to do this here, because jump would add the
5654 necessary REG_LABEL notes. */
5656 static void
5657 add_label_notes (rtx x, rtx insn)
5659 enum rtx_code code = GET_CODE (x);
5660 int i, j;
5661 const char *fmt;
5663 if (code == LABEL_REF && !LABEL_REF_NONLOCAL_P (x))
5665 /* This code used to ignore labels that referred to dispatch tables to
5666 avoid flow generating (slightly) worse code.
5668 We no longer ignore such label references (see LABEL_REF handling in
5669 mark_jump_label for additional information). */
5671 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, XEXP (x, 0),
5672 REG_NOTES (insn));
5673 if (LABEL_P (XEXP (x, 0)))
5674 LABEL_NUSES (XEXP (x, 0))++;
5675 return;
5678 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
5680 if (fmt[i] == 'e')
5681 add_label_notes (XEXP (x, i), insn);
5682 else if (fmt[i] == 'E')
5683 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5684 add_label_notes (XVECEXP (x, i, j), insn);
5688 /* Compute transparent outgoing information for each block.
5690 An expression is transparent to an edge unless it is killed by
5691 the edge itself. This can only happen with abnormal control flow,
5692 when the edge is traversed through a call. This happens with
5693 non-local labels and exceptions.
5695 This would not be necessary if we split the edge. While this is
5696 normally impossible for abnormal critical edges, with some effort
5697 it should be possible with exception handling, since we still have
5698 control over which handler should be invoked. But due to increased
5699 EH table sizes, this may not be worthwhile. */
5701 static void
5702 compute_transpout (void)
5704 basic_block bb;
5705 unsigned int i;
5706 struct expr *expr;
5708 sbitmap_vector_ones (transpout, last_basic_block);
5710 FOR_EACH_BB (bb)
5712 /* Note that flow inserted a nop a the end of basic blocks that
5713 end in call instructions for reasons other than abnormal
5714 control flow. */
5715 if (GET_CODE (bb->end) != CALL_INSN)
5716 continue;
5718 for (i = 0; i < expr_hash_table.size; i++)
5719 for (expr = expr_hash_table.table[i]; expr ; expr = expr->next_same_hash)
5720 if (GET_CODE (expr->expr) == MEM)
5722 if (GET_CODE (XEXP (expr->expr, 0)) == SYMBOL_REF
5723 && CONSTANT_POOL_ADDRESS_P (XEXP (expr->expr, 0)))
5724 continue;
5726 /* ??? Optimally, we would use interprocedural alias
5727 analysis to determine if this mem is actually killed
5728 by this call. */
5729 RESET_BIT (transpout[bb->index], expr->bitmap_index);
5734 /* Removal of useless null pointer checks */
5736 /* Called via note_stores. X is set by SETTER. If X is a register we must
5737 invalidate nonnull_local and set nonnull_killed. DATA is really a
5738 `null_pointer_info *'.
5740 We ignore hard registers. */
5742 static void
5743 invalidate_nonnull_info (rtx x, rtx setter ATTRIBUTE_UNUSED, void *data)
5745 unsigned int regno;
5746 struct null_pointer_info *npi = (struct null_pointer_info *) data;
5748 while (GET_CODE (x) == SUBREG)
5749 x = SUBREG_REG (x);
5751 /* Ignore anything that is not a register or is a hard register. */
5752 if (GET_CODE (x) != REG
5753 || REGNO (x) < npi->min_reg
5754 || REGNO (x) >= npi->max_reg)
5755 return;
5757 regno = REGNO (x) - npi->min_reg;
5759 RESET_BIT (npi->nonnull_local[npi->current_block->index], regno);
5760 SET_BIT (npi->nonnull_killed[npi->current_block->index], regno);
5763 /* Do null-pointer check elimination for the registers indicated in
5764 NPI. NONNULL_AVIN and NONNULL_AVOUT are pre-allocated sbitmaps;
5765 they are not our responsibility to free. */
5767 static int
5768 delete_null_pointer_checks_1 (unsigned int *block_reg, sbitmap *nonnull_avin,
5769 sbitmap *nonnull_avout,
5770 struct null_pointer_info *npi)
5772 basic_block bb, current_block;
5773 sbitmap *nonnull_local = npi->nonnull_local;
5774 sbitmap *nonnull_killed = npi->nonnull_killed;
5775 int something_changed = 0;
5777 /* Compute local properties, nonnull and killed. A register will have
5778 the nonnull property if at the end of the current block its value is
5779 known to be nonnull. The killed property indicates that somewhere in
5780 the block any information we had about the register is killed.
5782 Note that a register can have both properties in a single block. That
5783 indicates that it's killed, then later in the block a new value is
5784 computed. */
5785 sbitmap_vector_zero (nonnull_local, last_basic_block);
5786 sbitmap_vector_zero (nonnull_killed, last_basic_block);
5788 FOR_EACH_BB (current_block)
5790 rtx insn, stop_insn;
5792 /* Set the current block for invalidate_nonnull_info. */
5793 npi->current_block = current_block;
5795 /* Scan each insn in the basic block looking for memory references and
5796 register sets. */
5797 stop_insn = NEXT_INSN (current_block->end);
5798 for (insn = current_block->head;
5799 insn != stop_insn;
5800 insn = NEXT_INSN (insn))
5802 rtx set;
5803 rtx reg;
5805 /* Ignore anything that is not a normal insn. */
5806 if (! INSN_P (insn))
5807 continue;
5809 /* Basically ignore anything that is not a simple SET. We do have
5810 to make sure to invalidate nonnull_local and set nonnull_killed
5811 for such insns though. */
5812 set = single_set (insn);
5813 if (!set)
5815 note_stores (PATTERN (insn), invalidate_nonnull_info, npi);
5816 continue;
5819 /* See if we've got a usable memory load. We handle it first
5820 in case it uses its address register as a dest (which kills
5821 the nonnull property). */
5822 if (GET_CODE (SET_SRC (set)) == MEM
5823 && GET_CODE ((reg = XEXP (SET_SRC (set), 0))) == REG
5824 && REGNO (reg) >= npi->min_reg
5825 && REGNO (reg) < npi->max_reg)
5826 SET_BIT (nonnull_local[current_block->index],
5827 REGNO (reg) - npi->min_reg);
5829 /* Now invalidate stuff clobbered by this insn. */
5830 note_stores (PATTERN (insn), invalidate_nonnull_info, npi);
5832 /* And handle stores, we do these last since any sets in INSN can
5833 not kill the nonnull property if it is derived from a MEM
5834 appearing in a SET_DEST. */
5835 if (GET_CODE (SET_DEST (set)) == MEM
5836 && GET_CODE ((reg = XEXP (SET_DEST (set), 0))) == REG
5837 && REGNO (reg) >= npi->min_reg
5838 && REGNO (reg) < npi->max_reg)
5839 SET_BIT (nonnull_local[current_block->index],
5840 REGNO (reg) - npi->min_reg);
5844 /* Now compute global properties based on the local properties. This
5845 is a classic global availability algorithm. */
5846 compute_available (nonnull_local, nonnull_killed,
5847 nonnull_avout, nonnull_avin);
5849 /* Now look at each bb and see if it ends with a compare of a value
5850 against zero. */
5851 FOR_EACH_BB (bb)
5853 rtx last_insn = bb->end;
5854 rtx condition, earliest;
5855 int compare_and_branch;
5857 /* Since MIN_REG is always at least FIRST_PSEUDO_REGISTER, and
5858 since BLOCK_REG[BB] is zero if this block did not end with a
5859 comparison against zero, this condition works. */
5860 if (block_reg[bb->index] < npi->min_reg
5861 || block_reg[bb->index] >= npi->max_reg)
5862 continue;
5864 /* LAST_INSN is a conditional jump. Get its condition. */
5865 condition = get_condition (last_insn, &earliest);
5867 /* If we can't determine the condition then skip. */
5868 if (! condition)
5869 continue;
5871 /* Is the register known to have a nonzero value? */
5872 if (!TEST_BIT (nonnull_avout[bb->index], block_reg[bb->index] - npi->min_reg))
5873 continue;
5875 /* Try to compute whether the compare/branch at the loop end is one or
5876 two instructions. */
5877 if (earliest == last_insn)
5878 compare_and_branch = 1;
5879 else if (earliest == prev_nonnote_insn (last_insn))
5880 compare_and_branch = 2;
5881 else
5882 continue;
5884 /* We know the register in this comparison is nonnull at exit from
5885 this block. We can optimize this comparison. */
5886 if (GET_CODE (condition) == NE)
5888 rtx new_jump;
5890 new_jump = emit_jump_insn_after (gen_jump (JUMP_LABEL (last_insn)),
5891 last_insn);
5892 JUMP_LABEL (new_jump) = JUMP_LABEL (last_insn);
5893 LABEL_NUSES (JUMP_LABEL (new_jump))++;
5894 emit_barrier_after (new_jump);
5897 something_changed = 1;
5898 delete_insn (last_insn);
5899 if (compare_and_branch == 2)
5900 delete_insn (earliest);
5901 purge_dead_edges (bb);
5903 /* Don't check this block again. (Note that BLOCK_END is
5904 invalid here; we deleted the last instruction in the
5905 block.) */
5906 block_reg[bb->index] = 0;
5909 return something_changed;
5912 /* Find EQ/NE comparisons against zero which can be (indirectly) evaluated
5913 at compile time.
5915 This is conceptually similar to global constant/copy propagation and
5916 classic global CSE (it even uses the same dataflow equations as cprop).
5918 If a register is used as memory address with the form (mem (reg)), then we
5919 know that REG can not be zero at that point in the program. Any instruction
5920 which sets REG "kills" this property.
5922 So, if every path leading to a conditional branch has an available memory
5923 reference of that form, then we know the register can not have the value
5924 zero at the conditional branch.
5926 So we merely need to compute the local properties and propagate that data
5927 around the cfg, then optimize where possible.
5929 We run this pass two times. Once before CSE, then again after CSE. This
5930 has proven to be the most profitable approach. It is rare for new
5931 optimization opportunities of this nature to appear after the first CSE
5932 pass.
5934 This could probably be integrated with global cprop with a little work. */
5937 delete_null_pointer_checks (rtx f ATTRIBUTE_UNUSED)
5939 sbitmap *nonnull_avin, *nonnull_avout;
5940 unsigned int *block_reg;
5941 basic_block bb;
5942 int reg;
5943 int regs_per_pass;
5944 int max_reg;
5945 struct null_pointer_info npi;
5946 int something_changed = 0;
5948 /* If we have only a single block, then there's nothing to do. */
5949 if (n_basic_blocks <= 1)
5950 return 0;
5952 /* Trying to perform global optimizations on flow graphs which have
5953 a high connectivity will take a long time and is unlikely to be
5954 particularly useful.
5956 In normal circumstances a cfg should have about twice as many edges
5957 as blocks. But we do not want to punish small functions which have
5958 a couple switch statements. So we require a relatively large number
5959 of basic blocks and the ratio of edges to blocks to be high. */
5960 if (n_basic_blocks > 1000 && n_edges / n_basic_blocks >= 20)
5961 return 0;
5963 /* We need four bitmaps, each with a bit for each register in each
5964 basic block. */
5965 max_reg = max_reg_num ();
5966 regs_per_pass = get_bitmap_width (4, last_basic_block, max_reg);
5968 /* Allocate bitmaps to hold local and global properties. */
5969 npi.nonnull_local = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
5970 npi.nonnull_killed = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
5971 nonnull_avin = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
5972 nonnull_avout = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
5974 /* Go through the basic blocks, seeing whether or not each block
5975 ends with a conditional branch whose condition is a comparison
5976 against zero. Record the register compared in BLOCK_REG. */
5977 block_reg = (unsigned int *) xcalloc (last_basic_block, sizeof (int));
5978 FOR_EACH_BB (bb)
5980 rtx last_insn = bb->end;
5981 rtx condition, earliest, reg;
5983 /* We only want conditional branches. */
5984 if (GET_CODE (last_insn) != JUMP_INSN
5985 || !any_condjump_p (last_insn)
5986 || !onlyjump_p (last_insn))
5987 continue;
5989 /* LAST_INSN is a conditional jump. Get its condition. */
5990 condition = get_condition (last_insn, &earliest);
5992 /* If we were unable to get the condition, or it is not an equality
5993 comparison against zero then there's nothing we can do. */
5994 if (!condition
5995 || (GET_CODE (condition) != NE && GET_CODE (condition) != EQ)
5996 || GET_CODE (XEXP (condition, 1)) != CONST_INT
5997 || (XEXP (condition, 1)
5998 != CONST0_RTX (GET_MODE (XEXP (condition, 0)))))
5999 continue;
6001 /* We must be checking a register against zero. */
6002 reg = XEXP (condition, 0);
6003 if (GET_CODE (reg) != REG)
6004 continue;
6006 block_reg[bb->index] = REGNO (reg);
6009 /* Go through the algorithm for each block of registers. */
6010 for (reg = FIRST_PSEUDO_REGISTER; reg < max_reg; reg += regs_per_pass)
6012 npi.min_reg = reg;
6013 npi.max_reg = MIN (reg + regs_per_pass, max_reg);
6014 something_changed |= delete_null_pointer_checks_1 (block_reg,
6015 nonnull_avin,
6016 nonnull_avout,
6017 &npi);
6020 /* Free the table of registers compared at the end of every block. */
6021 free (block_reg);
6023 /* Free bitmaps. */
6024 sbitmap_vector_free (npi.nonnull_local);
6025 sbitmap_vector_free (npi.nonnull_killed);
6026 sbitmap_vector_free (nonnull_avin);
6027 sbitmap_vector_free (nonnull_avout);
6029 return something_changed;
6032 /* Code Hoisting variables and subroutines. */
6034 /* Very busy expressions. */
6035 static sbitmap *hoist_vbein;
6036 static sbitmap *hoist_vbeout;
6038 /* Hoistable expressions. */
6039 static sbitmap *hoist_exprs;
6041 /* Dominator bitmaps. */
6042 dominance_info dominators;
6044 /* ??? We could compute post dominators and run this algorithm in
6045 reverse to perform tail merging, doing so would probably be
6046 more effective than the tail merging code in jump.c.
6048 It's unclear if tail merging could be run in parallel with
6049 code hoisting. It would be nice. */
6051 /* Allocate vars used for code hoisting analysis. */
6053 static void
6054 alloc_code_hoist_mem (int n_blocks, int n_exprs)
6056 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
6057 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
6058 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
6060 hoist_vbein = sbitmap_vector_alloc (n_blocks, n_exprs);
6061 hoist_vbeout = sbitmap_vector_alloc (n_blocks, n_exprs);
6062 hoist_exprs = sbitmap_vector_alloc (n_blocks, n_exprs);
6063 transpout = sbitmap_vector_alloc (n_blocks, n_exprs);
6066 /* Free vars used for code hoisting analysis. */
6068 static void
6069 free_code_hoist_mem (void)
6071 sbitmap_vector_free (antloc);
6072 sbitmap_vector_free (transp);
6073 sbitmap_vector_free (comp);
6075 sbitmap_vector_free (hoist_vbein);
6076 sbitmap_vector_free (hoist_vbeout);
6077 sbitmap_vector_free (hoist_exprs);
6078 sbitmap_vector_free (transpout);
6080 free_dominance_info (dominators);
6083 /* Compute the very busy expressions at entry/exit from each block.
6085 An expression is very busy if all paths from a given point
6086 compute the expression. */
6088 static void
6089 compute_code_hoist_vbeinout (void)
6091 int changed, passes;
6092 basic_block bb;
6094 sbitmap_vector_zero (hoist_vbeout, last_basic_block);
6095 sbitmap_vector_zero (hoist_vbein, last_basic_block);
6097 passes = 0;
6098 changed = 1;
6100 while (changed)
6102 changed = 0;
6104 /* We scan the blocks in the reverse order to speed up
6105 the convergence. */
6106 FOR_EACH_BB_REVERSE (bb)
6108 changed |= sbitmap_a_or_b_and_c_cg (hoist_vbein[bb->index], antloc[bb->index],
6109 hoist_vbeout[bb->index], transp[bb->index]);
6110 if (bb->next_bb != EXIT_BLOCK_PTR)
6111 sbitmap_intersection_of_succs (hoist_vbeout[bb->index], hoist_vbein, bb->index);
6114 passes++;
6117 if (gcse_file)
6118 fprintf (gcse_file, "hoisting vbeinout computation: %d passes\n", passes);
6121 /* Top level routine to do the dataflow analysis needed by code hoisting. */
6123 static void
6124 compute_code_hoist_data (void)
6126 compute_local_properties (transp, comp, antloc, &expr_hash_table);
6127 compute_transpout ();
6128 compute_code_hoist_vbeinout ();
6129 dominators = calculate_dominance_info (CDI_DOMINATORS);
6130 if (gcse_file)
6131 fprintf (gcse_file, "\n");
6134 /* Determine if the expression identified by EXPR_INDEX would
6135 reach BB unimpared if it was placed at the end of EXPR_BB.
6137 It's unclear exactly what Muchnick meant by "unimpared". It seems
6138 to me that the expression must either be computed or transparent in
6139 *every* block in the path(s) from EXPR_BB to BB. Any other definition
6140 would allow the expression to be hoisted out of loops, even if
6141 the expression wasn't a loop invariant.
6143 Contrast this to reachability for PRE where an expression is
6144 considered reachable if *any* path reaches instead of *all*
6145 paths. */
6147 static int
6148 hoist_expr_reaches_here_p (basic_block expr_bb, int expr_index, basic_block bb, char *visited)
6150 edge pred;
6151 int visited_allocated_locally = 0;
6154 if (visited == NULL)
6156 visited_allocated_locally = 1;
6157 visited = xcalloc (last_basic_block, 1);
6160 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
6162 basic_block pred_bb = pred->src;
6164 if (pred->src == ENTRY_BLOCK_PTR)
6165 break;
6166 else if (pred_bb == expr_bb)
6167 continue;
6168 else if (visited[pred_bb->index])
6169 continue;
6171 /* Does this predecessor generate this expression? */
6172 else if (TEST_BIT (comp[pred_bb->index], expr_index))
6173 break;
6174 else if (! TEST_BIT (transp[pred_bb->index], expr_index))
6175 break;
6177 /* Not killed. */
6178 else
6180 visited[pred_bb->index] = 1;
6181 if (! hoist_expr_reaches_here_p (expr_bb, expr_index,
6182 pred_bb, visited))
6183 break;
6186 if (visited_allocated_locally)
6187 free (visited);
6189 return (pred == NULL);
6192 /* Actually perform code hoisting. */
6194 static void
6195 hoist_code (void)
6197 basic_block bb, dominated;
6198 basic_block *domby;
6199 unsigned int domby_len;
6200 unsigned int i,j;
6201 struct expr **index_map;
6202 struct expr *expr;
6204 sbitmap_vector_zero (hoist_exprs, last_basic_block);
6206 /* Compute a mapping from expression number (`bitmap_index') to
6207 hash table entry. */
6209 index_map = (struct expr **) xcalloc (expr_hash_table.n_elems, sizeof (struct expr *));
6210 for (i = 0; i < expr_hash_table.size; i++)
6211 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
6212 index_map[expr->bitmap_index] = expr;
6214 /* Walk over each basic block looking for potentially hoistable
6215 expressions, nothing gets hoisted from the entry block. */
6216 FOR_EACH_BB (bb)
6218 int found = 0;
6219 int insn_inserted_p;
6221 domby_len = get_dominated_by (dominators, bb, &domby);
6222 /* Examine each expression that is very busy at the exit of this
6223 block. These are the potentially hoistable expressions. */
6224 for (i = 0; i < hoist_vbeout[bb->index]->n_bits; i++)
6226 int hoistable = 0;
6228 if (TEST_BIT (hoist_vbeout[bb->index], i)
6229 && TEST_BIT (transpout[bb->index], i))
6231 /* We've found a potentially hoistable expression, now
6232 we look at every block BB dominates to see if it
6233 computes the expression. */
6234 for (j = 0; j < domby_len; j++)
6236 dominated = domby[j];
6237 /* Ignore self dominance. */
6238 if (bb == dominated)
6239 continue;
6240 /* We've found a dominated block, now see if it computes
6241 the busy expression and whether or not moving that
6242 expression to the "beginning" of that block is safe. */
6243 if (!TEST_BIT (antloc[dominated->index], i))
6244 continue;
6246 /* Note if the expression would reach the dominated block
6247 unimpared if it was placed at the end of BB.
6249 Keep track of how many times this expression is hoistable
6250 from a dominated block into BB. */
6251 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
6252 hoistable++;
6255 /* If we found more than one hoistable occurrence of this
6256 expression, then note it in the bitmap of expressions to
6257 hoist. It makes no sense to hoist things which are computed
6258 in only one BB, and doing so tends to pessimize register
6259 allocation. One could increase this value to try harder
6260 to avoid any possible code expansion due to register
6261 allocation issues; however experiments have shown that
6262 the vast majority of hoistable expressions are only movable
6263 from two successors, so raising this threshold is likely
6264 to nullify any benefit we get from code hoisting. */
6265 if (hoistable > 1)
6267 SET_BIT (hoist_exprs[bb->index], i);
6268 found = 1;
6272 /* If we found nothing to hoist, then quit now. */
6273 if (! found)
6275 free (domby);
6276 continue;
6279 /* Loop over all the hoistable expressions. */
6280 for (i = 0; i < hoist_exprs[bb->index]->n_bits; i++)
6282 /* We want to insert the expression into BB only once, so
6283 note when we've inserted it. */
6284 insn_inserted_p = 0;
6286 /* These tests should be the same as the tests above. */
6287 if (TEST_BIT (hoist_vbeout[bb->index], i))
6289 /* We've found a potentially hoistable expression, now
6290 we look at every block BB dominates to see if it
6291 computes the expression. */
6292 for (j = 0; j < domby_len; j++)
6294 dominated = domby[j];
6295 /* Ignore self dominance. */
6296 if (bb == dominated)
6297 continue;
6299 /* We've found a dominated block, now see if it computes
6300 the busy expression and whether or not moving that
6301 expression to the "beginning" of that block is safe. */
6302 if (!TEST_BIT (antloc[dominated->index], i))
6303 continue;
6305 /* The expression is computed in the dominated block and
6306 it would be safe to compute it at the start of the
6307 dominated block. Now we have to determine if the
6308 expression would reach the dominated block if it was
6309 placed at the end of BB. */
6310 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
6312 struct expr *expr = index_map[i];
6313 struct occr *occr = expr->antic_occr;
6314 rtx insn;
6315 rtx set;
6317 /* Find the right occurrence of this expression. */
6318 while (BLOCK_FOR_INSN (occr->insn) != dominated && occr)
6319 occr = occr->next;
6321 /* Should never happen. */
6322 if (!occr)
6323 abort ();
6325 insn = occr->insn;
6327 set = single_set (insn);
6328 if (! set)
6329 abort ();
6331 /* Create a pseudo-reg to store the result of reaching
6332 expressions into. Get the mode for the new pseudo
6333 from the mode of the original destination pseudo. */
6334 if (expr->reaching_reg == NULL)
6335 expr->reaching_reg
6336 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
6338 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
6339 delete_insn (insn);
6340 occr->deleted_p = 1;
6341 if (!insn_inserted_p)
6343 insert_insn_end_bb (index_map[i], bb, 0);
6344 insn_inserted_p = 1;
6350 free (domby);
6353 free (index_map);
6356 /* Top level routine to perform one code hoisting (aka unification) pass
6358 Return nonzero if a change was made. */
6360 static int
6361 one_code_hoisting_pass (void)
6363 int changed = 0;
6365 alloc_hash_table (max_cuid, &expr_hash_table, 0);
6366 compute_hash_table (&expr_hash_table);
6367 if (gcse_file)
6368 dump_hash_table (gcse_file, "Code Hosting Expressions", &expr_hash_table);
6370 if (expr_hash_table.n_elems > 0)
6372 alloc_code_hoist_mem (last_basic_block, expr_hash_table.n_elems);
6373 compute_code_hoist_data ();
6374 hoist_code ();
6375 free_code_hoist_mem ();
6378 free_hash_table (&expr_hash_table);
6380 return changed;
6383 /* Here we provide the things required to do store motion towards
6384 the exit. In order for this to be effective, gcse also needed to
6385 be taught how to move a load when it is kill only by a store to itself.
6387 int i;
6388 float a[10];
6390 void foo(float scale)
6392 for (i=0; i<10; i++)
6393 a[i] *= scale;
6396 'i' is both loaded and stored to in the loop. Normally, gcse cannot move
6397 the load out since its live around the loop, and stored at the bottom
6398 of the loop.
6400 The 'Load Motion' referred to and implemented in this file is
6401 an enhancement to gcse which when using edge based lcm, recognizes
6402 this situation and allows gcse to move the load out of the loop.
6404 Once gcse has hoisted the load, store motion can then push this
6405 load towards the exit, and we end up with no loads or stores of 'i'
6406 in the loop. */
6408 /* This will search the ldst list for a matching expression. If it
6409 doesn't find one, we create one and initialize it. */
6411 static struct ls_expr *
6412 ldst_entry (rtx x)
6414 struct ls_expr * ptr;
6416 for (ptr = first_ls_expr(); ptr != NULL; ptr = next_ls_expr (ptr))
6417 if (expr_equiv_p (ptr->pattern, x))
6418 break;
6420 if (!ptr)
6422 ptr = (struct ls_expr *) xmalloc (sizeof (struct ls_expr));
6424 ptr->next = pre_ldst_mems;
6425 ptr->expr = NULL;
6426 ptr->pattern = x;
6427 ptr->pattern_regs = NULL_RTX;
6428 ptr->loads = NULL_RTX;
6429 ptr->stores = NULL_RTX;
6430 ptr->reaching_reg = NULL_RTX;
6431 ptr->invalid = 0;
6432 ptr->index = 0;
6433 ptr->hash_index = 0;
6434 pre_ldst_mems = ptr;
6437 return ptr;
6440 /* Free up an individual ldst entry. */
6442 static void
6443 free_ldst_entry (struct ls_expr * ptr)
6445 free_INSN_LIST_list (& ptr->loads);
6446 free_INSN_LIST_list (& ptr->stores);
6448 free (ptr);
6451 /* Free up all memory associated with the ldst list. */
6453 static void
6454 free_ldst_mems (void)
6456 while (pre_ldst_mems)
6458 struct ls_expr * tmp = pre_ldst_mems;
6460 pre_ldst_mems = pre_ldst_mems->next;
6462 free_ldst_entry (tmp);
6465 pre_ldst_mems = NULL;
6468 /* Dump debugging info about the ldst list. */
6470 static void
6471 print_ldst_list (FILE * file)
6473 struct ls_expr * ptr;
6475 fprintf (file, "LDST list: \n");
6477 for (ptr = first_ls_expr(); ptr != NULL; ptr = next_ls_expr (ptr))
6479 fprintf (file, " Pattern (%3d): ", ptr->index);
6481 print_rtl (file, ptr->pattern);
6483 fprintf (file, "\n Loads : ");
6485 if (ptr->loads)
6486 print_rtl (file, ptr->loads);
6487 else
6488 fprintf (file, "(nil)");
6490 fprintf (file, "\n Stores : ");
6492 if (ptr->stores)
6493 print_rtl (file, ptr->stores);
6494 else
6495 fprintf (file, "(nil)");
6497 fprintf (file, "\n\n");
6500 fprintf (file, "\n");
6503 /* Returns 1 if X is in the list of ldst only expressions. */
6505 static struct ls_expr *
6506 find_rtx_in_ldst (rtx x)
6508 struct ls_expr * ptr;
6510 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
6511 if (expr_equiv_p (ptr->pattern, x) && ! ptr->invalid)
6512 return ptr;
6514 return NULL;
6517 /* Assign each element of the list of mems a monotonically increasing value. */
6519 static int
6520 enumerate_ldsts (void)
6522 struct ls_expr * ptr;
6523 int n = 0;
6525 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
6526 ptr->index = n++;
6528 return n;
6531 /* Return first item in the list. */
6533 static inline struct ls_expr *
6534 first_ls_expr (void)
6536 return pre_ldst_mems;
6539 /* Return the next item in the list after the specified one. */
6541 static inline struct ls_expr *
6542 next_ls_expr (struct ls_expr * ptr)
6544 return ptr->next;
6547 /* Load Motion for loads which only kill themselves. */
6549 /* Return true if x is a simple MEM operation, with no registers or
6550 side effects. These are the types of loads we consider for the
6551 ld_motion list, otherwise we let the usual aliasing take care of it. */
6553 static int
6554 simple_mem (rtx x)
6556 if (GET_CODE (x) != MEM)
6557 return 0;
6559 if (MEM_VOLATILE_P (x))
6560 return 0;
6562 if (GET_MODE (x) == BLKmode)
6563 return 0;
6565 /* If we are handling exceptions, we must be careful with memory references
6566 that may trap. If we are not, the behavior is undefined, so we may just
6567 continue. */
6568 if (flag_non_call_exceptions && may_trap_p (x))
6569 return 0;
6571 if (side_effects_p (x))
6572 return 0;
6574 /* Do not consider function arguments passed on stack. */
6575 if (reg_mentioned_p (stack_pointer_rtx, x))
6576 return 0;
6578 if (flag_float_store && FLOAT_MODE_P (GET_MODE (x)))
6579 return 0;
6581 return 1;
6584 /* Make sure there isn't a buried reference in this pattern anywhere.
6585 If there is, invalidate the entry for it since we're not capable
6586 of fixing it up just yet.. We have to be sure we know about ALL
6587 loads since the aliasing code will allow all entries in the
6588 ld_motion list to not-alias itself. If we miss a load, we will get
6589 the wrong value since gcse might common it and we won't know to
6590 fix it up. */
6592 static void
6593 invalidate_any_buried_refs (rtx x)
6595 const char * fmt;
6596 int i, j;
6597 struct ls_expr * ptr;
6599 /* Invalidate it in the list. */
6600 if (GET_CODE (x) == MEM && simple_mem (x))
6602 ptr = ldst_entry (x);
6603 ptr->invalid = 1;
6606 /* Recursively process the insn. */
6607 fmt = GET_RTX_FORMAT (GET_CODE (x));
6609 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6611 if (fmt[i] == 'e')
6612 invalidate_any_buried_refs (XEXP (x, i));
6613 else if (fmt[i] == 'E')
6614 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6615 invalidate_any_buried_refs (XVECEXP (x, i, j));
6619 /* Find all the 'simple' MEMs which are used in LOADs and STORES. Simple
6620 being defined as MEM loads and stores to symbols, with no side effects
6621 and no registers in the expression. For a MEM destination, we also
6622 check that the insn is still valid if we replace the destination with a
6623 REG, as is done in update_ld_motion_stores. If there are any uses/defs
6624 which don't match this criteria, they are invalidated and trimmed out
6625 later. */
6627 static void
6628 compute_ld_motion_mems (void)
6630 struct ls_expr * ptr;
6631 basic_block bb;
6632 rtx insn;
6634 pre_ldst_mems = NULL;
6636 FOR_EACH_BB (bb)
6638 for (insn = bb->head;
6639 insn && insn != NEXT_INSN (bb->end);
6640 insn = NEXT_INSN (insn))
6642 if (INSN_P (insn))
6644 if (GET_CODE (PATTERN (insn)) == SET)
6646 rtx src = SET_SRC (PATTERN (insn));
6647 rtx dest = SET_DEST (PATTERN (insn));
6649 /* Check for a simple LOAD... */
6650 if (GET_CODE (src) == MEM && simple_mem (src))
6652 ptr = ldst_entry (src);
6653 if (GET_CODE (dest) == REG)
6654 ptr->loads = alloc_INSN_LIST (insn, ptr->loads);
6655 else
6656 ptr->invalid = 1;
6658 else
6660 /* Make sure there isn't a buried load somewhere. */
6661 invalidate_any_buried_refs (src);
6664 /* Check for stores. Don't worry about aliased ones, they
6665 will block any movement we might do later. We only care
6666 about this exact pattern since those are the only
6667 circumstance that we will ignore the aliasing info. */
6668 if (GET_CODE (dest) == MEM && simple_mem (dest))
6670 ptr = ldst_entry (dest);
6672 if (GET_CODE (src) != MEM
6673 && GET_CODE (src) != ASM_OPERANDS
6674 /* Check for REG manually since want_to_gcse_p
6675 returns 0 for all REGs. */
6676 && (REG_P (src) || want_to_gcse_p (src)))
6677 ptr->stores = alloc_INSN_LIST (insn, ptr->stores);
6678 else
6679 ptr->invalid = 1;
6682 else
6683 invalidate_any_buried_refs (PATTERN (insn));
6689 /* Remove any references that have been either invalidated or are not in the
6690 expression list for pre gcse. */
6692 static void
6693 trim_ld_motion_mems (void)
6695 struct ls_expr * last = NULL;
6696 struct ls_expr * ptr = first_ls_expr ();
6698 while (ptr != NULL)
6700 int del = ptr->invalid;
6701 struct expr * expr = NULL;
6703 /* Delete if entry has been made invalid. */
6704 if (!del)
6706 unsigned int i;
6708 del = 1;
6709 /* Delete if we cannot find this mem in the expression list. */
6710 for (i = 0; i < expr_hash_table.size && del; i++)
6712 for (expr = expr_hash_table.table[i];
6713 expr != NULL;
6714 expr = expr->next_same_hash)
6715 if (expr_equiv_p (expr->expr, ptr->pattern))
6717 del = 0;
6718 break;
6723 if (del)
6725 if (last != NULL)
6727 last->next = ptr->next;
6728 free_ldst_entry (ptr);
6729 ptr = last->next;
6731 else
6733 pre_ldst_mems = pre_ldst_mems->next;
6734 free_ldst_entry (ptr);
6735 ptr = pre_ldst_mems;
6738 else
6740 /* Set the expression field if we are keeping it. */
6741 last = ptr;
6742 ptr->expr = expr;
6743 ptr = ptr->next;
6747 /* Show the world what we've found. */
6748 if (gcse_file && pre_ldst_mems != NULL)
6749 print_ldst_list (gcse_file);
6752 /* This routine will take an expression which we are replacing with
6753 a reaching register, and update any stores that are needed if
6754 that expression is in the ld_motion list. Stores are updated by
6755 copying their SRC to the reaching register, and then storeing
6756 the reaching register into the store location. These keeps the
6757 correct value in the reaching register for the loads. */
6759 static void
6760 update_ld_motion_stores (struct expr * expr)
6762 struct ls_expr * mem_ptr;
6764 if ((mem_ptr = find_rtx_in_ldst (expr->expr)))
6766 /* We can try to find just the REACHED stores, but is shouldn't
6767 matter to set the reaching reg everywhere... some might be
6768 dead and should be eliminated later. */
6770 /* We replace (set mem expr) with (set reg expr) (set mem reg)
6771 where reg is the reaching reg used in the load. We checked in
6772 compute_ld_motion_mems that we can replace (set mem expr) with
6773 (set reg expr) in that insn. */
6774 rtx list = mem_ptr->stores;
6776 for ( ; list != NULL_RTX; list = XEXP (list, 1))
6778 rtx insn = XEXP (list, 0);
6779 rtx pat = PATTERN (insn);
6780 rtx src = SET_SRC (pat);
6781 rtx reg = expr->reaching_reg;
6782 rtx copy, new;
6784 /* If we've already copied it, continue. */
6785 if (expr->reaching_reg == src)
6786 continue;
6788 if (gcse_file)
6790 fprintf (gcse_file, "PRE: store updated with reaching reg ");
6791 print_rtl (gcse_file, expr->reaching_reg);
6792 fprintf (gcse_file, ":\n ");
6793 print_inline_rtx (gcse_file, insn, 8);
6794 fprintf (gcse_file, "\n");
6797 copy = gen_move_insn ( reg, copy_rtx (SET_SRC (pat)));
6798 new = emit_insn_before (copy, insn);
6799 record_one_set (REGNO (reg), new);
6800 SET_SRC (pat) = reg;
6802 /* un-recognize this pattern since it's probably different now. */
6803 INSN_CODE (insn) = -1;
6804 gcse_create_count++;
6809 /* Store motion code. */
6811 #define ANTIC_STORE_LIST(x) ((x)->loads)
6812 #define AVAIL_STORE_LIST(x) ((x)->stores)
6813 #define LAST_AVAIL_CHECK_FAILURE(x) ((x)->reaching_reg)
6815 /* This is used to communicate the target bitvector we want to use in the
6816 reg_set_info routine when called via the note_stores mechanism. */
6817 static int * regvec;
6819 /* And current insn, for the same routine. */
6820 static rtx compute_store_table_current_insn;
6822 /* Used in computing the reverse edge graph bit vectors. */
6823 static sbitmap * st_antloc;
6825 /* Global holding the number of store expressions we are dealing with. */
6826 static int num_stores;
6828 /* Checks to set if we need to mark a register set. Called from note_stores. */
6830 static void
6831 reg_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED,
6832 void *data ATTRIBUTE_UNUSED)
6834 if (GET_CODE (dest) == SUBREG)
6835 dest = SUBREG_REG (dest);
6837 if (GET_CODE (dest) == REG)
6838 regvec[REGNO (dest)] = INSN_UID (compute_store_table_current_insn);
6841 /* Return zero if some of the registers in list X are killed
6842 due to set of registers in bitmap REGS_SET. */
6844 static bool
6845 store_ops_ok (rtx x, int *regs_set)
6847 rtx reg;
6849 for (; x; x = XEXP (x, 1))
6851 reg = XEXP (x, 0);
6852 if (regs_set[REGNO(reg)])
6853 return false;
6856 return true;
6859 /* Returns a list of registers mentioned in X. */
6860 static rtx
6861 extract_mentioned_regs (rtx x)
6863 return extract_mentioned_regs_helper (x, NULL_RTX);
6866 /* Helper for extract_mentioned_regs; ACCUM is used to accumulate used
6867 registers. */
6868 static rtx
6869 extract_mentioned_regs_helper (rtx x, rtx accum)
6871 int i;
6872 enum rtx_code code;
6873 const char * fmt;
6875 /* Repeat is used to turn tail-recursion into iteration. */
6876 repeat:
6878 if (x == 0)
6879 return accum;
6881 code = GET_CODE (x);
6882 switch (code)
6884 case REG:
6885 return alloc_EXPR_LIST (0, x, accum);
6887 case MEM:
6888 x = XEXP (x, 0);
6889 goto repeat;
6891 case PRE_DEC:
6892 case PRE_INC:
6893 case POST_DEC:
6894 case POST_INC:
6895 /* We do not run this function with arguments having side effects. */
6896 abort ();
6898 case PC:
6899 case CC0: /*FIXME*/
6900 case CONST:
6901 case CONST_INT:
6902 case CONST_DOUBLE:
6903 case CONST_VECTOR:
6904 case SYMBOL_REF:
6905 case LABEL_REF:
6906 case ADDR_VEC:
6907 case ADDR_DIFF_VEC:
6908 return accum;
6910 default:
6911 break;
6914 i = GET_RTX_LENGTH (code) - 1;
6915 fmt = GET_RTX_FORMAT (code);
6917 for (; i >= 0; i--)
6919 if (fmt[i] == 'e')
6921 rtx tem = XEXP (x, i);
6923 /* If we are about to do the last recursive call
6924 needed at this level, change it into iteration. */
6925 if (i == 0)
6927 x = tem;
6928 goto repeat;
6931 accum = extract_mentioned_regs_helper (tem, accum);
6933 else if (fmt[i] == 'E')
6935 int j;
6937 for (j = 0; j < XVECLEN (x, i); j++)
6938 accum = extract_mentioned_regs_helper (XVECEXP (x, i, j), accum);
6942 return accum;
6945 /* Determine whether INSN is MEM store pattern that we will consider moving.
6946 REGS_SET_BEFORE is bitmap of registers set before (and including) the
6947 current insn, REGS_SET_AFTER is bitmap of registers set after (and
6948 including) the insn in this basic block. We must be passing through BB from
6949 head to end, as we are using this fact to speed things up.
6951 The results are stored this way:
6953 -- the first anticipatable expression is added into ANTIC_STORE_LIST
6954 -- if the processed expression is not anticipatable, NULL_RTX is added
6955 there instead, so that we can use it as indicator that no further
6956 expression of this type may be anticipatable
6957 -- if the expression is available, it is added as head of AVAIL_STORE_LIST;
6958 consequently, all of them but this head are dead and may be deleted.
6959 -- if the expression is not available, the insn due to that it fails to be
6960 available is stored in reaching_reg.
6962 The things are complicated a bit by fact that there already may be stores
6963 to the same MEM from other blocks; also caller must take care of the
6964 necessary cleanup of the temporary markers after end of the basic block.
6967 static void
6968 find_moveable_store (rtx insn, int *regs_set_before, int *regs_set_after)
6970 struct ls_expr * ptr;
6971 rtx dest, set, tmp;
6972 int check_anticipatable, check_available;
6973 basic_block bb = BLOCK_FOR_INSN (insn);
6975 set = single_set (insn);
6976 if (!set)
6977 return;
6979 dest = SET_DEST (set);
6981 if (GET_CODE (dest) != MEM || MEM_VOLATILE_P (dest)
6982 || GET_MODE (dest) == BLKmode)
6983 return;
6985 if (side_effects_p (dest))
6986 return;
6988 /* If we are handling exceptions, we must be careful with memory references
6989 that may trap. If we are not, the behavior is undefined, so we may just
6990 continue. */
6991 if (flag_non_call_exceptions && may_trap_p (dest))
6992 return;
6994 ptr = ldst_entry (dest);
6995 if (!ptr->pattern_regs)
6996 ptr->pattern_regs = extract_mentioned_regs (dest);
6998 /* Do not check for anticipatability if we either found one anticipatable
6999 store already, or tested for one and found out that it was killed. */
7000 check_anticipatable = 0;
7001 if (!ANTIC_STORE_LIST (ptr))
7002 check_anticipatable = 1;
7003 else
7005 tmp = XEXP (ANTIC_STORE_LIST (ptr), 0);
7006 if (tmp != NULL_RTX
7007 && BLOCK_FOR_INSN (tmp) != bb)
7008 check_anticipatable = 1;
7010 if (check_anticipatable)
7012 if (store_killed_before (dest, ptr->pattern_regs, insn, bb, regs_set_before))
7013 tmp = NULL_RTX;
7014 else
7015 tmp = insn;
7016 ANTIC_STORE_LIST (ptr) = alloc_INSN_LIST (tmp,
7017 ANTIC_STORE_LIST (ptr));
7020 /* It is not necessary to check whether store is available if we did
7021 it successfully before; if we failed before, do not bother to check
7022 until we reach the insn that caused us to fail. */
7023 check_available = 0;
7024 if (!AVAIL_STORE_LIST (ptr))
7025 check_available = 1;
7026 else
7028 tmp = XEXP (AVAIL_STORE_LIST (ptr), 0);
7029 if (BLOCK_FOR_INSN (tmp) != bb)
7030 check_available = 1;
7032 if (check_available)
7034 /* Check that we have already reached the insn at that the check
7035 failed last time. */
7036 if (LAST_AVAIL_CHECK_FAILURE (ptr))
7038 for (tmp = bb->end;
7039 tmp != insn && tmp != LAST_AVAIL_CHECK_FAILURE (ptr);
7040 tmp = PREV_INSN (tmp))
7041 continue;
7042 if (tmp == insn)
7043 check_available = 0;
7045 else
7046 check_available = store_killed_after (dest, ptr->pattern_regs, insn,
7047 bb, regs_set_after,
7048 &LAST_AVAIL_CHECK_FAILURE (ptr));
7050 if (!check_available)
7051 AVAIL_STORE_LIST (ptr) = alloc_INSN_LIST (insn, AVAIL_STORE_LIST (ptr));
7054 /* Find available and anticipatable stores. */
7056 static int
7057 compute_store_table (void)
7059 int ret;
7060 basic_block bb;
7061 unsigned regno;
7062 rtx insn, pat, tmp;
7063 int *last_set_in, *already_set;
7064 struct ls_expr * ptr, **prev_next_ptr_ptr;
7066 max_gcse_regno = max_reg_num ();
7068 reg_set_in_block = (sbitmap *) sbitmap_vector_alloc (last_basic_block,
7069 max_gcse_regno);
7070 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
7071 pre_ldst_mems = 0;
7072 last_set_in = xmalloc (sizeof (int) * max_gcse_regno);
7073 already_set = xmalloc (sizeof (int) * max_gcse_regno);
7075 /* Find all the stores we care about. */
7076 FOR_EACH_BB (bb)
7078 /* First compute the registers set in this block. */
7079 memset (last_set_in, 0, sizeof (int) * max_gcse_regno);
7080 regvec = last_set_in;
7082 for (insn = bb->head;
7083 insn != NEXT_INSN (bb->end);
7084 insn = NEXT_INSN (insn))
7086 if (! INSN_P (insn))
7087 continue;
7089 if (GET_CODE (insn) == CALL_INSN)
7091 bool clobbers_all = false;
7092 #ifdef NON_SAVING_SETJMP
7093 if (NON_SAVING_SETJMP
7094 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
7095 clobbers_all = true;
7096 #endif
7098 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7099 if (clobbers_all
7100 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
7101 last_set_in[regno] = INSN_UID (insn);
7104 pat = PATTERN (insn);
7105 compute_store_table_current_insn = insn;
7106 note_stores (pat, reg_set_info, NULL);
7109 /* Record the set registers. */
7110 for (regno = 0; regno < max_gcse_regno; regno++)
7111 if (last_set_in[regno])
7112 SET_BIT (reg_set_in_block[bb->index], regno);
7114 /* Now find the stores. */
7115 memset (already_set, 0, sizeof (int) * max_gcse_regno);
7116 regvec = already_set;
7117 for (insn = bb->head;
7118 insn != NEXT_INSN (bb->end);
7119 insn = NEXT_INSN (insn))
7121 if (! INSN_P (insn))
7122 continue;
7124 if (GET_CODE (insn) == CALL_INSN)
7126 bool clobbers_all = false;
7127 #ifdef NON_SAVING_SETJMP
7128 if (NON_SAVING_SETJMP
7129 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
7130 clobbers_all = true;
7131 #endif
7133 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7134 if (clobbers_all
7135 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
7136 already_set[regno] = 1;
7139 pat = PATTERN (insn);
7140 note_stores (pat, reg_set_info, NULL);
7142 /* Now that we've marked regs, look for stores. */
7143 find_moveable_store (insn, already_set, last_set_in);
7145 /* Unmark regs that are no longer set. */
7146 for (regno = 0; regno < max_gcse_regno; regno++)
7147 if (last_set_in[regno] == INSN_UID (insn))
7148 last_set_in[regno] = 0;
7151 /* Clear temporary marks. */
7152 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7154 LAST_AVAIL_CHECK_FAILURE(ptr) = NULL_RTX;
7155 if (ANTIC_STORE_LIST (ptr)
7156 && (tmp = XEXP (ANTIC_STORE_LIST (ptr), 0)) == NULL_RTX)
7157 ANTIC_STORE_LIST (ptr) = XEXP (ANTIC_STORE_LIST (ptr), 1);
7161 /* Remove the stores that are not available anywhere, as there will
7162 be no opportunity to optimize them. */
7163 for (ptr = pre_ldst_mems, prev_next_ptr_ptr = &pre_ldst_mems;
7164 ptr != NULL;
7165 ptr = *prev_next_ptr_ptr)
7167 if (!AVAIL_STORE_LIST (ptr))
7169 *prev_next_ptr_ptr = ptr->next;
7170 free_ldst_entry (ptr);
7172 else
7173 prev_next_ptr_ptr = &ptr->next;
7176 ret = enumerate_ldsts ();
7178 if (gcse_file)
7180 fprintf (gcse_file, "ST_avail and ST_antic (shown under loads..)\n");
7181 print_ldst_list (gcse_file);
7184 free (last_set_in);
7185 free (already_set);
7186 return ret;
7189 /* Check to see if the load X is aliased with STORE_PATTERN. */
7191 static bool
7192 load_kills_store (rtx x, rtx store_pattern)
7194 if (true_dependence (x, GET_MODE (x), store_pattern, rtx_addr_varies_p))
7195 return true;
7196 return false;
7199 /* Go through the entire insn X, looking for any loads which might alias
7200 STORE_PATTERN. Return true if found. */
7202 static bool
7203 find_loads (rtx x, rtx store_pattern)
7205 const char * fmt;
7206 int i, j;
7207 int ret = false;
7209 if (!x)
7210 return false;
7212 if (GET_CODE (x) == SET)
7213 x = SET_SRC (x);
7215 if (GET_CODE (x) == MEM)
7217 if (load_kills_store (x, store_pattern))
7218 return true;
7221 /* Recursively process the insn. */
7222 fmt = GET_RTX_FORMAT (GET_CODE (x));
7224 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0 && !ret; i--)
7226 if (fmt[i] == 'e')
7227 ret |= find_loads (XEXP (x, i), store_pattern);
7228 else if (fmt[i] == 'E')
7229 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7230 ret |= find_loads (XVECEXP (x, i, j), store_pattern);
7232 return ret;
7235 /* Check if INSN kills the store pattern X (is aliased with it).
7236 Return true if it it does. */
7238 static bool
7239 store_killed_in_insn (rtx x, rtx x_regs, rtx insn)
7241 rtx reg, base;
7243 if (!INSN_P (insn))
7244 return false;
7246 if (GET_CODE (insn) == CALL_INSN)
7248 /* A normal or pure call might read from pattern,
7249 but a const call will not. */
7250 if (! CONST_OR_PURE_CALL_P (insn) || pure_call_p (insn))
7251 return true;
7253 /* But even a const call reads its parameters. Check whether the
7254 base of some of registers used in mem is stack pointer. */
7255 for (reg = x_regs; reg; reg = XEXP (reg, 1))
7257 base = find_base_term (XEXP (reg, 0));
7258 if (!base
7259 || (GET_CODE (base) == ADDRESS
7260 && GET_MODE (base) == Pmode
7261 && XEXP (base, 0) == stack_pointer_rtx))
7262 return true;
7265 return false;
7268 if (GET_CODE (PATTERN (insn)) == SET)
7270 rtx pat = PATTERN (insn);
7271 /* Check for memory stores to aliased objects. */
7272 if (GET_CODE (SET_DEST (pat)) == MEM && !expr_equiv_p (SET_DEST (pat), x))
7273 /* pretend its a load and check for aliasing. */
7274 if (find_loads (SET_DEST (pat), x))
7275 return true;
7276 return find_loads (SET_SRC (pat), x);
7278 else
7279 return find_loads (PATTERN (insn), x);
7282 /* Returns true if the expression X is loaded or clobbered on or after INSN
7283 within basic block BB. REGS_SET_AFTER is bitmap of registers set in
7284 or after the insn. X_REGS is list of registers mentioned in X. If the store
7285 is killed, return the last insn in that it occurs in FAIL_INSN. */
7287 static bool
7288 store_killed_after (rtx x, rtx x_regs, rtx insn, basic_block bb,
7289 int *regs_set_after, rtx *fail_insn)
7291 rtx last = bb->end, act;
7293 if (!store_ops_ok (x_regs, regs_set_after))
7295 /* We do not know where it will happen. */
7296 if (fail_insn)
7297 *fail_insn = NULL_RTX;
7298 return true;
7301 /* Scan from the end, so that fail_insn is determined correctly. */
7302 for (act = last; act != PREV_INSN (insn); act = PREV_INSN (act))
7303 if (store_killed_in_insn (x, x_regs, act))
7305 if (fail_insn)
7306 *fail_insn = act;
7307 return true;
7310 return false;
7313 /* Returns true if the expression X is loaded or clobbered on or before INSN
7314 within basic block BB. X_REGS is list of registers mentioned in X.
7315 REGS_SET_BEFORE is bitmap of registers set before or in this insn. */
7316 static bool
7317 store_killed_before (rtx x, rtx x_regs, rtx insn, basic_block bb,
7318 int *regs_set_before)
7320 rtx first = bb->head;
7322 if (!store_ops_ok (x_regs, regs_set_before))
7323 return true;
7325 for ( ; insn != PREV_INSN (first); insn = PREV_INSN (insn))
7326 if (store_killed_in_insn (x, x_regs, insn))
7327 return true;
7329 return false;
7332 /* Fill in available, anticipatable, transparent and kill vectors in
7333 STORE_DATA, based on lists of available and anticipatable stores. */
7334 static void
7335 build_store_vectors (void)
7337 basic_block bb;
7338 int *regs_set_in_block;
7339 rtx insn, st;
7340 struct ls_expr * ptr;
7341 unsigned regno;
7343 /* Build the gen_vector. This is any store in the table which is not killed
7344 by aliasing later in its block. */
7345 ae_gen = (sbitmap *) sbitmap_vector_alloc (last_basic_block, num_stores);
7346 sbitmap_vector_zero (ae_gen, last_basic_block);
7348 st_antloc = (sbitmap *) sbitmap_vector_alloc (last_basic_block, num_stores);
7349 sbitmap_vector_zero (st_antloc, last_basic_block);
7351 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7353 for (st = AVAIL_STORE_LIST (ptr); st != NULL; st = XEXP (st, 1))
7355 insn = XEXP (st, 0);
7356 bb = BLOCK_FOR_INSN (insn);
7358 /* If we've already seen an available expression in this block,
7359 we can delete this one (It occurs earlier in the block). We'll
7360 copy the SRC expression to an unused register in case there
7361 are any side effects. */
7362 if (TEST_BIT (ae_gen[bb->index], ptr->index))
7364 rtx r = gen_reg_rtx (GET_MODE (ptr->pattern));
7365 if (gcse_file)
7366 fprintf (gcse_file, "Removing redundant store:\n");
7367 replace_store_insn (r, XEXP (st, 0), bb);
7368 continue;
7370 SET_BIT (ae_gen[bb->index], ptr->index);
7373 for (st = ANTIC_STORE_LIST (ptr); st != NULL; st = XEXP (st, 1))
7375 insn = XEXP (st, 0);
7376 bb = BLOCK_FOR_INSN (insn);
7377 SET_BIT (st_antloc[bb->index], ptr->index);
7381 ae_kill = (sbitmap *) sbitmap_vector_alloc (last_basic_block, num_stores);
7382 sbitmap_vector_zero (ae_kill, last_basic_block);
7384 transp = (sbitmap *) sbitmap_vector_alloc (last_basic_block, num_stores);
7385 sbitmap_vector_zero (transp, last_basic_block);
7386 regs_set_in_block = xmalloc (sizeof (int) * max_gcse_regno);
7388 FOR_EACH_BB (bb)
7390 for (regno = 0; regno < max_gcse_regno; regno++)
7391 regs_set_in_block[regno] = TEST_BIT (reg_set_in_block[bb->index], regno);
7393 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7395 if (store_killed_after (ptr->pattern, ptr->pattern_regs, bb->head,
7396 bb, regs_set_in_block, NULL))
7398 /* It should not be necessary to consider the expression
7399 killed if it is both anticipatable and available. */
7400 if (!TEST_BIT (st_antloc[bb->index], ptr->index)
7401 || !TEST_BIT (ae_gen[bb->index], ptr->index))
7402 SET_BIT (ae_kill[bb->index], ptr->index);
7404 else
7405 SET_BIT (transp[bb->index], ptr->index);
7409 free (regs_set_in_block);
7411 if (gcse_file)
7413 dump_sbitmap_vector (gcse_file, "st_antloc", "", st_antloc, last_basic_block);
7414 dump_sbitmap_vector (gcse_file, "st_kill", "", ae_kill, last_basic_block);
7415 dump_sbitmap_vector (gcse_file, "Transpt", "", transp, last_basic_block);
7416 dump_sbitmap_vector (gcse_file, "st_avloc", "", ae_gen, last_basic_block);
7420 /* Insert an instruction at the beginning of a basic block, and update
7421 the BLOCK_HEAD if needed. */
7423 static void
7424 insert_insn_start_bb (rtx insn, basic_block bb)
7426 /* Insert at start of successor block. */
7427 rtx prev = PREV_INSN (bb->head);
7428 rtx before = bb->head;
7429 while (before != 0)
7431 if (GET_CODE (before) != CODE_LABEL
7432 && (GET_CODE (before) != NOTE
7433 || NOTE_LINE_NUMBER (before) != NOTE_INSN_BASIC_BLOCK))
7434 break;
7435 prev = before;
7436 if (prev == bb->end)
7437 break;
7438 before = NEXT_INSN (before);
7441 insn = emit_insn_after (insn, prev);
7443 if (gcse_file)
7445 fprintf (gcse_file, "STORE_MOTION insert store at start of BB %d:\n",
7446 bb->index);
7447 print_inline_rtx (gcse_file, insn, 6);
7448 fprintf (gcse_file, "\n");
7452 /* This routine will insert a store on an edge. EXPR is the ldst entry for
7453 the memory reference, and E is the edge to insert it on. Returns nonzero
7454 if an edge insertion was performed. */
7456 static int
7457 insert_store (struct ls_expr * expr, edge e)
7459 rtx reg, insn;
7460 basic_block bb;
7461 edge tmp;
7463 /* We did all the deleted before this insert, so if we didn't delete a
7464 store, then we haven't set the reaching reg yet either. */
7465 if (expr->reaching_reg == NULL_RTX)
7466 return 0;
7468 reg = expr->reaching_reg;
7469 insn = gen_move_insn (copy_rtx (expr->pattern), reg);
7471 /* If we are inserting this expression on ALL predecessor edges of a BB,
7472 insert it at the start of the BB, and reset the insert bits on the other
7473 edges so we don't try to insert it on the other edges. */
7474 bb = e->dest;
7475 for (tmp = e->dest->pred; tmp ; tmp = tmp->pred_next)
7477 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
7478 if (index == EDGE_INDEX_NO_EDGE)
7479 abort ();
7480 if (! TEST_BIT (pre_insert_map[index], expr->index))
7481 break;
7484 /* If tmp is NULL, we found an insertion on every edge, blank the
7485 insertion vector for these edges, and insert at the start of the BB. */
7486 if (!tmp && bb != EXIT_BLOCK_PTR)
7488 for (tmp = e->dest->pred; tmp ; tmp = tmp->pred_next)
7490 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
7491 RESET_BIT (pre_insert_map[index], expr->index);
7493 insert_insn_start_bb (insn, bb);
7494 return 0;
7497 /* We can't insert on this edge, so we'll insert at the head of the
7498 successors block. See Morgan, sec 10.5. */
7499 if ((e->flags & EDGE_ABNORMAL) == EDGE_ABNORMAL)
7501 insert_insn_start_bb (insn, bb);
7502 return 0;
7505 insert_insn_on_edge (insn, e);
7507 if (gcse_file)
7509 fprintf (gcse_file, "STORE_MOTION insert insn on edge (%d, %d):\n",
7510 e->src->index, e->dest->index);
7511 print_inline_rtx (gcse_file, insn, 6);
7512 fprintf (gcse_file, "\n");
7515 return 1;
7518 /* This routine will replace a store with a SET to a specified register. */
7520 static void
7521 replace_store_insn (rtx reg, rtx del, basic_block bb)
7523 rtx insn;
7525 insn = gen_move_insn (reg, SET_SRC (single_set (del)));
7526 insn = emit_insn_after (insn, del);
7528 if (gcse_file)
7530 fprintf (gcse_file,
7531 "STORE_MOTION delete insn in BB %d:\n ", bb->index);
7532 print_inline_rtx (gcse_file, del, 6);
7533 fprintf (gcse_file, "\nSTORE MOTION replaced with insn:\n ");
7534 print_inline_rtx (gcse_file, insn, 6);
7535 fprintf (gcse_file, "\n");
7538 delete_insn (del);
7542 /* Delete a store, but copy the value that would have been stored into
7543 the reaching_reg for later storing. */
7545 static void
7546 delete_store (struct ls_expr * expr, basic_block bb)
7548 rtx reg, i, del;
7550 if (expr->reaching_reg == NULL_RTX)
7551 expr->reaching_reg = gen_reg_rtx (GET_MODE (expr->pattern));
7553 reg = expr->reaching_reg;
7555 for (i = AVAIL_STORE_LIST (expr); i; i = XEXP (i, 1))
7557 del = XEXP (i, 0);
7558 if (BLOCK_FOR_INSN (del) == bb)
7560 /* We know there is only one since we deleted redundant
7561 ones during the available computation. */
7562 replace_store_insn (reg, del, bb);
7563 break;
7568 /* Free memory used by store motion. */
7570 static void
7571 free_store_memory (void)
7573 free_ldst_mems ();
7575 if (ae_gen)
7576 sbitmap_vector_free (ae_gen);
7577 if (ae_kill)
7578 sbitmap_vector_free (ae_kill);
7579 if (transp)
7580 sbitmap_vector_free (transp);
7581 if (st_antloc)
7582 sbitmap_vector_free (st_antloc);
7583 if (pre_insert_map)
7584 sbitmap_vector_free (pre_insert_map);
7585 if (pre_delete_map)
7586 sbitmap_vector_free (pre_delete_map);
7587 if (reg_set_in_block)
7588 sbitmap_vector_free (reg_set_in_block);
7590 ae_gen = ae_kill = transp = st_antloc = NULL;
7591 pre_insert_map = pre_delete_map = reg_set_in_block = NULL;
7594 /* Perform store motion. Much like gcse, except we move expressions the
7595 other way by looking at the flowgraph in reverse. */
7597 static void
7598 store_motion (void)
7600 basic_block bb;
7601 int x;
7602 struct ls_expr * ptr;
7603 int update_flow = 0;
7605 if (gcse_file)
7607 fprintf (gcse_file, "before store motion\n");
7608 print_rtl (gcse_file, get_insns ());
7611 init_alias_analysis ();
7613 /* Find all the available and anticipatable stores. */
7614 num_stores = compute_store_table ();
7615 if (num_stores == 0)
7617 sbitmap_vector_free (reg_set_in_block);
7618 end_alias_analysis ();
7619 return;
7622 /* Now compute kill & transp vectors. */
7623 build_store_vectors ();
7624 add_noreturn_fake_exit_edges ();
7626 edge_list = pre_edge_rev_lcm (gcse_file, num_stores, transp, ae_gen,
7627 st_antloc, ae_kill, &pre_insert_map,
7628 &pre_delete_map);
7630 /* Now we want to insert the new stores which are going to be needed. */
7631 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7633 FOR_EACH_BB (bb)
7634 if (TEST_BIT (pre_delete_map[bb->index], ptr->index))
7635 delete_store (ptr, bb);
7637 for (x = 0; x < NUM_EDGES (edge_list); x++)
7638 if (TEST_BIT (pre_insert_map[x], ptr->index))
7639 update_flow |= insert_store (ptr, INDEX_EDGE (edge_list, x));
7642 if (update_flow)
7643 commit_edge_insertions ();
7645 free_store_memory ();
7646 free_edge_list (edge_list);
7647 remove_fake_edges ();
7648 end_alias_analysis ();
7652 /* Entry point for jump bypassing optimization pass. */
7655 bypass_jumps (FILE *file)
7657 int changed;
7659 /* We do not construct an accurate cfg in functions which call
7660 setjmp, so just punt to be safe. */
7661 if (current_function_calls_setjmp)
7662 return 0;
7664 /* For calling dump_foo fns from gdb. */
7665 debug_stderr = stderr;
7666 gcse_file = file;
7668 /* Identify the basic block information for this function, including
7669 successors and predecessors. */
7670 max_gcse_regno = max_reg_num ();
7672 if (file)
7673 dump_flow_info (file);
7675 /* Return if there's nothing to do. */
7676 if (n_basic_blocks <= 1)
7677 return 0;
7679 /* Trying to perform global optimizations on flow graphs which have
7680 a high connectivity will take a long time and is unlikely to be
7681 particularly useful.
7683 In normal circumstances a cfg should have about twice as many edges
7684 as blocks. But we do not want to punish small functions which have
7685 a couple switch statements. So we require a relatively large number
7686 of basic blocks and the ratio of edges to blocks to be high. */
7687 if (n_basic_blocks > 1000 && n_edges / n_basic_blocks >= 20)
7689 if (warn_disabled_optimization)
7690 warning ("BYPASS disabled: %d > 1000 basic blocks and %d >= 20 edges/basic block",
7691 n_basic_blocks, n_edges / n_basic_blocks);
7692 return 0;
7695 /* If allocating memory for the cprop bitmap would take up too much
7696 storage it's better just to disable the optimization. */
7697 if ((n_basic_blocks
7698 * SBITMAP_SET_SIZE (max_gcse_regno)
7699 * sizeof (SBITMAP_ELT_TYPE)) > MAX_GCSE_MEMORY)
7701 if (warn_disabled_optimization)
7702 warning ("GCSE disabled: %d basic blocks and %d registers",
7703 n_basic_blocks, max_gcse_regno);
7705 return 0;
7708 gcc_obstack_init (&gcse_obstack);
7709 bytes_used = 0;
7711 /* We need alias. */
7712 init_alias_analysis ();
7714 /* Record where pseudo-registers are set. This data is kept accurate
7715 during each pass. ??? We could also record hard-reg information here
7716 [since it's unchanging], however it is currently done during hash table
7717 computation.
7719 It may be tempting to compute MEM set information here too, but MEM sets
7720 will be subject to code motion one day and thus we need to compute
7721 information about memory sets when we build the hash tables. */
7723 alloc_reg_set_mem (max_gcse_regno);
7724 compute_sets (get_insns ());
7726 max_gcse_regno = max_reg_num ();
7727 alloc_gcse_mem (get_insns ());
7728 changed = one_cprop_pass (1, 1, 1);
7729 free_gcse_mem ();
7731 if (file)
7733 fprintf (file, "BYPASS of %s: %d basic blocks, ",
7734 current_function_name, n_basic_blocks);
7735 fprintf (file, "%d bytes\n\n", bytes_used);
7738 obstack_free (&gcse_obstack, NULL);
7739 free_reg_set_mem ();
7741 /* We are finished with alias. */
7742 end_alias_analysis ();
7743 allocate_reg_info (max_reg_num (), FALSE, FALSE);
7745 return changed;
7748 #include "gt-gcse.h"