PR c++/11786
[official-gcc.git] / gcc / gcse.c
blobedc8a625746955cea883ff4a471858f2633df0a2
1 /* Global common subexpression elimination/Partial redundancy elimination
2 and global constant/copy propagation for GNU compiler.
3 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 02111-1307, USA. */
23 /* TODO
24 - reordering of memory allocation and freeing to be more space efficient
25 - do rough calc of how many regs are needed in each block, and a rough
26 calc of how many regs are available in each class and use that to
27 throttle back the code in cases where RTX_COST is minimal.
28 - a store to the same address as a load does not kill the load if the
29 source of the store is also the destination of the load. Handling this
30 allows more load motion, particularly out of loops.
31 - ability to realloc sbitmap vectors would allow one initial computation
32 of reg_set_in_block with only subsequent additions, rather than
33 recomputing it for each pass
37 /* References searched while implementing this.
39 Compilers Principles, Techniques and Tools
40 Aho, Sethi, Ullman
41 Addison-Wesley, 1988
43 Global Optimization by Suppression of Partial Redundancies
44 E. Morel, C. Renvoise
45 communications of the acm, Vol. 22, Num. 2, Feb. 1979
47 A Portable Machine-Independent Global Optimizer - Design and Measurements
48 Frederick Chow
49 Stanford Ph.D. thesis, Dec. 1983
51 A Fast Algorithm for Code Movement Optimization
52 D.M. Dhamdhere
53 SIGPLAN Notices, Vol. 23, Num. 10, Oct. 1988
55 A Solution to a Problem with Morel and Renvoise's
56 Global Optimization by Suppression of Partial Redundancies
57 K-H Drechsler, M.P. Stadel
58 ACM TOPLAS, Vol. 10, Num. 4, Oct. 1988
60 Practical Adaptation of the Global Optimization
61 Algorithm of Morel and Renvoise
62 D.M. Dhamdhere
63 ACM TOPLAS, Vol. 13, Num. 2. Apr. 1991
65 Efficiently Computing Static Single Assignment Form and the Control
66 Dependence Graph
67 R. Cytron, J. Ferrante, B.K. Rosen, M.N. Wegman, and F.K. Zadeck
68 ACM TOPLAS, Vol. 13, Num. 4, Oct. 1991
70 Lazy Code Motion
71 J. Knoop, O. Ruthing, B. Steffen
72 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
74 What's In a Region? Or Computing Control Dependence Regions in Near-Linear
75 Time for Reducible Flow Control
76 Thomas Ball
77 ACM Letters on Programming Languages and Systems,
78 Vol. 2, Num. 1-4, Mar-Dec 1993
80 An Efficient Representation for Sparse Sets
81 Preston Briggs, Linda Torczon
82 ACM Letters on Programming Languages and Systems,
83 Vol. 2, Num. 1-4, Mar-Dec 1993
85 A Variation of Knoop, Ruthing, and Steffen's Lazy Code Motion
86 K-H Drechsler, M.P. Stadel
87 ACM SIGPLAN Notices, Vol. 28, Num. 5, May 1993
89 Partial Dead Code Elimination
90 J. Knoop, O. Ruthing, B. Steffen
91 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
93 Effective Partial Redundancy Elimination
94 P. Briggs, K.D. Cooper
95 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
97 The Program Structure Tree: Computing Control Regions in Linear Time
98 R. Johnson, D. Pearson, K. Pingali
99 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
101 Optimal Code Motion: Theory and Practice
102 J. Knoop, O. Ruthing, B. Steffen
103 ACM TOPLAS, Vol. 16, Num. 4, Jul. 1994
105 The power of assignment motion
106 J. Knoop, O. Ruthing, B. Steffen
107 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
109 Global code motion / global value numbering
110 C. Click
111 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
113 Value Driven Redundancy Elimination
114 L.T. Simpson
115 Rice University Ph.D. thesis, Apr. 1996
117 Value Numbering
118 L.T. Simpson
119 Massively Scalar Compiler Project, Rice University, Sep. 1996
121 High Performance Compilers for Parallel Computing
122 Michael Wolfe
123 Addison-Wesley, 1996
125 Advanced Compiler Design and Implementation
126 Steven Muchnick
127 Morgan Kaufmann, 1997
129 Building an Optimizing Compiler
130 Robert Morgan
131 Digital Press, 1998
133 People wishing to speed up the code here should read:
134 Elimination Algorithms for Data Flow Analysis
135 B.G. Ryder, M.C. Paull
136 ACM Computing Surveys, Vol. 18, Num. 3, Sep. 1986
138 How to Analyze Large Programs Efficiently and Informatively
139 D.M. Dhamdhere, B.K. Rosen, F.K. Zadeck
140 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
142 People wishing to do something different can find various possibilities
143 in the above papers and elsewhere.
146 #include "config.h"
147 #include "system.h"
148 #include "coretypes.h"
149 #include "tm.h"
150 #include "toplev.h"
152 #include "rtl.h"
153 #include "tm_p.h"
154 #include "regs.h"
155 #include "hard-reg-set.h"
156 #include "flags.h"
157 #include "real.h"
158 #include "insn-config.h"
159 #include "recog.h"
160 #include "basic-block.h"
161 #include "output.h"
162 #include "function.h"
163 #include "expr.h"
164 #include "except.h"
165 #include "ggc.h"
166 #include "params.h"
167 #include "cselib.h"
168 #include "intl.h"
169 #include "obstack.h"
171 /* Propagate flow information through back edges and thus enable PRE's
172 moving loop invariant calculations out of loops.
174 Originally this tended to create worse overall code, but several
175 improvements during the development of PRE seem to have made following
176 back edges generally a win.
178 Note much of the loop invariant code motion done here would normally
179 be done by loop.c, which has more heuristics for when to move invariants
180 out of loops. At some point we might need to move some of those
181 heuristics into gcse.c. */
183 /* We support GCSE via Partial Redundancy Elimination. PRE optimizations
184 are a superset of those done by GCSE.
186 We perform the following steps:
188 1) Compute basic block information.
190 2) Compute table of places where registers are set.
192 3) Perform copy/constant propagation.
194 4) Perform global cse.
196 5) Perform another pass of copy/constant propagation.
198 Two passes of copy/constant propagation are done because the first one
199 enables more GCSE and the second one helps to clean up the copies that
200 GCSE creates. This is needed more for PRE than for Classic because Classic
201 GCSE will try to use an existing register containing the common
202 subexpression rather than create a new one. This is harder to do for PRE
203 because of the code motion (which Classic GCSE doesn't do).
205 Expressions we are interested in GCSE-ing are of the form
206 (set (pseudo-reg) (expression)).
207 Function want_to_gcse_p says what these are.
209 PRE handles moving invariant expressions out of loops (by treating them as
210 partially redundant).
212 Eventually it would be nice to replace cse.c/gcse.c with SSA (static single
213 assignment) based GVN (global value numbering). L. T. Simpson's paper
214 (Rice University) on value numbering is a useful reference for this.
216 **********************
218 We used to support multiple passes but there are diminishing returns in
219 doing so. The first pass usually makes 90% of the changes that are doable.
220 A second pass can make a few more changes made possible by the first pass.
221 Experiments show any further passes don't make enough changes to justify
222 the expense.
224 A study of spec92 using an unlimited number of passes:
225 [1 pass] = 1208 substitutions, [2] = 577, [3] = 202, [4] = 192, [5] = 83,
226 [6] = 34, [7] = 17, [8] = 9, [9] = 4, [10] = 4, [11] = 2,
227 [12] = 2, [13] = 1, [15] = 1, [16] = 2, [41] = 1
229 It was found doing copy propagation between each pass enables further
230 substitutions.
232 PRE is quite expensive in complicated functions because the DFA can take
233 awhile to converge. Hence we only perform one pass. The parameter max-gcse-passes can
234 be modified if one wants to experiment.
236 **********************
238 The steps for PRE are:
240 1) Build the hash table of expressions we wish to GCSE (expr_hash_table).
242 2) Perform the data flow analysis for PRE.
244 3) Delete the redundant instructions
246 4) Insert the required copies [if any] that make the partially
247 redundant instructions fully redundant.
249 5) For other reaching expressions, insert an instruction to copy the value
250 to a newly created pseudo that will reach the redundant instruction.
252 The deletion is done first so that when we do insertions we
253 know which pseudo reg to use.
255 Various papers have argued that PRE DFA is expensive (O(n^2)) and others
256 argue it is not. The number of iterations for the algorithm to converge
257 is typically 2-4 so I don't view it as that expensive (relatively speaking).
259 PRE GCSE depends heavily on the second CSE pass to clean up the copies
260 we create. To make an expression reach the place where it's redundant,
261 the result of the expression is copied to a new register, and the redundant
262 expression is deleted by replacing it with this new register. Classic GCSE
263 doesn't have this problem as much as it computes the reaching defs of
264 each register in each block and thus can try to use an existing register.
266 **********************
268 A fair bit of simplicity is created by creating small functions for simple
269 tasks, even when the function is only called in one place. This may
270 measurably slow things down [or may not] by creating more function call
271 overhead than is necessary. The source is laid out so that it's trivial
272 to make the affected functions inline so that one can measure what speed
273 up, if any, can be achieved, and maybe later when things settle things can
274 be rearranged.
276 Help stamp out big monolithic functions! */
278 /* GCSE global vars. */
280 /* -dG dump file. */
281 static FILE *gcse_file;
283 /* Note whether or not we should run jump optimization after gcse. We
284 want to do this for two cases.
286 * If we changed any jumps via cprop.
288 * If we added any labels via edge splitting. */
290 static int run_jump_opt_after_gcse;
292 /* Bitmaps are normally not included in debugging dumps.
293 However it's useful to be able to print them from GDB.
294 We could create special functions for this, but it's simpler to
295 just allow passing stderr to the dump_foo fns. Since stderr can
296 be a macro, we store a copy here. */
297 static FILE *debug_stderr;
299 /* An obstack for our working variables. */
300 static struct obstack gcse_obstack;
302 struct reg_use {rtx reg_rtx; };
304 /* Hash table of expressions. */
306 struct expr
308 /* The expression (SET_SRC for expressions, PATTERN for assignments). */
309 rtx expr;
310 /* Index in the available expression bitmaps. */
311 int bitmap_index;
312 /* Next entry with the same hash. */
313 struct expr *next_same_hash;
314 /* List of anticipatable occurrences in basic blocks in the function.
315 An "anticipatable occurrence" is one that is the first occurrence in the
316 basic block, the operands are not modified in the basic block prior
317 to the occurrence and the output is not used between the start of
318 the block and the occurrence. */
319 struct occr *antic_occr;
320 /* List of available occurrence in basic blocks in the function.
321 An "available occurrence" is one that is the last occurrence in the
322 basic block and the operands are not modified by following statements in
323 the basic block [including this insn]. */
324 struct occr *avail_occr;
325 /* Non-null if the computation is PRE redundant.
326 The value is the newly created pseudo-reg to record a copy of the
327 expression in all the places that reach the redundant copy. */
328 rtx reaching_reg;
331 /* Occurrence of an expression.
332 There is one per basic block. If a pattern appears more than once the
333 last appearance is used [or first for anticipatable expressions]. */
335 struct occr
337 /* Next occurrence of this expression. */
338 struct occr *next;
339 /* The insn that computes the expression. */
340 rtx insn;
341 /* Nonzero if this [anticipatable] occurrence has been deleted. */
342 char deleted_p;
343 /* Nonzero if this [available] occurrence has been copied to
344 reaching_reg. */
345 /* ??? This is mutually exclusive with deleted_p, so they could share
346 the same byte. */
347 char copied_p;
350 /* Expression and copy propagation hash tables.
351 Each hash table is an array of buckets.
352 ??? It is known that if it were an array of entries, structure elements
353 `next_same_hash' and `bitmap_index' wouldn't be necessary. However, it is
354 not clear whether in the final analysis a sufficient amount of memory would
355 be saved as the size of the available expression bitmaps would be larger
356 [one could build a mapping table without holes afterwards though].
357 Someday I'll perform the computation and figure it out. */
359 struct hash_table
361 /* The table itself.
362 This is an array of `expr_hash_table_size' elements. */
363 struct expr **table;
365 /* Size of the hash table, in elements. */
366 unsigned int size;
368 /* Number of hash table elements. */
369 unsigned int n_elems;
371 /* Whether the table is expression of copy propagation one. */
372 int set_p;
375 /* Expression hash table. */
376 static struct hash_table expr_hash_table;
378 /* Copy propagation hash table. */
379 static struct hash_table set_hash_table;
381 /* Mapping of uids to cuids.
382 Only real insns get cuids. */
383 static int *uid_cuid;
385 /* Highest UID in UID_CUID. */
386 static int max_uid;
388 /* Get the cuid of an insn. */
389 #ifdef ENABLE_CHECKING
390 #define INSN_CUID(INSN) (INSN_UID (INSN) > max_uid ? (abort (), 0) : uid_cuid[INSN_UID (INSN)])
391 #else
392 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
393 #endif
395 /* Number of cuids. */
396 static int max_cuid;
398 /* Mapping of cuids to insns. */
399 static rtx *cuid_insn;
401 /* Get insn from cuid. */
402 #define CUID_INSN(CUID) (cuid_insn[CUID])
404 /* Maximum register number in function prior to doing gcse + 1.
405 Registers created during this pass have regno >= max_gcse_regno.
406 This is named with "gcse" to not collide with global of same name. */
407 static unsigned int max_gcse_regno;
409 /* Table of registers that are modified.
411 For each register, each element is a list of places where the pseudo-reg
412 is set.
414 For simplicity, GCSE is done on sets of pseudo-regs only. PRE GCSE only
415 requires knowledge of which blocks kill which regs [and thus could use
416 a bitmap instead of the lists `reg_set_table' uses].
418 `reg_set_table' and could be turned into an array of bitmaps (num-bbs x
419 num-regs) [however perhaps it may be useful to keep the data as is]. One
420 advantage of recording things this way is that `reg_set_table' is fairly
421 sparse with respect to pseudo regs but for hard regs could be fairly dense
422 [relatively speaking]. And recording sets of pseudo-regs in lists speeds
423 up functions like compute_transp since in the case of pseudo-regs we only
424 need to iterate over the number of times a pseudo-reg is set, not over the
425 number of basic blocks [clearly there is a bit of a slow down in the cases
426 where a pseudo is set more than once in a block, however it is believed
427 that the net effect is to speed things up]. This isn't done for hard-regs
428 because recording call-clobbered hard-regs in `reg_set_table' at each
429 function call can consume a fair bit of memory, and iterating over
430 hard-regs stored this way in compute_transp will be more expensive. */
432 typedef struct reg_set
434 /* The next setting of this register. */
435 struct reg_set *next;
436 /* The insn where it was set. */
437 rtx insn;
438 } reg_set;
440 static reg_set **reg_set_table;
442 /* Size of `reg_set_table'.
443 The table starts out at max_gcse_regno + slop, and is enlarged as
444 necessary. */
445 static int reg_set_table_size;
447 /* Amount to grow `reg_set_table' by when it's full. */
448 #define REG_SET_TABLE_SLOP 100
450 /* This is a list of expressions which are MEMs and will be used by load
451 or store motion.
452 Load motion tracks MEMs which aren't killed by
453 anything except itself. (ie, loads and stores to a single location).
454 We can then allow movement of these MEM refs with a little special
455 allowance. (all stores copy the same value to the reaching reg used
456 for the loads). This means all values used to store into memory must have
457 no side effects so we can re-issue the setter value.
458 Store Motion uses this structure as an expression table to track stores
459 which look interesting, and might be moveable towards the exit block. */
461 struct ls_expr
463 struct expr * expr; /* Gcse expression reference for LM. */
464 rtx pattern; /* Pattern of this mem. */
465 rtx pattern_regs; /* List of registers mentioned by the mem. */
466 rtx loads; /* INSN list of loads seen. */
467 rtx stores; /* INSN list of stores seen. */
468 struct ls_expr * next; /* Next in the list. */
469 int invalid; /* Invalid for some reason. */
470 int index; /* If it maps to a bitmap index. */
471 int hash_index; /* Index when in a hash table. */
472 rtx reaching_reg; /* Register to use when re-writing. */
475 /* Array of implicit set patterns indexed by basic block index. */
476 static rtx *implicit_sets;
478 /* Head of the list of load/store memory refs. */
479 static struct ls_expr * pre_ldst_mems = NULL;
481 /* Bitmap containing one bit for each register in the program.
482 Used when performing GCSE to track which registers have been set since
483 the start of the basic block. */
484 static regset reg_set_bitmap;
486 /* For each block, a bitmap of registers set in the block.
487 This is used by expr_killed_p and compute_transp.
488 It is computed during hash table computation and not by compute_sets
489 as it includes registers added since the last pass (or between cprop and
490 gcse) and it's currently not easy to realloc sbitmap vectors. */
491 static sbitmap *reg_set_in_block;
493 /* Array, indexed by basic block number for a list of insns which modify
494 memory within that block. */
495 static rtx * modify_mem_list;
496 bitmap modify_mem_list_set;
498 /* This array parallels modify_mem_list, but is kept canonicalized. */
499 static rtx * canon_modify_mem_list;
500 bitmap canon_modify_mem_list_set;
501 /* Various variables for statistics gathering. */
503 /* Memory used in a pass.
504 This isn't intended to be absolutely precise. Its intent is only
505 to keep an eye on memory usage. */
506 static int bytes_used;
508 /* GCSE substitutions made. */
509 static int gcse_subst_count;
510 /* Number of copy instructions created. */
511 static int gcse_create_count;
512 /* Number of constants propagated. */
513 static int const_prop_count;
514 /* Number of copys propagated. */
515 static int copy_prop_count;
517 /* These variables are used by classic GCSE.
518 Normally they'd be defined a bit later, but `rd_gen' needs to
519 be declared sooner. */
521 /* Each block has a bitmap of each type.
522 The length of each blocks bitmap is:
524 max_cuid - for reaching definitions
525 n_exprs - for available expressions
527 Thus we view the bitmaps as 2 dimensional arrays. i.e.
528 rd_kill[block_num][cuid_num]
529 ae_kill[block_num][expr_num] */
531 /* For reaching defs */
532 static sbitmap *rd_kill, *rd_gen, *reaching_defs, *rd_out;
534 /* for available exprs */
535 static sbitmap *ae_kill, *ae_gen, *ae_in, *ae_out;
537 /* Objects of this type are passed around by the null-pointer check
538 removal routines. */
539 struct null_pointer_info
541 /* The basic block being processed. */
542 basic_block current_block;
543 /* The first register to be handled in this pass. */
544 unsigned int min_reg;
545 /* One greater than the last register to be handled in this pass. */
546 unsigned int max_reg;
547 sbitmap *nonnull_local;
548 sbitmap *nonnull_killed;
551 static void compute_can_copy (void);
552 static void *gmalloc (size_t) ATTRIBUTE_MALLOC;
553 static void *gcalloc (size_t, size_t) ATTRIBUTE_MALLOC;
554 static void *grealloc (void *, size_t);
555 static void *gcse_alloc (unsigned long);
556 static void alloc_gcse_mem (rtx);
557 static void free_gcse_mem (void);
558 static void alloc_reg_set_mem (int);
559 static void free_reg_set_mem (void);
560 static int get_bitmap_width (int, int, int);
561 static void record_one_set (int, rtx);
562 static void replace_one_set (int, rtx, rtx);
563 static void record_set_info (rtx, rtx, void *);
564 static void compute_sets (rtx);
565 static void hash_scan_insn (rtx, struct hash_table *, int);
566 static void hash_scan_set (rtx, rtx, struct hash_table *);
567 static void hash_scan_clobber (rtx, rtx, struct hash_table *);
568 static void hash_scan_call (rtx, rtx, struct hash_table *);
569 static int want_to_gcse_p (rtx);
570 static bool gcse_constant_p (rtx);
571 static int oprs_unchanged_p (rtx, rtx, int);
572 static int oprs_anticipatable_p (rtx, rtx);
573 static int oprs_available_p (rtx, rtx);
574 static void insert_expr_in_table (rtx, enum machine_mode, rtx, int, int,
575 struct hash_table *);
576 static void insert_set_in_table (rtx, rtx, struct hash_table *);
577 static unsigned int hash_expr (rtx, enum machine_mode, int *, int);
578 static unsigned int hash_expr_1 (rtx, enum machine_mode, int *);
579 static unsigned int hash_string_1 (const char *);
580 static unsigned int hash_set (int, int);
581 static int expr_equiv_p (rtx, rtx);
582 static void record_last_reg_set_info (rtx, int);
583 static void record_last_mem_set_info (rtx);
584 static void record_last_set_info (rtx, rtx, void *);
585 static void compute_hash_table (struct hash_table *);
586 static void alloc_hash_table (int, struct hash_table *, int);
587 static void free_hash_table (struct hash_table *);
588 static void compute_hash_table_work (struct hash_table *);
589 static void dump_hash_table (FILE *, const char *, struct hash_table *);
590 static struct expr *lookup_expr (rtx, struct hash_table *);
591 static struct expr *lookup_set (unsigned int, struct hash_table *);
592 static struct expr *next_set (unsigned int, struct expr *);
593 static void reset_opr_set_tables (void);
594 static int oprs_not_set_p (rtx, rtx);
595 static void mark_call (rtx);
596 static void mark_set (rtx, rtx);
597 static void mark_clobber (rtx, rtx);
598 static void mark_oprs_set (rtx);
599 static void alloc_cprop_mem (int, int);
600 static void free_cprop_mem (void);
601 static void compute_transp (rtx, int, sbitmap *, int);
602 static void compute_transpout (void);
603 static void compute_local_properties (sbitmap *, sbitmap *, sbitmap *,
604 struct hash_table *);
605 static void compute_cprop_data (void);
606 static void find_used_regs (rtx *, void *);
607 static int try_replace_reg (rtx, rtx, rtx);
608 static struct expr *find_avail_set (int, rtx);
609 static int cprop_jump (basic_block, rtx, rtx, rtx, rtx);
610 static void mems_conflict_for_gcse_p (rtx, rtx, void *);
611 static int load_killed_in_block_p (basic_block, int, rtx, int);
612 static void canon_list_insert (rtx, rtx, void *);
613 static int cprop_insn (rtx, int);
614 static int cprop (int);
615 static void find_implicit_sets (void);
616 static int one_cprop_pass (int, int, int);
617 static bool constprop_register (rtx, rtx, rtx, int);
618 static struct expr *find_bypass_set (int, int);
619 static bool reg_killed_on_edge (rtx, edge);
620 static int bypass_block (basic_block, rtx, rtx);
621 static int bypass_conditional_jumps (void);
622 static void alloc_pre_mem (int, int);
623 static void free_pre_mem (void);
624 static void compute_pre_data (void);
625 static int pre_expr_reaches_here_p (basic_block, struct expr *,
626 basic_block);
627 static void insert_insn_end_bb (struct expr *, basic_block, int);
628 static void pre_insert_copy_insn (struct expr *, rtx);
629 static void pre_insert_copies (void);
630 static int pre_delete (void);
631 static int pre_gcse (void);
632 static int one_pre_gcse_pass (int);
633 static void add_label_notes (rtx, rtx);
634 static void alloc_code_hoist_mem (int, int);
635 static void free_code_hoist_mem (void);
636 static void compute_code_hoist_vbeinout (void);
637 static void compute_code_hoist_data (void);
638 static int hoist_expr_reaches_here_p (basic_block, int, basic_block, char *);
639 static void hoist_code (void);
640 static int one_code_hoisting_pass (void);
641 static void alloc_rd_mem (int, int);
642 static void free_rd_mem (void);
643 static void handle_rd_kill_set (rtx, int, basic_block);
644 static void compute_kill_rd (void);
645 static void compute_rd (void);
646 static void alloc_avail_expr_mem (int, int);
647 static void free_avail_expr_mem (void);
648 static void compute_ae_gen (struct hash_table *);
649 static int expr_killed_p (rtx, basic_block);
650 static void compute_ae_kill (sbitmap *, sbitmap *, struct hash_table *);
651 static int expr_reaches_here_p (struct occr *, struct expr *, basic_block,
652 int);
653 static rtx computing_insn (struct expr *, rtx);
654 static int def_reaches_here_p (rtx, rtx);
655 static int can_disregard_other_sets (struct reg_set **, rtx, int);
656 static int handle_avail_expr (rtx, struct expr *);
657 static int classic_gcse (void);
658 static int one_classic_gcse_pass (int);
659 static void invalidate_nonnull_info (rtx, rtx, void *);
660 static int delete_null_pointer_checks_1 (unsigned int *, sbitmap *, sbitmap *,
661 struct null_pointer_info *);
662 static rtx process_insert_insn (struct expr *);
663 static int pre_edge_insert (struct edge_list *, struct expr **);
664 static int expr_reaches_here_p_work (struct occr *, struct expr *,
665 basic_block, int, char *);
666 static int pre_expr_reaches_here_p_work (basic_block, struct expr *,
667 basic_block, char *);
668 static struct ls_expr * ldst_entry (rtx);
669 static void free_ldst_entry (struct ls_expr *);
670 static void free_ldst_mems (void);
671 static void print_ldst_list (FILE *);
672 static struct ls_expr * find_rtx_in_ldst (rtx);
673 static int enumerate_ldsts (void);
674 static inline struct ls_expr * first_ls_expr (void);
675 static inline struct ls_expr * next_ls_expr (struct ls_expr *);
676 static int simple_mem (rtx);
677 static void invalidate_any_buried_refs (rtx);
678 static void compute_ld_motion_mems (void);
679 static void trim_ld_motion_mems (void);
680 static void update_ld_motion_stores (struct expr *);
681 static void reg_set_info (rtx, rtx, void *);
682 static bool store_ops_ok (rtx, int *);
683 static rtx extract_mentioned_regs (rtx);
684 static rtx extract_mentioned_regs_helper (rtx, rtx);
685 static void find_moveable_store (rtx, int *, int *);
686 static int compute_store_table (void);
687 static bool load_kills_store (rtx, rtx, int);
688 static bool find_loads (rtx, rtx, int);
689 static bool store_killed_in_insn (rtx, rtx, rtx, int);
690 static bool store_killed_after (rtx, rtx, rtx, basic_block, int *, rtx *);
691 static bool store_killed_before (rtx, rtx, rtx, basic_block, int *);
692 static void build_store_vectors (void);
693 static void insert_insn_start_bb (rtx, basic_block);
694 static int insert_store (struct ls_expr *, edge);
695 static void replace_store_insn (rtx, rtx, basic_block);
696 static void delete_store (struct ls_expr *, basic_block);
697 static void free_store_memory (void);
698 static void store_motion (void);
699 static void free_insn_expr_list_list (rtx *);
700 static void clear_modify_mem_tables (void);
701 static void free_modify_mem_tables (void);
702 static rtx gcse_emit_move_after (rtx, rtx, rtx);
703 static void local_cprop_find_used_regs (rtx *, void *);
704 static bool do_local_cprop (rtx, rtx, int, rtx*);
705 static bool adjust_libcall_notes (rtx, rtx, rtx, rtx*);
706 static void local_cprop_pass (int);
707 static bool is_too_expensive (const char *);
710 /* Entry point for global common subexpression elimination.
711 F is the first instruction in the function. */
714 gcse_main (rtx f, FILE *file)
716 int changed, pass;
717 /* Bytes used at start of pass. */
718 int initial_bytes_used;
719 /* Maximum number of bytes used by a pass. */
720 int max_pass_bytes;
721 /* Point to release obstack data from for each pass. */
722 char *gcse_obstack_bottom;
724 /* We do not construct an accurate cfg in functions which call
725 setjmp, so just punt to be safe. */
726 if (current_function_calls_setjmp)
727 return 0;
729 /* Assume that we do not need to run jump optimizations after gcse. */
730 run_jump_opt_after_gcse = 0;
732 /* For calling dump_foo fns from gdb. */
733 debug_stderr = stderr;
734 gcse_file = file;
736 /* Identify the basic block information for this function, including
737 successors and predecessors. */
738 max_gcse_regno = max_reg_num ();
740 if (file)
741 dump_flow_info (file);
743 /* Return if there's nothing to do, or it is too expensive. */
744 if (n_basic_blocks <= 1 || is_too_expensive (_("GCSE disabled")))
745 return 0;
747 gcc_obstack_init (&gcse_obstack);
748 bytes_used = 0;
750 /* We need alias. */
751 init_alias_analysis ();
752 /* Record where pseudo-registers are set. This data is kept accurate
753 during each pass. ??? We could also record hard-reg information here
754 [since it's unchanging], however it is currently done during hash table
755 computation.
757 It may be tempting to compute MEM set information here too, but MEM sets
758 will be subject to code motion one day and thus we need to compute
759 information about memory sets when we build the hash tables. */
761 alloc_reg_set_mem (max_gcse_regno);
762 compute_sets (f);
764 pass = 0;
765 initial_bytes_used = bytes_used;
766 max_pass_bytes = 0;
767 gcse_obstack_bottom = gcse_alloc (1);
768 changed = 1;
769 while (changed && pass < MAX_GCSE_PASSES)
771 changed = 0;
772 if (file)
773 fprintf (file, "GCSE pass %d\n\n", pass + 1);
775 /* Initialize bytes_used to the space for the pred/succ lists,
776 and the reg_set_table data. */
777 bytes_used = initial_bytes_used;
779 /* Each pass may create new registers, so recalculate each time. */
780 max_gcse_regno = max_reg_num ();
782 alloc_gcse_mem (f);
784 /* Don't allow constant propagation to modify jumps
785 during this pass. */
786 changed = one_cprop_pass (pass + 1, 0, 0);
788 if (optimize_size)
789 changed |= one_classic_gcse_pass (pass + 1);
790 else
792 changed |= one_pre_gcse_pass (pass + 1);
793 /* We may have just created new basic blocks. Release and
794 recompute various things which are sized on the number of
795 basic blocks. */
796 if (changed)
798 free_modify_mem_tables ();
799 modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
800 canon_modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
802 free_reg_set_mem ();
803 alloc_reg_set_mem (max_reg_num ());
804 compute_sets (f);
805 run_jump_opt_after_gcse = 1;
808 if (max_pass_bytes < bytes_used)
809 max_pass_bytes = bytes_used;
811 /* Free up memory, then reallocate for code hoisting. We can
812 not re-use the existing allocated memory because the tables
813 will not have info for the insns or registers created by
814 partial redundancy elimination. */
815 free_gcse_mem ();
817 /* It does not make sense to run code hoisting unless we optimizing
818 for code size -- it rarely makes programs faster, and can make
819 them bigger if we did partial redundancy elimination (when optimizing
820 for space, we use a classic gcse algorithm instead of partial
821 redundancy algorithms). */
822 if (optimize_size)
824 max_gcse_regno = max_reg_num ();
825 alloc_gcse_mem (f);
826 changed |= one_code_hoisting_pass ();
827 free_gcse_mem ();
829 if (max_pass_bytes < bytes_used)
830 max_pass_bytes = bytes_used;
833 if (file)
835 fprintf (file, "\n");
836 fflush (file);
839 obstack_free (&gcse_obstack, gcse_obstack_bottom);
840 pass++;
843 /* Do one last pass of copy propagation, including cprop into
844 conditional jumps. */
846 max_gcse_regno = max_reg_num ();
847 alloc_gcse_mem (f);
848 /* This time, go ahead and allow cprop to alter jumps. */
849 one_cprop_pass (pass + 1, 1, 0);
850 free_gcse_mem ();
852 if (file)
854 fprintf (file, "GCSE of %s: %d basic blocks, ",
855 current_function_name, n_basic_blocks);
856 fprintf (file, "%d pass%s, %d bytes\n\n",
857 pass, pass > 1 ? "es" : "", max_pass_bytes);
860 obstack_free (&gcse_obstack, NULL);
861 free_reg_set_mem ();
862 /* We are finished with alias. */
863 end_alias_analysis ();
864 allocate_reg_info (max_reg_num (), FALSE, FALSE);
866 if (!optimize_size && flag_gcse_sm)
867 store_motion ();
869 /* Record where pseudo-registers are set. */
870 return run_jump_opt_after_gcse;
873 /* Misc. utilities. */
875 /* Nonzero for each mode that supports (set (reg) (reg)).
876 This is trivially true for integer and floating point values.
877 It may or may not be true for condition codes. */
878 static char can_copy[(int) NUM_MACHINE_MODES];
880 /* Compute which modes support reg/reg copy operations. */
882 static void
883 compute_can_copy (void)
885 int i;
886 #ifndef AVOID_CCMODE_COPIES
887 rtx reg, insn;
888 #endif
889 memset (can_copy, 0, NUM_MACHINE_MODES);
891 start_sequence ();
892 for (i = 0; i < NUM_MACHINE_MODES; i++)
893 if (GET_MODE_CLASS (i) == MODE_CC)
895 #ifdef AVOID_CCMODE_COPIES
896 can_copy[i] = 0;
897 #else
898 reg = gen_rtx_REG ((enum machine_mode) i, LAST_VIRTUAL_REGISTER + 1);
899 insn = emit_insn (gen_rtx_SET (VOIDmode, reg, reg));
900 if (recog (PATTERN (insn), insn, NULL) >= 0)
901 can_copy[i] = 1;
902 #endif
904 else
905 can_copy[i] = 1;
907 end_sequence ();
910 /* Returns whether the mode supports reg/reg copy operations. */
912 bool
913 can_copy_p (enum machine_mode mode)
915 static bool can_copy_init_p = false;
917 if (! can_copy_init_p)
919 compute_can_copy ();
920 can_copy_init_p = true;
923 return can_copy[mode] != 0;
926 /* Cover function to xmalloc to record bytes allocated. */
928 static void *
929 gmalloc (size_t size)
931 bytes_used += size;
932 return xmalloc (size);
935 /* Cover function to xcalloc to record bytes allocated. */
937 static void *
938 gcalloc (size_t nelem, size_t elsize)
940 bytes_used += nelem * elsize;
941 return xcalloc (nelem, elsize);
944 /* Cover function to xrealloc.
945 We don't record the additional size since we don't know it.
946 It won't affect memory usage stats much anyway. */
948 static void *
949 grealloc (void *ptr, size_t size)
951 return xrealloc (ptr, size);
954 /* Cover function to obstack_alloc. */
956 static void *
957 gcse_alloc (unsigned long size)
959 bytes_used += size;
960 return obstack_alloc (&gcse_obstack, size);
963 /* Allocate memory for the cuid mapping array,
964 and reg/memory set tracking tables.
966 This is called at the start of each pass. */
968 static void
969 alloc_gcse_mem (rtx f)
971 int i;
972 rtx insn;
974 /* Find the largest UID and create a mapping from UIDs to CUIDs.
975 CUIDs are like UIDs except they increase monotonically, have no gaps,
976 and only apply to real insns. */
978 max_uid = get_max_uid ();
979 uid_cuid = gcalloc (max_uid + 1, sizeof (int));
980 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
982 if (INSN_P (insn))
983 uid_cuid[INSN_UID (insn)] = i++;
984 else
985 uid_cuid[INSN_UID (insn)] = i;
988 /* Create a table mapping cuids to insns. */
990 max_cuid = i;
991 cuid_insn = gcalloc (max_cuid + 1, sizeof (rtx));
992 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
993 if (INSN_P (insn))
994 CUID_INSN (i++) = insn;
996 /* Allocate vars to track sets of regs. */
997 reg_set_bitmap = BITMAP_XMALLOC ();
999 /* Allocate vars to track sets of regs, memory per block. */
1000 reg_set_in_block = sbitmap_vector_alloc (last_basic_block, max_gcse_regno);
1001 /* Allocate array to keep a list of insns which modify memory in each
1002 basic block. */
1003 modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
1004 canon_modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
1005 modify_mem_list_set = BITMAP_XMALLOC ();
1006 canon_modify_mem_list_set = BITMAP_XMALLOC ();
1009 /* Free memory allocated by alloc_gcse_mem. */
1011 static void
1012 free_gcse_mem (void)
1014 free (uid_cuid);
1015 free (cuid_insn);
1017 BITMAP_XFREE (reg_set_bitmap);
1019 sbitmap_vector_free (reg_set_in_block);
1020 free_modify_mem_tables ();
1021 BITMAP_XFREE (modify_mem_list_set);
1022 BITMAP_XFREE (canon_modify_mem_list_set);
1025 /* Many of the global optimization algorithms work by solving dataflow
1026 equations for various expressions. Initially, some local value is
1027 computed for each expression in each block. Then, the values across the
1028 various blocks are combined (by following flow graph edges) to arrive at
1029 global values. Conceptually, each set of equations is independent. We
1030 may therefore solve all the equations in parallel, solve them one at a
1031 time, or pick any intermediate approach.
1033 When you're going to need N two-dimensional bitmaps, each X (say, the
1034 number of blocks) by Y (say, the number of expressions), call this
1035 function. It's not important what X and Y represent; only that Y
1036 correspond to the things that can be done in parallel. This function will
1037 return an appropriate chunking factor C; you should solve C sets of
1038 equations in parallel. By going through this function, we can easily
1039 trade space against time; by solving fewer equations in parallel we use
1040 less space. */
1042 static int
1043 get_bitmap_width (int n, int x, int y)
1045 /* It's not really worth figuring out *exactly* how much memory will
1046 be used by a particular choice. The important thing is to get
1047 something approximately right. */
1048 size_t max_bitmap_memory = 10 * 1024 * 1024;
1050 /* The number of bytes we'd use for a single column of minimum
1051 width. */
1052 size_t column_size = n * x * sizeof (SBITMAP_ELT_TYPE);
1054 /* Often, it's reasonable just to solve all the equations in
1055 parallel. */
1056 if (column_size * SBITMAP_SET_SIZE (y) <= max_bitmap_memory)
1057 return y;
1059 /* Otherwise, pick the largest width we can, without going over the
1060 limit. */
1061 return SBITMAP_ELT_BITS * ((max_bitmap_memory + column_size - 1)
1062 / column_size);
1065 /* Compute the local properties of each recorded expression.
1067 Local properties are those that are defined by the block, irrespective of
1068 other blocks.
1070 An expression is transparent in a block if its operands are not modified
1071 in the block.
1073 An expression is computed (locally available) in a block if it is computed
1074 at least once and expression would contain the same value if the
1075 computation was moved to the end of the block.
1077 An expression is locally anticipatable in a block if it is computed at
1078 least once and expression would contain the same value if the computation
1079 was moved to the beginning of the block.
1081 We call this routine for cprop, pre and code hoisting. They all compute
1082 basically the same information and thus can easily share this code.
1084 TRANSP, COMP, and ANTLOC are destination sbitmaps for recording local
1085 properties. If NULL, then it is not necessary to compute or record that
1086 particular property.
1088 TABLE controls which hash table to look at. If it is set hash table,
1089 additionally, TRANSP is computed as ~TRANSP, since this is really cprop's
1090 ABSALTERED. */
1092 static void
1093 compute_local_properties (sbitmap *transp, sbitmap *comp, sbitmap *antloc, struct hash_table *table)
1095 unsigned int i;
1097 /* Initialize any bitmaps that were passed in. */
1098 if (transp)
1100 if (table->set_p)
1101 sbitmap_vector_zero (transp, last_basic_block);
1102 else
1103 sbitmap_vector_ones (transp, last_basic_block);
1106 if (comp)
1107 sbitmap_vector_zero (comp, last_basic_block);
1108 if (antloc)
1109 sbitmap_vector_zero (antloc, last_basic_block);
1111 for (i = 0; i < table->size; i++)
1113 struct expr *expr;
1115 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
1117 int indx = expr->bitmap_index;
1118 struct occr *occr;
1120 /* The expression is transparent in this block if it is not killed.
1121 We start by assuming all are transparent [none are killed], and
1122 then reset the bits for those that are. */
1123 if (transp)
1124 compute_transp (expr->expr, indx, transp, table->set_p);
1126 /* The occurrences recorded in antic_occr are exactly those that
1127 we want to set to nonzero in ANTLOC. */
1128 if (antloc)
1129 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
1131 SET_BIT (antloc[BLOCK_NUM (occr->insn)], indx);
1133 /* While we're scanning the table, this is a good place to
1134 initialize this. */
1135 occr->deleted_p = 0;
1138 /* The occurrences recorded in avail_occr are exactly those that
1139 we want to set to nonzero in COMP. */
1140 if (comp)
1141 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
1143 SET_BIT (comp[BLOCK_NUM (occr->insn)], indx);
1145 /* While we're scanning the table, this is a good place to
1146 initialize this. */
1147 occr->copied_p = 0;
1150 /* While we're scanning the table, this is a good place to
1151 initialize this. */
1152 expr->reaching_reg = 0;
1157 /* Register set information.
1159 `reg_set_table' records where each register is set or otherwise
1160 modified. */
1162 static struct obstack reg_set_obstack;
1164 static void
1165 alloc_reg_set_mem (int n_regs)
1167 reg_set_table_size = n_regs + REG_SET_TABLE_SLOP;
1168 reg_set_table = gcalloc (reg_set_table_size, sizeof (struct reg_set *));
1170 gcc_obstack_init (&reg_set_obstack);
1173 static void
1174 free_reg_set_mem (void)
1176 free (reg_set_table);
1177 obstack_free (&reg_set_obstack, NULL);
1180 /* An OLD_INSN that used to set REGNO was replaced by NEW_INSN.
1181 Update the corresponding `reg_set_table' entry accordingly.
1182 We assume that NEW_INSN is not already recorded in reg_set_table[regno]. */
1184 static void
1185 replace_one_set (int regno, rtx old_insn, rtx new_insn)
1187 struct reg_set *reg_info;
1188 if (regno >= reg_set_table_size)
1189 return;
1190 for (reg_info = reg_set_table[regno]; reg_info; reg_info = reg_info->next)
1191 if (reg_info->insn == old_insn)
1193 reg_info->insn = new_insn;
1194 break;
1198 /* Record REGNO in the reg_set table. */
1200 static void
1201 record_one_set (int regno, rtx insn)
1203 /* Allocate a new reg_set element and link it onto the list. */
1204 struct reg_set *new_reg_info;
1206 /* If the table isn't big enough, enlarge it. */
1207 if (regno >= reg_set_table_size)
1209 int new_size = regno + REG_SET_TABLE_SLOP;
1211 reg_set_table = grealloc (reg_set_table,
1212 new_size * sizeof (struct reg_set *));
1213 memset (reg_set_table + reg_set_table_size, 0,
1214 (new_size - reg_set_table_size) * sizeof (struct reg_set *));
1215 reg_set_table_size = new_size;
1218 new_reg_info = obstack_alloc (&reg_set_obstack, sizeof (struct reg_set));
1219 bytes_used += sizeof (struct reg_set);
1220 new_reg_info->insn = insn;
1221 new_reg_info->next = reg_set_table[regno];
1222 reg_set_table[regno] = new_reg_info;
1225 /* Called from compute_sets via note_stores to handle one SET or CLOBBER in
1226 an insn. The DATA is really the instruction in which the SET is
1227 occurring. */
1229 static void
1230 record_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED, void *data)
1232 rtx record_set_insn = (rtx) data;
1234 if (GET_CODE (dest) == REG && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
1235 record_one_set (REGNO (dest), record_set_insn);
1238 /* Scan the function and record each set of each pseudo-register.
1240 This is called once, at the start of the gcse pass. See the comments for
1241 `reg_set_table' for further documentation. */
1243 static void
1244 compute_sets (rtx f)
1246 rtx insn;
1248 for (insn = f; insn != 0; insn = NEXT_INSN (insn))
1249 if (INSN_P (insn))
1250 note_stores (PATTERN (insn), record_set_info, insn);
1253 /* Hash table support. */
1255 struct reg_avail_info
1257 basic_block last_bb;
1258 int first_set;
1259 int last_set;
1262 static struct reg_avail_info *reg_avail_info;
1263 static basic_block current_bb;
1266 /* See whether X, the source of a set, is something we want to consider for
1267 GCSE. */
1269 static GTY(()) rtx test_insn;
1270 static int
1271 want_to_gcse_p (rtx x)
1273 int num_clobbers = 0;
1274 int icode;
1276 switch (GET_CODE (x))
1278 case REG:
1279 case SUBREG:
1280 case CONST_INT:
1281 case CONST_DOUBLE:
1282 case CONST_VECTOR:
1283 case CALL:
1284 case CONSTANT_P_RTX:
1285 return 0;
1287 default:
1288 break;
1291 /* If this is a valid operand, we are OK. If it's VOIDmode, we aren't. */
1292 if (general_operand (x, GET_MODE (x)))
1293 return 1;
1294 else if (GET_MODE (x) == VOIDmode)
1295 return 0;
1297 /* Otherwise, check if we can make a valid insn from it. First initialize
1298 our test insn if we haven't already. */
1299 if (test_insn == 0)
1301 test_insn
1302 = make_insn_raw (gen_rtx_SET (VOIDmode,
1303 gen_rtx_REG (word_mode,
1304 FIRST_PSEUDO_REGISTER * 2),
1305 const0_rtx));
1306 NEXT_INSN (test_insn) = PREV_INSN (test_insn) = 0;
1309 /* Now make an insn like the one we would make when GCSE'ing and see if
1310 valid. */
1311 PUT_MODE (SET_DEST (PATTERN (test_insn)), GET_MODE (x));
1312 SET_SRC (PATTERN (test_insn)) = x;
1313 return ((icode = recog (PATTERN (test_insn), test_insn, &num_clobbers)) >= 0
1314 && (num_clobbers == 0 || ! added_clobbers_hard_reg_p (icode)));
1317 /* Return nonzero if the operands of expression X are unchanged from the
1318 start of INSN's basic block up to but not including INSN (if AVAIL_P == 0),
1319 or from INSN to the end of INSN's basic block (if AVAIL_P != 0). */
1321 static int
1322 oprs_unchanged_p (rtx x, rtx insn, int avail_p)
1324 int i, j;
1325 enum rtx_code code;
1326 const char *fmt;
1328 if (x == 0)
1329 return 1;
1331 code = GET_CODE (x);
1332 switch (code)
1334 case REG:
1336 struct reg_avail_info *info = &reg_avail_info[REGNO (x)];
1338 if (info->last_bb != current_bb)
1339 return 1;
1340 if (avail_p)
1341 return info->last_set < INSN_CUID (insn);
1342 else
1343 return info->first_set >= INSN_CUID (insn);
1346 case MEM:
1347 if (load_killed_in_block_p (current_bb, INSN_CUID (insn),
1348 x, avail_p))
1349 return 0;
1350 else
1351 return oprs_unchanged_p (XEXP (x, 0), insn, avail_p);
1353 case PRE_DEC:
1354 case PRE_INC:
1355 case POST_DEC:
1356 case POST_INC:
1357 case PRE_MODIFY:
1358 case POST_MODIFY:
1359 return 0;
1361 case PC:
1362 case CC0: /*FIXME*/
1363 case CONST:
1364 case CONST_INT:
1365 case CONST_DOUBLE:
1366 case CONST_VECTOR:
1367 case SYMBOL_REF:
1368 case LABEL_REF:
1369 case ADDR_VEC:
1370 case ADDR_DIFF_VEC:
1371 return 1;
1373 default:
1374 break;
1377 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
1379 if (fmt[i] == 'e')
1381 /* If we are about to do the last recursive call needed at this
1382 level, change it into iteration. This function is called enough
1383 to be worth it. */
1384 if (i == 0)
1385 return oprs_unchanged_p (XEXP (x, i), insn, avail_p);
1387 else if (! oprs_unchanged_p (XEXP (x, i), insn, avail_p))
1388 return 0;
1390 else if (fmt[i] == 'E')
1391 for (j = 0; j < XVECLEN (x, i); j++)
1392 if (! oprs_unchanged_p (XVECEXP (x, i, j), insn, avail_p))
1393 return 0;
1396 return 1;
1399 /* Used for communication between mems_conflict_for_gcse_p and
1400 load_killed_in_block_p. Nonzero if mems_conflict_for_gcse_p finds a
1401 conflict between two memory references. */
1402 static int gcse_mems_conflict_p;
1404 /* Used for communication between mems_conflict_for_gcse_p and
1405 load_killed_in_block_p. A memory reference for a load instruction,
1406 mems_conflict_for_gcse_p will see if a memory store conflicts with
1407 this memory load. */
1408 static rtx gcse_mem_operand;
1410 /* DEST is the output of an instruction. If it is a memory reference, and
1411 possibly conflicts with the load found in gcse_mem_operand, then set
1412 gcse_mems_conflict_p to a nonzero value. */
1414 static void
1415 mems_conflict_for_gcse_p (rtx dest, rtx setter ATTRIBUTE_UNUSED,
1416 void *data ATTRIBUTE_UNUSED)
1418 while (GET_CODE (dest) == SUBREG
1419 || GET_CODE (dest) == ZERO_EXTRACT
1420 || GET_CODE (dest) == SIGN_EXTRACT
1421 || GET_CODE (dest) == STRICT_LOW_PART)
1422 dest = XEXP (dest, 0);
1424 /* If DEST is not a MEM, then it will not conflict with the load. Note
1425 that function calls are assumed to clobber memory, but are handled
1426 elsewhere. */
1427 if (GET_CODE (dest) != MEM)
1428 return;
1430 /* If we are setting a MEM in our list of specially recognized MEMs,
1431 don't mark as killed this time. */
1433 if (expr_equiv_p (dest, gcse_mem_operand) && pre_ldst_mems != NULL)
1435 if (!find_rtx_in_ldst (dest))
1436 gcse_mems_conflict_p = 1;
1437 return;
1440 if (true_dependence (dest, GET_MODE (dest), gcse_mem_operand,
1441 rtx_addr_varies_p))
1442 gcse_mems_conflict_p = 1;
1445 /* Return nonzero if the expression in X (a memory reference) is killed
1446 in block BB before or after the insn with the CUID in UID_LIMIT.
1447 AVAIL_P is nonzero for kills after UID_LIMIT, and zero for kills
1448 before UID_LIMIT.
1450 To check the entire block, set UID_LIMIT to max_uid + 1 and
1451 AVAIL_P to 0. */
1453 static int
1454 load_killed_in_block_p (basic_block bb, int uid_limit, rtx x, int avail_p)
1456 rtx list_entry = modify_mem_list[bb->index];
1457 while (list_entry)
1459 rtx setter;
1460 /* Ignore entries in the list that do not apply. */
1461 if ((avail_p
1462 && INSN_CUID (XEXP (list_entry, 0)) < uid_limit)
1463 || (! avail_p
1464 && INSN_CUID (XEXP (list_entry, 0)) > uid_limit))
1466 list_entry = XEXP (list_entry, 1);
1467 continue;
1470 setter = XEXP (list_entry, 0);
1472 /* If SETTER is a call everything is clobbered. Note that calls
1473 to pure functions are never put on the list, so we need not
1474 worry about them. */
1475 if (GET_CODE (setter) == CALL_INSN)
1476 return 1;
1478 /* SETTER must be an INSN of some kind that sets memory. Call
1479 note_stores to examine each hunk of memory that is modified.
1481 The note_stores interface is pretty limited, so we have to
1482 communicate via global variables. Yuk. */
1483 gcse_mem_operand = x;
1484 gcse_mems_conflict_p = 0;
1485 note_stores (PATTERN (setter), mems_conflict_for_gcse_p, NULL);
1486 if (gcse_mems_conflict_p)
1487 return 1;
1488 list_entry = XEXP (list_entry, 1);
1490 return 0;
1493 /* Return nonzero if the operands of expression X are unchanged from
1494 the start of INSN's basic block up to but not including INSN. */
1496 static int
1497 oprs_anticipatable_p (rtx x, rtx insn)
1499 return oprs_unchanged_p (x, insn, 0);
1502 /* Return nonzero if the operands of expression X are unchanged from
1503 INSN to the end of INSN's basic block. */
1505 static int
1506 oprs_available_p (rtx x, rtx insn)
1508 return oprs_unchanged_p (x, insn, 1);
1511 /* Hash expression X.
1513 MODE is only used if X is a CONST_INT. DO_NOT_RECORD_P is a boolean
1514 indicating if a volatile operand is found or if the expression contains
1515 something we don't want to insert in the table.
1517 ??? One might want to merge this with canon_hash. Later. */
1519 static unsigned int
1520 hash_expr (rtx x, enum machine_mode mode, int *do_not_record_p, int hash_table_size)
1522 unsigned int hash;
1524 *do_not_record_p = 0;
1526 hash = hash_expr_1 (x, mode, do_not_record_p);
1527 return hash % hash_table_size;
1530 /* Hash a string. Just add its bytes up. */
1532 static inline unsigned
1533 hash_string_1 (const char *ps)
1535 unsigned hash = 0;
1536 const unsigned char *p = (const unsigned char *) ps;
1538 if (p)
1539 while (*p)
1540 hash += *p++;
1542 return hash;
1545 /* Subroutine of hash_expr to do the actual work. */
1547 static unsigned int
1548 hash_expr_1 (rtx x, enum machine_mode mode, int *do_not_record_p)
1550 int i, j;
1551 unsigned hash = 0;
1552 enum rtx_code code;
1553 const char *fmt;
1555 /* Used to turn recursion into iteration. We can't rely on GCC's
1556 tail-recursion elimination since we need to keep accumulating values
1557 in HASH. */
1559 if (x == 0)
1560 return hash;
1562 repeat:
1563 code = GET_CODE (x);
1564 switch (code)
1566 case REG:
1567 hash += ((unsigned int) REG << 7) + REGNO (x);
1568 return hash;
1570 case CONST_INT:
1571 hash += (((unsigned int) CONST_INT << 7) + (unsigned int) mode
1572 + (unsigned int) INTVAL (x));
1573 return hash;
1575 case CONST_DOUBLE:
1576 /* This is like the general case, except that it only counts
1577 the integers representing the constant. */
1578 hash += (unsigned int) code + (unsigned int) GET_MODE (x);
1579 if (GET_MODE (x) != VOIDmode)
1580 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
1581 hash += (unsigned int) XWINT (x, i);
1582 else
1583 hash += ((unsigned int) CONST_DOUBLE_LOW (x)
1584 + (unsigned int) CONST_DOUBLE_HIGH (x));
1585 return hash;
1587 case CONST_VECTOR:
1589 int units;
1590 rtx elt;
1592 units = CONST_VECTOR_NUNITS (x);
1594 for (i = 0; i < units; ++i)
1596 elt = CONST_VECTOR_ELT (x, i);
1597 hash += hash_expr_1 (elt, GET_MODE (elt), do_not_record_p);
1600 return hash;
1603 /* Assume there is only one rtx object for any given label. */
1604 case LABEL_REF:
1605 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
1606 differences and differences between each stage's debugging dumps. */
1607 hash += (((unsigned int) LABEL_REF << 7)
1608 + CODE_LABEL_NUMBER (XEXP (x, 0)));
1609 return hash;
1611 case SYMBOL_REF:
1613 /* Don't hash on the symbol's address to avoid bootstrap differences.
1614 Different hash values may cause expressions to be recorded in
1615 different orders and thus different registers to be used in the
1616 final assembler. This also avoids differences in the dump files
1617 between various stages. */
1618 unsigned int h = 0;
1619 const unsigned char *p = (const unsigned char *) XSTR (x, 0);
1621 while (*p)
1622 h += (h << 7) + *p++; /* ??? revisit */
1624 hash += ((unsigned int) SYMBOL_REF << 7) + h;
1625 return hash;
1628 case MEM:
1629 if (MEM_VOLATILE_P (x))
1631 *do_not_record_p = 1;
1632 return 0;
1635 hash += (unsigned int) MEM;
1636 /* We used alias set for hashing, but this is not good, since the alias
1637 set may differ in -fprofile-arcs and -fbranch-probabilities compilation
1638 causing the profiles to fail to match. */
1639 x = XEXP (x, 0);
1640 goto repeat;
1642 case PRE_DEC:
1643 case PRE_INC:
1644 case POST_DEC:
1645 case POST_INC:
1646 case PC:
1647 case CC0:
1648 case CALL:
1649 case UNSPEC_VOLATILE:
1650 *do_not_record_p = 1;
1651 return 0;
1653 case ASM_OPERANDS:
1654 if (MEM_VOLATILE_P (x))
1656 *do_not_record_p = 1;
1657 return 0;
1659 else
1661 /* We don't want to take the filename and line into account. */
1662 hash += (unsigned) code + (unsigned) GET_MODE (x)
1663 + hash_string_1 (ASM_OPERANDS_TEMPLATE (x))
1664 + hash_string_1 (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
1665 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
1667 if (ASM_OPERANDS_INPUT_LENGTH (x))
1669 for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
1671 hash += (hash_expr_1 (ASM_OPERANDS_INPUT (x, i),
1672 GET_MODE (ASM_OPERANDS_INPUT (x, i)),
1673 do_not_record_p)
1674 + hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT
1675 (x, i)));
1678 hash += hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
1679 x = ASM_OPERANDS_INPUT (x, 0);
1680 mode = GET_MODE (x);
1681 goto repeat;
1683 return hash;
1686 default:
1687 break;
1690 hash += (unsigned) code + (unsigned) GET_MODE (x);
1691 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
1693 if (fmt[i] == 'e')
1695 /* If we are about to do the last recursive call
1696 needed at this level, change it into iteration.
1697 This function is called enough to be worth it. */
1698 if (i == 0)
1700 x = XEXP (x, i);
1701 goto repeat;
1704 hash += hash_expr_1 (XEXP (x, i), 0, do_not_record_p);
1705 if (*do_not_record_p)
1706 return 0;
1709 else if (fmt[i] == 'E')
1710 for (j = 0; j < XVECLEN (x, i); j++)
1712 hash += hash_expr_1 (XVECEXP (x, i, j), 0, do_not_record_p);
1713 if (*do_not_record_p)
1714 return 0;
1717 else if (fmt[i] == 's')
1718 hash += hash_string_1 (XSTR (x, i));
1719 else if (fmt[i] == 'i')
1720 hash += (unsigned int) XINT (x, i);
1721 else
1722 abort ();
1725 return hash;
1728 /* Hash a set of register REGNO.
1730 Sets are hashed on the register that is set. This simplifies the PRE copy
1731 propagation code.
1733 ??? May need to make things more elaborate. Later, as necessary. */
1735 static unsigned int
1736 hash_set (int regno, int hash_table_size)
1738 unsigned int hash;
1740 hash = regno;
1741 return hash % hash_table_size;
1744 /* Return nonzero if exp1 is equivalent to exp2.
1745 ??? Borrowed from cse.c. Might want to remerge with cse.c. Later. */
1747 static int
1748 expr_equiv_p (rtx x, rtx y)
1750 int i, j;
1751 enum rtx_code code;
1752 const char *fmt;
1754 if (x == y)
1755 return 1;
1757 if (x == 0 || y == 0)
1758 return 0;
1760 code = GET_CODE (x);
1761 if (code != GET_CODE (y))
1762 return 0;
1764 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
1765 if (GET_MODE (x) != GET_MODE (y))
1766 return 0;
1768 switch (code)
1770 case PC:
1771 case CC0:
1772 case CONST_INT:
1773 return 0;
1775 case LABEL_REF:
1776 return XEXP (x, 0) == XEXP (y, 0);
1778 case SYMBOL_REF:
1779 return XSTR (x, 0) == XSTR (y, 0);
1781 case REG:
1782 return REGNO (x) == REGNO (y);
1784 case MEM:
1785 /* Can't merge two expressions in different alias sets, since we can
1786 decide that the expression is transparent in a block when it isn't,
1787 due to it being set with the different alias set. */
1788 if (MEM_ALIAS_SET (x) != MEM_ALIAS_SET (y))
1789 return 0;
1791 /* A volatile mem should not be considered equivalent to any other. */
1792 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
1793 return 0;
1794 break;
1796 /* For commutative operations, check both orders. */
1797 case PLUS:
1798 case MULT:
1799 case AND:
1800 case IOR:
1801 case XOR:
1802 case NE:
1803 case EQ:
1804 return ((expr_equiv_p (XEXP (x, 0), XEXP (y, 0))
1805 && expr_equiv_p (XEXP (x, 1), XEXP (y, 1)))
1806 || (expr_equiv_p (XEXP (x, 0), XEXP (y, 1))
1807 && expr_equiv_p (XEXP (x, 1), XEXP (y, 0))));
1809 case ASM_OPERANDS:
1810 /* We don't use the generic code below because we want to
1811 disregard filename and line numbers. */
1813 /* A volatile asm isn't equivalent to any other. */
1814 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
1815 return 0;
1817 if (GET_MODE (x) != GET_MODE (y)
1818 || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
1819 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
1820 ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
1821 || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
1822 || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
1823 return 0;
1825 if (ASM_OPERANDS_INPUT_LENGTH (x))
1827 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
1828 if (! expr_equiv_p (ASM_OPERANDS_INPUT (x, i),
1829 ASM_OPERANDS_INPUT (y, i))
1830 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
1831 ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
1832 return 0;
1835 return 1;
1837 default:
1838 break;
1841 /* Compare the elements. If any pair of corresponding elements
1842 fail to match, return 0 for the whole thing. */
1844 fmt = GET_RTX_FORMAT (code);
1845 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1847 switch (fmt[i])
1849 case 'e':
1850 if (! expr_equiv_p (XEXP (x, i), XEXP (y, i)))
1851 return 0;
1852 break;
1854 case 'E':
1855 if (XVECLEN (x, i) != XVECLEN (y, i))
1856 return 0;
1857 for (j = 0; j < XVECLEN (x, i); j++)
1858 if (! expr_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
1859 return 0;
1860 break;
1862 case 's':
1863 if (strcmp (XSTR (x, i), XSTR (y, i)))
1864 return 0;
1865 break;
1867 case 'i':
1868 if (XINT (x, i) != XINT (y, i))
1869 return 0;
1870 break;
1872 case 'w':
1873 if (XWINT (x, i) != XWINT (y, i))
1874 return 0;
1875 break;
1877 case '0':
1878 break;
1880 default:
1881 abort ();
1885 return 1;
1888 /* Insert expression X in INSN in the hash TABLE.
1889 If it is already present, record it as the last occurrence in INSN's
1890 basic block.
1892 MODE is the mode of the value X is being stored into.
1893 It is only used if X is a CONST_INT.
1895 ANTIC_P is nonzero if X is an anticipatable expression.
1896 AVAIL_P is nonzero if X is an available expression. */
1898 static void
1899 insert_expr_in_table (rtx x, enum machine_mode mode, rtx insn, int antic_p,
1900 int avail_p, struct hash_table *table)
1902 int found, do_not_record_p;
1903 unsigned int hash;
1904 struct expr *cur_expr, *last_expr = NULL;
1905 struct occr *antic_occr, *avail_occr;
1906 struct occr *last_occr = NULL;
1908 hash = hash_expr (x, mode, &do_not_record_p, table->size);
1910 /* Do not insert expression in table if it contains volatile operands,
1911 or if hash_expr determines the expression is something we don't want
1912 to or can't handle. */
1913 if (do_not_record_p)
1914 return;
1916 cur_expr = table->table[hash];
1917 found = 0;
1919 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
1921 /* If the expression isn't found, save a pointer to the end of
1922 the list. */
1923 last_expr = cur_expr;
1924 cur_expr = cur_expr->next_same_hash;
1927 if (! found)
1929 cur_expr = gcse_alloc (sizeof (struct expr));
1930 bytes_used += sizeof (struct expr);
1931 if (table->table[hash] == NULL)
1932 /* This is the first pattern that hashed to this index. */
1933 table->table[hash] = cur_expr;
1934 else
1935 /* Add EXPR to end of this hash chain. */
1936 last_expr->next_same_hash = cur_expr;
1938 /* Set the fields of the expr element. */
1939 cur_expr->expr = x;
1940 cur_expr->bitmap_index = table->n_elems++;
1941 cur_expr->next_same_hash = NULL;
1942 cur_expr->antic_occr = NULL;
1943 cur_expr->avail_occr = NULL;
1946 /* Now record the occurrence(s). */
1947 if (antic_p)
1949 antic_occr = cur_expr->antic_occr;
1951 /* Search for another occurrence in the same basic block. */
1952 while (antic_occr && BLOCK_NUM (antic_occr->insn) != BLOCK_NUM (insn))
1954 /* If an occurrence isn't found, save a pointer to the end of
1955 the list. */
1956 last_occr = antic_occr;
1957 antic_occr = antic_occr->next;
1960 if (antic_occr)
1961 /* Found another instance of the expression in the same basic block.
1962 Prefer the currently recorded one. We want the first one in the
1963 block and the block is scanned from start to end. */
1964 ; /* nothing to do */
1965 else
1967 /* First occurrence of this expression in this basic block. */
1968 antic_occr = gcse_alloc (sizeof (struct occr));
1969 bytes_used += sizeof (struct occr);
1970 /* First occurrence of this expression in any block? */
1971 if (cur_expr->antic_occr == NULL)
1972 cur_expr->antic_occr = antic_occr;
1973 else
1974 last_occr->next = antic_occr;
1976 antic_occr->insn = insn;
1977 antic_occr->next = NULL;
1981 if (avail_p)
1983 avail_occr = cur_expr->avail_occr;
1985 /* Search for another occurrence in the same basic block. */
1986 while (avail_occr && BLOCK_NUM (avail_occr->insn) != BLOCK_NUM (insn))
1988 /* If an occurrence isn't found, save a pointer to the end of
1989 the list. */
1990 last_occr = avail_occr;
1991 avail_occr = avail_occr->next;
1994 if (avail_occr)
1995 /* Found another instance of the expression in the same basic block.
1996 Prefer this occurrence to the currently recorded one. We want
1997 the last one in the block and the block is scanned from start
1998 to end. */
1999 avail_occr->insn = insn;
2000 else
2002 /* First occurrence of this expression in this basic block. */
2003 avail_occr = gcse_alloc (sizeof (struct occr));
2004 bytes_used += sizeof (struct occr);
2006 /* First occurrence of this expression in any block? */
2007 if (cur_expr->avail_occr == NULL)
2008 cur_expr->avail_occr = avail_occr;
2009 else
2010 last_occr->next = avail_occr;
2012 avail_occr->insn = insn;
2013 avail_occr->next = NULL;
2018 /* Insert pattern X in INSN in the hash table.
2019 X is a SET of a reg to either another reg or a constant.
2020 If it is already present, record it as the last occurrence in INSN's
2021 basic block. */
2023 static void
2024 insert_set_in_table (rtx x, rtx insn, struct hash_table *table)
2026 int found;
2027 unsigned int hash;
2028 struct expr *cur_expr, *last_expr = NULL;
2029 struct occr *cur_occr, *last_occr = NULL;
2031 if (GET_CODE (x) != SET
2032 || GET_CODE (SET_DEST (x)) != REG)
2033 abort ();
2035 hash = hash_set (REGNO (SET_DEST (x)), table->size);
2037 cur_expr = table->table[hash];
2038 found = 0;
2040 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
2042 /* If the expression isn't found, save a pointer to the end of
2043 the list. */
2044 last_expr = cur_expr;
2045 cur_expr = cur_expr->next_same_hash;
2048 if (! found)
2050 cur_expr = gcse_alloc (sizeof (struct expr));
2051 bytes_used += sizeof (struct expr);
2052 if (table->table[hash] == NULL)
2053 /* This is the first pattern that hashed to this index. */
2054 table->table[hash] = cur_expr;
2055 else
2056 /* Add EXPR to end of this hash chain. */
2057 last_expr->next_same_hash = cur_expr;
2059 /* Set the fields of the expr element.
2060 We must copy X because it can be modified when copy propagation is
2061 performed on its operands. */
2062 cur_expr->expr = copy_rtx (x);
2063 cur_expr->bitmap_index = table->n_elems++;
2064 cur_expr->next_same_hash = NULL;
2065 cur_expr->antic_occr = NULL;
2066 cur_expr->avail_occr = NULL;
2069 /* Now record the occurrence. */
2070 cur_occr = cur_expr->avail_occr;
2072 /* Search for another occurrence in the same basic block. */
2073 while (cur_occr && BLOCK_NUM (cur_occr->insn) != BLOCK_NUM (insn))
2075 /* If an occurrence isn't found, save a pointer to the end of
2076 the list. */
2077 last_occr = cur_occr;
2078 cur_occr = cur_occr->next;
2081 if (cur_occr)
2082 /* Found another instance of the expression in the same basic block.
2083 Prefer this occurrence to the currently recorded one. We want the
2084 last one in the block and the block is scanned from start to end. */
2085 cur_occr->insn = insn;
2086 else
2088 /* First occurrence of this expression in this basic block. */
2089 cur_occr = gcse_alloc (sizeof (struct occr));
2090 bytes_used += sizeof (struct occr);
2092 /* First occurrence of this expression in any block? */
2093 if (cur_expr->avail_occr == NULL)
2094 cur_expr->avail_occr = cur_occr;
2095 else
2096 last_occr->next = cur_occr;
2098 cur_occr->insn = insn;
2099 cur_occr->next = NULL;
2103 /* Determine whether the rtx X should be treated as a constant for
2104 the purposes of GCSE's constant propagation. */
2106 static bool
2107 gcse_constant_p (rtx x)
2109 /* Consider a COMPARE of two integers constant. */
2110 if (GET_CODE (x) == COMPARE
2111 && GET_CODE (XEXP (x, 0)) == CONST_INT
2112 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2113 return true;
2116 /* Consider a COMPARE of the same registers is a constant
2117 if they are not floating point registers. */
2118 if (GET_CODE(x) == COMPARE
2119 && GET_CODE (XEXP (x, 0)) == REG
2120 && GET_CODE (XEXP (x, 1)) == REG
2121 && REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 1))
2122 && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0)))
2123 && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 1))))
2124 return true;
2126 if (GET_CODE (x) == CONSTANT_P_RTX)
2127 return false;
2129 return CONSTANT_P (x);
2132 /* Scan pattern PAT of INSN and add an entry to the hash TABLE (set or
2133 expression one). */
2135 static void
2136 hash_scan_set (rtx pat, rtx insn, struct hash_table *table)
2138 rtx src = SET_SRC (pat);
2139 rtx dest = SET_DEST (pat);
2140 rtx note;
2142 if (GET_CODE (src) == CALL)
2143 hash_scan_call (src, insn, table);
2145 else if (GET_CODE (dest) == REG)
2147 unsigned int regno = REGNO (dest);
2148 rtx tmp;
2150 /* If this is a single set and we are doing constant propagation,
2151 see if a REG_NOTE shows this equivalent to a constant. */
2152 if (table->set_p && (note = find_reg_equal_equiv_note (insn)) != 0
2153 && gcse_constant_p (XEXP (note, 0)))
2154 src = XEXP (note, 0), pat = gen_rtx_SET (VOIDmode, dest, src);
2156 /* Only record sets of pseudo-regs in the hash table. */
2157 if (! table->set_p
2158 && regno >= FIRST_PSEUDO_REGISTER
2159 /* Don't GCSE something if we can't do a reg/reg copy. */
2160 && can_copy_p (GET_MODE (dest))
2161 /* GCSE commonly inserts instruction after the insn. We can't
2162 do that easily for EH_REGION notes so disable GCSE on these
2163 for now. */
2164 && !find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2165 /* Is SET_SRC something we want to gcse? */
2166 && want_to_gcse_p (src)
2167 /* Don't CSE a nop. */
2168 && ! set_noop_p (pat)
2169 /* Don't GCSE if it has attached REG_EQUIV note.
2170 At this point this only function parameters should have
2171 REG_EQUIV notes and if the argument slot is used somewhere
2172 explicitly, it means address of parameter has been taken,
2173 so we should not extend the lifetime of the pseudo. */
2174 && ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) == 0
2175 || GET_CODE (XEXP (note, 0)) != MEM))
2177 /* An expression is not anticipatable if its operands are
2178 modified before this insn or if this is not the only SET in
2179 this insn. */
2180 int antic_p = oprs_anticipatable_p (src, insn) && single_set (insn);
2181 /* An expression is not available if its operands are
2182 subsequently modified, including this insn. It's also not
2183 available if this is a branch, because we can't insert
2184 a set after the branch. */
2185 int avail_p = (oprs_available_p (src, insn)
2186 && ! JUMP_P (insn));
2188 insert_expr_in_table (src, GET_MODE (dest), insn, antic_p, avail_p, table);
2191 /* Record sets for constant/copy propagation. */
2192 else if (table->set_p
2193 && regno >= FIRST_PSEUDO_REGISTER
2194 && ((GET_CODE (src) == REG
2195 && REGNO (src) >= FIRST_PSEUDO_REGISTER
2196 && can_copy_p (GET_MODE (dest))
2197 && REGNO (src) != regno)
2198 || gcse_constant_p (src))
2199 /* A copy is not available if its src or dest is subsequently
2200 modified. Here we want to search from INSN+1 on, but
2201 oprs_available_p searches from INSN on. */
2202 && (insn == BLOCK_END (BLOCK_NUM (insn))
2203 || ((tmp = next_nonnote_insn (insn)) != NULL_RTX
2204 && oprs_available_p (pat, tmp))))
2205 insert_set_in_table (pat, insn, table);
2209 static void
2210 hash_scan_clobber (rtx x ATTRIBUTE_UNUSED, rtx insn ATTRIBUTE_UNUSED,
2211 struct hash_table *table ATTRIBUTE_UNUSED)
2213 /* Currently nothing to do. */
2216 static void
2217 hash_scan_call (rtx x ATTRIBUTE_UNUSED, rtx insn ATTRIBUTE_UNUSED,
2218 struct hash_table *table ATTRIBUTE_UNUSED)
2220 /* Currently nothing to do. */
2223 /* Process INSN and add hash table entries as appropriate.
2225 Only available expressions that set a single pseudo-reg are recorded.
2227 Single sets in a PARALLEL could be handled, but it's an extra complication
2228 that isn't dealt with right now. The trick is handling the CLOBBERs that
2229 are also in the PARALLEL. Later.
2231 If SET_P is nonzero, this is for the assignment hash table,
2232 otherwise it is for the expression hash table.
2233 If IN_LIBCALL_BLOCK nonzero, we are in a libcall block, and should
2234 not record any expressions. */
2236 static void
2237 hash_scan_insn (rtx insn, struct hash_table *table, int in_libcall_block)
2239 rtx pat = PATTERN (insn);
2240 int i;
2242 if (in_libcall_block)
2243 return;
2245 /* Pick out the sets of INSN and for other forms of instructions record
2246 what's been modified. */
2248 if (GET_CODE (pat) == SET)
2249 hash_scan_set (pat, insn, table);
2250 else if (GET_CODE (pat) == PARALLEL)
2251 for (i = 0; i < XVECLEN (pat, 0); i++)
2253 rtx x = XVECEXP (pat, 0, i);
2255 if (GET_CODE (x) == SET)
2256 hash_scan_set (x, insn, table);
2257 else if (GET_CODE (x) == CLOBBER)
2258 hash_scan_clobber (x, insn, table);
2259 else if (GET_CODE (x) == CALL)
2260 hash_scan_call (x, insn, table);
2263 else if (GET_CODE (pat) == CLOBBER)
2264 hash_scan_clobber (pat, insn, table);
2265 else if (GET_CODE (pat) == CALL)
2266 hash_scan_call (pat, insn, table);
2269 static void
2270 dump_hash_table (FILE *file, const char *name, struct hash_table *table)
2272 int i;
2273 /* Flattened out table, so it's printed in proper order. */
2274 struct expr **flat_table;
2275 unsigned int *hash_val;
2276 struct expr *expr;
2278 flat_table = xcalloc (table->n_elems, sizeof (struct expr *));
2279 hash_val = xmalloc (table->n_elems * sizeof (unsigned int));
2281 for (i = 0; i < (int) table->size; i++)
2282 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
2284 flat_table[expr->bitmap_index] = expr;
2285 hash_val[expr->bitmap_index] = i;
2288 fprintf (file, "%s hash table (%d buckets, %d entries)\n",
2289 name, table->size, table->n_elems);
2291 for (i = 0; i < (int) table->n_elems; i++)
2292 if (flat_table[i] != 0)
2294 expr = flat_table[i];
2295 fprintf (file, "Index %d (hash value %d)\n ",
2296 expr->bitmap_index, hash_val[i]);
2297 print_rtl (file, expr->expr);
2298 fprintf (file, "\n");
2301 fprintf (file, "\n");
2303 free (flat_table);
2304 free (hash_val);
2307 /* Record register first/last/block set information for REGNO in INSN.
2309 first_set records the first place in the block where the register
2310 is set and is used to compute "anticipatability".
2312 last_set records the last place in the block where the register
2313 is set and is used to compute "availability".
2315 last_bb records the block for which first_set and last_set are
2316 valid, as a quick test to invalidate them.
2318 reg_set_in_block records whether the register is set in the block
2319 and is used to compute "transparency". */
2321 static void
2322 record_last_reg_set_info (rtx insn, int regno)
2324 struct reg_avail_info *info = &reg_avail_info[regno];
2325 int cuid = INSN_CUID (insn);
2327 info->last_set = cuid;
2328 if (info->last_bb != current_bb)
2330 info->last_bb = current_bb;
2331 info->first_set = cuid;
2332 SET_BIT (reg_set_in_block[current_bb->index], regno);
2337 /* Record all of the canonicalized MEMs of record_last_mem_set_info's insn.
2338 Note we store a pair of elements in the list, so they have to be
2339 taken off pairwise. */
2341 static void
2342 canon_list_insert (rtx dest ATTRIBUTE_UNUSED, rtx unused1 ATTRIBUTE_UNUSED,
2343 void * v_insn)
2345 rtx dest_addr, insn;
2346 int bb;
2348 while (GET_CODE (dest) == SUBREG
2349 || GET_CODE (dest) == ZERO_EXTRACT
2350 || GET_CODE (dest) == SIGN_EXTRACT
2351 || GET_CODE (dest) == STRICT_LOW_PART)
2352 dest = XEXP (dest, 0);
2354 /* If DEST is not a MEM, then it will not conflict with a load. Note
2355 that function calls are assumed to clobber memory, but are handled
2356 elsewhere. */
2358 if (GET_CODE (dest) != MEM)
2359 return;
2361 dest_addr = get_addr (XEXP (dest, 0));
2362 dest_addr = canon_rtx (dest_addr);
2363 insn = (rtx) v_insn;
2364 bb = BLOCK_NUM (insn);
2366 canon_modify_mem_list[bb] =
2367 alloc_EXPR_LIST (VOIDmode, dest_addr, canon_modify_mem_list[bb]);
2368 canon_modify_mem_list[bb] =
2369 alloc_EXPR_LIST (VOIDmode, dest, canon_modify_mem_list[bb]);
2370 bitmap_set_bit (canon_modify_mem_list_set, bb);
2373 /* Record memory modification information for INSN. We do not actually care
2374 about the memory location(s) that are set, or even how they are set (consider
2375 a CALL_INSN). We merely need to record which insns modify memory. */
2377 static void
2378 record_last_mem_set_info (rtx insn)
2380 int bb = BLOCK_NUM (insn);
2382 /* load_killed_in_block_p will handle the case of calls clobbering
2383 everything. */
2384 modify_mem_list[bb] = alloc_INSN_LIST (insn, modify_mem_list[bb]);
2385 bitmap_set_bit (modify_mem_list_set, bb);
2387 if (GET_CODE (insn) == CALL_INSN)
2389 /* Note that traversals of this loop (other than for free-ing)
2390 will break after encountering a CALL_INSN. So, there's no
2391 need to insert a pair of items, as canon_list_insert does. */
2392 canon_modify_mem_list[bb] =
2393 alloc_INSN_LIST (insn, canon_modify_mem_list[bb]);
2394 bitmap_set_bit (canon_modify_mem_list_set, bb);
2396 else
2397 note_stores (PATTERN (insn), canon_list_insert, (void*) insn);
2400 /* Called from compute_hash_table via note_stores to handle one
2401 SET or CLOBBER in an insn. DATA is really the instruction in which
2402 the SET is taking place. */
2404 static void
2405 record_last_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED, void *data)
2407 rtx last_set_insn = (rtx) data;
2409 if (GET_CODE (dest) == SUBREG)
2410 dest = SUBREG_REG (dest);
2412 if (GET_CODE (dest) == REG)
2413 record_last_reg_set_info (last_set_insn, REGNO (dest));
2414 else if (GET_CODE (dest) == MEM
2415 /* Ignore pushes, they clobber nothing. */
2416 && ! push_operand (dest, GET_MODE (dest)))
2417 record_last_mem_set_info (last_set_insn);
2420 /* Top level function to create an expression or assignment hash table.
2422 Expression entries are placed in the hash table if
2423 - they are of the form (set (pseudo-reg) src),
2424 - src is something we want to perform GCSE on,
2425 - none of the operands are subsequently modified in the block
2427 Assignment entries are placed in the hash table if
2428 - they are of the form (set (pseudo-reg) src),
2429 - src is something we want to perform const/copy propagation on,
2430 - none of the operands or target are subsequently modified in the block
2432 Currently src must be a pseudo-reg or a const_int.
2434 TABLE is the table computed. */
2436 static void
2437 compute_hash_table_work (struct hash_table *table)
2439 unsigned int i;
2441 /* While we compute the hash table we also compute a bit array of which
2442 registers are set in which blocks.
2443 ??? This isn't needed during const/copy propagation, but it's cheap to
2444 compute. Later. */
2445 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
2447 /* re-Cache any INSN_LIST nodes we have allocated. */
2448 clear_modify_mem_tables ();
2449 /* Some working arrays used to track first and last set in each block. */
2450 reg_avail_info = gmalloc (max_gcse_regno * sizeof (struct reg_avail_info));
2452 for (i = 0; i < max_gcse_regno; ++i)
2453 reg_avail_info[i].last_bb = NULL;
2455 FOR_EACH_BB (current_bb)
2457 rtx insn;
2458 unsigned int regno;
2459 int in_libcall_block;
2461 /* First pass over the instructions records information used to
2462 determine when registers and memory are first and last set.
2463 ??? hard-reg reg_set_in_block computation
2464 could be moved to compute_sets since they currently don't change. */
2466 for (insn = current_bb->head;
2467 insn && insn != NEXT_INSN (current_bb->end);
2468 insn = NEXT_INSN (insn))
2470 if (! INSN_P (insn))
2471 continue;
2473 if (GET_CODE (insn) == CALL_INSN)
2475 bool clobbers_all = false;
2476 #ifdef NON_SAVING_SETJMP
2477 if (NON_SAVING_SETJMP
2478 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
2479 clobbers_all = true;
2480 #endif
2482 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2483 if (clobbers_all
2484 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2485 record_last_reg_set_info (insn, regno);
2487 mark_call (insn);
2490 note_stores (PATTERN (insn), record_last_set_info, insn);
2493 /* Insert implicit sets in the hash table. */
2494 if (table->set_p
2495 && implicit_sets[current_bb->index] != NULL_RTX)
2496 hash_scan_set (implicit_sets[current_bb->index],
2497 current_bb->head, table);
2499 /* The next pass builds the hash table. */
2501 for (insn = current_bb->head, in_libcall_block = 0;
2502 insn && insn != NEXT_INSN (current_bb->end);
2503 insn = NEXT_INSN (insn))
2504 if (INSN_P (insn))
2506 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
2507 in_libcall_block = 1;
2508 else if (table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
2509 in_libcall_block = 0;
2510 hash_scan_insn (insn, table, in_libcall_block);
2511 if (!table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
2512 in_libcall_block = 0;
2516 free (reg_avail_info);
2517 reg_avail_info = NULL;
2520 /* Allocate space for the set/expr hash TABLE.
2521 N_INSNS is the number of instructions in the function.
2522 It is used to determine the number of buckets to use.
2523 SET_P determines whether set or expression table will
2524 be created. */
2526 static void
2527 alloc_hash_table (int n_insns, struct hash_table *table, int set_p)
2529 int n;
2531 table->size = n_insns / 4;
2532 if (table->size < 11)
2533 table->size = 11;
2535 /* Attempt to maintain efficient use of hash table.
2536 Making it an odd number is simplest for now.
2537 ??? Later take some measurements. */
2538 table->size |= 1;
2539 n = table->size * sizeof (struct expr *);
2540 table->table = gmalloc (n);
2541 table->set_p = set_p;
2544 /* Free things allocated by alloc_hash_table. */
2546 static void
2547 free_hash_table (struct hash_table *table)
2549 free (table->table);
2552 /* Compute the hash TABLE for doing copy/const propagation or
2553 expression hash table. */
2555 static void
2556 compute_hash_table (struct hash_table *table)
2558 /* Initialize count of number of entries in hash table. */
2559 table->n_elems = 0;
2560 memset (table->table, 0, table->size * sizeof (struct expr *));
2562 compute_hash_table_work (table);
2565 /* Expression tracking support. */
2567 /* Lookup pattern PAT in the expression TABLE.
2568 The result is a pointer to the table entry, or NULL if not found. */
2570 static struct expr *
2571 lookup_expr (rtx pat, struct hash_table *table)
2573 int do_not_record_p;
2574 unsigned int hash = hash_expr (pat, GET_MODE (pat), &do_not_record_p,
2575 table->size);
2576 struct expr *expr;
2578 if (do_not_record_p)
2579 return NULL;
2581 expr = table->table[hash];
2583 while (expr && ! expr_equiv_p (expr->expr, pat))
2584 expr = expr->next_same_hash;
2586 return expr;
2589 /* Lookup REGNO in the set TABLE. The result is a pointer to the
2590 table entry, or NULL if not found. */
2592 static struct expr *
2593 lookup_set (unsigned int regno, struct hash_table *table)
2595 unsigned int hash = hash_set (regno, table->size);
2596 struct expr *expr;
2598 expr = table->table[hash];
2600 while (expr && REGNO (SET_DEST (expr->expr)) != regno)
2601 expr = expr->next_same_hash;
2603 return expr;
2606 /* Return the next entry for REGNO in list EXPR. */
2608 static struct expr *
2609 next_set (unsigned int regno, struct expr *expr)
2612 expr = expr->next_same_hash;
2613 while (expr && REGNO (SET_DEST (expr->expr)) != regno);
2615 return expr;
2618 /* Like free_INSN_LIST_list or free_EXPR_LIST_list, except that the node
2619 types may be mixed. */
2621 static void
2622 free_insn_expr_list_list (rtx *listp)
2624 rtx list, next;
2626 for (list = *listp; list ; list = next)
2628 next = XEXP (list, 1);
2629 if (GET_CODE (list) == EXPR_LIST)
2630 free_EXPR_LIST_node (list);
2631 else
2632 free_INSN_LIST_node (list);
2635 *listp = NULL;
2638 /* Clear canon_modify_mem_list and modify_mem_list tables. */
2639 static void
2640 clear_modify_mem_tables (void)
2642 int i;
2644 EXECUTE_IF_SET_IN_BITMAP
2645 (modify_mem_list_set, 0, i, free_INSN_LIST_list (modify_mem_list + i));
2646 bitmap_clear (modify_mem_list_set);
2648 EXECUTE_IF_SET_IN_BITMAP
2649 (canon_modify_mem_list_set, 0, i,
2650 free_insn_expr_list_list (canon_modify_mem_list + i));
2651 bitmap_clear (canon_modify_mem_list_set);
2654 /* Release memory used by modify_mem_list_set and canon_modify_mem_list_set. */
2656 static void
2657 free_modify_mem_tables (void)
2659 clear_modify_mem_tables ();
2660 free (modify_mem_list);
2661 free (canon_modify_mem_list);
2662 modify_mem_list = 0;
2663 canon_modify_mem_list = 0;
2666 /* Reset tables used to keep track of what's still available [since the
2667 start of the block]. */
2669 static void
2670 reset_opr_set_tables (void)
2672 /* Maintain a bitmap of which regs have been set since beginning of
2673 the block. */
2674 CLEAR_REG_SET (reg_set_bitmap);
2676 /* Also keep a record of the last instruction to modify memory.
2677 For now this is very trivial, we only record whether any memory
2678 location has been modified. */
2679 clear_modify_mem_tables ();
2682 /* Return nonzero if the operands of X are not set before INSN in
2683 INSN's basic block. */
2685 static int
2686 oprs_not_set_p (rtx x, rtx insn)
2688 int i, j;
2689 enum rtx_code code;
2690 const char *fmt;
2692 if (x == 0)
2693 return 1;
2695 code = GET_CODE (x);
2696 switch (code)
2698 case PC:
2699 case CC0:
2700 case CONST:
2701 case CONST_INT:
2702 case CONST_DOUBLE:
2703 case CONST_VECTOR:
2704 case SYMBOL_REF:
2705 case LABEL_REF:
2706 case ADDR_VEC:
2707 case ADDR_DIFF_VEC:
2708 return 1;
2710 case MEM:
2711 if (load_killed_in_block_p (BLOCK_FOR_INSN (insn),
2712 INSN_CUID (insn), x, 0))
2713 return 0;
2714 else
2715 return oprs_not_set_p (XEXP (x, 0), insn);
2717 case REG:
2718 return ! REGNO_REG_SET_P (reg_set_bitmap, REGNO (x));
2720 default:
2721 break;
2724 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
2726 if (fmt[i] == 'e')
2728 /* If we are about to do the last recursive call
2729 needed at this level, change it into iteration.
2730 This function is called enough to be worth it. */
2731 if (i == 0)
2732 return oprs_not_set_p (XEXP (x, i), insn);
2734 if (! oprs_not_set_p (XEXP (x, i), insn))
2735 return 0;
2737 else if (fmt[i] == 'E')
2738 for (j = 0; j < XVECLEN (x, i); j++)
2739 if (! oprs_not_set_p (XVECEXP (x, i, j), insn))
2740 return 0;
2743 return 1;
2746 /* Mark things set by a CALL. */
2748 static void
2749 mark_call (rtx insn)
2751 if (! CONST_OR_PURE_CALL_P (insn))
2752 record_last_mem_set_info (insn);
2755 /* Mark things set by a SET. */
2757 static void
2758 mark_set (rtx pat, rtx insn)
2760 rtx dest = SET_DEST (pat);
2762 while (GET_CODE (dest) == SUBREG
2763 || GET_CODE (dest) == ZERO_EXTRACT
2764 || GET_CODE (dest) == SIGN_EXTRACT
2765 || GET_CODE (dest) == STRICT_LOW_PART)
2766 dest = XEXP (dest, 0);
2768 if (GET_CODE (dest) == REG)
2769 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (dest));
2770 else if (GET_CODE (dest) == MEM)
2771 record_last_mem_set_info (insn);
2773 if (GET_CODE (SET_SRC (pat)) == CALL)
2774 mark_call (insn);
2777 /* Record things set by a CLOBBER. */
2779 static void
2780 mark_clobber (rtx pat, rtx insn)
2782 rtx clob = XEXP (pat, 0);
2784 while (GET_CODE (clob) == SUBREG || GET_CODE (clob) == STRICT_LOW_PART)
2785 clob = XEXP (clob, 0);
2787 if (GET_CODE (clob) == REG)
2788 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (clob));
2789 else
2790 record_last_mem_set_info (insn);
2793 /* Record things set by INSN.
2794 This data is used by oprs_not_set_p. */
2796 static void
2797 mark_oprs_set (rtx insn)
2799 rtx pat = PATTERN (insn);
2800 int i;
2802 if (GET_CODE (pat) == SET)
2803 mark_set (pat, insn);
2804 else if (GET_CODE (pat) == PARALLEL)
2805 for (i = 0; i < XVECLEN (pat, 0); i++)
2807 rtx x = XVECEXP (pat, 0, i);
2809 if (GET_CODE (x) == SET)
2810 mark_set (x, insn);
2811 else if (GET_CODE (x) == CLOBBER)
2812 mark_clobber (x, insn);
2813 else if (GET_CODE (x) == CALL)
2814 mark_call (insn);
2817 else if (GET_CODE (pat) == CLOBBER)
2818 mark_clobber (pat, insn);
2819 else if (GET_CODE (pat) == CALL)
2820 mark_call (insn);
2824 /* Classic GCSE reaching definition support. */
2826 /* Allocate reaching def variables. */
2828 static void
2829 alloc_rd_mem (int n_blocks, int n_insns)
2831 rd_kill = sbitmap_vector_alloc (n_blocks, n_insns);
2832 sbitmap_vector_zero (rd_kill, n_blocks);
2834 rd_gen = sbitmap_vector_alloc (n_blocks, n_insns);
2835 sbitmap_vector_zero (rd_gen, n_blocks);
2837 reaching_defs = sbitmap_vector_alloc (n_blocks, n_insns);
2838 sbitmap_vector_zero (reaching_defs, n_blocks);
2840 rd_out = sbitmap_vector_alloc (n_blocks, n_insns);
2841 sbitmap_vector_zero (rd_out, n_blocks);
2844 /* Free reaching def variables. */
2846 static void
2847 free_rd_mem (void)
2849 sbitmap_vector_free (rd_kill);
2850 sbitmap_vector_free (rd_gen);
2851 sbitmap_vector_free (reaching_defs);
2852 sbitmap_vector_free (rd_out);
2855 /* Add INSN to the kills of BB. REGNO, set in BB, is killed by INSN. */
2857 static void
2858 handle_rd_kill_set (rtx insn, int regno, basic_block bb)
2860 struct reg_set *this_reg;
2862 for (this_reg = reg_set_table[regno]; this_reg; this_reg = this_reg ->next)
2863 if (BLOCK_NUM (this_reg->insn) != BLOCK_NUM (insn))
2864 SET_BIT (rd_kill[bb->index], INSN_CUID (this_reg->insn));
2867 /* Compute the set of kill's for reaching definitions. */
2869 static void
2870 compute_kill_rd (void)
2872 int cuid;
2873 unsigned int regno;
2874 int i;
2875 basic_block bb;
2877 /* For each block
2878 For each set bit in `gen' of the block (i.e each insn which
2879 generates a definition in the block)
2880 Call the reg set by the insn corresponding to that bit regx
2881 Look at the linked list starting at reg_set_table[regx]
2882 For each setting of regx in the linked list, which is not in
2883 this block
2884 Set the bit in `kill' corresponding to that insn. */
2885 FOR_EACH_BB (bb)
2886 for (cuid = 0; cuid < max_cuid; cuid++)
2887 if (TEST_BIT (rd_gen[bb->index], cuid))
2889 rtx insn = CUID_INSN (cuid);
2890 rtx pat = PATTERN (insn);
2892 if (GET_CODE (insn) == CALL_INSN)
2894 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2895 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2896 handle_rd_kill_set (insn, regno, bb);
2899 if (GET_CODE (pat) == PARALLEL)
2901 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
2903 enum rtx_code code = GET_CODE (XVECEXP (pat, 0, i));
2905 if ((code == SET || code == CLOBBER)
2906 && GET_CODE (XEXP (XVECEXP (pat, 0, i), 0)) == REG)
2907 handle_rd_kill_set (insn,
2908 REGNO (XEXP (XVECEXP (pat, 0, i), 0)),
2909 bb);
2912 else if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == REG)
2913 /* Each setting of this register outside of this block
2914 must be marked in the set of kills in this block. */
2915 handle_rd_kill_set (insn, REGNO (SET_DEST (pat)), bb);
2919 /* Compute the reaching definitions as in
2920 Compilers Principles, Techniques, and Tools. Aho, Sethi, Ullman,
2921 Chapter 10. It is the same algorithm as used for computing available
2922 expressions but applied to the gens and kills of reaching definitions. */
2924 static void
2925 compute_rd (void)
2927 int changed, passes;
2928 basic_block bb;
2930 FOR_EACH_BB (bb)
2931 sbitmap_copy (rd_out[bb->index] /*dst*/, rd_gen[bb->index] /*src*/);
2933 passes = 0;
2934 changed = 1;
2935 while (changed)
2937 changed = 0;
2938 FOR_EACH_BB (bb)
2940 sbitmap_union_of_preds (reaching_defs[bb->index], rd_out, bb->index);
2941 changed |= sbitmap_union_of_diff_cg (rd_out[bb->index], rd_gen[bb->index],
2942 reaching_defs[bb->index], rd_kill[bb->index]);
2944 passes++;
2947 if (gcse_file)
2948 fprintf (gcse_file, "reaching def computation: %d passes\n", passes);
2951 /* Classic GCSE available expression support. */
2953 /* Allocate memory for available expression computation. */
2955 static void
2956 alloc_avail_expr_mem (int n_blocks, int n_exprs)
2958 ae_kill = sbitmap_vector_alloc (n_blocks, n_exprs);
2959 sbitmap_vector_zero (ae_kill, n_blocks);
2961 ae_gen = sbitmap_vector_alloc (n_blocks, n_exprs);
2962 sbitmap_vector_zero (ae_gen, n_blocks);
2964 ae_in = sbitmap_vector_alloc (n_blocks, n_exprs);
2965 sbitmap_vector_zero (ae_in, n_blocks);
2967 ae_out = sbitmap_vector_alloc (n_blocks, n_exprs);
2968 sbitmap_vector_zero (ae_out, n_blocks);
2971 static void
2972 free_avail_expr_mem (void)
2974 sbitmap_vector_free (ae_kill);
2975 sbitmap_vector_free (ae_gen);
2976 sbitmap_vector_free (ae_in);
2977 sbitmap_vector_free (ae_out);
2980 /* Compute the set of available expressions generated in each basic block. */
2982 static void
2983 compute_ae_gen (struct hash_table *expr_hash_table)
2985 unsigned int i;
2986 struct expr *expr;
2987 struct occr *occr;
2989 /* For each recorded occurrence of each expression, set ae_gen[bb][expr].
2990 This is all we have to do because an expression is not recorded if it
2991 is not available, and the only expressions we want to work with are the
2992 ones that are recorded. */
2993 for (i = 0; i < expr_hash_table->size; i++)
2994 for (expr = expr_hash_table->table[i]; expr != 0; expr = expr->next_same_hash)
2995 for (occr = expr->avail_occr; occr != 0; occr = occr->next)
2996 SET_BIT (ae_gen[BLOCK_NUM (occr->insn)], expr->bitmap_index);
2999 /* Return nonzero if expression X is killed in BB. */
3001 static int
3002 expr_killed_p (rtx x, basic_block bb)
3004 int i, j;
3005 enum rtx_code code;
3006 const char *fmt;
3008 if (x == 0)
3009 return 1;
3011 code = GET_CODE (x);
3012 switch (code)
3014 case REG:
3015 return TEST_BIT (reg_set_in_block[bb->index], REGNO (x));
3017 case MEM:
3018 if (load_killed_in_block_p (bb, get_max_uid () + 1, x, 0))
3019 return 1;
3020 else
3021 return expr_killed_p (XEXP (x, 0), bb);
3023 case PC:
3024 case CC0: /*FIXME*/
3025 case CONST:
3026 case CONST_INT:
3027 case CONST_DOUBLE:
3028 case CONST_VECTOR:
3029 case SYMBOL_REF:
3030 case LABEL_REF:
3031 case ADDR_VEC:
3032 case ADDR_DIFF_VEC:
3033 return 0;
3035 default:
3036 break;
3039 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
3041 if (fmt[i] == 'e')
3043 /* If we are about to do the last recursive call
3044 needed at this level, change it into iteration.
3045 This function is called enough to be worth it. */
3046 if (i == 0)
3047 return expr_killed_p (XEXP (x, i), bb);
3048 else if (expr_killed_p (XEXP (x, i), bb))
3049 return 1;
3051 else if (fmt[i] == 'E')
3052 for (j = 0; j < XVECLEN (x, i); j++)
3053 if (expr_killed_p (XVECEXP (x, i, j), bb))
3054 return 1;
3057 return 0;
3060 /* Compute the set of available expressions killed in each basic block. */
3062 static void
3063 compute_ae_kill (sbitmap *ae_gen, sbitmap *ae_kill,
3064 struct hash_table *expr_hash_table)
3066 basic_block bb;
3067 unsigned int i;
3068 struct expr *expr;
3070 FOR_EACH_BB (bb)
3071 for (i = 0; i < expr_hash_table->size; i++)
3072 for (expr = expr_hash_table->table[i]; expr; expr = expr->next_same_hash)
3074 /* Skip EXPR if generated in this block. */
3075 if (TEST_BIT (ae_gen[bb->index], expr->bitmap_index))
3076 continue;
3078 if (expr_killed_p (expr->expr, bb))
3079 SET_BIT (ae_kill[bb->index], expr->bitmap_index);
3083 /* Actually perform the Classic GCSE optimizations. */
3085 /* Return nonzero if occurrence OCCR of expression EXPR reaches block BB.
3087 CHECK_SELF_LOOP is nonzero if we should consider a block reaching itself
3088 as a positive reach. We want to do this when there are two computations
3089 of the expression in the block.
3091 VISITED is a pointer to a working buffer for tracking which BB's have
3092 been visited. It is NULL for the top-level call.
3094 We treat reaching expressions that go through blocks containing the same
3095 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
3096 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
3097 2 as not reaching. The intent is to improve the probability of finding
3098 only one reaching expression and to reduce register lifetimes by picking
3099 the closest such expression. */
3101 static int
3102 expr_reaches_here_p_work (struct occr *occr, struct expr *expr,
3103 basic_block bb, int check_self_loop, char *visited)
3105 edge pred;
3107 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
3109 basic_block pred_bb = pred->src;
3111 if (visited[pred_bb->index])
3112 /* This predecessor has already been visited. Nothing to do. */
3114 else if (pred_bb == bb)
3116 /* BB loops on itself. */
3117 if (check_self_loop
3118 && TEST_BIT (ae_gen[pred_bb->index], expr->bitmap_index)
3119 && BLOCK_NUM (occr->insn) == pred_bb->index)
3120 return 1;
3122 visited[pred_bb->index] = 1;
3125 /* Ignore this predecessor if it kills the expression. */
3126 else if (TEST_BIT (ae_kill[pred_bb->index], expr->bitmap_index))
3127 visited[pred_bb->index] = 1;
3129 /* Does this predecessor generate this expression? */
3130 else if (TEST_BIT (ae_gen[pred_bb->index], expr->bitmap_index))
3132 /* Is this the occurrence we're looking for?
3133 Note that there's only one generating occurrence per block
3134 so we just need to check the block number. */
3135 if (BLOCK_NUM (occr->insn) == pred_bb->index)
3136 return 1;
3138 visited[pred_bb->index] = 1;
3141 /* Neither gen nor kill. */
3142 else
3144 visited[pred_bb->index] = 1;
3145 if (expr_reaches_here_p_work (occr, expr, pred_bb, check_self_loop,
3146 visited))
3148 return 1;
3152 /* All paths have been checked. */
3153 return 0;
3156 /* This wrapper for expr_reaches_here_p_work() is to ensure that any
3157 memory allocated for that function is returned. */
3159 static int
3160 expr_reaches_here_p (struct occr *occr, struct expr *expr, basic_block bb,
3161 int check_self_loop)
3163 int rval;
3164 char *visited = xcalloc (last_basic_block, 1);
3166 rval = expr_reaches_here_p_work (occr, expr, bb, check_self_loop, visited);
3168 free (visited);
3169 return rval;
3172 /* Return the instruction that computes EXPR that reaches INSN's basic block.
3173 If there is more than one such instruction, return NULL.
3175 Called only by handle_avail_expr. */
3177 static rtx
3178 computing_insn (struct expr *expr, rtx insn)
3180 basic_block bb = BLOCK_FOR_INSN (insn);
3182 if (expr->avail_occr->next == NULL)
3184 if (BLOCK_FOR_INSN (expr->avail_occr->insn) == bb)
3185 /* The available expression is actually itself
3186 (i.e. a loop in the flow graph) so do nothing. */
3187 return NULL;
3189 /* (FIXME) Case that we found a pattern that was created by
3190 a substitution that took place. */
3191 return expr->avail_occr->insn;
3193 else
3195 /* Pattern is computed more than once.
3196 Search backwards from this insn to see how many of these
3197 computations actually reach this insn. */
3198 struct occr *occr;
3199 rtx insn_computes_expr = NULL;
3200 int can_reach = 0;
3202 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
3204 if (BLOCK_FOR_INSN (occr->insn) == bb)
3206 /* The expression is generated in this block.
3207 The only time we care about this is when the expression
3208 is generated later in the block [and thus there's a loop].
3209 We let the normal cse pass handle the other cases. */
3210 if (INSN_CUID (insn) < INSN_CUID (occr->insn)
3211 && expr_reaches_here_p (occr, expr, bb, 1))
3213 can_reach++;
3214 if (can_reach > 1)
3215 return NULL;
3217 insn_computes_expr = occr->insn;
3220 else if (expr_reaches_here_p (occr, expr, bb, 0))
3222 can_reach++;
3223 if (can_reach > 1)
3224 return NULL;
3226 insn_computes_expr = occr->insn;
3230 if (insn_computes_expr == NULL)
3231 abort ();
3233 return insn_computes_expr;
3237 /* Return nonzero if the definition in DEF_INSN can reach INSN.
3238 Only called by can_disregard_other_sets. */
3240 static int
3241 def_reaches_here_p (rtx insn, rtx def_insn)
3243 rtx reg;
3245 if (TEST_BIT (reaching_defs[BLOCK_NUM (insn)], INSN_CUID (def_insn)))
3246 return 1;
3248 if (BLOCK_NUM (insn) == BLOCK_NUM (def_insn))
3250 if (INSN_CUID (def_insn) < INSN_CUID (insn))
3252 if (GET_CODE (PATTERN (def_insn)) == PARALLEL)
3253 return 1;
3254 else if (GET_CODE (PATTERN (def_insn)) == CLOBBER)
3255 reg = XEXP (PATTERN (def_insn), 0);
3256 else if (GET_CODE (PATTERN (def_insn)) == SET)
3257 reg = SET_DEST (PATTERN (def_insn));
3258 else
3259 abort ();
3261 return ! reg_set_between_p (reg, NEXT_INSN (def_insn), insn);
3263 else
3264 return 0;
3267 return 0;
3270 /* Return nonzero if *ADDR_THIS_REG can only have one value at INSN. The
3271 value returned is the number of definitions that reach INSN. Returning a
3272 value of zero means that [maybe] more than one definition reaches INSN and
3273 the caller can't perform whatever optimization it is trying. i.e. it is
3274 always safe to return zero. */
3276 static int
3277 can_disregard_other_sets (struct reg_set **addr_this_reg, rtx insn, int for_combine)
3279 int number_of_reaching_defs = 0;
3280 struct reg_set *this_reg;
3282 for (this_reg = *addr_this_reg; this_reg != 0; this_reg = this_reg->next)
3283 if (def_reaches_here_p (insn, this_reg->insn))
3285 number_of_reaching_defs++;
3286 /* Ignore parallels for now. */
3287 if (GET_CODE (PATTERN (this_reg->insn)) == PARALLEL)
3288 return 0;
3290 if (!for_combine
3291 && (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER
3292 || ! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)),
3293 SET_SRC (PATTERN (insn)))))
3294 /* A setting of the reg to a different value reaches INSN. */
3295 return 0;
3297 if (number_of_reaching_defs > 1)
3299 /* If in this setting the value the register is being set to is
3300 equal to the previous value the register was set to and this
3301 setting reaches the insn we are trying to do the substitution
3302 on then we are ok. */
3303 if (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER)
3304 return 0;
3305 else if (! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)),
3306 SET_SRC (PATTERN (insn))))
3307 return 0;
3310 *addr_this_reg = this_reg;
3313 return number_of_reaching_defs;
3316 /* Expression computed by insn is available and the substitution is legal,
3317 so try to perform the substitution.
3319 The result is nonzero if any changes were made. */
3321 static int
3322 handle_avail_expr (rtx insn, struct expr *expr)
3324 rtx pat, insn_computes_expr, expr_set;
3325 rtx to;
3326 struct reg_set *this_reg;
3327 int found_setting, use_src;
3328 int changed = 0;
3330 /* We only handle the case where one computation of the expression
3331 reaches this instruction. */
3332 insn_computes_expr = computing_insn (expr, insn);
3333 if (insn_computes_expr == NULL)
3334 return 0;
3335 expr_set = single_set (insn_computes_expr);
3336 if (!expr_set)
3337 abort ();
3339 found_setting = 0;
3340 use_src = 0;
3342 /* At this point we know only one computation of EXPR outside of this
3343 block reaches this insn. Now try to find a register that the
3344 expression is computed into. */
3345 if (GET_CODE (SET_SRC (expr_set)) == REG)
3347 /* This is the case when the available expression that reaches
3348 here has already been handled as an available expression. */
3349 unsigned int regnum_for_replacing
3350 = REGNO (SET_SRC (expr_set));
3352 /* If the register was created by GCSE we can't use `reg_set_table',
3353 however we know it's set only once. */
3354 if (regnum_for_replacing >= max_gcse_regno
3355 /* If the register the expression is computed into is set only once,
3356 or only one set reaches this insn, we can use it. */
3357 || (((this_reg = reg_set_table[regnum_for_replacing]),
3358 this_reg->next == NULL)
3359 || can_disregard_other_sets (&this_reg, insn, 0)))
3361 use_src = 1;
3362 found_setting = 1;
3366 if (!found_setting)
3368 unsigned int regnum_for_replacing
3369 = REGNO (SET_DEST (expr_set));
3371 /* This shouldn't happen. */
3372 if (regnum_for_replacing >= max_gcse_regno)
3373 abort ();
3375 this_reg = reg_set_table[regnum_for_replacing];
3377 /* If the register the expression is computed into is set only once,
3378 or only one set reaches this insn, use it. */
3379 if (this_reg->next == NULL
3380 || can_disregard_other_sets (&this_reg, insn, 0))
3381 found_setting = 1;
3384 if (found_setting)
3386 pat = PATTERN (insn);
3387 if (use_src)
3388 to = SET_SRC (expr_set);
3389 else
3390 to = SET_DEST (expr_set);
3391 changed = validate_change (insn, &SET_SRC (pat), to, 0);
3393 /* We should be able to ignore the return code from validate_change but
3394 to play it safe we check. */
3395 if (changed)
3397 gcse_subst_count++;
3398 if (gcse_file != NULL)
3400 fprintf (gcse_file, "GCSE: Replacing the source in insn %d with",
3401 INSN_UID (insn));
3402 fprintf (gcse_file, " reg %d %s insn %d\n",
3403 REGNO (to), use_src ? "from" : "set in",
3404 INSN_UID (insn_computes_expr));
3409 /* The register that the expr is computed into is set more than once. */
3410 else if (1 /*expensive_op(this_pattrn->op) && do_expensive_gcse)*/)
3412 /* Insert an insn after insnx that copies the reg set in insnx
3413 into a new pseudo register call this new register REGN.
3414 From insnb until end of basic block or until REGB is set
3415 replace all uses of REGB with REGN. */
3416 rtx new_insn;
3418 to = gen_reg_rtx (GET_MODE (SET_DEST (expr_set)));
3420 /* Generate the new insn. */
3421 /* ??? If the change fails, we return 0, even though we created
3422 an insn. I think this is ok. */
3423 new_insn
3424 = emit_insn_after (gen_rtx_SET (VOIDmode, to,
3425 SET_DEST (expr_set)),
3426 insn_computes_expr);
3428 /* Keep register set table up to date. */
3429 record_one_set (REGNO (to), new_insn);
3431 gcse_create_count++;
3432 if (gcse_file != NULL)
3434 fprintf (gcse_file, "GCSE: Creating insn %d to copy value of reg %d",
3435 INSN_UID (NEXT_INSN (insn_computes_expr)),
3436 REGNO (SET_SRC (PATTERN (NEXT_INSN (insn_computes_expr)))));
3437 fprintf (gcse_file, ", computed in insn %d,\n",
3438 INSN_UID (insn_computes_expr));
3439 fprintf (gcse_file, " into newly allocated reg %d\n",
3440 REGNO (to));
3443 pat = PATTERN (insn);
3445 /* Do register replacement for INSN. */
3446 changed = validate_change (insn, &SET_SRC (pat),
3447 SET_DEST (PATTERN
3448 (NEXT_INSN (insn_computes_expr))),
3451 /* We should be able to ignore the return code from validate_change but
3452 to play it safe we check. */
3453 if (changed)
3455 gcse_subst_count++;
3456 if (gcse_file != NULL)
3458 fprintf (gcse_file,
3459 "GCSE: Replacing the source in insn %d with reg %d ",
3460 INSN_UID (insn),
3461 REGNO (SET_DEST (PATTERN (NEXT_INSN
3462 (insn_computes_expr)))));
3463 fprintf (gcse_file, "set in insn %d\n",
3464 INSN_UID (insn_computes_expr));
3469 return changed;
3472 /* Perform classic GCSE. This is called by one_classic_gcse_pass after all
3473 the dataflow analysis has been done.
3475 The result is nonzero if a change was made. */
3477 static int
3478 classic_gcse (void)
3480 int changed;
3481 rtx insn;
3482 basic_block bb;
3484 /* Note we start at block 1. */
3486 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
3487 return 0;
3489 changed = 0;
3490 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR, next_bb)
3492 /* Reset tables used to keep track of what's still valid [since the
3493 start of the block]. */
3494 reset_opr_set_tables ();
3496 for (insn = bb->head;
3497 insn != NULL && insn != NEXT_INSN (bb->end);
3498 insn = NEXT_INSN (insn))
3500 /* Is insn of form (set (pseudo-reg) ...)? */
3501 if (GET_CODE (insn) == INSN
3502 && GET_CODE (PATTERN (insn)) == SET
3503 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
3504 && REGNO (SET_DEST (PATTERN (insn))) >= FIRST_PSEUDO_REGISTER)
3506 rtx pat = PATTERN (insn);
3507 rtx src = SET_SRC (pat);
3508 struct expr *expr;
3510 if (want_to_gcse_p (src)
3511 /* Is the expression recorded? */
3512 && ((expr = lookup_expr (src, &expr_hash_table)) != NULL)
3513 /* Is the expression available [at the start of the
3514 block]? */
3515 && TEST_BIT (ae_in[bb->index], expr->bitmap_index)
3516 /* Are the operands unchanged since the start of the
3517 block? */
3518 && oprs_not_set_p (src, insn))
3519 changed |= handle_avail_expr (insn, expr);
3522 /* Keep track of everything modified by this insn. */
3523 /* ??? Need to be careful w.r.t. mods done to INSN. */
3524 if (INSN_P (insn))
3525 mark_oprs_set (insn);
3529 return changed;
3532 /* Top level routine to perform one classic GCSE pass.
3534 Return nonzero if a change was made. */
3536 static int
3537 one_classic_gcse_pass (int pass)
3539 int changed = 0;
3541 gcse_subst_count = 0;
3542 gcse_create_count = 0;
3544 alloc_hash_table (max_cuid, &expr_hash_table, 0);
3545 alloc_rd_mem (last_basic_block, max_cuid);
3546 compute_hash_table (&expr_hash_table);
3547 if (gcse_file)
3548 dump_hash_table (gcse_file, "Expression", &expr_hash_table);
3550 if (expr_hash_table.n_elems > 0)
3552 compute_kill_rd ();
3553 compute_rd ();
3554 alloc_avail_expr_mem (last_basic_block, expr_hash_table.n_elems);
3555 compute_ae_gen (&expr_hash_table);
3556 compute_ae_kill (ae_gen, ae_kill, &expr_hash_table);
3557 compute_available (ae_gen, ae_kill, ae_out, ae_in);
3558 changed = classic_gcse ();
3559 free_avail_expr_mem ();
3562 free_rd_mem ();
3563 free_hash_table (&expr_hash_table);
3565 if (gcse_file)
3567 fprintf (gcse_file, "\n");
3568 fprintf (gcse_file, "GCSE of %s, pass %d: %d bytes needed, %d substs,",
3569 current_function_name, pass, bytes_used, gcse_subst_count);
3570 fprintf (gcse_file, "%d insns created\n", gcse_create_count);
3573 return changed;
3576 /* Compute copy/constant propagation working variables. */
3578 /* Local properties of assignments. */
3579 static sbitmap *cprop_pavloc;
3580 static sbitmap *cprop_absaltered;
3582 /* Global properties of assignments (computed from the local properties). */
3583 static sbitmap *cprop_avin;
3584 static sbitmap *cprop_avout;
3586 /* Allocate vars used for copy/const propagation. N_BLOCKS is the number of
3587 basic blocks. N_SETS is the number of sets. */
3589 static void
3590 alloc_cprop_mem (int n_blocks, int n_sets)
3592 cprop_pavloc = sbitmap_vector_alloc (n_blocks, n_sets);
3593 cprop_absaltered = sbitmap_vector_alloc (n_blocks, n_sets);
3595 cprop_avin = sbitmap_vector_alloc (n_blocks, n_sets);
3596 cprop_avout = sbitmap_vector_alloc (n_blocks, n_sets);
3599 /* Free vars used by copy/const propagation. */
3601 static void
3602 free_cprop_mem (void)
3604 sbitmap_vector_free (cprop_pavloc);
3605 sbitmap_vector_free (cprop_absaltered);
3606 sbitmap_vector_free (cprop_avin);
3607 sbitmap_vector_free (cprop_avout);
3610 /* For each block, compute whether X is transparent. X is either an
3611 expression or an assignment [though we don't care which, for this context
3612 an assignment is treated as an expression]. For each block where an
3613 element of X is modified, set (SET_P == 1) or reset (SET_P == 0) the INDX
3614 bit in BMAP. */
3616 static void
3617 compute_transp (rtx x, int indx, sbitmap *bmap, int set_p)
3619 int i, j;
3620 basic_block bb;
3621 enum rtx_code code;
3622 reg_set *r;
3623 const char *fmt;
3625 /* repeat is used to turn tail-recursion into iteration since GCC
3626 can't do it when there's no return value. */
3627 repeat:
3629 if (x == 0)
3630 return;
3632 code = GET_CODE (x);
3633 switch (code)
3635 case REG:
3636 if (set_p)
3638 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3640 FOR_EACH_BB (bb)
3641 if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
3642 SET_BIT (bmap[bb->index], indx);
3644 else
3646 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
3647 SET_BIT (bmap[BLOCK_NUM (r->insn)], indx);
3650 else
3652 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3654 FOR_EACH_BB (bb)
3655 if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
3656 RESET_BIT (bmap[bb->index], indx);
3658 else
3660 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
3661 RESET_BIT (bmap[BLOCK_NUM (r->insn)], indx);
3665 return;
3667 case MEM:
3668 FOR_EACH_BB (bb)
3670 rtx list_entry = canon_modify_mem_list[bb->index];
3672 while (list_entry)
3674 rtx dest, dest_addr;
3676 if (GET_CODE (XEXP (list_entry, 0)) == CALL_INSN)
3678 if (set_p)
3679 SET_BIT (bmap[bb->index], indx);
3680 else
3681 RESET_BIT (bmap[bb->index], indx);
3682 break;
3684 /* LIST_ENTRY must be an INSN of some kind that sets memory.
3685 Examine each hunk of memory that is modified. */
3687 dest = XEXP (list_entry, 0);
3688 list_entry = XEXP (list_entry, 1);
3689 dest_addr = XEXP (list_entry, 0);
3691 if (canon_true_dependence (dest, GET_MODE (dest), dest_addr,
3692 x, rtx_addr_varies_p))
3694 if (set_p)
3695 SET_BIT (bmap[bb->index], indx);
3696 else
3697 RESET_BIT (bmap[bb->index], indx);
3698 break;
3700 list_entry = XEXP (list_entry, 1);
3704 x = XEXP (x, 0);
3705 goto repeat;
3707 case PC:
3708 case CC0: /*FIXME*/
3709 case CONST:
3710 case CONST_INT:
3711 case CONST_DOUBLE:
3712 case CONST_VECTOR:
3713 case SYMBOL_REF:
3714 case LABEL_REF:
3715 case ADDR_VEC:
3716 case ADDR_DIFF_VEC:
3717 return;
3719 default:
3720 break;
3723 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
3725 if (fmt[i] == 'e')
3727 /* If we are about to do the last recursive call
3728 needed at this level, change it into iteration.
3729 This function is called enough to be worth it. */
3730 if (i == 0)
3732 x = XEXP (x, i);
3733 goto repeat;
3736 compute_transp (XEXP (x, i), indx, bmap, set_p);
3738 else if (fmt[i] == 'E')
3739 for (j = 0; j < XVECLEN (x, i); j++)
3740 compute_transp (XVECEXP (x, i, j), indx, bmap, set_p);
3744 /* Top level routine to do the dataflow analysis needed by copy/const
3745 propagation. */
3747 static void
3748 compute_cprop_data (void)
3750 compute_local_properties (cprop_absaltered, cprop_pavloc, NULL, &set_hash_table);
3751 compute_available (cprop_pavloc, cprop_absaltered,
3752 cprop_avout, cprop_avin);
3755 /* Copy/constant propagation. */
3757 /* Maximum number of register uses in an insn that we handle. */
3758 #define MAX_USES 8
3760 /* Table of uses found in an insn.
3761 Allocated statically to avoid alloc/free complexity and overhead. */
3762 static struct reg_use reg_use_table[MAX_USES];
3764 /* Index into `reg_use_table' while building it. */
3765 static int reg_use_count;
3767 /* Set up a list of register numbers used in INSN. The found uses are stored
3768 in `reg_use_table'. `reg_use_count' is initialized to zero before entry,
3769 and contains the number of uses in the table upon exit.
3771 ??? If a register appears multiple times we will record it multiple times.
3772 This doesn't hurt anything but it will slow things down. */
3774 static void
3775 find_used_regs (rtx *xptr, void *data ATTRIBUTE_UNUSED)
3777 int i, j;
3778 enum rtx_code code;
3779 const char *fmt;
3780 rtx x = *xptr;
3782 /* repeat is used to turn tail-recursion into iteration since GCC
3783 can't do it when there's no return value. */
3784 repeat:
3785 if (x == 0)
3786 return;
3788 code = GET_CODE (x);
3789 if (REG_P (x))
3791 if (reg_use_count == MAX_USES)
3792 return;
3794 reg_use_table[reg_use_count].reg_rtx = x;
3795 reg_use_count++;
3798 /* Recursively scan the operands of this expression. */
3800 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
3802 if (fmt[i] == 'e')
3804 /* If we are about to do the last recursive call
3805 needed at this level, change it into iteration.
3806 This function is called enough to be worth it. */
3807 if (i == 0)
3809 x = XEXP (x, 0);
3810 goto repeat;
3813 find_used_regs (&XEXP (x, i), data);
3815 else if (fmt[i] == 'E')
3816 for (j = 0; j < XVECLEN (x, i); j++)
3817 find_used_regs (&XVECEXP (x, i, j), data);
3821 /* Try to replace all non-SET_DEST occurrences of FROM in INSN with TO.
3822 Returns nonzero is successful. */
3824 static int
3825 try_replace_reg (rtx from, rtx to, rtx insn)
3827 rtx note = find_reg_equal_equiv_note (insn);
3828 rtx src = 0;
3829 int success = 0;
3830 rtx set = single_set (insn);
3832 validate_replace_src_group (from, to, insn);
3833 if (num_changes_pending () && apply_change_group ())
3834 success = 1;
3836 /* Try to simplify SET_SRC if we have substituted a constant. */
3837 if (success && set && CONSTANT_P (to))
3839 src = simplify_rtx (SET_SRC (set));
3841 if (src)
3842 validate_change (insn, &SET_SRC (set), src, 0);
3845 /* If there is already a NOTE, update the expression in it with our
3846 replacement. */
3847 if (note != 0)
3848 XEXP (note, 0) = simplify_replace_rtx (XEXP (note, 0), from, to);
3850 if (!success && set && reg_mentioned_p (from, SET_SRC (set)))
3852 /* If above failed and this is a single set, try to simplify the source of
3853 the set given our substitution. We could perhaps try this for multiple
3854 SETs, but it probably won't buy us anything. */
3855 src = simplify_replace_rtx (SET_SRC (set), from, to);
3857 if (!rtx_equal_p (src, SET_SRC (set))
3858 && validate_change (insn, &SET_SRC (set), src, 0))
3859 success = 1;
3861 /* If we've failed to do replacement, have a single SET, don't already
3862 have a note, and have no special SET, add a REG_EQUAL note to not
3863 lose information. */
3864 if (!success && note == 0 && set != 0
3865 && GET_CODE (XEXP (set, 0)) != ZERO_EXTRACT
3866 && GET_CODE (XEXP (set, 0)) != SIGN_EXTRACT)
3867 note = set_unique_reg_note (insn, REG_EQUAL, copy_rtx (src));
3870 /* REG_EQUAL may get simplified into register.
3871 We don't allow that. Remove that note. This code ought
3872 not to happen, because previous code ought to synthesize
3873 reg-reg move, but be on the safe side. */
3874 if (note && REG_P (XEXP (note, 0)))
3875 remove_note (insn, note);
3877 return success;
3880 /* Find a set of REGNOs that are available on entry to INSN's block. Returns
3881 NULL no such set is found. */
3883 static struct expr *
3884 find_avail_set (int regno, rtx insn)
3886 /* SET1 contains the last set found that can be returned to the caller for
3887 use in a substitution. */
3888 struct expr *set1 = 0;
3890 /* Loops are not possible here. To get a loop we would need two sets
3891 available at the start of the block containing INSN. ie we would
3892 need two sets like this available at the start of the block:
3894 (set (reg X) (reg Y))
3895 (set (reg Y) (reg X))
3897 This can not happen since the set of (reg Y) would have killed the
3898 set of (reg X) making it unavailable at the start of this block. */
3899 while (1)
3901 rtx src;
3902 struct expr *set = lookup_set (regno, &set_hash_table);
3904 /* Find a set that is available at the start of the block
3905 which contains INSN. */
3906 while (set)
3908 if (TEST_BIT (cprop_avin[BLOCK_NUM (insn)], set->bitmap_index))
3909 break;
3910 set = next_set (regno, set);
3913 /* If no available set was found we've reached the end of the
3914 (possibly empty) copy chain. */
3915 if (set == 0)
3916 break;
3918 if (GET_CODE (set->expr) != SET)
3919 abort ();
3921 src = SET_SRC (set->expr);
3923 /* We know the set is available.
3924 Now check that SRC is ANTLOC (i.e. none of the source operands
3925 have changed since the start of the block).
3927 If the source operand changed, we may still use it for the next
3928 iteration of this loop, but we may not use it for substitutions. */
3930 if (gcse_constant_p (src) || oprs_not_set_p (src, insn))
3931 set1 = set;
3933 /* If the source of the set is anything except a register, then
3934 we have reached the end of the copy chain. */
3935 if (GET_CODE (src) != REG)
3936 break;
3938 /* Follow the copy chain, ie start another iteration of the loop
3939 and see if we have an available copy into SRC. */
3940 regno = REGNO (src);
3943 /* SET1 holds the last set that was available and anticipatable at
3944 INSN. */
3945 return set1;
3948 /* Subroutine of cprop_insn that tries to propagate constants into
3949 JUMP_INSNS. JUMP must be a conditional jump. If SETCC is non-NULL
3950 it is the instruction that immediately precedes JUMP, and must be a
3951 single SET of a register. FROM is what we will try to replace,
3952 SRC is the constant we will try to substitute for it. Returns nonzero
3953 if a change was made. */
3955 static int
3956 cprop_jump (basic_block bb, rtx setcc, rtx jump, rtx from, rtx src)
3958 rtx new, set_src, note_src;
3959 rtx set = pc_set (jump);
3960 rtx note = find_reg_equal_equiv_note (jump);
3962 if (note)
3964 note_src = XEXP (note, 0);
3965 if (GET_CODE (note_src) == EXPR_LIST)
3966 note_src = NULL_RTX;
3968 else note_src = NULL_RTX;
3970 /* Prefer REG_EQUAL notes except those containing EXPR_LISTs. */
3971 set_src = note_src ? note_src : SET_SRC (set);
3973 /* First substitute the SETCC condition into the JUMP instruction,
3974 then substitute that given values into this expanded JUMP. */
3975 if (setcc != NULL_RTX
3976 && !modified_between_p (from, setcc, jump)
3977 && !modified_between_p (src, setcc, jump))
3979 rtx setcc_src;
3980 rtx setcc_set = single_set (setcc);
3981 rtx setcc_note = find_reg_equal_equiv_note (setcc);
3982 setcc_src = (setcc_note && GET_CODE (XEXP (setcc_note, 0)) != EXPR_LIST)
3983 ? XEXP (setcc_note, 0) : SET_SRC (setcc_set);
3984 set_src = simplify_replace_rtx (set_src, SET_DEST (setcc_set),
3985 setcc_src);
3987 else
3988 setcc = NULL_RTX;
3990 new = simplify_replace_rtx (set_src, from, src);
3992 /* If no simplification can be made, then try the next register. */
3993 if (rtx_equal_p (new, SET_SRC (set)))
3994 return 0;
3996 /* If this is now a no-op delete it, otherwise this must be a valid insn. */
3997 if (new == pc_rtx)
3998 delete_insn (jump);
3999 else
4001 /* Ensure the value computed inside the jump insn to be equivalent
4002 to one computed by setcc. */
4003 if (setcc && modified_in_p (new, setcc))
4004 return 0;
4005 if (! validate_change (jump, &SET_SRC (set), new, 0))
4007 /* When (some) constants are not valid in a comparison, and there
4008 are two registers to be replaced by constants before the entire
4009 comparison can be folded into a constant, we need to keep
4010 intermediate information in REG_EQUAL notes. For targets with
4011 separate compare insns, such notes are added by try_replace_reg.
4012 When we have a combined compare-and-branch instruction, however,
4013 we need to attach a note to the branch itself to make this
4014 optimization work. */
4016 if (!rtx_equal_p (new, note_src))
4017 set_unique_reg_note (jump, REG_EQUAL, copy_rtx (new));
4018 return 0;
4021 /* Remove REG_EQUAL note after simplification. */
4022 if (note_src)
4023 remove_note (jump, note);
4025 /* If this has turned into an unconditional jump,
4026 then put a barrier after it so that the unreachable
4027 code will be deleted. */
4028 if (GET_CODE (SET_SRC (set)) == LABEL_REF)
4029 emit_barrier_after (jump);
4032 #ifdef HAVE_cc0
4033 /* Delete the cc0 setter. */
4034 if (setcc != NULL && CC0_P (SET_DEST (single_set (setcc))))
4035 delete_insn (setcc);
4036 #endif
4038 run_jump_opt_after_gcse = 1;
4040 const_prop_count++;
4041 if (gcse_file != NULL)
4043 fprintf (gcse_file,
4044 "CONST-PROP: Replacing reg %d in jump_insn %d with constant ",
4045 REGNO (from), INSN_UID (jump));
4046 print_rtl (gcse_file, src);
4047 fprintf (gcse_file, "\n");
4049 purge_dead_edges (bb);
4051 return 1;
4054 static bool
4055 constprop_register (rtx insn, rtx from, rtx to, int alter_jumps)
4057 rtx sset;
4059 /* Check for reg or cc0 setting instructions followed by
4060 conditional branch instructions first. */
4061 if (alter_jumps
4062 && (sset = single_set (insn)) != NULL
4063 && NEXT_INSN (insn)
4064 && any_condjump_p (NEXT_INSN (insn)) && onlyjump_p (NEXT_INSN (insn)))
4066 rtx dest = SET_DEST (sset);
4067 if ((REG_P (dest) || CC0_P (dest))
4068 && cprop_jump (BLOCK_FOR_INSN (insn), insn, NEXT_INSN (insn), from, to))
4069 return 1;
4072 /* Handle normal insns next. */
4073 if (GET_CODE (insn) == INSN
4074 && try_replace_reg (from, to, insn))
4075 return 1;
4077 /* Try to propagate a CONST_INT into a conditional jump.
4078 We're pretty specific about what we will handle in this
4079 code, we can extend this as necessary over time.
4081 Right now the insn in question must look like
4082 (set (pc) (if_then_else ...)) */
4083 else if (alter_jumps && any_condjump_p (insn) && onlyjump_p (insn))
4084 return cprop_jump (BLOCK_FOR_INSN (insn), NULL, insn, from, to);
4085 return 0;
4088 /* Perform constant and copy propagation on INSN.
4089 The result is nonzero if a change was made. */
4091 static int
4092 cprop_insn (rtx insn, int alter_jumps)
4094 struct reg_use *reg_used;
4095 int changed = 0;
4096 rtx note;
4098 if (!INSN_P (insn))
4099 return 0;
4101 reg_use_count = 0;
4102 note_uses (&PATTERN (insn), find_used_regs, NULL);
4104 note = find_reg_equal_equiv_note (insn);
4106 /* We may win even when propagating constants into notes. */
4107 if (note)
4108 find_used_regs (&XEXP (note, 0), NULL);
4110 for (reg_used = &reg_use_table[0]; reg_use_count > 0;
4111 reg_used++, reg_use_count--)
4113 unsigned int regno = REGNO (reg_used->reg_rtx);
4114 rtx pat, src;
4115 struct expr *set;
4117 /* Ignore registers created by GCSE.
4118 We do this because ... */
4119 if (regno >= max_gcse_regno)
4120 continue;
4122 /* If the register has already been set in this block, there's
4123 nothing we can do. */
4124 if (! oprs_not_set_p (reg_used->reg_rtx, insn))
4125 continue;
4127 /* Find an assignment that sets reg_used and is available
4128 at the start of the block. */
4129 set = find_avail_set (regno, insn);
4130 if (! set)
4131 continue;
4133 pat = set->expr;
4134 /* ??? We might be able to handle PARALLELs. Later. */
4135 if (GET_CODE (pat) != SET)
4136 abort ();
4138 src = SET_SRC (pat);
4140 /* Constant propagation. */
4141 if (gcse_constant_p (src))
4143 if (constprop_register (insn, reg_used->reg_rtx, src, alter_jumps))
4145 changed = 1;
4146 const_prop_count++;
4147 if (gcse_file != NULL)
4149 fprintf (gcse_file, "GLOBAL CONST-PROP: Replacing reg %d in ", regno);
4150 fprintf (gcse_file, "insn %d with constant ", INSN_UID (insn));
4151 print_rtl (gcse_file, src);
4152 fprintf (gcse_file, "\n");
4154 if (INSN_DELETED_P (insn))
4155 return 1;
4158 else if (GET_CODE (src) == REG
4159 && REGNO (src) >= FIRST_PSEUDO_REGISTER
4160 && REGNO (src) != regno)
4162 if (try_replace_reg (reg_used->reg_rtx, src, insn))
4164 changed = 1;
4165 copy_prop_count++;
4166 if (gcse_file != NULL)
4168 fprintf (gcse_file, "GLOBAL COPY-PROP: Replacing reg %d in insn %d",
4169 regno, INSN_UID (insn));
4170 fprintf (gcse_file, " with reg %d\n", REGNO (src));
4173 /* The original insn setting reg_used may or may not now be
4174 deletable. We leave the deletion to flow. */
4175 /* FIXME: If it turns out that the insn isn't deletable,
4176 then we may have unnecessarily extended register lifetimes
4177 and made things worse. */
4182 return changed;
4185 /* Like find_used_regs, but avoid recording uses that appear in
4186 input-output contexts such as zero_extract or pre_dec. This
4187 restricts the cases we consider to those for which local cprop
4188 can legitimately make replacements. */
4190 static void
4191 local_cprop_find_used_regs (rtx *xptr, void *data)
4193 rtx x = *xptr;
4195 if (x == 0)
4196 return;
4198 switch (GET_CODE (x))
4200 case ZERO_EXTRACT:
4201 case SIGN_EXTRACT:
4202 case STRICT_LOW_PART:
4203 return;
4205 case PRE_DEC:
4206 case PRE_INC:
4207 case POST_DEC:
4208 case POST_INC:
4209 case PRE_MODIFY:
4210 case POST_MODIFY:
4211 /* Can only legitimately appear this early in the context of
4212 stack pushes for function arguments, but handle all of the
4213 codes nonetheless. */
4214 return;
4216 case SUBREG:
4217 /* Setting a subreg of a register larger than word_mode leaves
4218 the non-written words unchanged. */
4219 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) > BITS_PER_WORD)
4220 return;
4221 break;
4223 default:
4224 break;
4227 find_used_regs (xptr, data);
4230 /* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
4231 their REG_EQUAL notes need updating. */
4233 static bool
4234 do_local_cprop (rtx x, rtx insn, int alter_jumps, rtx *libcall_sp)
4236 rtx newreg = NULL, newcnst = NULL;
4238 /* Rule out USE instructions and ASM statements as we don't want to
4239 change the hard registers mentioned. */
4240 if (GET_CODE (x) == REG
4241 && (REGNO (x) >= FIRST_PSEUDO_REGISTER
4242 || (GET_CODE (PATTERN (insn)) != USE
4243 && asm_noperands (PATTERN (insn)) < 0)))
4245 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0);
4246 struct elt_loc_list *l;
4248 if (!val)
4249 return false;
4250 for (l = val->locs; l; l = l->next)
4252 rtx this_rtx = l->loc;
4253 rtx note;
4255 if (l->in_libcall)
4256 continue;
4258 if (gcse_constant_p (this_rtx))
4259 newcnst = this_rtx;
4260 if (REG_P (this_rtx) && REGNO (this_rtx) >= FIRST_PSEUDO_REGISTER
4261 /* Don't copy propagate if it has attached REG_EQUIV note.
4262 At this point this only function parameters should have
4263 REG_EQUIV notes and if the argument slot is used somewhere
4264 explicitly, it means address of parameter has been taken,
4265 so we should not extend the lifetime of the pseudo. */
4266 && (!(note = find_reg_note (l->setting_insn, REG_EQUIV, NULL_RTX))
4267 || GET_CODE (XEXP (note, 0)) != MEM))
4268 newreg = this_rtx;
4270 if (newcnst && constprop_register (insn, x, newcnst, alter_jumps))
4272 /* If we find a case where we can't fix the retval REG_EQUAL notes
4273 match the new register, we either have to abandon this replacement
4274 or fix delete_trivially_dead_insns to preserve the setting insn,
4275 or make it delete the REG_EUAQL note, and fix up all passes that
4276 require the REG_EQUAL note there. */
4277 if (!adjust_libcall_notes (x, newcnst, insn, libcall_sp))
4278 abort ();
4279 if (gcse_file != NULL)
4281 fprintf (gcse_file, "LOCAL CONST-PROP: Replacing reg %d in ",
4282 REGNO (x));
4283 fprintf (gcse_file, "insn %d with constant ",
4284 INSN_UID (insn));
4285 print_rtl (gcse_file, newcnst);
4286 fprintf (gcse_file, "\n");
4288 const_prop_count++;
4289 return true;
4291 else if (newreg && newreg != x && try_replace_reg (x, newreg, insn))
4293 adjust_libcall_notes (x, newreg, insn, libcall_sp);
4294 if (gcse_file != NULL)
4296 fprintf (gcse_file,
4297 "LOCAL COPY-PROP: Replacing reg %d in insn %d",
4298 REGNO (x), INSN_UID (insn));
4299 fprintf (gcse_file, " with reg %d\n", REGNO (newreg));
4301 copy_prop_count++;
4302 return true;
4305 return false;
4308 /* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
4309 their REG_EQUAL notes need updating to reflect that OLDREG has been
4310 replaced with NEWVAL in INSN. Return true if all substitutions could
4311 be made. */
4312 static bool
4313 adjust_libcall_notes (rtx oldreg, rtx newval, rtx insn, rtx *libcall_sp)
4315 rtx end;
4317 while ((end = *libcall_sp++))
4319 rtx note = find_reg_equal_equiv_note (end);
4321 if (! note)
4322 continue;
4324 if (REG_P (newval))
4326 if (reg_set_between_p (newval, PREV_INSN (insn), end))
4330 note = find_reg_equal_equiv_note (end);
4331 if (! note)
4332 continue;
4333 if (reg_mentioned_p (newval, XEXP (note, 0)))
4334 return false;
4336 while ((end = *libcall_sp++));
4337 return true;
4340 XEXP (note, 0) = replace_rtx (XEXP (note, 0), oldreg, newval);
4341 insn = end;
4343 return true;
4346 #define MAX_NESTED_LIBCALLS 9
4348 static void
4349 local_cprop_pass (int alter_jumps)
4351 rtx insn;
4352 struct reg_use *reg_used;
4353 rtx libcall_stack[MAX_NESTED_LIBCALLS + 1], *libcall_sp;
4354 bool changed = false;
4356 cselib_init ();
4357 libcall_sp = &libcall_stack[MAX_NESTED_LIBCALLS];
4358 *libcall_sp = 0;
4359 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4361 if (INSN_P (insn))
4363 rtx note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
4365 if (note)
4367 if (libcall_sp == libcall_stack)
4368 abort ();
4369 *--libcall_sp = XEXP (note, 0);
4371 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
4372 if (note)
4373 libcall_sp++;
4374 note = find_reg_equal_equiv_note (insn);
4377 reg_use_count = 0;
4378 note_uses (&PATTERN (insn), local_cprop_find_used_regs, NULL);
4379 if (note)
4380 local_cprop_find_used_regs (&XEXP (note, 0), NULL);
4382 for (reg_used = &reg_use_table[0]; reg_use_count > 0;
4383 reg_used++, reg_use_count--)
4384 if (do_local_cprop (reg_used->reg_rtx, insn, alter_jumps,
4385 libcall_sp))
4387 changed = true;
4388 break;
4390 if (INSN_DELETED_P (insn))
4391 break;
4393 while (reg_use_count);
4395 cselib_process_insn (insn);
4397 cselib_finish ();
4398 /* Global analysis may get into infinite loops for unreachable blocks. */
4399 if (changed && alter_jumps)
4401 delete_unreachable_blocks ();
4402 free_reg_set_mem ();
4403 alloc_reg_set_mem (max_reg_num ());
4404 compute_sets (get_insns ());
4408 /* Forward propagate copies. This includes copies and constants. Return
4409 nonzero if a change was made. */
4411 static int
4412 cprop (int alter_jumps)
4414 int changed;
4415 basic_block bb;
4416 rtx insn;
4418 /* Note we start at block 1. */
4419 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
4421 if (gcse_file != NULL)
4422 fprintf (gcse_file, "\n");
4423 return 0;
4426 changed = 0;
4427 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR, next_bb)
4429 /* Reset tables used to keep track of what's still valid [since the
4430 start of the block]. */
4431 reset_opr_set_tables ();
4433 for (insn = bb->head;
4434 insn != NULL && insn != NEXT_INSN (bb->end);
4435 insn = NEXT_INSN (insn))
4436 if (INSN_P (insn))
4438 changed |= cprop_insn (insn, alter_jumps);
4440 /* Keep track of everything modified by this insn. */
4441 /* ??? Need to be careful w.r.t. mods done to INSN. Don't
4442 call mark_oprs_set if we turned the insn into a NOTE. */
4443 if (GET_CODE (insn) != NOTE)
4444 mark_oprs_set (insn);
4448 if (gcse_file != NULL)
4449 fprintf (gcse_file, "\n");
4451 return changed;
4454 /* Similar to get_condition, only the resulting condition must be
4455 valid at JUMP, instead of at EARLIEST.
4457 This differs from noce_get_condition in ifcvt.c in that we prefer not to
4458 settle for the condition variable in the jump instruction being integral.
4459 We prefer to be able to record the value of a user variable, rather than
4460 the value of a temporary used in a condition. This could be solved by
4461 recording the value of *every* register scaned by canonicalize_condition,
4462 but this would require some code reorganization. */
4465 fis_get_condition (rtx jump)
4467 rtx cond, set, tmp, insn, earliest;
4468 bool reverse;
4470 if (! any_condjump_p (jump))
4471 return NULL_RTX;
4473 set = pc_set (jump);
4474 cond = XEXP (SET_SRC (set), 0);
4476 /* If this branches to JUMP_LABEL when the condition is false,
4477 reverse the condition. */
4478 reverse = (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
4479 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump));
4481 /* Use canonicalize_condition to do the dirty work of manipulating
4482 MODE_CC values and COMPARE rtx codes. */
4483 tmp = canonicalize_condition (jump, cond, reverse, &earliest, NULL_RTX);
4484 if (!tmp)
4485 return NULL_RTX;
4487 /* Verify that the given condition is valid at JUMP by virtue of not
4488 having been modified since EARLIEST. */
4489 for (insn = earliest; insn != jump; insn = NEXT_INSN (insn))
4490 if (INSN_P (insn) && modified_in_p (tmp, insn))
4491 break;
4492 if (insn == jump)
4493 return tmp;
4495 /* The condition was modified. See if we can get a partial result
4496 that doesn't follow all the reversals. Perhaps combine can fold
4497 them together later. */
4498 tmp = XEXP (tmp, 0);
4499 if (!REG_P (tmp) || GET_MODE_CLASS (GET_MODE (tmp)) != MODE_INT)
4500 return NULL_RTX;
4501 tmp = canonicalize_condition (jump, cond, reverse, &earliest, tmp);
4502 if (!tmp)
4503 return NULL_RTX;
4505 /* For sanity's sake, re-validate the new result. */
4506 for (insn = earliest; insn != jump; insn = NEXT_INSN (insn))
4507 if (INSN_P (insn) && modified_in_p (tmp, insn))
4508 return NULL_RTX;
4510 return tmp;
4513 /* Find the implicit sets of a function. An "implicit set" is a constraint
4514 on the value of a variable, implied by a conditional jump. For example,
4515 following "if (x == 2)", the then branch may be optimized as though the
4516 conditional performed an "explicit set", in this example, "x = 2". This
4517 function records the set patterns that are implicit at the start of each
4518 basic block. */
4520 static void
4521 find_implicit_sets (void)
4523 basic_block bb, dest;
4524 unsigned int count;
4525 rtx cond, new;
4527 count = 0;
4528 FOR_EACH_BB (bb)
4529 /* Check for more than one successor. */
4530 if (bb->succ && bb->succ->succ_next)
4532 cond = fis_get_condition (bb->end);
4534 if (cond
4535 && (GET_CODE (cond) == EQ || GET_CODE (cond) == NE)
4536 && GET_CODE (XEXP (cond, 0)) == REG
4537 && REGNO (XEXP (cond, 0)) >= FIRST_PSEUDO_REGISTER
4538 && gcse_constant_p (XEXP (cond, 1)))
4540 dest = GET_CODE (cond) == EQ ? BRANCH_EDGE (bb)->dest
4541 : FALLTHRU_EDGE (bb)->dest;
4543 if (dest && ! dest->pred->pred_next
4544 && dest != EXIT_BLOCK_PTR)
4546 new = gen_rtx_SET (VOIDmode, XEXP (cond, 0),
4547 XEXP (cond, 1));
4548 implicit_sets[dest->index] = new;
4549 if (gcse_file)
4551 fprintf(gcse_file, "Implicit set of reg %d in ",
4552 REGNO (XEXP (cond, 0)));
4553 fprintf(gcse_file, "basic block %d\n", dest->index);
4555 count++;
4560 if (gcse_file)
4561 fprintf (gcse_file, "Found %d implicit sets\n", count);
4564 /* Perform one copy/constant propagation pass.
4565 PASS is the pass count. If CPROP_JUMPS is true, perform constant
4566 propagation into conditional jumps. If BYPASS_JUMPS is true,
4567 perform conditional jump bypassing optimizations. */
4569 static int
4570 one_cprop_pass (int pass, int cprop_jumps, int bypass_jumps)
4572 int changed = 0;
4574 const_prop_count = 0;
4575 copy_prop_count = 0;
4577 local_cprop_pass (cprop_jumps);
4579 /* Determine implicit sets. */
4580 implicit_sets = xcalloc (last_basic_block, sizeof (rtx));
4581 find_implicit_sets ();
4583 alloc_hash_table (max_cuid, &set_hash_table, 1);
4584 compute_hash_table (&set_hash_table);
4586 /* Free implicit_sets before peak usage. */
4587 free (implicit_sets);
4588 implicit_sets = NULL;
4590 if (gcse_file)
4591 dump_hash_table (gcse_file, "SET", &set_hash_table);
4592 if (set_hash_table.n_elems > 0)
4594 alloc_cprop_mem (last_basic_block, set_hash_table.n_elems);
4595 compute_cprop_data ();
4596 changed = cprop (cprop_jumps);
4597 if (bypass_jumps)
4598 changed |= bypass_conditional_jumps ();
4599 free_cprop_mem ();
4602 free_hash_table (&set_hash_table);
4604 if (gcse_file)
4606 fprintf (gcse_file, "CPROP of %s, pass %d: %d bytes needed, ",
4607 current_function_name, pass, bytes_used);
4608 fprintf (gcse_file, "%d const props, %d copy props\n\n",
4609 const_prop_count, copy_prop_count);
4611 /* Global analysis may get into infinite loops for unreachable blocks. */
4612 if (changed && cprop_jumps)
4613 delete_unreachable_blocks ();
4615 return changed;
4618 /* Bypass conditional jumps. */
4620 /* The value of last_basic_block at the beginning of the jump_bypass
4621 pass. The use of redirect_edge_and_branch_force may introduce new
4622 basic blocks, but the data flow analysis is only valid for basic
4623 block indices less than bypass_last_basic_block. */
4625 static int bypass_last_basic_block;
4627 /* Find a set of REGNO to a constant that is available at the end of basic
4628 block BB. Returns NULL if no such set is found. Based heavily upon
4629 find_avail_set. */
4631 static struct expr *
4632 find_bypass_set (int regno, int bb)
4634 struct expr *result = 0;
4636 for (;;)
4638 rtx src;
4639 struct expr *set = lookup_set (regno, &set_hash_table);
4641 while (set)
4643 if (TEST_BIT (cprop_avout[bb], set->bitmap_index))
4644 break;
4645 set = next_set (regno, set);
4648 if (set == 0)
4649 break;
4651 if (GET_CODE (set->expr) != SET)
4652 abort ();
4654 src = SET_SRC (set->expr);
4655 if (gcse_constant_p (src))
4656 result = set;
4658 if (GET_CODE (src) != REG)
4659 break;
4661 regno = REGNO (src);
4663 return result;
4667 /* Subroutine of bypass_block that checks whether a pseudo is killed by
4668 any of the instructions inserted on an edge. Jump bypassing places
4669 condition code setters on CFG edges using insert_insn_on_edge. This
4670 function is required to check that our data flow analysis is still
4671 valid prior to commit_edge_insertions. */
4673 static bool
4674 reg_killed_on_edge (rtx reg, edge e)
4676 rtx insn;
4678 for (insn = e->insns; insn; insn = NEXT_INSN (insn))
4679 if (INSN_P (insn) && reg_set_p (reg, insn))
4680 return true;
4682 return false;
4685 /* Subroutine of bypass_conditional_jumps that attempts to bypass the given
4686 basic block BB which has more than one predecessor. If not NULL, SETCC
4687 is the first instruction of BB, which is immediately followed by JUMP_INSN
4688 JUMP. Otherwise, SETCC is NULL, and JUMP is the first insn of BB.
4689 Returns nonzero if a change was made.
4691 During the jump bypassing pass, we may place copies of SETCC instructions
4692 on CFG edges. The following routine must be careful to pay attention to
4693 these inserted insns when performing its transformations. */
4695 static int
4696 bypass_block (basic_block bb, rtx setcc, rtx jump)
4698 rtx insn, note;
4699 edge e, enext, edest;
4700 int i, change;
4701 int may_be_loop_header;
4703 insn = (setcc != NULL) ? setcc : jump;
4705 /* Determine set of register uses in INSN. */
4706 reg_use_count = 0;
4707 note_uses (&PATTERN (insn), find_used_regs, NULL);
4708 note = find_reg_equal_equiv_note (insn);
4709 if (note)
4710 find_used_regs (&XEXP (note, 0), NULL);
4712 may_be_loop_header = false;
4713 for (e = bb->pred; e; e = e->pred_next)
4714 if (e->flags & EDGE_DFS_BACK)
4716 may_be_loop_header = true;
4717 break;
4720 change = 0;
4721 for (e = bb->pred; e; e = enext)
4723 enext = e->pred_next;
4724 if (e->flags & EDGE_COMPLEX)
4725 continue;
4727 /* We can't redirect edges from new basic blocks. */
4728 if (e->src->index >= bypass_last_basic_block)
4729 continue;
4731 /* The irreducible loops created by redirecting of edges entering the
4732 loop from outside would decrease effectiveness of some of the following
4733 optimizations, so prevent this. */
4734 if (may_be_loop_header
4735 && !(e->flags & EDGE_DFS_BACK))
4736 continue;
4738 for (i = 0; i < reg_use_count; i++)
4740 struct reg_use *reg_used = &reg_use_table[i];
4741 unsigned int regno = REGNO (reg_used->reg_rtx);
4742 basic_block dest, old_dest;
4743 struct expr *set;
4744 rtx src, new;
4746 if (regno >= max_gcse_regno)
4747 continue;
4749 set = find_bypass_set (regno, e->src->index);
4751 if (! set)
4752 continue;
4754 /* Check the data flow is valid after edge insertions. */
4755 if (e->insns && reg_killed_on_edge (reg_used->reg_rtx, e))
4756 continue;
4758 src = SET_SRC (pc_set (jump));
4760 if (setcc != NULL)
4761 src = simplify_replace_rtx (src,
4762 SET_DEST (PATTERN (setcc)),
4763 SET_SRC (PATTERN (setcc)));
4765 new = simplify_replace_rtx (src, reg_used->reg_rtx,
4766 SET_SRC (set->expr));
4768 /* Jump bypassing may have already placed instructions on
4769 edges of the CFG. We can't bypass an outgoing edge that
4770 has instructions associated with it, as these insns won't
4771 get executed if the incoming edge is redirected. */
4773 if (new == pc_rtx)
4775 edest = FALLTHRU_EDGE (bb);
4776 dest = edest->insns ? NULL : edest->dest;
4778 else if (GET_CODE (new) == LABEL_REF)
4780 dest = BLOCK_FOR_INSN (XEXP (new, 0));
4781 /* Don't bypass edges containing instructions. */
4782 for (edest = bb->succ; edest; edest = edest->succ_next)
4783 if (edest->dest == dest && edest->insns)
4785 dest = NULL;
4786 break;
4789 else
4790 dest = NULL;
4792 old_dest = e->dest;
4793 if (dest != NULL
4794 && dest != old_dest
4795 && dest != EXIT_BLOCK_PTR)
4797 redirect_edge_and_branch_force (e, dest);
4799 /* Copy the register setter to the redirected edge.
4800 Don't copy CC0 setters, as CC0 is dead after jump. */
4801 if (setcc)
4803 rtx pat = PATTERN (setcc);
4804 if (!CC0_P (SET_DEST (pat)))
4805 insert_insn_on_edge (copy_insn (pat), e);
4808 if (gcse_file != NULL)
4810 fprintf (gcse_file, "JUMP-BYPASS: Proved reg %d in jump_insn %d equals constant ",
4811 regno, INSN_UID (jump));
4812 print_rtl (gcse_file, SET_SRC (set->expr));
4813 fprintf (gcse_file, "\nBypass edge from %d->%d to %d\n",
4814 e->src->index, old_dest->index, dest->index);
4816 change = 1;
4817 break;
4821 return change;
4824 /* Find basic blocks with more than one predecessor that only contain a
4825 single conditional jump. If the result of the comparison is known at
4826 compile-time from any incoming edge, redirect that edge to the
4827 appropriate target. Returns nonzero if a change was made.
4829 This function is now mis-named, because we also handle indirect jumps. */
4831 static int
4832 bypass_conditional_jumps (void)
4834 basic_block bb;
4835 int changed;
4836 rtx setcc;
4837 rtx insn;
4838 rtx dest;
4840 /* Note we start at block 1. */
4841 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
4842 return 0;
4844 bypass_last_basic_block = last_basic_block;
4845 mark_dfs_back_edges ();
4847 changed = 0;
4848 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb,
4849 EXIT_BLOCK_PTR, next_bb)
4851 /* Check for more than one predecessor. */
4852 if (bb->pred && bb->pred->pred_next)
4854 setcc = NULL_RTX;
4855 for (insn = bb->head;
4856 insn != NULL && insn != NEXT_INSN (bb->end);
4857 insn = NEXT_INSN (insn))
4858 if (GET_CODE (insn) == INSN)
4860 if (setcc)
4861 break;
4862 if (GET_CODE (PATTERN (insn)) != SET)
4863 break;
4865 dest = SET_DEST (PATTERN (insn));
4866 if (REG_P (dest) || CC0_P (dest))
4867 setcc = insn;
4868 else
4869 break;
4871 else if (GET_CODE (insn) == JUMP_INSN)
4873 if ((any_condjump_p (insn) || computed_jump_p (insn))
4874 && onlyjump_p (insn))
4875 changed |= bypass_block (bb, setcc, insn);
4876 break;
4878 else if (INSN_P (insn))
4879 break;
4883 /* If we bypassed any register setting insns, we inserted a
4884 copy on the redirected edge. These need to be committed. */
4885 if (changed)
4886 commit_edge_insertions();
4888 return changed;
4891 /* Compute PRE+LCM working variables. */
4893 /* Local properties of expressions. */
4894 /* Nonzero for expressions that are transparent in the block. */
4895 static sbitmap *transp;
4897 /* Nonzero for expressions that are transparent at the end of the block.
4898 This is only zero for expressions killed by abnormal critical edge
4899 created by a calls. */
4900 static sbitmap *transpout;
4902 /* Nonzero for expressions that are computed (available) in the block. */
4903 static sbitmap *comp;
4905 /* Nonzero for expressions that are locally anticipatable in the block. */
4906 static sbitmap *antloc;
4908 /* Nonzero for expressions where this block is an optimal computation
4909 point. */
4910 static sbitmap *pre_optimal;
4912 /* Nonzero for expressions which are redundant in a particular block. */
4913 static sbitmap *pre_redundant;
4915 /* Nonzero for expressions which should be inserted on a specific edge. */
4916 static sbitmap *pre_insert_map;
4918 /* Nonzero for expressions which should be deleted in a specific block. */
4919 static sbitmap *pre_delete_map;
4921 /* Contains the edge_list returned by pre_edge_lcm. */
4922 static struct edge_list *edge_list;
4924 /* Redundant insns. */
4925 static sbitmap pre_redundant_insns;
4927 /* Allocate vars used for PRE analysis. */
4929 static void
4930 alloc_pre_mem (int n_blocks, int n_exprs)
4932 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
4933 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
4934 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
4936 pre_optimal = NULL;
4937 pre_redundant = NULL;
4938 pre_insert_map = NULL;
4939 pre_delete_map = NULL;
4940 ae_in = NULL;
4941 ae_out = NULL;
4942 ae_kill = sbitmap_vector_alloc (n_blocks, n_exprs);
4944 /* pre_insert and pre_delete are allocated later. */
4947 /* Free vars used for PRE analysis. */
4949 static void
4950 free_pre_mem (void)
4952 sbitmap_vector_free (transp);
4953 sbitmap_vector_free (comp);
4955 /* ANTLOC and AE_KILL are freed just after pre_lcm finishes. */
4957 if (pre_optimal)
4958 sbitmap_vector_free (pre_optimal);
4959 if (pre_redundant)
4960 sbitmap_vector_free (pre_redundant);
4961 if (pre_insert_map)
4962 sbitmap_vector_free (pre_insert_map);
4963 if (pre_delete_map)
4964 sbitmap_vector_free (pre_delete_map);
4965 if (ae_in)
4966 sbitmap_vector_free (ae_in);
4967 if (ae_out)
4968 sbitmap_vector_free (ae_out);
4970 transp = comp = NULL;
4971 pre_optimal = pre_redundant = pre_insert_map = pre_delete_map = NULL;
4972 ae_in = ae_out = NULL;
4975 /* Top level routine to do the dataflow analysis needed by PRE. */
4977 static void
4978 compute_pre_data (void)
4980 sbitmap trapping_expr;
4981 basic_block bb;
4982 unsigned int ui;
4984 compute_local_properties (transp, comp, antloc, &expr_hash_table);
4985 sbitmap_vector_zero (ae_kill, last_basic_block);
4987 /* Collect expressions which might trap. */
4988 trapping_expr = sbitmap_alloc (expr_hash_table.n_elems);
4989 sbitmap_zero (trapping_expr);
4990 for (ui = 0; ui < expr_hash_table.size; ui++)
4992 struct expr *e;
4993 for (e = expr_hash_table.table[ui]; e != NULL; e = e->next_same_hash)
4994 if (may_trap_p (e->expr))
4995 SET_BIT (trapping_expr, e->bitmap_index);
4998 /* Compute ae_kill for each basic block using:
5000 ~(TRANSP | COMP)
5002 This is significantly faster than compute_ae_kill. */
5004 FOR_EACH_BB (bb)
5006 edge e;
5008 /* If the current block is the destination of an abnormal edge, we
5009 kill all trapping expressions because we won't be able to properly
5010 place the instruction on the edge. So make them neither
5011 anticipatable nor transparent. This is fairly conservative. */
5012 for (e = bb->pred; e ; e = e->pred_next)
5013 if (e->flags & EDGE_ABNORMAL)
5015 sbitmap_difference (antloc[bb->index], antloc[bb->index], trapping_expr);
5016 sbitmap_difference (transp[bb->index], transp[bb->index], trapping_expr);
5017 break;
5020 sbitmap_a_or_b (ae_kill[bb->index], transp[bb->index], comp[bb->index]);
5021 sbitmap_not (ae_kill[bb->index], ae_kill[bb->index]);
5024 edge_list = pre_edge_lcm (gcse_file, expr_hash_table.n_elems, transp, comp, antloc,
5025 ae_kill, &pre_insert_map, &pre_delete_map);
5026 sbitmap_vector_free (antloc);
5027 antloc = NULL;
5028 sbitmap_vector_free (ae_kill);
5029 ae_kill = NULL;
5030 sbitmap_free (trapping_expr);
5033 /* PRE utilities */
5035 /* Return nonzero if an occurrence of expression EXPR in OCCR_BB would reach
5036 block BB.
5038 VISITED is a pointer to a working buffer for tracking which BB's have
5039 been visited. It is NULL for the top-level call.
5041 We treat reaching expressions that go through blocks containing the same
5042 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
5043 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
5044 2 as not reaching. The intent is to improve the probability of finding
5045 only one reaching expression and to reduce register lifetimes by picking
5046 the closest such expression. */
5048 static int
5049 pre_expr_reaches_here_p_work (basic_block occr_bb, struct expr *expr, basic_block bb, char *visited)
5051 edge pred;
5053 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
5055 basic_block pred_bb = pred->src;
5057 if (pred->src == ENTRY_BLOCK_PTR
5058 /* Has predecessor has already been visited? */
5059 || visited[pred_bb->index])
5060 ;/* Nothing to do. */
5062 /* Does this predecessor generate this expression? */
5063 else if (TEST_BIT (comp[pred_bb->index], expr->bitmap_index))
5065 /* Is this the occurrence we're looking for?
5066 Note that there's only one generating occurrence per block
5067 so we just need to check the block number. */
5068 if (occr_bb == pred_bb)
5069 return 1;
5071 visited[pred_bb->index] = 1;
5073 /* Ignore this predecessor if it kills the expression. */
5074 else if (! TEST_BIT (transp[pred_bb->index], expr->bitmap_index))
5075 visited[pred_bb->index] = 1;
5077 /* Neither gen nor kill. */
5078 else
5080 visited[pred_bb->index] = 1;
5081 if (pre_expr_reaches_here_p_work (occr_bb, expr, pred_bb, visited))
5082 return 1;
5086 /* All paths have been checked. */
5087 return 0;
5090 /* The wrapper for pre_expr_reaches_here_work that ensures that any
5091 memory allocated for that function is returned. */
5093 static int
5094 pre_expr_reaches_here_p (basic_block occr_bb, struct expr *expr, basic_block bb)
5096 int rval;
5097 char *visited = xcalloc (last_basic_block, 1);
5099 rval = pre_expr_reaches_here_p_work (occr_bb, expr, bb, visited);
5101 free (visited);
5102 return rval;
5106 /* Given an expr, generate RTL which we can insert at the end of a BB,
5107 or on an edge. Set the block number of any insns generated to
5108 the value of BB. */
5110 static rtx
5111 process_insert_insn (struct expr *expr)
5113 rtx reg = expr->reaching_reg;
5114 rtx exp = copy_rtx (expr->expr);
5115 rtx pat;
5117 start_sequence ();
5119 /* If the expression is something that's an operand, like a constant,
5120 just copy it to a register. */
5121 if (general_operand (exp, GET_MODE (reg)))
5122 emit_move_insn (reg, exp);
5124 /* Otherwise, make a new insn to compute this expression and make sure the
5125 insn will be recognized (this also adds any needed CLOBBERs). Copy the
5126 expression to make sure we don't have any sharing issues. */
5127 else if (insn_invalid_p (emit_insn (gen_rtx_SET (VOIDmode, reg, exp))))
5128 abort ();
5130 pat = get_insns ();
5131 end_sequence ();
5133 return pat;
5136 /* Add EXPR to the end of basic block BB.
5138 This is used by both the PRE and code hoisting.
5140 For PRE, we want to verify that the expr is either transparent
5141 or locally anticipatable in the target block. This check makes
5142 no sense for code hoisting. */
5144 static void
5145 insert_insn_end_bb (struct expr *expr, basic_block bb, int pre)
5147 rtx insn = bb->end;
5148 rtx new_insn;
5149 rtx reg = expr->reaching_reg;
5150 int regno = REGNO (reg);
5151 rtx pat, pat_end;
5153 pat = process_insert_insn (expr);
5154 if (pat == NULL_RTX || ! INSN_P (pat))
5155 abort ();
5157 pat_end = pat;
5158 while (NEXT_INSN (pat_end) != NULL_RTX)
5159 pat_end = NEXT_INSN (pat_end);
5161 /* If the last insn is a jump, insert EXPR in front [taking care to
5162 handle cc0, etc. properly]. Similarly we need to care trapping
5163 instructions in presence of non-call exceptions. */
5165 if (GET_CODE (insn) == JUMP_INSN
5166 || (GET_CODE (insn) == INSN
5167 && (bb->succ->succ_next || (bb->succ->flags & EDGE_ABNORMAL))))
5169 #ifdef HAVE_cc0
5170 rtx note;
5171 #endif
5172 /* It should always be the case that we can put these instructions
5173 anywhere in the basic block with performing PRE optimizations.
5174 Check this. */
5175 if (GET_CODE (insn) == INSN && pre
5176 && !TEST_BIT (antloc[bb->index], expr->bitmap_index)
5177 && !TEST_BIT (transp[bb->index], expr->bitmap_index))
5178 abort ();
5180 /* If this is a jump table, then we can't insert stuff here. Since
5181 we know the previous real insn must be the tablejump, we insert
5182 the new instruction just before the tablejump. */
5183 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
5184 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
5185 insn = prev_real_insn (insn);
5187 #ifdef HAVE_cc0
5188 /* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts
5189 if cc0 isn't set. */
5190 note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
5191 if (note)
5192 insn = XEXP (note, 0);
5193 else
5195 rtx maybe_cc0_setter = prev_nonnote_insn (insn);
5196 if (maybe_cc0_setter
5197 && INSN_P (maybe_cc0_setter)
5198 && sets_cc0_p (PATTERN (maybe_cc0_setter)))
5199 insn = maybe_cc0_setter;
5201 #endif
5202 /* FIXME: What if something in cc0/jump uses value set in new insn? */
5203 new_insn = emit_insn_before (pat, insn);
5206 /* Likewise if the last insn is a call, as will happen in the presence
5207 of exception handling. */
5208 else if (GET_CODE (insn) == CALL_INSN
5209 && (bb->succ->succ_next || (bb->succ->flags & EDGE_ABNORMAL)))
5211 /* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers,
5212 we search backward and place the instructions before the first
5213 parameter is loaded. Do this for everyone for consistency and a
5214 presumption that we'll get better code elsewhere as well.
5216 It should always be the case that we can put these instructions
5217 anywhere in the basic block with performing PRE optimizations.
5218 Check this. */
5220 if (pre
5221 && !TEST_BIT (antloc[bb->index], expr->bitmap_index)
5222 && !TEST_BIT (transp[bb->index], expr->bitmap_index))
5223 abort ();
5225 /* Since different machines initialize their parameter registers
5226 in different orders, assume nothing. Collect the set of all
5227 parameter registers. */
5228 insn = find_first_parameter_load (insn, bb->head);
5230 /* If we found all the parameter loads, then we want to insert
5231 before the first parameter load.
5233 If we did not find all the parameter loads, then we might have
5234 stopped on the head of the block, which could be a CODE_LABEL.
5235 If we inserted before the CODE_LABEL, then we would be putting
5236 the insn in the wrong basic block. In that case, put the insn
5237 after the CODE_LABEL. Also, respect NOTE_INSN_BASIC_BLOCK. */
5238 while (GET_CODE (insn) == CODE_LABEL
5239 || NOTE_INSN_BASIC_BLOCK_P (insn))
5240 insn = NEXT_INSN (insn);
5242 new_insn = emit_insn_before (pat, insn);
5244 else
5245 new_insn = emit_insn_after (pat, insn);
5247 while (1)
5249 if (INSN_P (pat))
5251 add_label_notes (PATTERN (pat), new_insn);
5252 note_stores (PATTERN (pat), record_set_info, pat);
5254 if (pat == pat_end)
5255 break;
5256 pat = NEXT_INSN (pat);
5259 gcse_create_count++;
5261 if (gcse_file)
5263 fprintf (gcse_file, "PRE/HOIST: end of bb %d, insn %d, ",
5264 bb->index, INSN_UID (new_insn));
5265 fprintf (gcse_file, "copying expression %d to reg %d\n",
5266 expr->bitmap_index, regno);
5270 /* Insert partially redundant expressions on edges in the CFG to make
5271 the expressions fully redundant. */
5273 static int
5274 pre_edge_insert (struct edge_list *edge_list, struct expr **index_map)
5276 int e, i, j, num_edges, set_size, did_insert = 0;
5277 sbitmap *inserted;
5279 /* Where PRE_INSERT_MAP is nonzero, we add the expression on that edge
5280 if it reaches any of the deleted expressions. */
5282 set_size = pre_insert_map[0]->size;
5283 num_edges = NUM_EDGES (edge_list);
5284 inserted = sbitmap_vector_alloc (num_edges, expr_hash_table.n_elems);
5285 sbitmap_vector_zero (inserted, num_edges);
5287 for (e = 0; e < num_edges; e++)
5289 int indx;
5290 basic_block bb = INDEX_EDGE_PRED_BB (edge_list, e);
5292 for (i = indx = 0; i < set_size; i++, indx += SBITMAP_ELT_BITS)
5294 SBITMAP_ELT_TYPE insert = pre_insert_map[e]->elms[i];
5296 for (j = indx; insert && j < (int) expr_hash_table.n_elems; j++, insert >>= 1)
5297 if ((insert & 1) != 0 && index_map[j]->reaching_reg != NULL_RTX)
5299 struct expr *expr = index_map[j];
5300 struct occr *occr;
5302 /* Now look at each deleted occurrence of this expression. */
5303 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5305 if (! occr->deleted_p)
5306 continue;
5308 /* Insert this expression on this edge if if it would
5309 reach the deleted occurrence in BB. */
5310 if (!TEST_BIT (inserted[e], j))
5312 rtx insn;
5313 edge eg = INDEX_EDGE (edge_list, e);
5315 /* We can't insert anything on an abnormal and
5316 critical edge, so we insert the insn at the end of
5317 the previous block. There are several alternatives
5318 detailed in Morgans book P277 (sec 10.5) for
5319 handling this situation. This one is easiest for
5320 now. */
5322 if ((eg->flags & EDGE_ABNORMAL) == EDGE_ABNORMAL)
5323 insert_insn_end_bb (index_map[j], bb, 0);
5324 else
5326 insn = process_insert_insn (index_map[j]);
5327 insert_insn_on_edge (insn, eg);
5330 if (gcse_file)
5332 fprintf (gcse_file, "PRE/HOIST: edge (%d,%d), ",
5333 bb->index,
5334 INDEX_EDGE_SUCC_BB (edge_list, e)->index);
5335 fprintf (gcse_file, "copy expression %d\n",
5336 expr->bitmap_index);
5339 update_ld_motion_stores (expr);
5340 SET_BIT (inserted[e], j);
5341 did_insert = 1;
5342 gcse_create_count++;
5349 sbitmap_vector_free (inserted);
5350 return did_insert;
5353 /* Copy the result of INSN to REG. INDX is the expression number.
5354 Given "old_reg <- expr" (INSN), instead of adding after it
5355 reaching_reg <- old_reg
5356 it's better to do the following:
5357 reaching_reg <- expr
5358 old_reg <- reaching_reg
5359 because this way copy propagation can discover additional PRE
5360 opportunuties. */
5362 static void
5363 pre_insert_copy_insn (struct expr *expr, rtx insn)
5365 rtx reg = expr->reaching_reg;
5366 int regno = REGNO (reg);
5367 int indx = expr->bitmap_index;
5368 rtx set = single_set (insn);
5369 rtx new_insn;
5370 rtx new_set;
5371 rtx old_reg;
5373 if (!set)
5374 abort ();
5376 old_reg = SET_DEST (set);
5377 new_insn = emit_insn_after (gen_move_insn (old_reg,
5378 reg),
5379 insn);
5380 new_set = single_set (new_insn);
5382 if (!new_set)
5383 abort();
5384 SET_DEST (set) = reg;
5386 /* Keep register set table up to date. */
5387 replace_one_set (REGNO (old_reg), insn, new_insn);
5388 record_one_set (regno, insn);
5390 gcse_create_count++;
5392 if (gcse_file)
5393 fprintf (gcse_file,
5394 "PRE: bb %d, insn %d, copy expression %d in insn %d to reg %d\n",
5395 BLOCK_NUM (insn), INSN_UID (new_insn), indx,
5396 INSN_UID (insn), regno);
5397 update_ld_motion_stores (expr);
5400 /* Copy available expressions that reach the redundant expression
5401 to `reaching_reg'. */
5403 static void
5404 pre_insert_copies (void)
5406 unsigned int i;
5407 struct expr *expr;
5408 struct occr *occr;
5409 struct occr *avail;
5411 /* For each available expression in the table, copy the result to
5412 `reaching_reg' if the expression reaches a deleted one.
5414 ??? The current algorithm is rather brute force.
5415 Need to do some profiling. */
5417 for (i = 0; i < expr_hash_table.size; i++)
5418 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
5420 /* If the basic block isn't reachable, PPOUT will be TRUE. However,
5421 we don't want to insert a copy here because the expression may not
5422 really be redundant. So only insert an insn if the expression was
5423 deleted. This test also avoids further processing if the
5424 expression wasn't deleted anywhere. */
5425 if (expr->reaching_reg == NULL)
5426 continue;
5428 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5430 if (! occr->deleted_p)
5431 continue;
5433 for (avail = expr->avail_occr; avail != NULL; avail = avail->next)
5435 rtx insn = avail->insn;
5437 /* No need to handle this one if handled already. */
5438 if (avail->copied_p)
5439 continue;
5441 /* Don't handle this one if it's a redundant one. */
5442 if (TEST_BIT (pre_redundant_insns, INSN_CUID (insn)))
5443 continue;
5445 /* Or if the expression doesn't reach the deleted one. */
5446 if (! pre_expr_reaches_here_p (BLOCK_FOR_INSN (avail->insn),
5447 expr,
5448 BLOCK_FOR_INSN (occr->insn)))
5449 continue;
5451 /* Copy the result of avail to reaching_reg. */
5452 pre_insert_copy_insn (expr, insn);
5453 avail->copied_p = 1;
5459 /* Emit move from SRC to DEST noting the equivalence with expression computed
5460 in INSN. */
5461 static rtx
5462 gcse_emit_move_after (rtx src, rtx dest, rtx insn)
5464 rtx new;
5465 rtx set = single_set (insn), set2;
5466 rtx note;
5467 rtx eqv;
5469 /* This should never fail since we're creating a reg->reg copy
5470 we've verified to be valid. */
5472 new = emit_insn_after (gen_move_insn (dest, src), insn);
5474 /* Note the equivalence for local CSE pass. */
5475 set2 = single_set (new);
5476 if (!set2 || !rtx_equal_p (SET_DEST (set2), dest))
5477 return new;
5478 if ((note = find_reg_equal_equiv_note (insn)))
5479 eqv = XEXP (note, 0);
5480 else
5481 eqv = SET_SRC (set);
5483 set_unique_reg_note (new, REG_EQUAL, copy_insn_1 (eqv));
5485 return new;
5488 /* Delete redundant computations.
5489 Deletion is done by changing the insn to copy the `reaching_reg' of
5490 the expression into the result of the SET. It is left to later passes
5491 (cprop, cse2, flow, combine, regmove) to propagate the copy or eliminate it.
5493 Returns nonzero if a change is made. */
5495 static int
5496 pre_delete (void)
5498 unsigned int i;
5499 int changed;
5500 struct expr *expr;
5501 struct occr *occr;
5503 changed = 0;
5504 for (i = 0; i < expr_hash_table.size; i++)
5505 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
5507 int indx = expr->bitmap_index;
5509 /* We only need to search antic_occr since we require
5510 ANTLOC != 0. */
5512 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5514 rtx insn = occr->insn;
5515 rtx set;
5516 basic_block bb = BLOCK_FOR_INSN (insn);
5518 if (TEST_BIT (pre_delete_map[bb->index], indx))
5520 set = single_set (insn);
5521 if (! set)
5522 abort ();
5524 /* Create a pseudo-reg to store the result of reaching
5525 expressions into. Get the mode for the new pseudo from
5526 the mode of the original destination pseudo. */
5527 if (expr->reaching_reg == NULL)
5528 expr->reaching_reg
5529 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
5531 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
5532 delete_insn (insn);
5533 occr->deleted_p = 1;
5534 SET_BIT (pre_redundant_insns, INSN_CUID (insn));
5535 changed = 1;
5536 gcse_subst_count++;
5538 if (gcse_file)
5540 fprintf (gcse_file,
5541 "PRE: redundant insn %d (expression %d) in ",
5542 INSN_UID (insn), indx);
5543 fprintf (gcse_file, "bb %d, reaching reg is %d\n",
5544 bb->index, REGNO (expr->reaching_reg));
5550 return changed;
5553 /* Perform GCSE optimizations using PRE.
5554 This is called by one_pre_gcse_pass after all the dataflow analysis
5555 has been done.
5557 This is based on the original Morel-Renvoise paper Fred Chow's thesis, and
5558 lazy code motion from Knoop, Ruthing and Steffen as described in Advanced
5559 Compiler Design and Implementation.
5561 ??? A new pseudo reg is created to hold the reaching expression. The nice
5562 thing about the classical approach is that it would try to use an existing
5563 reg. If the register can't be adequately optimized [i.e. we introduce
5564 reload problems], one could add a pass here to propagate the new register
5565 through the block.
5567 ??? We don't handle single sets in PARALLELs because we're [currently] not
5568 able to copy the rest of the parallel when we insert copies to create full
5569 redundancies from partial redundancies. However, there's no reason why we
5570 can't handle PARALLELs in the cases where there are no partial
5571 redundancies. */
5573 static int
5574 pre_gcse (void)
5576 unsigned int i;
5577 int did_insert, changed;
5578 struct expr **index_map;
5579 struct expr *expr;
5581 /* Compute a mapping from expression number (`bitmap_index') to
5582 hash table entry. */
5584 index_map = xcalloc (expr_hash_table.n_elems, sizeof (struct expr *));
5585 for (i = 0; i < expr_hash_table.size; i++)
5586 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
5587 index_map[expr->bitmap_index] = expr;
5589 /* Reset bitmap used to track which insns are redundant. */
5590 pre_redundant_insns = sbitmap_alloc (max_cuid);
5591 sbitmap_zero (pre_redundant_insns);
5593 /* Delete the redundant insns first so that
5594 - we know what register to use for the new insns and for the other
5595 ones with reaching expressions
5596 - we know which insns are redundant when we go to create copies */
5598 changed = pre_delete ();
5600 did_insert = pre_edge_insert (edge_list, index_map);
5602 /* In other places with reaching expressions, copy the expression to the
5603 specially allocated pseudo-reg that reaches the redundant expr. */
5604 pre_insert_copies ();
5605 if (did_insert)
5607 commit_edge_insertions ();
5608 changed = 1;
5611 free (index_map);
5612 sbitmap_free (pre_redundant_insns);
5613 return changed;
5616 /* Top level routine to perform one PRE GCSE pass.
5618 Return nonzero if a change was made. */
5620 static int
5621 one_pre_gcse_pass (int pass)
5623 int changed = 0;
5625 gcse_subst_count = 0;
5626 gcse_create_count = 0;
5628 alloc_hash_table (max_cuid, &expr_hash_table, 0);
5629 add_noreturn_fake_exit_edges ();
5630 if (flag_gcse_lm)
5631 compute_ld_motion_mems ();
5633 compute_hash_table (&expr_hash_table);
5634 trim_ld_motion_mems ();
5635 if (gcse_file)
5636 dump_hash_table (gcse_file, "Expression", &expr_hash_table);
5638 if (expr_hash_table.n_elems > 0)
5640 alloc_pre_mem (last_basic_block, expr_hash_table.n_elems);
5641 compute_pre_data ();
5642 changed |= pre_gcse ();
5643 free_edge_list (edge_list);
5644 free_pre_mem ();
5647 free_ldst_mems ();
5648 remove_fake_edges ();
5649 free_hash_table (&expr_hash_table);
5651 if (gcse_file)
5653 fprintf (gcse_file, "\nPRE GCSE of %s, pass %d: %d bytes needed, ",
5654 current_function_name, pass, bytes_used);
5655 fprintf (gcse_file, "%d substs, %d insns created\n",
5656 gcse_subst_count, gcse_create_count);
5659 return changed;
5662 /* If X contains any LABEL_REF's, add REG_LABEL notes for them to INSN.
5663 If notes are added to an insn which references a CODE_LABEL, the
5664 LABEL_NUSES count is incremented. We have to add REG_LABEL notes,
5665 because the following loop optimization pass requires them. */
5667 /* ??? This is very similar to the loop.c add_label_notes function. We
5668 could probably share code here. */
5670 /* ??? If there was a jump optimization pass after gcse and before loop,
5671 then we would not need to do this here, because jump would add the
5672 necessary REG_LABEL notes. */
5674 static void
5675 add_label_notes (rtx x, rtx insn)
5677 enum rtx_code code = GET_CODE (x);
5678 int i, j;
5679 const char *fmt;
5681 if (code == LABEL_REF && !LABEL_REF_NONLOCAL_P (x))
5683 /* This code used to ignore labels that referred to dispatch tables to
5684 avoid flow generating (slightly) worse code.
5686 We no longer ignore such label references (see LABEL_REF handling in
5687 mark_jump_label for additional information). */
5689 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, XEXP (x, 0),
5690 REG_NOTES (insn));
5691 if (LABEL_P (XEXP (x, 0)))
5692 LABEL_NUSES (XEXP (x, 0))++;
5693 return;
5696 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
5698 if (fmt[i] == 'e')
5699 add_label_notes (XEXP (x, i), insn);
5700 else if (fmt[i] == 'E')
5701 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5702 add_label_notes (XVECEXP (x, i, j), insn);
5706 /* Compute transparent outgoing information for each block.
5708 An expression is transparent to an edge unless it is killed by
5709 the edge itself. This can only happen with abnormal control flow,
5710 when the edge is traversed through a call. This happens with
5711 non-local labels and exceptions.
5713 This would not be necessary if we split the edge. While this is
5714 normally impossible for abnormal critical edges, with some effort
5715 it should be possible with exception handling, since we still have
5716 control over which handler should be invoked. But due to increased
5717 EH table sizes, this may not be worthwhile. */
5719 static void
5720 compute_transpout (void)
5722 basic_block bb;
5723 unsigned int i;
5724 struct expr *expr;
5726 sbitmap_vector_ones (transpout, last_basic_block);
5728 FOR_EACH_BB (bb)
5730 /* Note that flow inserted a nop a the end of basic blocks that
5731 end in call instructions for reasons other than abnormal
5732 control flow. */
5733 if (GET_CODE (bb->end) != CALL_INSN)
5734 continue;
5736 for (i = 0; i < expr_hash_table.size; i++)
5737 for (expr = expr_hash_table.table[i]; expr ; expr = expr->next_same_hash)
5738 if (GET_CODE (expr->expr) == MEM)
5740 if (GET_CODE (XEXP (expr->expr, 0)) == SYMBOL_REF
5741 && CONSTANT_POOL_ADDRESS_P (XEXP (expr->expr, 0)))
5742 continue;
5744 /* ??? Optimally, we would use interprocedural alias
5745 analysis to determine if this mem is actually killed
5746 by this call. */
5747 RESET_BIT (transpout[bb->index], expr->bitmap_index);
5752 /* Removal of useless null pointer checks */
5754 /* Called via note_stores. X is set by SETTER. If X is a register we must
5755 invalidate nonnull_local and set nonnull_killed. DATA is really a
5756 `null_pointer_info *'.
5758 We ignore hard registers. */
5760 static void
5761 invalidate_nonnull_info (rtx x, rtx setter ATTRIBUTE_UNUSED, void *data)
5763 unsigned int regno;
5764 struct null_pointer_info *npi = (struct null_pointer_info *) data;
5766 while (GET_CODE (x) == SUBREG)
5767 x = SUBREG_REG (x);
5769 /* Ignore anything that is not a register or is a hard register. */
5770 if (GET_CODE (x) != REG
5771 || REGNO (x) < npi->min_reg
5772 || REGNO (x) >= npi->max_reg)
5773 return;
5775 regno = REGNO (x) - npi->min_reg;
5777 RESET_BIT (npi->nonnull_local[npi->current_block->index], regno);
5778 SET_BIT (npi->nonnull_killed[npi->current_block->index], regno);
5781 /* Do null-pointer check elimination for the registers indicated in
5782 NPI. NONNULL_AVIN and NONNULL_AVOUT are pre-allocated sbitmaps;
5783 they are not our responsibility to free. */
5785 static int
5786 delete_null_pointer_checks_1 (unsigned int *block_reg, sbitmap *nonnull_avin,
5787 sbitmap *nonnull_avout,
5788 struct null_pointer_info *npi)
5790 basic_block bb, current_block;
5791 sbitmap *nonnull_local = npi->nonnull_local;
5792 sbitmap *nonnull_killed = npi->nonnull_killed;
5793 int something_changed = 0;
5795 /* Compute local properties, nonnull and killed. A register will have
5796 the nonnull property if at the end of the current block its value is
5797 known to be nonnull. The killed property indicates that somewhere in
5798 the block any information we had about the register is killed.
5800 Note that a register can have both properties in a single block. That
5801 indicates that it's killed, then later in the block a new value is
5802 computed. */
5803 sbitmap_vector_zero (nonnull_local, last_basic_block);
5804 sbitmap_vector_zero (nonnull_killed, last_basic_block);
5806 FOR_EACH_BB (current_block)
5808 rtx insn, stop_insn;
5810 /* Set the current block for invalidate_nonnull_info. */
5811 npi->current_block = current_block;
5813 /* Scan each insn in the basic block looking for memory references and
5814 register sets. */
5815 stop_insn = NEXT_INSN (current_block->end);
5816 for (insn = current_block->head;
5817 insn != stop_insn;
5818 insn = NEXT_INSN (insn))
5820 rtx set;
5821 rtx reg;
5823 /* Ignore anything that is not a normal insn. */
5824 if (! INSN_P (insn))
5825 continue;
5827 /* Basically ignore anything that is not a simple SET. We do have
5828 to make sure to invalidate nonnull_local and set nonnull_killed
5829 for such insns though. */
5830 set = single_set (insn);
5831 if (!set)
5833 note_stores (PATTERN (insn), invalidate_nonnull_info, npi);
5834 continue;
5837 /* See if we've got a usable memory load. We handle it first
5838 in case it uses its address register as a dest (which kills
5839 the nonnull property). */
5840 if (GET_CODE (SET_SRC (set)) == MEM
5841 && GET_CODE ((reg = XEXP (SET_SRC (set), 0))) == REG
5842 && REGNO (reg) >= npi->min_reg
5843 && REGNO (reg) < npi->max_reg)
5844 SET_BIT (nonnull_local[current_block->index],
5845 REGNO (reg) - npi->min_reg);
5847 /* Now invalidate stuff clobbered by this insn. */
5848 note_stores (PATTERN (insn), invalidate_nonnull_info, npi);
5850 /* And handle stores, we do these last since any sets in INSN can
5851 not kill the nonnull property if it is derived from a MEM
5852 appearing in a SET_DEST. */
5853 if (GET_CODE (SET_DEST (set)) == MEM
5854 && GET_CODE ((reg = XEXP (SET_DEST (set), 0))) == REG
5855 && REGNO (reg) >= npi->min_reg
5856 && REGNO (reg) < npi->max_reg)
5857 SET_BIT (nonnull_local[current_block->index],
5858 REGNO (reg) - npi->min_reg);
5862 /* Now compute global properties based on the local properties. This
5863 is a classic global availability algorithm. */
5864 compute_available (nonnull_local, nonnull_killed,
5865 nonnull_avout, nonnull_avin);
5867 /* Now look at each bb and see if it ends with a compare of a value
5868 against zero. */
5869 FOR_EACH_BB (bb)
5871 rtx last_insn = bb->end;
5872 rtx condition, earliest;
5873 int compare_and_branch;
5875 /* Since MIN_REG is always at least FIRST_PSEUDO_REGISTER, and
5876 since BLOCK_REG[BB] is zero if this block did not end with a
5877 comparison against zero, this condition works. */
5878 if (block_reg[bb->index] < npi->min_reg
5879 || block_reg[bb->index] >= npi->max_reg)
5880 continue;
5882 /* LAST_INSN is a conditional jump. Get its condition. */
5883 condition = get_condition (last_insn, &earliest);
5885 /* If we can't determine the condition then skip. */
5886 if (! condition)
5887 continue;
5889 /* Is the register known to have a nonzero value? */
5890 if (!TEST_BIT (nonnull_avout[bb->index], block_reg[bb->index] - npi->min_reg))
5891 continue;
5893 /* Try to compute whether the compare/branch at the loop end is one or
5894 two instructions. */
5895 if (earliest == last_insn)
5896 compare_and_branch = 1;
5897 else if (earliest == prev_nonnote_insn (last_insn))
5898 compare_and_branch = 2;
5899 else
5900 continue;
5902 /* We know the register in this comparison is nonnull at exit from
5903 this block. We can optimize this comparison. */
5904 if (GET_CODE (condition) == NE)
5906 rtx new_jump;
5908 new_jump = emit_jump_insn_after (gen_jump (JUMP_LABEL (last_insn)),
5909 last_insn);
5910 JUMP_LABEL (new_jump) = JUMP_LABEL (last_insn);
5911 LABEL_NUSES (JUMP_LABEL (new_jump))++;
5912 emit_barrier_after (new_jump);
5915 something_changed = 1;
5916 delete_insn (last_insn);
5917 if (compare_and_branch == 2)
5918 delete_insn (earliest);
5919 purge_dead_edges (bb);
5921 /* Don't check this block again. (Note that BLOCK_END is
5922 invalid here; we deleted the last instruction in the
5923 block.) */
5924 block_reg[bb->index] = 0;
5927 return something_changed;
5930 /* Find EQ/NE comparisons against zero which can be (indirectly) evaluated
5931 at compile time.
5933 This is conceptually similar to global constant/copy propagation and
5934 classic global CSE (it even uses the same dataflow equations as cprop).
5936 If a register is used as memory address with the form (mem (reg)), then we
5937 know that REG can not be zero at that point in the program. Any instruction
5938 which sets REG "kills" this property.
5940 So, if every path leading to a conditional branch has an available memory
5941 reference of that form, then we know the register can not have the value
5942 zero at the conditional branch.
5944 So we merely need to compute the local properties and propagate that data
5945 around the cfg, then optimize where possible.
5947 We run this pass two times. Once before CSE, then again after CSE. This
5948 has proven to be the most profitable approach. It is rare for new
5949 optimization opportunities of this nature to appear after the first CSE
5950 pass.
5952 This could probably be integrated with global cprop with a little work. */
5955 delete_null_pointer_checks (rtx f ATTRIBUTE_UNUSED)
5957 sbitmap *nonnull_avin, *nonnull_avout;
5958 unsigned int *block_reg;
5959 basic_block bb;
5960 int reg;
5961 int regs_per_pass;
5962 int max_reg = max_reg_num ();
5963 struct null_pointer_info npi;
5964 int something_changed = 0;
5966 /* If we have only a single block, or it is too expensive, give up. */
5967 if (n_basic_blocks <= 1
5968 || is_too_expensive (_ ("NULL pointer checks disabled")))
5969 return 0;
5971 /* We need four bitmaps, each with a bit for each register in each
5972 basic block. */
5973 regs_per_pass = get_bitmap_width (4, last_basic_block, max_reg);
5975 /* Allocate bitmaps to hold local and global properties. */
5976 npi.nonnull_local = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
5977 npi.nonnull_killed = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
5978 nonnull_avin = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
5979 nonnull_avout = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
5981 /* Go through the basic blocks, seeing whether or not each block
5982 ends with a conditional branch whose condition is a comparison
5983 against zero. Record the register compared in BLOCK_REG. */
5984 block_reg = xcalloc (last_basic_block, sizeof (int));
5985 FOR_EACH_BB (bb)
5987 rtx last_insn = bb->end;
5988 rtx condition, earliest, reg;
5990 /* We only want conditional branches. */
5991 if (GET_CODE (last_insn) != JUMP_INSN
5992 || !any_condjump_p (last_insn)
5993 || !onlyjump_p (last_insn))
5994 continue;
5996 /* LAST_INSN is a conditional jump. Get its condition. */
5997 condition = get_condition (last_insn, &earliest);
5999 /* If we were unable to get the condition, or it is not an equality
6000 comparison against zero then there's nothing we can do. */
6001 if (!condition
6002 || (GET_CODE (condition) != NE && GET_CODE (condition) != EQ)
6003 || GET_CODE (XEXP (condition, 1)) != CONST_INT
6004 || (XEXP (condition, 1)
6005 != CONST0_RTX (GET_MODE (XEXP (condition, 0)))))
6006 continue;
6008 /* We must be checking a register against zero. */
6009 reg = XEXP (condition, 0);
6010 if (GET_CODE (reg) != REG)
6011 continue;
6013 block_reg[bb->index] = REGNO (reg);
6016 /* Go through the algorithm for each block of registers. */
6017 for (reg = FIRST_PSEUDO_REGISTER; reg < max_reg; reg += regs_per_pass)
6019 npi.min_reg = reg;
6020 npi.max_reg = MIN (reg + regs_per_pass, max_reg);
6021 something_changed |= delete_null_pointer_checks_1 (block_reg,
6022 nonnull_avin,
6023 nonnull_avout,
6024 &npi);
6027 /* Free the table of registers compared at the end of every block. */
6028 free (block_reg);
6030 /* Free bitmaps. */
6031 sbitmap_vector_free (npi.nonnull_local);
6032 sbitmap_vector_free (npi.nonnull_killed);
6033 sbitmap_vector_free (nonnull_avin);
6034 sbitmap_vector_free (nonnull_avout);
6036 return something_changed;
6039 /* Code Hoisting variables and subroutines. */
6041 /* Very busy expressions. */
6042 static sbitmap *hoist_vbein;
6043 static sbitmap *hoist_vbeout;
6045 /* Hoistable expressions. */
6046 static sbitmap *hoist_exprs;
6048 /* Dominator bitmaps. */
6049 dominance_info dominators;
6051 /* ??? We could compute post dominators and run this algorithm in
6052 reverse to perform tail merging, doing so would probably be
6053 more effective than the tail merging code in jump.c.
6055 It's unclear if tail merging could be run in parallel with
6056 code hoisting. It would be nice. */
6058 /* Allocate vars used for code hoisting analysis. */
6060 static void
6061 alloc_code_hoist_mem (int n_blocks, int n_exprs)
6063 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
6064 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
6065 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
6067 hoist_vbein = sbitmap_vector_alloc (n_blocks, n_exprs);
6068 hoist_vbeout = sbitmap_vector_alloc (n_blocks, n_exprs);
6069 hoist_exprs = sbitmap_vector_alloc (n_blocks, n_exprs);
6070 transpout = sbitmap_vector_alloc (n_blocks, n_exprs);
6073 /* Free vars used for code hoisting analysis. */
6075 static void
6076 free_code_hoist_mem (void)
6078 sbitmap_vector_free (antloc);
6079 sbitmap_vector_free (transp);
6080 sbitmap_vector_free (comp);
6082 sbitmap_vector_free (hoist_vbein);
6083 sbitmap_vector_free (hoist_vbeout);
6084 sbitmap_vector_free (hoist_exprs);
6085 sbitmap_vector_free (transpout);
6087 free_dominance_info (dominators);
6090 /* Compute the very busy expressions at entry/exit from each block.
6092 An expression is very busy if all paths from a given point
6093 compute the expression. */
6095 static void
6096 compute_code_hoist_vbeinout (void)
6098 int changed, passes;
6099 basic_block bb;
6101 sbitmap_vector_zero (hoist_vbeout, last_basic_block);
6102 sbitmap_vector_zero (hoist_vbein, last_basic_block);
6104 passes = 0;
6105 changed = 1;
6107 while (changed)
6109 changed = 0;
6111 /* We scan the blocks in the reverse order to speed up
6112 the convergence. */
6113 FOR_EACH_BB_REVERSE (bb)
6115 changed |= sbitmap_a_or_b_and_c_cg (hoist_vbein[bb->index], antloc[bb->index],
6116 hoist_vbeout[bb->index], transp[bb->index]);
6117 if (bb->next_bb != EXIT_BLOCK_PTR)
6118 sbitmap_intersection_of_succs (hoist_vbeout[bb->index], hoist_vbein, bb->index);
6121 passes++;
6124 if (gcse_file)
6125 fprintf (gcse_file, "hoisting vbeinout computation: %d passes\n", passes);
6128 /* Top level routine to do the dataflow analysis needed by code hoisting. */
6130 static void
6131 compute_code_hoist_data (void)
6133 compute_local_properties (transp, comp, antloc, &expr_hash_table);
6134 compute_transpout ();
6135 compute_code_hoist_vbeinout ();
6136 dominators = calculate_dominance_info (CDI_DOMINATORS);
6137 if (gcse_file)
6138 fprintf (gcse_file, "\n");
6141 /* Determine if the expression identified by EXPR_INDEX would
6142 reach BB unimpared if it was placed at the end of EXPR_BB.
6144 It's unclear exactly what Muchnick meant by "unimpared". It seems
6145 to me that the expression must either be computed or transparent in
6146 *every* block in the path(s) from EXPR_BB to BB. Any other definition
6147 would allow the expression to be hoisted out of loops, even if
6148 the expression wasn't a loop invariant.
6150 Contrast this to reachability for PRE where an expression is
6151 considered reachable if *any* path reaches instead of *all*
6152 paths. */
6154 static int
6155 hoist_expr_reaches_here_p (basic_block expr_bb, int expr_index, basic_block bb, char *visited)
6157 edge pred;
6158 int visited_allocated_locally = 0;
6161 if (visited == NULL)
6163 visited_allocated_locally = 1;
6164 visited = xcalloc (last_basic_block, 1);
6167 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
6169 basic_block pred_bb = pred->src;
6171 if (pred->src == ENTRY_BLOCK_PTR)
6172 break;
6173 else if (pred_bb == expr_bb)
6174 continue;
6175 else if (visited[pred_bb->index])
6176 continue;
6178 /* Does this predecessor generate this expression? */
6179 else if (TEST_BIT (comp[pred_bb->index], expr_index))
6180 break;
6181 else if (! TEST_BIT (transp[pred_bb->index], expr_index))
6182 break;
6184 /* Not killed. */
6185 else
6187 visited[pred_bb->index] = 1;
6188 if (! hoist_expr_reaches_here_p (expr_bb, expr_index,
6189 pred_bb, visited))
6190 break;
6193 if (visited_allocated_locally)
6194 free (visited);
6196 return (pred == NULL);
6199 /* Actually perform code hoisting. */
6201 static void
6202 hoist_code (void)
6204 basic_block bb, dominated;
6205 basic_block *domby;
6206 unsigned int domby_len;
6207 unsigned int i,j;
6208 struct expr **index_map;
6209 struct expr *expr;
6211 sbitmap_vector_zero (hoist_exprs, last_basic_block);
6213 /* Compute a mapping from expression number (`bitmap_index') to
6214 hash table entry. */
6216 index_map = xcalloc (expr_hash_table.n_elems, sizeof (struct expr *));
6217 for (i = 0; i < expr_hash_table.size; i++)
6218 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
6219 index_map[expr->bitmap_index] = expr;
6221 /* Walk over each basic block looking for potentially hoistable
6222 expressions, nothing gets hoisted from the entry block. */
6223 FOR_EACH_BB (bb)
6225 int found = 0;
6226 int insn_inserted_p;
6228 domby_len = get_dominated_by (dominators, bb, &domby);
6229 /* Examine each expression that is very busy at the exit of this
6230 block. These are the potentially hoistable expressions. */
6231 for (i = 0; i < hoist_vbeout[bb->index]->n_bits; i++)
6233 int hoistable = 0;
6235 if (TEST_BIT (hoist_vbeout[bb->index], i)
6236 && TEST_BIT (transpout[bb->index], i))
6238 /* We've found a potentially hoistable expression, now
6239 we look at every block BB dominates to see if it
6240 computes the expression. */
6241 for (j = 0; j < domby_len; j++)
6243 dominated = domby[j];
6244 /* Ignore self dominance. */
6245 if (bb == dominated)
6246 continue;
6247 /* We've found a dominated block, now see if it computes
6248 the busy expression and whether or not moving that
6249 expression to the "beginning" of that block is safe. */
6250 if (!TEST_BIT (antloc[dominated->index], i))
6251 continue;
6253 /* Note if the expression would reach the dominated block
6254 unimpared if it was placed at the end of BB.
6256 Keep track of how many times this expression is hoistable
6257 from a dominated block into BB. */
6258 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
6259 hoistable++;
6262 /* If we found more than one hoistable occurrence of this
6263 expression, then note it in the bitmap of expressions to
6264 hoist. It makes no sense to hoist things which are computed
6265 in only one BB, and doing so tends to pessimize register
6266 allocation. One could increase this value to try harder
6267 to avoid any possible code expansion due to register
6268 allocation issues; however experiments have shown that
6269 the vast majority of hoistable expressions are only movable
6270 from two successors, so raising this threshold is likely
6271 to nullify any benefit we get from code hoisting. */
6272 if (hoistable > 1)
6274 SET_BIT (hoist_exprs[bb->index], i);
6275 found = 1;
6279 /* If we found nothing to hoist, then quit now. */
6280 if (! found)
6282 free (domby);
6283 continue;
6286 /* Loop over all the hoistable expressions. */
6287 for (i = 0; i < hoist_exprs[bb->index]->n_bits; i++)
6289 /* We want to insert the expression into BB only once, so
6290 note when we've inserted it. */
6291 insn_inserted_p = 0;
6293 /* These tests should be the same as the tests above. */
6294 if (TEST_BIT (hoist_vbeout[bb->index], i))
6296 /* We've found a potentially hoistable expression, now
6297 we look at every block BB dominates to see if it
6298 computes the expression. */
6299 for (j = 0; j < domby_len; j++)
6301 dominated = domby[j];
6302 /* Ignore self dominance. */
6303 if (bb == dominated)
6304 continue;
6306 /* We've found a dominated block, now see if it computes
6307 the busy expression and whether or not moving that
6308 expression to the "beginning" of that block is safe. */
6309 if (!TEST_BIT (antloc[dominated->index], i))
6310 continue;
6312 /* The expression is computed in the dominated block and
6313 it would be safe to compute it at the start of the
6314 dominated block. Now we have to determine if the
6315 expression would reach the dominated block if it was
6316 placed at the end of BB. */
6317 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
6319 struct expr *expr = index_map[i];
6320 struct occr *occr = expr->antic_occr;
6321 rtx insn;
6322 rtx set;
6324 /* Find the right occurrence of this expression. */
6325 while (BLOCK_FOR_INSN (occr->insn) != dominated && occr)
6326 occr = occr->next;
6328 /* Should never happen. */
6329 if (!occr)
6330 abort ();
6332 insn = occr->insn;
6334 set = single_set (insn);
6335 if (! set)
6336 abort ();
6338 /* Create a pseudo-reg to store the result of reaching
6339 expressions into. Get the mode for the new pseudo
6340 from the mode of the original destination pseudo. */
6341 if (expr->reaching_reg == NULL)
6342 expr->reaching_reg
6343 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
6345 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
6346 delete_insn (insn);
6347 occr->deleted_p = 1;
6348 if (!insn_inserted_p)
6350 insert_insn_end_bb (index_map[i], bb, 0);
6351 insn_inserted_p = 1;
6357 free (domby);
6360 free (index_map);
6363 /* Top level routine to perform one code hoisting (aka unification) pass
6365 Return nonzero if a change was made. */
6367 static int
6368 one_code_hoisting_pass (void)
6370 int changed = 0;
6372 alloc_hash_table (max_cuid, &expr_hash_table, 0);
6373 compute_hash_table (&expr_hash_table);
6374 if (gcse_file)
6375 dump_hash_table (gcse_file, "Code Hosting Expressions", &expr_hash_table);
6377 if (expr_hash_table.n_elems > 0)
6379 alloc_code_hoist_mem (last_basic_block, expr_hash_table.n_elems);
6380 compute_code_hoist_data ();
6381 hoist_code ();
6382 free_code_hoist_mem ();
6385 free_hash_table (&expr_hash_table);
6387 return changed;
6390 /* Here we provide the things required to do store motion towards
6391 the exit. In order for this to be effective, gcse also needed to
6392 be taught how to move a load when it is kill only by a store to itself.
6394 int i;
6395 float a[10];
6397 void foo(float scale)
6399 for (i=0; i<10; i++)
6400 a[i] *= scale;
6403 'i' is both loaded and stored to in the loop. Normally, gcse cannot move
6404 the load out since its live around the loop, and stored at the bottom
6405 of the loop.
6407 The 'Load Motion' referred to and implemented in this file is
6408 an enhancement to gcse which when using edge based lcm, recognizes
6409 this situation and allows gcse to move the load out of the loop.
6411 Once gcse has hoisted the load, store motion can then push this
6412 load towards the exit, and we end up with no loads or stores of 'i'
6413 in the loop. */
6415 /* This will search the ldst list for a matching expression. If it
6416 doesn't find one, we create one and initialize it. */
6418 static struct ls_expr *
6419 ldst_entry (rtx x)
6421 struct ls_expr * ptr;
6423 for (ptr = first_ls_expr(); ptr != NULL; ptr = next_ls_expr (ptr))
6424 if (expr_equiv_p (ptr->pattern, x))
6425 break;
6427 if (!ptr)
6429 ptr = xmalloc (sizeof (struct ls_expr));
6431 ptr->next = pre_ldst_mems;
6432 ptr->expr = NULL;
6433 ptr->pattern = x;
6434 ptr->pattern_regs = NULL_RTX;
6435 ptr->loads = NULL_RTX;
6436 ptr->stores = NULL_RTX;
6437 ptr->reaching_reg = NULL_RTX;
6438 ptr->invalid = 0;
6439 ptr->index = 0;
6440 ptr->hash_index = 0;
6441 pre_ldst_mems = ptr;
6444 return ptr;
6447 /* Free up an individual ldst entry. */
6449 static void
6450 free_ldst_entry (struct ls_expr * ptr)
6452 free_INSN_LIST_list (& ptr->loads);
6453 free_INSN_LIST_list (& ptr->stores);
6455 free (ptr);
6458 /* Free up all memory associated with the ldst list. */
6460 static void
6461 free_ldst_mems (void)
6463 while (pre_ldst_mems)
6465 struct ls_expr * tmp = pre_ldst_mems;
6467 pre_ldst_mems = pre_ldst_mems->next;
6469 free_ldst_entry (tmp);
6472 pre_ldst_mems = NULL;
6475 /* Dump debugging info about the ldst list. */
6477 static void
6478 print_ldst_list (FILE * file)
6480 struct ls_expr * ptr;
6482 fprintf (file, "LDST list: \n");
6484 for (ptr = first_ls_expr(); ptr != NULL; ptr = next_ls_expr (ptr))
6486 fprintf (file, " Pattern (%3d): ", ptr->index);
6488 print_rtl (file, ptr->pattern);
6490 fprintf (file, "\n Loads : ");
6492 if (ptr->loads)
6493 print_rtl (file, ptr->loads);
6494 else
6495 fprintf (file, "(nil)");
6497 fprintf (file, "\n Stores : ");
6499 if (ptr->stores)
6500 print_rtl (file, ptr->stores);
6501 else
6502 fprintf (file, "(nil)");
6504 fprintf (file, "\n\n");
6507 fprintf (file, "\n");
6510 /* Returns 1 if X is in the list of ldst only expressions. */
6512 static struct ls_expr *
6513 find_rtx_in_ldst (rtx x)
6515 struct ls_expr * ptr;
6517 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
6518 if (expr_equiv_p (ptr->pattern, x) && ! ptr->invalid)
6519 return ptr;
6521 return NULL;
6524 /* Assign each element of the list of mems a monotonically increasing value. */
6526 static int
6527 enumerate_ldsts (void)
6529 struct ls_expr * ptr;
6530 int n = 0;
6532 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
6533 ptr->index = n++;
6535 return n;
6538 /* Return first item in the list. */
6540 static inline struct ls_expr *
6541 first_ls_expr (void)
6543 return pre_ldst_mems;
6546 /* Return the next item in the list after the specified one. */
6548 static inline struct ls_expr *
6549 next_ls_expr (struct ls_expr * ptr)
6551 return ptr->next;
6554 /* Load Motion for loads which only kill themselves. */
6556 /* Return true if x is a simple MEM operation, with no registers or
6557 side effects. These are the types of loads we consider for the
6558 ld_motion list, otherwise we let the usual aliasing take care of it. */
6560 static int
6561 simple_mem (rtx x)
6563 if (GET_CODE (x) != MEM)
6564 return 0;
6566 if (MEM_VOLATILE_P (x))
6567 return 0;
6569 if (GET_MODE (x) == BLKmode)
6570 return 0;
6572 /* If we are handling exceptions, we must be careful with memory references
6573 that may trap. If we are not, the behavior is undefined, so we may just
6574 continue. */
6575 if (flag_non_call_exceptions && may_trap_p (x))
6576 return 0;
6578 if (side_effects_p (x))
6579 return 0;
6581 /* Do not consider function arguments passed on stack. */
6582 if (reg_mentioned_p (stack_pointer_rtx, x))
6583 return 0;
6585 if (flag_float_store && FLOAT_MODE_P (GET_MODE (x)))
6586 return 0;
6588 return 1;
6591 /* Make sure there isn't a buried reference in this pattern anywhere.
6592 If there is, invalidate the entry for it since we're not capable
6593 of fixing it up just yet.. We have to be sure we know about ALL
6594 loads since the aliasing code will allow all entries in the
6595 ld_motion list to not-alias itself. If we miss a load, we will get
6596 the wrong value since gcse might common it and we won't know to
6597 fix it up. */
6599 static void
6600 invalidate_any_buried_refs (rtx x)
6602 const char * fmt;
6603 int i, j;
6604 struct ls_expr * ptr;
6606 /* Invalidate it in the list. */
6607 if (GET_CODE (x) == MEM && simple_mem (x))
6609 ptr = ldst_entry (x);
6610 ptr->invalid = 1;
6613 /* Recursively process the insn. */
6614 fmt = GET_RTX_FORMAT (GET_CODE (x));
6616 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6618 if (fmt[i] == 'e')
6619 invalidate_any_buried_refs (XEXP (x, i));
6620 else if (fmt[i] == 'E')
6621 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6622 invalidate_any_buried_refs (XVECEXP (x, i, j));
6626 /* Find all the 'simple' MEMs which are used in LOADs and STORES. Simple
6627 being defined as MEM loads and stores to symbols, with no side effects
6628 and no registers in the expression. For a MEM destination, we also
6629 check that the insn is still valid if we replace the destination with a
6630 REG, as is done in update_ld_motion_stores. If there are any uses/defs
6631 which don't match this criteria, they are invalidated and trimmed out
6632 later. */
6634 static void
6635 compute_ld_motion_mems (void)
6637 struct ls_expr * ptr;
6638 basic_block bb;
6639 rtx insn;
6641 pre_ldst_mems = NULL;
6643 FOR_EACH_BB (bb)
6645 for (insn = bb->head;
6646 insn && insn != NEXT_INSN (bb->end);
6647 insn = NEXT_INSN (insn))
6649 if (INSN_P (insn))
6651 if (GET_CODE (PATTERN (insn)) == SET)
6653 rtx src = SET_SRC (PATTERN (insn));
6654 rtx dest = SET_DEST (PATTERN (insn));
6656 /* Check for a simple LOAD... */
6657 if (GET_CODE (src) == MEM && simple_mem (src))
6659 ptr = ldst_entry (src);
6660 if (GET_CODE (dest) == REG)
6661 ptr->loads = alloc_INSN_LIST (insn, ptr->loads);
6662 else
6663 ptr->invalid = 1;
6665 else
6667 /* Make sure there isn't a buried load somewhere. */
6668 invalidate_any_buried_refs (src);
6671 /* Check for stores. Don't worry about aliased ones, they
6672 will block any movement we might do later. We only care
6673 about this exact pattern since those are the only
6674 circumstance that we will ignore the aliasing info. */
6675 if (GET_CODE (dest) == MEM && simple_mem (dest))
6677 ptr = ldst_entry (dest);
6679 if (GET_CODE (src) != MEM
6680 && GET_CODE (src) != ASM_OPERANDS
6681 /* Check for REG manually since want_to_gcse_p
6682 returns 0 for all REGs. */
6683 && (REG_P (src) || want_to_gcse_p (src)))
6684 ptr->stores = alloc_INSN_LIST (insn, ptr->stores);
6685 else
6686 ptr->invalid = 1;
6689 else
6690 invalidate_any_buried_refs (PATTERN (insn));
6696 /* Remove any references that have been either invalidated or are not in the
6697 expression list for pre gcse. */
6699 static void
6700 trim_ld_motion_mems (void)
6702 struct ls_expr * last = NULL;
6703 struct ls_expr * ptr = first_ls_expr ();
6705 while (ptr != NULL)
6707 int del = ptr->invalid;
6708 struct expr * expr = NULL;
6710 /* Delete if entry has been made invalid. */
6711 if (!del)
6713 unsigned int i;
6715 del = 1;
6716 /* Delete if we cannot find this mem in the expression list. */
6717 for (i = 0; i < expr_hash_table.size && del; i++)
6719 for (expr = expr_hash_table.table[i];
6720 expr != NULL;
6721 expr = expr->next_same_hash)
6722 if (expr_equiv_p (expr->expr, ptr->pattern))
6724 del = 0;
6725 break;
6730 if (del)
6732 if (last != NULL)
6734 last->next = ptr->next;
6735 free_ldst_entry (ptr);
6736 ptr = last->next;
6738 else
6740 pre_ldst_mems = pre_ldst_mems->next;
6741 free_ldst_entry (ptr);
6742 ptr = pre_ldst_mems;
6745 else
6747 /* Set the expression field if we are keeping it. */
6748 last = ptr;
6749 ptr->expr = expr;
6750 ptr = ptr->next;
6754 /* Show the world what we've found. */
6755 if (gcse_file && pre_ldst_mems != NULL)
6756 print_ldst_list (gcse_file);
6759 /* This routine will take an expression which we are replacing with
6760 a reaching register, and update any stores that are needed if
6761 that expression is in the ld_motion list. Stores are updated by
6762 copying their SRC to the reaching register, and then storing
6763 the reaching register into the store location. These keeps the
6764 correct value in the reaching register for the loads. */
6766 static void
6767 update_ld_motion_stores (struct expr * expr)
6769 struct ls_expr * mem_ptr;
6771 if ((mem_ptr = find_rtx_in_ldst (expr->expr)))
6773 /* We can try to find just the REACHED stores, but is shouldn't
6774 matter to set the reaching reg everywhere... some might be
6775 dead and should be eliminated later. */
6777 /* We replace (set mem expr) with (set reg expr) (set mem reg)
6778 where reg is the reaching reg used in the load. We checked in
6779 compute_ld_motion_mems that we can replace (set mem expr) with
6780 (set reg expr) in that insn. */
6781 rtx list = mem_ptr->stores;
6783 for ( ; list != NULL_RTX; list = XEXP (list, 1))
6785 rtx insn = XEXP (list, 0);
6786 rtx pat = PATTERN (insn);
6787 rtx src = SET_SRC (pat);
6788 rtx reg = expr->reaching_reg;
6789 rtx copy, new;
6791 /* If we've already copied it, continue. */
6792 if (expr->reaching_reg == src)
6793 continue;
6795 if (gcse_file)
6797 fprintf (gcse_file, "PRE: store updated with reaching reg ");
6798 print_rtl (gcse_file, expr->reaching_reg);
6799 fprintf (gcse_file, ":\n ");
6800 print_inline_rtx (gcse_file, insn, 8);
6801 fprintf (gcse_file, "\n");
6804 copy = gen_move_insn ( reg, copy_rtx (SET_SRC (pat)));
6805 new = emit_insn_before (copy, insn);
6806 record_one_set (REGNO (reg), new);
6807 SET_SRC (pat) = reg;
6809 /* un-recognize this pattern since it's probably different now. */
6810 INSN_CODE (insn) = -1;
6811 gcse_create_count++;
6816 /* Store motion code. */
6818 #define ANTIC_STORE_LIST(x) ((x)->loads)
6819 #define AVAIL_STORE_LIST(x) ((x)->stores)
6820 #define LAST_AVAIL_CHECK_FAILURE(x) ((x)->reaching_reg)
6822 /* This is used to communicate the target bitvector we want to use in the
6823 reg_set_info routine when called via the note_stores mechanism. */
6824 static int * regvec;
6826 /* And current insn, for the same routine. */
6827 static rtx compute_store_table_current_insn;
6829 /* Used in computing the reverse edge graph bit vectors. */
6830 static sbitmap * st_antloc;
6832 /* Global holding the number of store expressions we are dealing with. */
6833 static int num_stores;
6835 /* Checks to set if we need to mark a register set. Called from note_stores. */
6837 static void
6838 reg_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED,
6839 void *data ATTRIBUTE_UNUSED)
6841 if (GET_CODE (dest) == SUBREG)
6842 dest = SUBREG_REG (dest);
6844 if (GET_CODE (dest) == REG)
6845 regvec[REGNO (dest)] = INSN_UID (compute_store_table_current_insn);
6848 /* Return zero if some of the registers in list X are killed
6849 due to set of registers in bitmap REGS_SET. */
6851 static bool
6852 store_ops_ok (rtx x, int *regs_set)
6854 rtx reg;
6856 for (; x; x = XEXP (x, 1))
6858 reg = XEXP (x, 0);
6859 if (regs_set[REGNO(reg)])
6860 return false;
6863 return true;
6866 /* Returns a list of registers mentioned in X. */
6867 static rtx
6868 extract_mentioned_regs (rtx x)
6870 return extract_mentioned_regs_helper (x, NULL_RTX);
6873 /* Helper for extract_mentioned_regs; ACCUM is used to accumulate used
6874 registers. */
6875 static rtx
6876 extract_mentioned_regs_helper (rtx x, rtx accum)
6878 int i;
6879 enum rtx_code code;
6880 const char * fmt;
6882 /* Repeat is used to turn tail-recursion into iteration. */
6883 repeat:
6885 if (x == 0)
6886 return accum;
6888 code = GET_CODE (x);
6889 switch (code)
6891 case REG:
6892 return alloc_EXPR_LIST (0, x, accum);
6894 case MEM:
6895 x = XEXP (x, 0);
6896 goto repeat;
6898 case PRE_DEC:
6899 case PRE_INC:
6900 case POST_DEC:
6901 case POST_INC:
6902 /* We do not run this function with arguments having side effects. */
6903 abort ();
6905 case PC:
6906 case CC0: /*FIXME*/
6907 case CONST:
6908 case CONST_INT:
6909 case CONST_DOUBLE:
6910 case CONST_VECTOR:
6911 case SYMBOL_REF:
6912 case LABEL_REF:
6913 case ADDR_VEC:
6914 case ADDR_DIFF_VEC:
6915 return accum;
6917 default:
6918 break;
6921 i = GET_RTX_LENGTH (code) - 1;
6922 fmt = GET_RTX_FORMAT (code);
6924 for (; i >= 0; i--)
6926 if (fmt[i] == 'e')
6928 rtx tem = XEXP (x, i);
6930 /* If we are about to do the last recursive call
6931 needed at this level, change it into iteration. */
6932 if (i == 0)
6934 x = tem;
6935 goto repeat;
6938 accum = extract_mentioned_regs_helper (tem, accum);
6940 else if (fmt[i] == 'E')
6942 int j;
6944 for (j = 0; j < XVECLEN (x, i); j++)
6945 accum = extract_mentioned_regs_helper (XVECEXP (x, i, j), accum);
6949 return accum;
6952 /* Determine whether INSN is MEM store pattern that we will consider moving.
6953 REGS_SET_BEFORE is bitmap of registers set before (and including) the
6954 current insn, REGS_SET_AFTER is bitmap of registers set after (and
6955 including) the insn in this basic block. We must be passing through BB from
6956 head to end, as we are using this fact to speed things up.
6958 The results are stored this way:
6960 -- the first anticipatable expression is added into ANTIC_STORE_LIST
6961 -- if the processed expression is not anticipatable, NULL_RTX is added
6962 there instead, so that we can use it as indicator that no further
6963 expression of this type may be anticipatable
6964 -- if the expression is available, it is added as head of AVAIL_STORE_LIST;
6965 consequently, all of them but this head are dead and may be deleted.
6966 -- if the expression is not available, the insn due to that it fails to be
6967 available is stored in reaching_reg.
6969 The things are complicated a bit by fact that there already may be stores
6970 to the same MEM from other blocks; also caller must take care of the
6971 necessary cleanup of the temporary markers after end of the basic block.
6974 static void
6975 find_moveable_store (rtx insn, int *regs_set_before, int *regs_set_after)
6977 struct ls_expr * ptr;
6978 rtx dest, set, tmp;
6979 int check_anticipatable, check_available;
6980 basic_block bb = BLOCK_FOR_INSN (insn);
6982 set = single_set (insn);
6983 if (!set)
6984 return;
6986 dest = SET_DEST (set);
6988 if (GET_CODE (dest) != MEM || MEM_VOLATILE_P (dest)
6989 || GET_MODE (dest) == BLKmode)
6990 return;
6992 if (side_effects_p (dest))
6993 return;
6995 /* If we are handling exceptions, we must be careful with memory references
6996 that may trap. If we are not, the behavior is undefined, so we may just
6997 continue. */
6998 if (flag_non_call_exceptions && may_trap_p (dest))
6999 return;
7001 ptr = ldst_entry (dest);
7002 if (!ptr->pattern_regs)
7003 ptr->pattern_regs = extract_mentioned_regs (dest);
7005 /* Do not check for anticipatability if we either found one anticipatable
7006 store already, or tested for one and found out that it was killed. */
7007 check_anticipatable = 0;
7008 if (!ANTIC_STORE_LIST (ptr))
7009 check_anticipatable = 1;
7010 else
7012 tmp = XEXP (ANTIC_STORE_LIST (ptr), 0);
7013 if (tmp != NULL_RTX
7014 && BLOCK_FOR_INSN (tmp) != bb)
7015 check_anticipatable = 1;
7017 if (check_anticipatable)
7019 if (store_killed_before (dest, ptr->pattern_regs, insn, bb, regs_set_before))
7020 tmp = NULL_RTX;
7021 else
7022 tmp = insn;
7023 ANTIC_STORE_LIST (ptr) = alloc_INSN_LIST (tmp,
7024 ANTIC_STORE_LIST (ptr));
7027 /* It is not necessary to check whether store is available if we did
7028 it successfully before; if we failed before, do not bother to check
7029 until we reach the insn that caused us to fail. */
7030 check_available = 0;
7031 if (!AVAIL_STORE_LIST (ptr))
7032 check_available = 1;
7033 else
7035 tmp = XEXP (AVAIL_STORE_LIST (ptr), 0);
7036 if (BLOCK_FOR_INSN (tmp) != bb)
7037 check_available = 1;
7039 if (check_available)
7041 /* Check that we have already reached the insn at that the check
7042 failed last time. */
7043 if (LAST_AVAIL_CHECK_FAILURE (ptr))
7045 for (tmp = bb->end;
7046 tmp != insn && tmp != LAST_AVAIL_CHECK_FAILURE (ptr);
7047 tmp = PREV_INSN (tmp))
7048 continue;
7049 if (tmp == insn)
7050 check_available = 0;
7052 else
7053 check_available = store_killed_after (dest, ptr->pattern_regs, insn,
7054 bb, regs_set_after,
7055 &LAST_AVAIL_CHECK_FAILURE (ptr));
7057 if (!check_available)
7058 AVAIL_STORE_LIST (ptr) = alloc_INSN_LIST (insn, AVAIL_STORE_LIST (ptr));
7061 /* Find available and anticipatable stores. */
7063 static int
7064 compute_store_table (void)
7066 int ret;
7067 basic_block bb;
7068 unsigned regno;
7069 rtx insn, pat, tmp;
7070 int *last_set_in, *already_set;
7071 struct ls_expr * ptr, **prev_next_ptr_ptr;
7073 max_gcse_regno = max_reg_num ();
7075 reg_set_in_block = sbitmap_vector_alloc (last_basic_block,
7076 max_gcse_regno);
7077 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
7078 pre_ldst_mems = 0;
7079 last_set_in = xmalloc (sizeof (int) * max_gcse_regno);
7080 already_set = xmalloc (sizeof (int) * max_gcse_regno);
7082 /* Find all the stores we care about. */
7083 FOR_EACH_BB (bb)
7085 /* First compute the registers set in this block. */
7086 memset (last_set_in, 0, sizeof (int) * max_gcse_regno);
7087 regvec = last_set_in;
7089 for (insn = bb->head;
7090 insn != NEXT_INSN (bb->end);
7091 insn = NEXT_INSN (insn))
7093 if (! INSN_P (insn))
7094 continue;
7096 if (GET_CODE (insn) == CALL_INSN)
7098 bool clobbers_all = false;
7099 #ifdef NON_SAVING_SETJMP
7100 if (NON_SAVING_SETJMP
7101 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
7102 clobbers_all = true;
7103 #endif
7105 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7106 if (clobbers_all
7107 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
7108 last_set_in[regno] = INSN_UID (insn);
7111 pat = PATTERN (insn);
7112 compute_store_table_current_insn = insn;
7113 note_stores (pat, reg_set_info, NULL);
7116 /* Record the set registers. */
7117 for (regno = 0; regno < max_gcse_regno; regno++)
7118 if (last_set_in[regno])
7119 SET_BIT (reg_set_in_block[bb->index], regno);
7121 /* Now find the stores. */
7122 memset (already_set, 0, sizeof (int) * max_gcse_regno);
7123 regvec = already_set;
7124 for (insn = bb->head;
7125 insn != NEXT_INSN (bb->end);
7126 insn = NEXT_INSN (insn))
7128 if (! INSN_P (insn))
7129 continue;
7131 if (GET_CODE (insn) == CALL_INSN)
7133 bool clobbers_all = false;
7134 #ifdef NON_SAVING_SETJMP
7135 if (NON_SAVING_SETJMP
7136 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
7137 clobbers_all = true;
7138 #endif
7140 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7141 if (clobbers_all
7142 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
7143 already_set[regno] = 1;
7146 pat = PATTERN (insn);
7147 note_stores (pat, reg_set_info, NULL);
7149 /* Now that we've marked regs, look for stores. */
7150 find_moveable_store (insn, already_set, last_set_in);
7152 /* Unmark regs that are no longer set. */
7153 for (regno = 0; regno < max_gcse_regno; regno++)
7154 if (last_set_in[regno] == INSN_UID (insn))
7155 last_set_in[regno] = 0;
7158 /* Clear temporary marks. */
7159 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7161 LAST_AVAIL_CHECK_FAILURE(ptr) = NULL_RTX;
7162 if (ANTIC_STORE_LIST (ptr)
7163 && (tmp = XEXP (ANTIC_STORE_LIST (ptr), 0)) == NULL_RTX)
7164 ANTIC_STORE_LIST (ptr) = XEXP (ANTIC_STORE_LIST (ptr), 1);
7168 /* Remove the stores that are not available anywhere, as there will
7169 be no opportunity to optimize them. */
7170 for (ptr = pre_ldst_mems, prev_next_ptr_ptr = &pre_ldst_mems;
7171 ptr != NULL;
7172 ptr = *prev_next_ptr_ptr)
7174 if (!AVAIL_STORE_LIST (ptr))
7176 *prev_next_ptr_ptr = ptr->next;
7177 free_ldst_entry (ptr);
7179 else
7180 prev_next_ptr_ptr = &ptr->next;
7183 ret = enumerate_ldsts ();
7185 if (gcse_file)
7187 fprintf (gcse_file, "ST_avail and ST_antic (shown under loads..)\n");
7188 print_ldst_list (gcse_file);
7191 free (last_set_in);
7192 free (already_set);
7193 return ret;
7196 /* Check to see if the load X is aliased with STORE_PATTERN.
7197 AFTER is true if we are checking the case when STORE_PATTERN occurs
7198 after the X. */
7200 static bool
7201 load_kills_store (rtx x, rtx store_pattern, int after)
7203 if (after)
7204 return anti_dependence (x, store_pattern);
7205 else
7206 return true_dependence (store_pattern, GET_MODE (store_pattern), x,
7207 rtx_addr_varies_p);
7210 /* Go through the entire insn X, looking for any loads which might alias
7211 STORE_PATTERN. Return true if found.
7212 AFTER is true if we are checking the case when STORE_PATTERN occurs
7213 after the insn X. */
7215 static bool
7216 find_loads (rtx x, rtx store_pattern, int after)
7218 const char * fmt;
7219 int i, j;
7220 int ret = false;
7222 if (!x)
7223 return false;
7225 if (GET_CODE (x) == SET)
7226 x = SET_SRC (x);
7228 if (GET_CODE (x) == MEM)
7230 if (load_kills_store (x, store_pattern, after))
7231 return true;
7234 /* Recursively process the insn. */
7235 fmt = GET_RTX_FORMAT (GET_CODE (x));
7237 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0 && !ret; i--)
7239 if (fmt[i] == 'e')
7240 ret |= find_loads (XEXP (x, i), store_pattern, after);
7241 else if (fmt[i] == 'E')
7242 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7243 ret |= find_loads (XVECEXP (x, i, j), store_pattern, after);
7245 return ret;
7248 /* Check if INSN kills the store pattern X (is aliased with it).
7249 AFTER is true if we are checking the case when store X occurs
7250 after the insn. Return true if it it does. */
7252 static bool
7253 store_killed_in_insn (rtx x, rtx x_regs, rtx insn, int after)
7255 rtx reg, base;
7257 if (!INSN_P (insn))
7258 return false;
7260 if (GET_CODE (insn) == CALL_INSN)
7262 /* A normal or pure call might read from pattern,
7263 but a const call will not. */
7264 if (! CONST_OR_PURE_CALL_P (insn) || pure_call_p (insn))
7265 return true;
7267 /* But even a const call reads its parameters. Check whether the
7268 base of some of registers used in mem is stack pointer. */
7269 for (reg = x_regs; reg; reg = XEXP (reg, 1))
7271 base = find_base_term (XEXP (reg, 0));
7272 if (!base
7273 || (GET_CODE (base) == ADDRESS
7274 && GET_MODE (base) == Pmode
7275 && XEXP (base, 0) == stack_pointer_rtx))
7276 return true;
7279 return false;
7282 if (GET_CODE (PATTERN (insn)) == SET)
7284 rtx pat = PATTERN (insn);
7285 rtx dest = SET_DEST (pat);
7287 if (GET_CODE (dest) == SIGN_EXTRACT
7288 || GET_CODE (dest) == ZERO_EXTRACT)
7289 dest = XEXP (dest, 0);
7291 /* Check for memory stores to aliased objects. */
7292 if (GET_CODE (dest) == MEM
7293 && !expr_equiv_p (dest, x))
7295 if (after)
7297 if (output_dependence (dest, x))
7298 return true;
7300 else
7302 if (output_dependence (x, dest))
7303 return true;
7306 return find_loads (SET_SRC (pat), x, after);
7308 else
7309 return find_loads (PATTERN (insn), x, after);
7312 /* Returns true if the expression X is loaded or clobbered on or after INSN
7313 within basic block BB. REGS_SET_AFTER is bitmap of registers set in
7314 or after the insn. X_REGS is list of registers mentioned in X. If the store
7315 is killed, return the last insn in that it occurs in FAIL_INSN. */
7317 static bool
7318 store_killed_after (rtx x, rtx x_regs, rtx insn, basic_block bb,
7319 int *regs_set_after, rtx *fail_insn)
7321 rtx last = bb->end, act;
7323 if (!store_ops_ok (x_regs, regs_set_after))
7325 /* We do not know where it will happen. */
7326 if (fail_insn)
7327 *fail_insn = NULL_RTX;
7328 return true;
7331 /* Scan from the end, so that fail_insn is determined correctly. */
7332 for (act = last; act != PREV_INSN (insn); act = PREV_INSN (act))
7333 if (store_killed_in_insn (x, x_regs, act, false))
7335 if (fail_insn)
7336 *fail_insn = act;
7337 return true;
7340 return false;
7343 /* Returns true if the expression X is loaded or clobbered on or before INSN
7344 within basic block BB. X_REGS is list of registers mentioned in X.
7345 REGS_SET_BEFORE is bitmap of registers set before or in this insn. */
7346 static bool
7347 store_killed_before (rtx x, rtx x_regs, rtx insn, basic_block bb,
7348 int *regs_set_before)
7350 rtx first = bb->head;
7352 if (!store_ops_ok (x_regs, regs_set_before))
7353 return true;
7355 for ( ; insn != PREV_INSN (first); insn = PREV_INSN (insn))
7356 if (store_killed_in_insn (x, x_regs, insn, true))
7357 return true;
7359 return false;
7362 /* Fill in available, anticipatable, transparent and kill vectors in
7363 STORE_DATA, based on lists of available and anticipatable stores. */
7364 static void
7365 build_store_vectors (void)
7367 basic_block bb;
7368 int *regs_set_in_block;
7369 rtx insn, st;
7370 struct ls_expr * ptr;
7371 unsigned regno;
7373 /* Build the gen_vector. This is any store in the table which is not killed
7374 by aliasing later in its block. */
7375 ae_gen = sbitmap_vector_alloc (last_basic_block, num_stores);
7376 sbitmap_vector_zero (ae_gen, last_basic_block);
7378 st_antloc = sbitmap_vector_alloc (last_basic_block, num_stores);
7379 sbitmap_vector_zero (st_antloc, last_basic_block);
7381 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7383 for (st = AVAIL_STORE_LIST (ptr); st != NULL; st = XEXP (st, 1))
7385 insn = XEXP (st, 0);
7386 bb = BLOCK_FOR_INSN (insn);
7388 /* If we've already seen an available expression in this block,
7389 we can delete this one (It occurs earlier in the block). We'll
7390 copy the SRC expression to an unused register in case there
7391 are any side effects. */
7392 if (TEST_BIT (ae_gen[bb->index], ptr->index))
7394 rtx r = gen_reg_rtx (GET_MODE (ptr->pattern));
7395 if (gcse_file)
7396 fprintf (gcse_file, "Removing redundant store:\n");
7397 replace_store_insn (r, XEXP (st, 0), bb);
7398 continue;
7400 SET_BIT (ae_gen[bb->index], ptr->index);
7403 for (st = ANTIC_STORE_LIST (ptr); st != NULL; st = XEXP (st, 1))
7405 insn = XEXP (st, 0);
7406 bb = BLOCK_FOR_INSN (insn);
7407 SET_BIT (st_antloc[bb->index], ptr->index);
7411 ae_kill = sbitmap_vector_alloc (last_basic_block, num_stores);
7412 sbitmap_vector_zero (ae_kill, last_basic_block);
7414 transp = sbitmap_vector_alloc (last_basic_block, num_stores);
7415 sbitmap_vector_zero (transp, last_basic_block);
7416 regs_set_in_block = xmalloc (sizeof (int) * max_gcse_regno);
7418 FOR_EACH_BB (bb)
7420 for (regno = 0; regno < max_gcse_regno; regno++)
7421 regs_set_in_block[regno] = TEST_BIT (reg_set_in_block[bb->index], regno);
7423 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7425 if (store_killed_after (ptr->pattern, ptr->pattern_regs, bb->head,
7426 bb, regs_set_in_block, NULL))
7428 /* It should not be necessary to consider the expression
7429 killed if it is both anticipatable and available. */
7430 if (!TEST_BIT (st_antloc[bb->index], ptr->index)
7431 || !TEST_BIT (ae_gen[bb->index], ptr->index))
7432 SET_BIT (ae_kill[bb->index], ptr->index);
7434 else
7435 SET_BIT (transp[bb->index], ptr->index);
7439 free (regs_set_in_block);
7441 if (gcse_file)
7443 dump_sbitmap_vector (gcse_file, "st_antloc", "", st_antloc, last_basic_block);
7444 dump_sbitmap_vector (gcse_file, "st_kill", "", ae_kill, last_basic_block);
7445 dump_sbitmap_vector (gcse_file, "Transpt", "", transp, last_basic_block);
7446 dump_sbitmap_vector (gcse_file, "st_avloc", "", ae_gen, last_basic_block);
7450 /* Insert an instruction at the beginning of a basic block, and update
7451 the BLOCK_HEAD if needed. */
7453 static void
7454 insert_insn_start_bb (rtx insn, basic_block bb)
7456 /* Insert at start of successor block. */
7457 rtx prev = PREV_INSN (bb->head);
7458 rtx before = bb->head;
7459 while (before != 0)
7461 if (GET_CODE (before) != CODE_LABEL
7462 && (GET_CODE (before) != NOTE
7463 || NOTE_LINE_NUMBER (before) != NOTE_INSN_BASIC_BLOCK))
7464 break;
7465 prev = before;
7466 if (prev == bb->end)
7467 break;
7468 before = NEXT_INSN (before);
7471 insn = emit_insn_after (insn, prev);
7473 if (gcse_file)
7475 fprintf (gcse_file, "STORE_MOTION insert store at start of BB %d:\n",
7476 bb->index);
7477 print_inline_rtx (gcse_file, insn, 6);
7478 fprintf (gcse_file, "\n");
7482 /* This routine will insert a store on an edge. EXPR is the ldst entry for
7483 the memory reference, and E is the edge to insert it on. Returns nonzero
7484 if an edge insertion was performed. */
7486 static int
7487 insert_store (struct ls_expr * expr, edge e)
7489 rtx reg, insn;
7490 basic_block bb;
7491 edge tmp;
7493 /* We did all the deleted before this insert, so if we didn't delete a
7494 store, then we haven't set the reaching reg yet either. */
7495 if (expr->reaching_reg == NULL_RTX)
7496 return 0;
7498 if (e->flags & EDGE_FAKE)
7499 return 0;
7501 reg = expr->reaching_reg;
7502 insn = gen_move_insn (copy_rtx (expr->pattern), reg);
7504 /* If we are inserting this expression on ALL predecessor edges of a BB,
7505 insert it at the start of the BB, and reset the insert bits on the other
7506 edges so we don't try to insert it on the other edges. */
7507 bb = e->dest;
7508 for (tmp = e->dest->pred; tmp ; tmp = tmp->pred_next)
7509 if (!(tmp->flags & EDGE_FAKE))
7511 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
7512 if (index == EDGE_INDEX_NO_EDGE)
7513 abort ();
7514 if (! TEST_BIT (pre_insert_map[index], expr->index))
7515 break;
7518 /* If tmp is NULL, we found an insertion on every edge, blank the
7519 insertion vector for these edges, and insert at the start of the BB. */
7520 if (!tmp && bb != EXIT_BLOCK_PTR)
7522 for (tmp = e->dest->pred; tmp ; tmp = tmp->pred_next)
7524 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
7525 RESET_BIT (pre_insert_map[index], expr->index);
7527 insert_insn_start_bb (insn, bb);
7528 return 0;
7531 /* We can't insert on this edge, so we'll insert at the head of the
7532 successors block. See Morgan, sec 10.5. */
7533 if ((e->flags & EDGE_ABNORMAL) == EDGE_ABNORMAL)
7535 insert_insn_start_bb (insn, bb);
7536 return 0;
7539 insert_insn_on_edge (insn, e);
7541 if (gcse_file)
7543 fprintf (gcse_file, "STORE_MOTION insert insn on edge (%d, %d):\n",
7544 e->src->index, e->dest->index);
7545 print_inline_rtx (gcse_file, insn, 6);
7546 fprintf (gcse_file, "\n");
7549 return 1;
7552 /* This routine will replace a store with a SET to a specified register. */
7554 static void
7555 replace_store_insn (rtx reg, rtx del, basic_block bb)
7557 rtx insn;
7559 insn = gen_move_insn (reg, SET_SRC (single_set (del)));
7560 insn = emit_insn_after (insn, del);
7562 if (gcse_file)
7564 fprintf (gcse_file,
7565 "STORE_MOTION delete insn in BB %d:\n ", bb->index);
7566 print_inline_rtx (gcse_file, del, 6);
7567 fprintf (gcse_file, "\nSTORE MOTION replaced with insn:\n ");
7568 print_inline_rtx (gcse_file, insn, 6);
7569 fprintf (gcse_file, "\n");
7572 delete_insn (del);
7576 /* Delete a store, but copy the value that would have been stored into
7577 the reaching_reg for later storing. */
7579 static void
7580 delete_store (struct ls_expr * expr, basic_block bb)
7582 rtx reg, i, del;
7584 if (expr->reaching_reg == NULL_RTX)
7585 expr->reaching_reg = gen_reg_rtx (GET_MODE (expr->pattern));
7587 reg = expr->reaching_reg;
7589 for (i = AVAIL_STORE_LIST (expr); i; i = XEXP (i, 1))
7591 del = XEXP (i, 0);
7592 if (BLOCK_FOR_INSN (del) == bb)
7594 /* We know there is only one since we deleted redundant
7595 ones during the available computation. */
7596 replace_store_insn (reg, del, bb);
7597 break;
7602 /* Free memory used by store motion. */
7604 static void
7605 free_store_memory (void)
7607 free_ldst_mems ();
7609 if (ae_gen)
7610 sbitmap_vector_free (ae_gen);
7611 if (ae_kill)
7612 sbitmap_vector_free (ae_kill);
7613 if (transp)
7614 sbitmap_vector_free (transp);
7615 if (st_antloc)
7616 sbitmap_vector_free (st_antloc);
7617 if (pre_insert_map)
7618 sbitmap_vector_free (pre_insert_map);
7619 if (pre_delete_map)
7620 sbitmap_vector_free (pre_delete_map);
7621 if (reg_set_in_block)
7622 sbitmap_vector_free (reg_set_in_block);
7624 ae_gen = ae_kill = transp = st_antloc = NULL;
7625 pre_insert_map = pre_delete_map = reg_set_in_block = NULL;
7628 /* Perform store motion. Much like gcse, except we move expressions the
7629 other way by looking at the flowgraph in reverse. */
7631 static void
7632 store_motion (void)
7634 basic_block bb;
7635 int x;
7636 struct ls_expr * ptr;
7637 int update_flow = 0;
7639 if (gcse_file)
7641 fprintf (gcse_file, "before store motion\n");
7642 print_rtl (gcse_file, get_insns ());
7645 init_alias_analysis ();
7647 /* Find all the available and anticipatable stores. */
7648 num_stores = compute_store_table ();
7649 if (num_stores == 0)
7651 sbitmap_vector_free (reg_set_in_block);
7652 end_alias_analysis ();
7653 return;
7656 /* Now compute kill & transp vectors. */
7657 build_store_vectors ();
7658 add_noreturn_fake_exit_edges ();
7659 connect_infinite_loops_to_exit ();
7661 edge_list = pre_edge_rev_lcm (gcse_file, num_stores, transp, ae_gen,
7662 st_antloc, ae_kill, &pre_insert_map,
7663 &pre_delete_map);
7665 /* Now we want to insert the new stores which are going to be needed. */
7666 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7668 FOR_EACH_BB (bb)
7669 if (TEST_BIT (pre_delete_map[bb->index], ptr->index))
7670 delete_store (ptr, bb);
7672 for (x = 0; x < NUM_EDGES (edge_list); x++)
7673 if (TEST_BIT (pre_insert_map[x], ptr->index))
7674 update_flow |= insert_store (ptr, INDEX_EDGE (edge_list, x));
7677 if (update_flow)
7678 commit_edge_insertions ();
7680 free_store_memory ();
7681 free_edge_list (edge_list);
7682 remove_fake_edges ();
7683 end_alias_analysis ();
7687 /* Entry point for jump bypassing optimization pass. */
7690 bypass_jumps (FILE *file)
7692 int changed;
7694 /* We do not construct an accurate cfg in functions which call
7695 setjmp, so just punt to be safe. */
7696 if (current_function_calls_setjmp)
7697 return 0;
7699 /* For calling dump_foo fns from gdb. */
7700 debug_stderr = stderr;
7701 gcse_file = file;
7703 /* Identify the basic block information for this function, including
7704 successors and predecessors. */
7705 max_gcse_regno = max_reg_num ();
7707 if (file)
7708 dump_flow_info (file);
7710 /* Return if there's nothing to do, or it is too expensive */
7711 if (n_basic_blocks <= 1 || is_too_expensive (_ ("jump bypassing disabled")))
7712 return 0;
7714 gcc_obstack_init (&gcse_obstack);
7715 bytes_used = 0;
7717 /* We need alias. */
7718 init_alias_analysis ();
7720 /* Record where pseudo-registers are set. This data is kept accurate
7721 during each pass. ??? We could also record hard-reg information here
7722 [since it's unchanging], however it is currently done during hash table
7723 computation.
7725 It may be tempting to compute MEM set information here too, but MEM sets
7726 will be subject to code motion one day and thus we need to compute
7727 information about memory sets when we build the hash tables. */
7729 alloc_reg_set_mem (max_gcse_regno);
7730 compute_sets (get_insns ());
7732 max_gcse_regno = max_reg_num ();
7733 alloc_gcse_mem (get_insns ());
7734 changed = one_cprop_pass (1, 1, 1);
7735 free_gcse_mem ();
7737 if (file)
7739 fprintf (file, "BYPASS of %s: %d basic blocks, ",
7740 current_function_name, n_basic_blocks);
7741 fprintf (file, "%d bytes\n\n", bytes_used);
7744 obstack_free (&gcse_obstack, NULL);
7745 free_reg_set_mem ();
7747 /* We are finished with alias. */
7748 end_alias_analysis ();
7749 allocate_reg_info (max_reg_num (), FALSE, FALSE);
7751 return changed;
7754 /* Return true if the graph is too expensive to optimize. PASS is the
7755 optimization about to be performed. */
7757 static bool
7758 is_too_expensive (const char *pass)
7760 /* Trying to perform global optimizations on flow graphs which have
7761 a high connectivity will take a long time and is unlikely to be
7762 particularly useful.
7764 In normal circumstances a cfg should have about twice as many
7765 edges as blocks. But we do not want to punish small functions
7766 which have a couple switch statements. Rather than simply
7767 threshold the number of blocks, uses something with a more
7768 graceful degradation. */
7769 if (n_edges > 20000 + n_basic_blocks * 4)
7771 if (warn_disabled_optimization)
7772 warning ("%s: %d basic blocks and %d edges/basic block",
7773 pass, n_basic_blocks, n_edges / n_basic_blocks);
7775 return true;
7778 /* If allocating memory for the cprop bitmap would take up too much
7779 storage it's better just to disable the optimization. */
7780 if ((n_basic_blocks
7781 * SBITMAP_SET_SIZE (max_reg_num ())
7782 * sizeof (SBITMAP_ELT_TYPE)) > MAX_GCSE_MEMORY)
7784 if (warn_disabled_optimization)
7785 warning ("%s: %d basic blocks and %d registers",
7786 pass, n_basic_blocks, max_reg_num ());
7788 return true;
7791 return false;
7794 #include "gt-gcse.h"