m68k.c (m68k_output_function_prologue): use %U in label name
[official-gcc.git] / gcc / gcse.c
blob3233d8494e255abcf70b65f35a9ffea3b75fb7d3
1 /* Global common subexpression elimination/Partial redundancy elimination
2 and global constant/copy propagation for GNU compiler.
3 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 02111-1307, USA. */
23 /* TODO
24 - reordering of memory allocation and freeing to be more space efficient
25 - do rough calc of how many regs are needed in each block, and a rough
26 calc of how many regs are available in each class and use that to
27 throttle back the code in cases where RTX_COST is minimal.
28 - a store to the same address as a load does not kill the load if the
29 source of the store is also the destination of the load. Handling this
30 allows more load motion, particularly out of loops.
31 - ability to realloc sbitmap vectors would allow one initial computation
32 of reg_set_in_block with only subsequent additions, rather than
33 recomputing it for each pass
37 /* References searched while implementing this.
39 Compilers Principles, Techniques and Tools
40 Aho, Sethi, Ullman
41 Addison-Wesley, 1988
43 Global Optimization by Suppression of Partial Redundancies
44 E. Morel, C. Renvoise
45 communications of the acm, Vol. 22, Num. 2, Feb. 1979
47 A Portable Machine-Independent Global Optimizer - Design and Measurements
48 Frederick Chow
49 Stanford Ph.D. thesis, Dec. 1983
51 A Fast Algorithm for Code Movement Optimization
52 D.M. Dhamdhere
53 SIGPLAN Notices, Vol. 23, Num. 10, Oct. 1988
55 A Solution to a Problem with Morel and Renvoise's
56 Global Optimization by Suppression of Partial Redundancies
57 K-H Drechsler, M.P. Stadel
58 ACM TOPLAS, Vol. 10, Num. 4, Oct. 1988
60 Practical Adaptation of the Global Optimization
61 Algorithm of Morel and Renvoise
62 D.M. Dhamdhere
63 ACM TOPLAS, Vol. 13, Num. 2. Apr. 1991
65 Efficiently Computing Static Single Assignment Form and the Control
66 Dependence Graph
67 R. Cytron, J. Ferrante, B.K. Rosen, M.N. Wegman, and F.K. Zadeck
68 ACM TOPLAS, Vol. 13, Num. 4, Oct. 1991
70 Lazy Code Motion
71 J. Knoop, O. Ruthing, B. Steffen
72 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
74 What's In a Region? Or Computing Control Dependence Regions in Near-Linear
75 Time for Reducible Flow Control
76 Thomas Ball
77 ACM Letters on Programming Languages and Systems,
78 Vol. 2, Num. 1-4, Mar-Dec 1993
80 An Efficient Representation for Sparse Sets
81 Preston Briggs, Linda Torczon
82 ACM Letters on Programming Languages and Systems,
83 Vol. 2, Num. 1-4, Mar-Dec 1993
85 A Variation of Knoop, Ruthing, and Steffen's Lazy Code Motion
86 K-H Drechsler, M.P. Stadel
87 ACM SIGPLAN Notices, Vol. 28, Num. 5, May 1993
89 Partial Dead Code Elimination
90 J. Knoop, O. Ruthing, B. Steffen
91 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
93 Effective Partial Redundancy Elimination
94 P. Briggs, K.D. Cooper
95 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
97 The Program Structure Tree: Computing Control Regions in Linear Time
98 R. Johnson, D. Pearson, K. Pingali
99 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
101 Optimal Code Motion: Theory and Practice
102 J. Knoop, O. Ruthing, B. Steffen
103 ACM TOPLAS, Vol. 16, Num. 4, Jul. 1994
105 The power of assignment motion
106 J. Knoop, O. Ruthing, B. Steffen
107 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
109 Global code motion / global value numbering
110 C. Click
111 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
113 Value Driven Redundancy Elimination
114 L.T. Simpson
115 Rice University Ph.D. thesis, Apr. 1996
117 Value Numbering
118 L.T. Simpson
119 Massively Scalar Compiler Project, Rice University, Sep. 1996
121 High Performance Compilers for Parallel Computing
122 Michael Wolfe
123 Addison-Wesley, 1996
125 Advanced Compiler Design and Implementation
126 Steven Muchnick
127 Morgan Kaufmann, 1997
129 Building an Optimizing Compiler
130 Robert Morgan
131 Digital Press, 1998
133 People wishing to speed up the code here should read:
134 Elimination Algorithms for Data Flow Analysis
135 B.G. Ryder, M.C. Paull
136 ACM Computing Surveys, Vol. 18, Num. 3, Sep. 1986
138 How to Analyze Large Programs Efficiently and Informatively
139 D.M. Dhamdhere, B.K. Rosen, F.K. Zadeck
140 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
142 People wishing to do something different can find various possibilities
143 in the above papers and elsewhere.
146 #include "config.h"
147 #include "system.h"
148 #include "coretypes.h"
149 #include "tm.h"
150 #include "toplev.h"
152 #include "rtl.h"
153 #include "tm_p.h"
154 #include "regs.h"
155 #include "hard-reg-set.h"
156 #include "flags.h"
157 #include "real.h"
158 #include "insn-config.h"
159 #include "recog.h"
160 #include "basic-block.h"
161 #include "output.h"
162 #include "function.h"
163 #include "expr.h"
164 #include "except.h"
165 #include "ggc.h"
166 #include "params.h"
167 #include "cselib.h"
169 #include "obstack.h"
171 /* Propagate flow information through back edges and thus enable PRE's
172 moving loop invariant calculations out of loops.
174 Originally this tended to create worse overall code, but several
175 improvements during the development of PRE seem to have made following
176 back edges generally a win.
178 Note much of the loop invariant code motion done here would normally
179 be done by loop.c, which has more heuristics for when to move invariants
180 out of loops. At some point we might need to move some of those
181 heuristics into gcse.c. */
183 /* We support GCSE via Partial Redundancy Elimination. PRE optimizations
184 are a superset of those done by GCSE.
186 We perform the following steps:
188 1) Compute basic block information.
190 2) Compute table of places where registers are set.
192 3) Perform copy/constant propagation.
194 4) Perform global cse.
196 5) Perform another pass of copy/constant propagation.
198 Two passes of copy/constant propagation are done because the first one
199 enables more GCSE and the second one helps to clean up the copies that
200 GCSE creates. This is needed more for PRE than for Classic because Classic
201 GCSE will try to use an existing register containing the common
202 subexpression rather than create a new one. This is harder to do for PRE
203 because of the code motion (which Classic GCSE doesn't do).
205 Expressions we are interested in GCSE-ing are of the form
206 (set (pseudo-reg) (expression)).
207 Function want_to_gcse_p says what these are.
209 PRE handles moving invariant expressions out of loops (by treating them as
210 partially redundant).
212 Eventually it would be nice to replace cse.c/gcse.c with SSA (static single
213 assignment) based GVN (global value numbering). L. T. Simpson's paper
214 (Rice University) on value numbering is a useful reference for this.
216 **********************
218 We used to support multiple passes but there are diminishing returns in
219 doing so. The first pass usually makes 90% of the changes that are doable.
220 A second pass can make a few more changes made possible by the first pass.
221 Experiments show any further passes don't make enough changes to justify
222 the expense.
224 A study of spec92 using an unlimited number of passes:
225 [1 pass] = 1208 substitutions, [2] = 577, [3] = 202, [4] = 192, [5] = 83,
226 [6] = 34, [7] = 17, [8] = 9, [9] = 4, [10] = 4, [11] = 2,
227 [12] = 2, [13] = 1, [15] = 1, [16] = 2, [41] = 1
229 It was found doing copy propagation between each pass enables further
230 substitutions.
232 PRE is quite expensive in complicated functions because the DFA can take
233 awhile to converge. Hence we only perform one pass. The parameter max-gcse-passes can
234 be modified if one wants to experiment.
236 **********************
238 The steps for PRE are:
240 1) Build the hash table of expressions we wish to GCSE (expr_hash_table).
242 2) Perform the data flow analysis for PRE.
244 3) Delete the redundant instructions
246 4) Insert the required copies [if any] that make the partially
247 redundant instructions fully redundant.
249 5) For other reaching expressions, insert an instruction to copy the value
250 to a newly created pseudo that will reach the redundant instruction.
252 The deletion is done first so that when we do insertions we
253 know which pseudo reg to use.
255 Various papers have argued that PRE DFA is expensive (O(n^2)) and others
256 argue it is not. The number of iterations for the algorithm to converge
257 is typically 2-4 so I don't view it as that expensive (relatively speaking).
259 PRE GCSE depends heavily on the second CSE pass to clean up the copies
260 we create. To make an expression reach the place where it's redundant,
261 the result of the expression is copied to a new register, and the redundant
262 expression is deleted by replacing it with this new register. Classic GCSE
263 doesn't have this problem as much as it computes the reaching defs of
264 each register in each block and thus can try to use an existing register.
266 **********************
268 A fair bit of simplicity is created by creating small functions for simple
269 tasks, even when the function is only called in one place. This may
270 measurably slow things down [or may not] by creating more function call
271 overhead than is necessary. The source is laid out so that it's trivial
272 to make the affected functions inline so that one can measure what speed
273 up, if any, can be achieved, and maybe later when things settle things can
274 be rearranged.
276 Help stamp out big monolithic functions! */
278 /* GCSE global vars. */
280 /* -dG dump file. */
281 static FILE *gcse_file;
283 /* Note whether or not we should run jump optimization after gcse. We
284 want to do this for two cases.
286 * If we changed any jumps via cprop.
288 * If we added any labels via edge splitting. */
290 static int run_jump_opt_after_gcse;
292 /* Bitmaps are normally not included in debugging dumps.
293 However it's useful to be able to print them from GDB.
294 We could create special functions for this, but it's simpler to
295 just allow passing stderr to the dump_foo fns. Since stderr can
296 be a macro, we store a copy here. */
297 static FILE *debug_stderr;
299 /* An obstack for our working variables. */
300 static struct obstack gcse_obstack;
302 struct reg_use {rtx reg_rtx; };
304 /* Hash table of expressions. */
306 struct expr
308 /* The expression (SET_SRC for expressions, PATTERN for assignments). */
309 rtx expr;
310 /* Index in the available expression bitmaps. */
311 int bitmap_index;
312 /* Next entry with the same hash. */
313 struct expr *next_same_hash;
314 /* List of anticipatable occurrences in basic blocks in the function.
315 An "anticipatable occurrence" is one that is the first occurrence in the
316 basic block, the operands are not modified in the basic block prior
317 to the occurrence and the output is not used between the start of
318 the block and the occurrence. */
319 struct occr *antic_occr;
320 /* List of available occurrence in basic blocks in the function.
321 An "available occurrence" is one that is the last occurrence in the
322 basic block and the operands are not modified by following statements in
323 the basic block [including this insn]. */
324 struct occr *avail_occr;
325 /* Non-null if the computation is PRE redundant.
326 The value is the newly created pseudo-reg to record a copy of the
327 expression in all the places that reach the redundant copy. */
328 rtx reaching_reg;
331 /* Occurrence of an expression.
332 There is one per basic block. If a pattern appears more than once the
333 last appearance is used [or first for anticipatable expressions]. */
335 struct occr
337 /* Next occurrence of this expression. */
338 struct occr *next;
339 /* The insn that computes the expression. */
340 rtx insn;
341 /* Nonzero if this [anticipatable] occurrence has been deleted. */
342 char deleted_p;
343 /* Nonzero if this [available] occurrence has been copied to
344 reaching_reg. */
345 /* ??? This is mutually exclusive with deleted_p, so they could share
346 the same byte. */
347 char copied_p;
350 /* Expression and copy propagation hash tables.
351 Each hash table is an array of buckets.
352 ??? It is known that if it were an array of entries, structure elements
353 `next_same_hash' and `bitmap_index' wouldn't be necessary. However, it is
354 not clear whether in the final analysis a sufficient amount of memory would
355 be saved as the size of the available expression bitmaps would be larger
356 [one could build a mapping table without holes afterwards though].
357 Someday I'll perform the computation and figure it out. */
359 struct hash_table
361 /* The table itself.
362 This is an array of `expr_hash_table_size' elements. */
363 struct expr **table;
365 /* Size of the hash table, in elements. */
366 unsigned int size;
368 /* Number of hash table elements. */
369 unsigned int n_elems;
371 /* Whether the table is expression of copy propagation one. */
372 int set_p;
375 /* Expression hash table. */
376 static struct hash_table expr_hash_table;
378 /* Copy propagation hash table. */
379 static struct hash_table set_hash_table;
381 /* Mapping of uids to cuids.
382 Only real insns get cuids. */
383 static int *uid_cuid;
385 /* Highest UID in UID_CUID. */
386 static int max_uid;
388 /* Get the cuid of an insn. */
389 #ifdef ENABLE_CHECKING
390 #define INSN_CUID(INSN) (INSN_UID (INSN) > max_uid ? (abort (), 0) : uid_cuid[INSN_UID (INSN)])
391 #else
392 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
393 #endif
395 /* Number of cuids. */
396 static int max_cuid;
398 /* Mapping of cuids to insns. */
399 static rtx *cuid_insn;
401 /* Get insn from cuid. */
402 #define CUID_INSN(CUID) (cuid_insn[CUID])
404 /* Maximum register number in function prior to doing gcse + 1.
405 Registers created during this pass have regno >= max_gcse_regno.
406 This is named with "gcse" to not collide with global of same name. */
407 static unsigned int max_gcse_regno;
409 /* Table of registers that are modified.
411 For each register, each element is a list of places where the pseudo-reg
412 is set.
414 For simplicity, GCSE is done on sets of pseudo-regs only. PRE GCSE only
415 requires knowledge of which blocks kill which regs [and thus could use
416 a bitmap instead of the lists `reg_set_table' uses].
418 `reg_set_table' and could be turned into an array of bitmaps (num-bbs x
419 num-regs) [however perhaps it may be useful to keep the data as is]. One
420 advantage of recording things this way is that `reg_set_table' is fairly
421 sparse with respect to pseudo regs but for hard regs could be fairly dense
422 [relatively speaking]. And recording sets of pseudo-regs in lists speeds
423 up functions like compute_transp since in the case of pseudo-regs we only
424 need to iterate over the number of times a pseudo-reg is set, not over the
425 number of basic blocks [clearly there is a bit of a slow down in the cases
426 where a pseudo is set more than once in a block, however it is believed
427 that the net effect is to speed things up]. This isn't done for hard-regs
428 because recording call-clobbered hard-regs in `reg_set_table' at each
429 function call can consume a fair bit of memory, and iterating over
430 hard-regs stored this way in compute_transp will be more expensive. */
432 typedef struct reg_set
434 /* The next setting of this register. */
435 struct reg_set *next;
436 /* The insn where it was set. */
437 rtx insn;
438 } reg_set;
440 static reg_set **reg_set_table;
442 /* Size of `reg_set_table'.
443 The table starts out at max_gcse_regno + slop, and is enlarged as
444 necessary. */
445 static int reg_set_table_size;
447 /* Amount to grow `reg_set_table' by when it's full. */
448 #define REG_SET_TABLE_SLOP 100
450 /* This is a list of expressions which are MEMs and will be used by load
451 or store motion.
452 Load motion tracks MEMs which aren't killed by
453 anything except itself. (ie, loads and stores to a single location).
454 We can then allow movement of these MEM refs with a little special
455 allowance. (all stores copy the same value to the reaching reg used
456 for the loads). This means all values used to store into memory must have
457 no side effects so we can re-issue the setter value.
458 Store Motion uses this structure as an expression table to track stores
459 which look interesting, and might be moveable towards the exit block. */
461 struct ls_expr
463 struct expr * expr; /* Gcse expression reference for LM. */
464 rtx pattern; /* Pattern of this mem. */
465 rtx pattern_regs; /* List of registers mentioned by the mem. */
466 rtx loads; /* INSN list of loads seen. */
467 rtx stores; /* INSN list of stores seen. */
468 struct ls_expr * next; /* Next in the list. */
469 int invalid; /* Invalid for some reason. */
470 int index; /* If it maps to a bitmap index. */
471 int hash_index; /* Index when in a hash table. */
472 rtx reaching_reg; /* Register to use when re-writing. */
475 /* Array of implicit set patterns indexed by basic block index. */
476 static rtx *implicit_sets;
478 /* Head of the list of load/store memory refs. */
479 static struct ls_expr * pre_ldst_mems = NULL;
481 /* Bitmap containing one bit for each register in the program.
482 Used when performing GCSE to track which registers have been set since
483 the start of the basic block. */
484 static regset reg_set_bitmap;
486 /* For each block, a bitmap of registers set in the block.
487 This is used by expr_killed_p and compute_transp.
488 It is computed during hash table computation and not by compute_sets
489 as it includes registers added since the last pass (or between cprop and
490 gcse) and it's currently not easy to realloc sbitmap vectors. */
491 static sbitmap *reg_set_in_block;
493 /* Array, indexed by basic block number for a list of insns which modify
494 memory within that block. */
495 static rtx * modify_mem_list;
496 bitmap modify_mem_list_set;
498 /* This array parallels modify_mem_list, but is kept canonicalized. */
499 static rtx * canon_modify_mem_list;
500 bitmap canon_modify_mem_list_set;
501 /* Various variables for statistics gathering. */
503 /* Memory used in a pass.
504 This isn't intended to be absolutely precise. Its intent is only
505 to keep an eye on memory usage. */
506 static int bytes_used;
508 /* GCSE substitutions made. */
509 static int gcse_subst_count;
510 /* Number of copy instructions created. */
511 static int gcse_create_count;
512 /* Number of constants propagated. */
513 static int const_prop_count;
514 /* Number of copys propagated. */
515 static int copy_prop_count;
517 /* These variables are used by classic GCSE.
518 Normally they'd be defined a bit later, but `rd_gen' needs to
519 be declared sooner. */
521 /* Each block has a bitmap of each type.
522 The length of each blocks bitmap is:
524 max_cuid - for reaching definitions
525 n_exprs - for available expressions
527 Thus we view the bitmaps as 2 dimensional arrays. i.e.
528 rd_kill[block_num][cuid_num]
529 ae_kill[block_num][expr_num] */
531 /* For reaching defs */
532 static sbitmap *rd_kill, *rd_gen, *reaching_defs, *rd_out;
534 /* for available exprs */
535 static sbitmap *ae_kill, *ae_gen, *ae_in, *ae_out;
537 /* Objects of this type are passed around by the null-pointer check
538 removal routines. */
539 struct null_pointer_info
541 /* The basic block being processed. */
542 basic_block current_block;
543 /* The first register to be handled in this pass. */
544 unsigned int min_reg;
545 /* One greater than the last register to be handled in this pass. */
546 unsigned int max_reg;
547 sbitmap *nonnull_local;
548 sbitmap *nonnull_killed;
551 static void compute_can_copy (void);
552 static void *gmalloc (size_t) ATTRIBUTE_MALLOC;
553 static void *gcalloc (size_t, size_t) ATTRIBUTE_MALLOC;
554 static void *grealloc (void *, size_t);
555 static void *gcse_alloc (unsigned long);
556 static void alloc_gcse_mem (rtx);
557 static void free_gcse_mem (void);
558 static void alloc_reg_set_mem (int);
559 static void free_reg_set_mem (void);
560 static int get_bitmap_width (int, int, int);
561 static void record_one_set (int, rtx);
562 static void record_set_info (rtx, rtx, void *);
563 static void compute_sets (rtx);
564 static void hash_scan_insn (rtx, struct hash_table *, int);
565 static void hash_scan_set (rtx, rtx, struct hash_table *);
566 static void hash_scan_clobber (rtx, rtx, struct hash_table *);
567 static void hash_scan_call (rtx, rtx, struct hash_table *);
568 static int want_to_gcse_p (rtx);
569 static bool gcse_constant_p (rtx);
570 static int oprs_unchanged_p (rtx, rtx, int);
571 static int oprs_anticipatable_p (rtx, rtx);
572 static int oprs_available_p (rtx, rtx);
573 static void insert_expr_in_table (rtx, enum machine_mode, rtx, int, int,
574 struct hash_table *);
575 static void insert_set_in_table (rtx, rtx, struct hash_table *);
576 static unsigned int hash_expr (rtx, enum machine_mode, int *, int);
577 static unsigned int hash_expr_1 (rtx, enum machine_mode, int *);
578 static unsigned int hash_string_1 (const char *);
579 static unsigned int hash_set (int, int);
580 static int expr_equiv_p (rtx, rtx);
581 static void record_last_reg_set_info (rtx, int);
582 static void record_last_mem_set_info (rtx);
583 static void record_last_set_info (rtx, rtx, void *);
584 static void compute_hash_table (struct hash_table *);
585 static void alloc_hash_table (int, struct hash_table *, int);
586 static void free_hash_table (struct hash_table *);
587 static void compute_hash_table_work (struct hash_table *);
588 static void dump_hash_table (FILE *, const char *, struct hash_table *);
589 static struct expr *lookup_expr (rtx, struct hash_table *);
590 static struct expr *lookup_set (unsigned int, struct hash_table *);
591 static struct expr *next_set (unsigned int, struct expr *);
592 static void reset_opr_set_tables (void);
593 static int oprs_not_set_p (rtx, rtx);
594 static void mark_call (rtx);
595 static void mark_set (rtx, rtx);
596 static void mark_clobber (rtx, rtx);
597 static void mark_oprs_set (rtx);
598 static void alloc_cprop_mem (int, int);
599 static void free_cprop_mem (void);
600 static void compute_transp (rtx, int, sbitmap *, int);
601 static void compute_transpout (void);
602 static void compute_local_properties (sbitmap *, sbitmap *, sbitmap *,
603 struct hash_table *);
604 static void compute_cprop_data (void);
605 static void find_used_regs (rtx *, void *);
606 static int try_replace_reg (rtx, rtx, rtx);
607 static struct expr *find_avail_set (int, rtx);
608 static int cprop_jump (basic_block, rtx, rtx, rtx, rtx);
609 static void mems_conflict_for_gcse_p (rtx, rtx, void *);
610 static int load_killed_in_block_p (basic_block, int, rtx, int);
611 static void canon_list_insert (rtx, rtx, void *);
612 static int cprop_insn (rtx, int);
613 static int cprop (int);
614 static void find_implicit_sets (void);
615 static int one_cprop_pass (int, int, int);
616 static bool constprop_register (rtx, rtx, rtx, int);
617 static struct expr *find_bypass_set (int, int);
618 static bool reg_killed_on_edge (rtx, edge);
619 static int bypass_block (basic_block, rtx, rtx);
620 static int bypass_conditional_jumps (void);
621 static void alloc_pre_mem (int, int);
622 static void free_pre_mem (void);
623 static void compute_pre_data (void);
624 static int pre_expr_reaches_here_p (basic_block, struct expr *,
625 basic_block);
626 static void insert_insn_end_bb (struct expr *, basic_block, int);
627 static void pre_insert_copy_insn (struct expr *, rtx);
628 static void pre_insert_copies (void);
629 static int pre_delete (void);
630 static int pre_gcse (void);
631 static int one_pre_gcse_pass (int);
632 static void add_label_notes (rtx, rtx);
633 static void alloc_code_hoist_mem (int, int);
634 static void free_code_hoist_mem (void);
635 static void compute_code_hoist_vbeinout (void);
636 static void compute_code_hoist_data (void);
637 static int hoist_expr_reaches_here_p (basic_block, int, basic_block, char *);
638 static void hoist_code (void);
639 static int one_code_hoisting_pass (void);
640 static void alloc_rd_mem (int, int);
641 static void free_rd_mem (void);
642 static void handle_rd_kill_set (rtx, int, basic_block);
643 static void compute_kill_rd (void);
644 static void compute_rd (void);
645 static void alloc_avail_expr_mem (int, int);
646 static void free_avail_expr_mem (void);
647 static void compute_ae_gen (struct hash_table *);
648 static int expr_killed_p (rtx, basic_block);
649 static void compute_ae_kill (sbitmap *, sbitmap *, struct hash_table *);
650 static int expr_reaches_here_p (struct occr *, struct expr *, basic_block,
651 int);
652 static rtx computing_insn (struct expr *, rtx);
653 static int def_reaches_here_p (rtx, rtx);
654 static int can_disregard_other_sets (struct reg_set **, rtx, int);
655 static int handle_avail_expr (rtx, struct expr *);
656 static int classic_gcse (void);
657 static int one_classic_gcse_pass (int);
658 static void invalidate_nonnull_info (rtx, rtx, void *);
659 static int delete_null_pointer_checks_1 (unsigned int *, sbitmap *, sbitmap *,
660 struct null_pointer_info *);
661 static rtx process_insert_insn (struct expr *);
662 static int pre_edge_insert (struct edge_list *, struct expr **);
663 static int expr_reaches_here_p_work (struct occr *, struct expr *,
664 basic_block, int, char *);
665 static int pre_expr_reaches_here_p_work (basic_block, struct expr *,
666 basic_block, char *);
667 static struct ls_expr * ldst_entry (rtx);
668 static void free_ldst_entry (struct ls_expr *);
669 static void free_ldst_mems (void);
670 static void print_ldst_list (FILE *);
671 static struct ls_expr * find_rtx_in_ldst (rtx);
672 static int enumerate_ldsts (void);
673 static inline struct ls_expr * first_ls_expr (void);
674 static inline struct ls_expr * next_ls_expr (struct ls_expr *);
675 static int simple_mem (rtx);
676 static void invalidate_any_buried_refs (rtx);
677 static void compute_ld_motion_mems (void);
678 static void trim_ld_motion_mems (void);
679 static void update_ld_motion_stores (struct expr *);
680 static void reg_set_info (rtx, rtx, void *);
681 static bool store_ops_ok (rtx, int *);
682 static rtx extract_mentioned_regs (rtx);
683 static rtx extract_mentioned_regs_helper (rtx, rtx);
684 static void find_moveable_store (rtx, int *, int *);
685 static int compute_store_table (void);
686 static bool load_kills_store (rtx, rtx, int);
687 static bool find_loads (rtx, rtx, int);
688 static bool store_killed_in_insn (rtx, rtx, rtx, int);
689 static bool store_killed_after (rtx, rtx, rtx, basic_block, int *, rtx *);
690 static bool store_killed_before (rtx, rtx, rtx, basic_block, int *);
691 static void build_store_vectors (void);
692 static void insert_insn_start_bb (rtx, basic_block);
693 static int insert_store (struct ls_expr *, edge);
694 static void replace_store_insn (rtx, rtx, basic_block);
695 static void delete_store (struct ls_expr *, basic_block);
696 static void free_store_memory (void);
697 static void store_motion (void);
698 static void free_insn_expr_list_list (rtx *);
699 static void clear_modify_mem_tables (void);
700 static void free_modify_mem_tables (void);
701 static rtx gcse_emit_move_after (rtx, rtx, rtx);
702 static void local_cprop_find_used_regs (rtx *, void *);
703 static bool do_local_cprop (rtx, rtx, int, rtx*);
704 static bool adjust_libcall_notes (rtx, rtx, rtx, rtx*);
705 static void local_cprop_pass (int);
707 /* Entry point for global common subexpression elimination.
708 F is the first instruction in the function. */
711 gcse_main (rtx f, FILE *file)
713 int changed, pass;
714 /* Bytes used at start of pass. */
715 int initial_bytes_used;
716 /* Maximum number of bytes used by a pass. */
717 int max_pass_bytes;
718 /* Point to release obstack data from for each pass. */
719 char *gcse_obstack_bottom;
721 /* We do not construct an accurate cfg in functions which call
722 setjmp, so just punt to be safe. */
723 if (current_function_calls_setjmp)
724 return 0;
726 /* Assume that we do not need to run jump optimizations after gcse. */
727 run_jump_opt_after_gcse = 0;
729 /* For calling dump_foo fns from gdb. */
730 debug_stderr = stderr;
731 gcse_file = file;
733 /* Identify the basic block information for this function, including
734 successors and predecessors. */
735 max_gcse_regno = max_reg_num ();
737 if (file)
738 dump_flow_info (file);
740 /* Return if there's nothing to do. */
741 if (n_basic_blocks <= 1)
742 return 0;
744 /* Trying to perform global optimizations on flow graphs which have
745 a high connectivity will take a long time and is unlikely to be
746 particularly useful.
748 In normal circumstances a cfg should have about twice as many edges
749 as blocks. But we do not want to punish small functions which have
750 a couple switch statements. So we require a relatively large number
751 of basic blocks and the ratio of edges to blocks to be high. */
752 if (n_basic_blocks > 1000 && n_edges / n_basic_blocks >= 20)
754 if (warn_disabled_optimization)
755 warning ("GCSE disabled: %d > 1000 basic blocks and %d >= 20 edges/basic block",
756 n_basic_blocks, n_edges / n_basic_blocks);
757 return 0;
760 /* If allocating memory for the cprop bitmap would take up too much
761 storage it's better just to disable the optimization. */
762 if ((n_basic_blocks
763 * SBITMAP_SET_SIZE (max_gcse_regno)
764 * sizeof (SBITMAP_ELT_TYPE)) > MAX_GCSE_MEMORY)
766 if (warn_disabled_optimization)
767 warning ("GCSE disabled: %d basic blocks and %d registers",
768 n_basic_blocks, max_gcse_regno);
770 return 0;
773 gcc_obstack_init (&gcse_obstack);
774 bytes_used = 0;
776 /* We need alias. */
777 init_alias_analysis ();
778 /* Record where pseudo-registers are set. This data is kept accurate
779 during each pass. ??? We could also record hard-reg information here
780 [since it's unchanging], however it is currently done during hash table
781 computation.
783 It may be tempting to compute MEM set information here too, but MEM sets
784 will be subject to code motion one day and thus we need to compute
785 information about memory sets when we build the hash tables. */
787 alloc_reg_set_mem (max_gcse_regno);
788 compute_sets (f);
790 pass = 0;
791 initial_bytes_used = bytes_used;
792 max_pass_bytes = 0;
793 gcse_obstack_bottom = gcse_alloc (1);
794 changed = 1;
795 while (changed && pass < MAX_GCSE_PASSES)
797 changed = 0;
798 if (file)
799 fprintf (file, "GCSE pass %d\n\n", pass + 1);
801 /* Initialize bytes_used to the space for the pred/succ lists,
802 and the reg_set_table data. */
803 bytes_used = initial_bytes_used;
805 /* Each pass may create new registers, so recalculate each time. */
806 max_gcse_regno = max_reg_num ();
808 alloc_gcse_mem (f);
810 /* Don't allow constant propagation to modify jumps
811 during this pass. */
812 changed = one_cprop_pass (pass + 1, 0, 0);
814 if (optimize_size)
815 changed |= one_classic_gcse_pass (pass + 1);
816 else
818 changed |= one_pre_gcse_pass (pass + 1);
819 /* We may have just created new basic blocks. Release and
820 recompute various things which are sized on the number of
821 basic blocks. */
822 if (changed)
824 free_modify_mem_tables ();
825 modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
826 canon_modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
828 free_reg_set_mem ();
829 alloc_reg_set_mem (max_reg_num ());
830 compute_sets (f);
831 run_jump_opt_after_gcse = 1;
834 if (max_pass_bytes < bytes_used)
835 max_pass_bytes = bytes_used;
837 /* Free up memory, then reallocate for code hoisting. We can
838 not re-use the existing allocated memory because the tables
839 will not have info for the insns or registers created by
840 partial redundancy elimination. */
841 free_gcse_mem ();
843 /* It does not make sense to run code hoisting unless we optimizing
844 for code size -- it rarely makes programs faster, and can make
845 them bigger if we did partial redundancy elimination (when optimizing
846 for space, we use a classic gcse algorithm instead of partial
847 redundancy algorithms). */
848 if (optimize_size)
850 max_gcse_regno = max_reg_num ();
851 alloc_gcse_mem (f);
852 changed |= one_code_hoisting_pass ();
853 free_gcse_mem ();
855 if (max_pass_bytes < bytes_used)
856 max_pass_bytes = bytes_used;
859 if (file)
861 fprintf (file, "\n");
862 fflush (file);
865 obstack_free (&gcse_obstack, gcse_obstack_bottom);
866 pass++;
869 /* Do one last pass of copy propagation, including cprop into
870 conditional jumps. */
872 max_gcse_regno = max_reg_num ();
873 alloc_gcse_mem (f);
874 /* This time, go ahead and allow cprop to alter jumps. */
875 one_cprop_pass (pass + 1, 1, 0);
876 free_gcse_mem ();
878 if (file)
880 fprintf (file, "GCSE of %s: %d basic blocks, ",
881 current_function_name, n_basic_blocks);
882 fprintf (file, "%d pass%s, %d bytes\n\n",
883 pass, pass > 1 ? "es" : "", max_pass_bytes);
886 obstack_free (&gcse_obstack, NULL);
887 free_reg_set_mem ();
888 /* We are finished with alias. */
889 end_alias_analysis ();
890 allocate_reg_info (max_reg_num (), FALSE, FALSE);
892 if (!optimize_size && flag_gcse_sm)
893 store_motion ();
895 /* Record where pseudo-registers are set. */
896 return run_jump_opt_after_gcse;
899 /* Misc. utilities. */
901 /* Nonzero for each mode that supports (set (reg) (reg)).
902 This is trivially true for integer and floating point values.
903 It may or may not be true for condition codes. */
904 static char can_copy[(int) NUM_MACHINE_MODES];
906 /* Compute which modes support reg/reg copy operations. */
908 static void
909 compute_can_copy (void)
911 int i;
912 #ifndef AVOID_CCMODE_COPIES
913 rtx reg, insn;
914 #endif
915 memset (can_copy, 0, NUM_MACHINE_MODES);
917 start_sequence ();
918 for (i = 0; i < NUM_MACHINE_MODES; i++)
919 if (GET_MODE_CLASS (i) == MODE_CC)
921 #ifdef AVOID_CCMODE_COPIES
922 can_copy[i] = 0;
923 #else
924 reg = gen_rtx_REG ((enum machine_mode) i, LAST_VIRTUAL_REGISTER + 1);
925 insn = emit_insn (gen_rtx_SET (VOIDmode, reg, reg));
926 if (recog (PATTERN (insn), insn, NULL) >= 0)
927 can_copy[i] = 1;
928 #endif
930 else
931 can_copy[i] = 1;
933 end_sequence ();
936 /* Returns whether the mode supports reg/reg copy operations. */
938 bool
939 can_copy_p (enum machine_mode mode)
941 static bool can_copy_init_p = false;
943 if (! can_copy_init_p)
945 compute_can_copy ();
946 can_copy_init_p = true;
949 return can_copy[mode] != 0;
952 /* Cover function to xmalloc to record bytes allocated. */
954 static void *
955 gmalloc (size_t size)
957 bytes_used += size;
958 return xmalloc (size);
961 /* Cover function to xcalloc to record bytes allocated. */
963 static void *
964 gcalloc (size_t nelem, size_t elsize)
966 bytes_used += nelem * elsize;
967 return xcalloc (nelem, elsize);
970 /* Cover function to xrealloc.
971 We don't record the additional size since we don't know it.
972 It won't affect memory usage stats much anyway. */
974 static void *
975 grealloc (void *ptr, size_t size)
977 return xrealloc (ptr, size);
980 /* Cover function to obstack_alloc. */
982 static void *
983 gcse_alloc (unsigned long size)
985 bytes_used += size;
986 return obstack_alloc (&gcse_obstack, size);
989 /* Allocate memory for the cuid mapping array,
990 and reg/memory set tracking tables.
992 This is called at the start of each pass. */
994 static void
995 alloc_gcse_mem (rtx f)
997 int i;
998 rtx insn;
1000 /* Find the largest UID and create a mapping from UIDs to CUIDs.
1001 CUIDs are like UIDs except they increase monotonically, have no gaps,
1002 and only apply to real insns. */
1004 max_uid = get_max_uid ();
1005 uid_cuid = gcalloc (max_uid + 1, sizeof (int));
1006 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
1008 if (INSN_P (insn))
1009 uid_cuid[INSN_UID (insn)] = i++;
1010 else
1011 uid_cuid[INSN_UID (insn)] = i;
1014 /* Create a table mapping cuids to insns. */
1016 max_cuid = i;
1017 cuid_insn = gcalloc (max_cuid + 1, sizeof (rtx));
1018 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
1019 if (INSN_P (insn))
1020 CUID_INSN (i++) = insn;
1022 /* Allocate vars to track sets of regs. */
1023 reg_set_bitmap = BITMAP_XMALLOC ();
1025 /* Allocate vars to track sets of regs, memory per block. */
1026 reg_set_in_block = sbitmap_vector_alloc (last_basic_block, max_gcse_regno);
1027 /* Allocate array to keep a list of insns which modify memory in each
1028 basic block. */
1029 modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
1030 canon_modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
1031 modify_mem_list_set = BITMAP_XMALLOC ();
1032 canon_modify_mem_list_set = BITMAP_XMALLOC ();
1035 /* Free memory allocated by alloc_gcse_mem. */
1037 static void
1038 free_gcse_mem (void)
1040 free (uid_cuid);
1041 free (cuid_insn);
1043 BITMAP_XFREE (reg_set_bitmap);
1045 sbitmap_vector_free (reg_set_in_block);
1046 free_modify_mem_tables ();
1047 BITMAP_XFREE (modify_mem_list_set);
1048 BITMAP_XFREE (canon_modify_mem_list_set);
1051 /* Many of the global optimization algorithms work by solving dataflow
1052 equations for various expressions. Initially, some local value is
1053 computed for each expression in each block. Then, the values across the
1054 various blocks are combined (by following flow graph edges) to arrive at
1055 global values. Conceptually, each set of equations is independent. We
1056 may therefore solve all the equations in parallel, solve them one at a
1057 time, or pick any intermediate approach.
1059 When you're going to need N two-dimensional bitmaps, each X (say, the
1060 number of blocks) by Y (say, the number of expressions), call this
1061 function. It's not important what X and Y represent; only that Y
1062 correspond to the things that can be done in parallel. This function will
1063 return an appropriate chunking factor C; you should solve C sets of
1064 equations in parallel. By going through this function, we can easily
1065 trade space against time; by solving fewer equations in parallel we use
1066 less space. */
1068 static int
1069 get_bitmap_width (int n, int x, int y)
1071 /* It's not really worth figuring out *exactly* how much memory will
1072 be used by a particular choice. The important thing is to get
1073 something approximately right. */
1074 size_t max_bitmap_memory = 10 * 1024 * 1024;
1076 /* The number of bytes we'd use for a single column of minimum
1077 width. */
1078 size_t column_size = n * x * sizeof (SBITMAP_ELT_TYPE);
1080 /* Often, it's reasonable just to solve all the equations in
1081 parallel. */
1082 if (column_size * SBITMAP_SET_SIZE (y) <= max_bitmap_memory)
1083 return y;
1085 /* Otherwise, pick the largest width we can, without going over the
1086 limit. */
1087 return SBITMAP_ELT_BITS * ((max_bitmap_memory + column_size - 1)
1088 / column_size);
1091 /* Compute the local properties of each recorded expression.
1093 Local properties are those that are defined by the block, irrespective of
1094 other blocks.
1096 An expression is transparent in a block if its operands are not modified
1097 in the block.
1099 An expression is computed (locally available) in a block if it is computed
1100 at least once and expression would contain the same value if the
1101 computation was moved to the end of the block.
1103 An expression is locally anticipatable in a block if it is computed at
1104 least once and expression would contain the same value if the computation
1105 was moved to the beginning of the block.
1107 We call this routine for cprop, pre and code hoisting. They all compute
1108 basically the same information and thus can easily share this code.
1110 TRANSP, COMP, and ANTLOC are destination sbitmaps for recording local
1111 properties. If NULL, then it is not necessary to compute or record that
1112 particular property.
1114 TABLE controls which hash table to look at. If it is set hash table,
1115 additionally, TRANSP is computed as ~TRANSP, since this is really cprop's
1116 ABSALTERED. */
1118 static void
1119 compute_local_properties (sbitmap *transp, sbitmap *comp, sbitmap *antloc, struct hash_table *table)
1121 unsigned int i;
1123 /* Initialize any bitmaps that were passed in. */
1124 if (transp)
1126 if (table->set_p)
1127 sbitmap_vector_zero (transp, last_basic_block);
1128 else
1129 sbitmap_vector_ones (transp, last_basic_block);
1132 if (comp)
1133 sbitmap_vector_zero (comp, last_basic_block);
1134 if (antloc)
1135 sbitmap_vector_zero (antloc, last_basic_block);
1137 for (i = 0; i < table->size; i++)
1139 struct expr *expr;
1141 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
1143 int indx = expr->bitmap_index;
1144 struct occr *occr;
1146 /* The expression is transparent in this block if it is not killed.
1147 We start by assuming all are transparent [none are killed], and
1148 then reset the bits for those that are. */
1149 if (transp)
1150 compute_transp (expr->expr, indx, transp, table->set_p);
1152 /* The occurrences recorded in antic_occr are exactly those that
1153 we want to set to nonzero in ANTLOC. */
1154 if (antloc)
1155 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
1157 SET_BIT (antloc[BLOCK_NUM (occr->insn)], indx);
1159 /* While we're scanning the table, this is a good place to
1160 initialize this. */
1161 occr->deleted_p = 0;
1164 /* The occurrences recorded in avail_occr are exactly those that
1165 we want to set to nonzero in COMP. */
1166 if (comp)
1167 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
1169 SET_BIT (comp[BLOCK_NUM (occr->insn)], indx);
1171 /* While we're scanning the table, this is a good place to
1172 initialize this. */
1173 occr->copied_p = 0;
1176 /* While we're scanning the table, this is a good place to
1177 initialize this. */
1178 expr->reaching_reg = 0;
1183 /* Register set information.
1185 `reg_set_table' records where each register is set or otherwise
1186 modified. */
1188 static struct obstack reg_set_obstack;
1190 static void
1191 alloc_reg_set_mem (int n_regs)
1193 reg_set_table_size = n_regs + REG_SET_TABLE_SLOP;
1194 reg_set_table = gcalloc (reg_set_table_size, sizeof (struct reg_set *));
1196 gcc_obstack_init (&reg_set_obstack);
1199 static void
1200 free_reg_set_mem (void)
1202 free (reg_set_table);
1203 obstack_free (&reg_set_obstack, NULL);
1206 /* Record REGNO in the reg_set table. */
1208 static void
1209 record_one_set (int regno, rtx insn)
1211 /* Allocate a new reg_set element and link it onto the list. */
1212 struct reg_set *new_reg_info;
1214 /* If the table isn't big enough, enlarge it. */
1215 if (regno >= reg_set_table_size)
1217 int new_size = regno + REG_SET_TABLE_SLOP;
1219 reg_set_table = grealloc (reg_set_table,
1220 new_size * sizeof (struct reg_set *));
1221 memset (reg_set_table + reg_set_table_size, 0,
1222 (new_size - reg_set_table_size) * sizeof (struct reg_set *));
1223 reg_set_table_size = new_size;
1226 new_reg_info = obstack_alloc (&reg_set_obstack, sizeof (struct reg_set));
1227 bytes_used += sizeof (struct reg_set);
1228 new_reg_info->insn = insn;
1229 new_reg_info->next = reg_set_table[regno];
1230 reg_set_table[regno] = new_reg_info;
1233 /* Called from compute_sets via note_stores to handle one SET or CLOBBER in
1234 an insn. The DATA is really the instruction in which the SET is
1235 occurring. */
1237 static void
1238 record_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED, void *data)
1240 rtx record_set_insn = (rtx) data;
1242 if (GET_CODE (dest) == REG && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
1243 record_one_set (REGNO (dest), record_set_insn);
1246 /* Scan the function and record each set of each pseudo-register.
1248 This is called once, at the start of the gcse pass. See the comments for
1249 `reg_set_table' for further documentation. */
1251 static void
1252 compute_sets (rtx f)
1254 rtx insn;
1256 for (insn = f; insn != 0; insn = NEXT_INSN (insn))
1257 if (INSN_P (insn))
1258 note_stores (PATTERN (insn), record_set_info, insn);
1261 /* Hash table support. */
1263 struct reg_avail_info
1265 basic_block last_bb;
1266 int first_set;
1267 int last_set;
1270 static struct reg_avail_info *reg_avail_info;
1271 static basic_block current_bb;
1274 /* See whether X, the source of a set, is something we want to consider for
1275 GCSE. */
1277 static GTY(()) rtx test_insn;
1278 static int
1279 want_to_gcse_p (rtx x)
1281 int num_clobbers = 0;
1282 int icode;
1284 switch (GET_CODE (x))
1286 case REG:
1287 case SUBREG:
1288 case CONST_INT:
1289 case CONST_DOUBLE:
1290 case CONST_VECTOR:
1291 case CALL:
1292 case CONSTANT_P_RTX:
1293 return 0;
1295 default:
1296 break;
1299 /* If this is a valid operand, we are OK. If it's VOIDmode, we aren't. */
1300 if (general_operand (x, GET_MODE (x)))
1301 return 1;
1302 else if (GET_MODE (x) == VOIDmode)
1303 return 0;
1305 /* Otherwise, check if we can make a valid insn from it. First initialize
1306 our test insn if we haven't already. */
1307 if (test_insn == 0)
1309 test_insn
1310 = make_insn_raw (gen_rtx_SET (VOIDmode,
1311 gen_rtx_REG (word_mode,
1312 FIRST_PSEUDO_REGISTER * 2),
1313 const0_rtx));
1314 NEXT_INSN (test_insn) = PREV_INSN (test_insn) = 0;
1317 /* Now make an insn like the one we would make when GCSE'ing and see if
1318 valid. */
1319 PUT_MODE (SET_DEST (PATTERN (test_insn)), GET_MODE (x));
1320 SET_SRC (PATTERN (test_insn)) = x;
1321 return ((icode = recog (PATTERN (test_insn), test_insn, &num_clobbers)) >= 0
1322 && (num_clobbers == 0 || ! added_clobbers_hard_reg_p (icode)));
1325 /* Return nonzero if the operands of expression X are unchanged from the
1326 start of INSN's basic block up to but not including INSN (if AVAIL_P == 0),
1327 or from INSN to the end of INSN's basic block (if AVAIL_P != 0). */
1329 static int
1330 oprs_unchanged_p (rtx x, rtx insn, int avail_p)
1332 int i, j;
1333 enum rtx_code code;
1334 const char *fmt;
1336 if (x == 0)
1337 return 1;
1339 code = GET_CODE (x);
1340 switch (code)
1342 case REG:
1344 struct reg_avail_info *info = &reg_avail_info[REGNO (x)];
1346 if (info->last_bb != current_bb)
1347 return 1;
1348 if (avail_p)
1349 return info->last_set < INSN_CUID (insn);
1350 else
1351 return info->first_set >= INSN_CUID (insn);
1354 case MEM:
1355 if (load_killed_in_block_p (current_bb, INSN_CUID (insn),
1356 x, avail_p))
1357 return 0;
1358 else
1359 return oprs_unchanged_p (XEXP (x, 0), insn, avail_p);
1361 case PRE_DEC:
1362 case PRE_INC:
1363 case POST_DEC:
1364 case POST_INC:
1365 case PRE_MODIFY:
1366 case POST_MODIFY:
1367 return 0;
1369 case PC:
1370 case CC0: /*FIXME*/
1371 case CONST:
1372 case CONST_INT:
1373 case CONST_DOUBLE:
1374 case CONST_VECTOR:
1375 case SYMBOL_REF:
1376 case LABEL_REF:
1377 case ADDR_VEC:
1378 case ADDR_DIFF_VEC:
1379 return 1;
1381 default:
1382 break;
1385 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
1387 if (fmt[i] == 'e')
1389 /* If we are about to do the last recursive call needed at this
1390 level, change it into iteration. This function is called enough
1391 to be worth it. */
1392 if (i == 0)
1393 return oprs_unchanged_p (XEXP (x, i), insn, avail_p);
1395 else if (! oprs_unchanged_p (XEXP (x, i), insn, avail_p))
1396 return 0;
1398 else if (fmt[i] == 'E')
1399 for (j = 0; j < XVECLEN (x, i); j++)
1400 if (! oprs_unchanged_p (XVECEXP (x, i, j), insn, avail_p))
1401 return 0;
1404 return 1;
1407 /* Used for communication between mems_conflict_for_gcse_p and
1408 load_killed_in_block_p. Nonzero if mems_conflict_for_gcse_p finds a
1409 conflict between two memory references. */
1410 static int gcse_mems_conflict_p;
1412 /* Used for communication between mems_conflict_for_gcse_p and
1413 load_killed_in_block_p. A memory reference for a load instruction,
1414 mems_conflict_for_gcse_p will see if a memory store conflicts with
1415 this memory load. */
1416 static rtx gcse_mem_operand;
1418 /* DEST is the output of an instruction. If it is a memory reference, and
1419 possibly conflicts with the load found in gcse_mem_operand, then set
1420 gcse_mems_conflict_p to a nonzero value. */
1422 static void
1423 mems_conflict_for_gcse_p (rtx dest, rtx setter ATTRIBUTE_UNUSED,
1424 void *data ATTRIBUTE_UNUSED)
1426 while (GET_CODE (dest) == SUBREG
1427 || GET_CODE (dest) == ZERO_EXTRACT
1428 || GET_CODE (dest) == SIGN_EXTRACT
1429 || GET_CODE (dest) == STRICT_LOW_PART)
1430 dest = XEXP (dest, 0);
1432 /* If DEST is not a MEM, then it will not conflict with the load. Note
1433 that function calls are assumed to clobber memory, but are handled
1434 elsewhere. */
1435 if (GET_CODE (dest) != MEM)
1436 return;
1438 /* If we are setting a MEM in our list of specially recognized MEMs,
1439 don't mark as killed this time. */
1441 if (expr_equiv_p (dest, gcse_mem_operand) && pre_ldst_mems != NULL)
1443 if (!find_rtx_in_ldst (dest))
1444 gcse_mems_conflict_p = 1;
1445 return;
1448 if (true_dependence (dest, GET_MODE (dest), gcse_mem_operand,
1449 rtx_addr_varies_p))
1450 gcse_mems_conflict_p = 1;
1453 /* Return nonzero if the expression in X (a memory reference) is killed
1454 in block BB before or after the insn with the CUID in UID_LIMIT.
1455 AVAIL_P is nonzero for kills after UID_LIMIT, and zero for kills
1456 before UID_LIMIT.
1458 To check the entire block, set UID_LIMIT to max_uid + 1 and
1459 AVAIL_P to 0. */
1461 static int
1462 load_killed_in_block_p (basic_block bb, int uid_limit, rtx x, int avail_p)
1464 rtx list_entry = modify_mem_list[bb->index];
1465 while (list_entry)
1467 rtx setter;
1468 /* Ignore entries in the list that do not apply. */
1469 if ((avail_p
1470 && INSN_CUID (XEXP (list_entry, 0)) < uid_limit)
1471 || (! avail_p
1472 && INSN_CUID (XEXP (list_entry, 0)) > uid_limit))
1474 list_entry = XEXP (list_entry, 1);
1475 continue;
1478 setter = XEXP (list_entry, 0);
1480 /* If SETTER is a call everything is clobbered. Note that calls
1481 to pure functions are never put on the list, so we need not
1482 worry about them. */
1483 if (GET_CODE (setter) == CALL_INSN)
1484 return 1;
1486 /* SETTER must be an INSN of some kind that sets memory. Call
1487 note_stores to examine each hunk of memory that is modified.
1489 The note_stores interface is pretty limited, so we have to
1490 communicate via global variables. Yuk. */
1491 gcse_mem_operand = x;
1492 gcse_mems_conflict_p = 0;
1493 note_stores (PATTERN (setter), mems_conflict_for_gcse_p, NULL);
1494 if (gcse_mems_conflict_p)
1495 return 1;
1496 list_entry = XEXP (list_entry, 1);
1498 return 0;
1501 /* Return nonzero if the operands of expression X are unchanged from
1502 the start of INSN's basic block up to but not including INSN. */
1504 static int
1505 oprs_anticipatable_p (rtx x, rtx insn)
1507 return oprs_unchanged_p (x, insn, 0);
1510 /* Return nonzero if the operands of expression X are unchanged from
1511 INSN to the end of INSN's basic block. */
1513 static int
1514 oprs_available_p (rtx x, rtx insn)
1516 return oprs_unchanged_p (x, insn, 1);
1519 /* Hash expression X.
1521 MODE is only used if X is a CONST_INT. DO_NOT_RECORD_P is a boolean
1522 indicating if a volatile operand is found or if the expression contains
1523 something we don't want to insert in the table.
1525 ??? One might want to merge this with canon_hash. Later. */
1527 static unsigned int
1528 hash_expr (rtx x, enum machine_mode mode, int *do_not_record_p, int hash_table_size)
1530 unsigned int hash;
1532 *do_not_record_p = 0;
1534 hash = hash_expr_1 (x, mode, do_not_record_p);
1535 return hash % hash_table_size;
1538 /* Hash a string. Just add its bytes up. */
1540 static inline unsigned
1541 hash_string_1 (const char *ps)
1543 unsigned hash = 0;
1544 const unsigned char *p = (const unsigned char *) ps;
1546 if (p)
1547 while (*p)
1548 hash += *p++;
1550 return hash;
1553 /* Subroutine of hash_expr to do the actual work. */
1555 static unsigned int
1556 hash_expr_1 (rtx x, enum machine_mode mode, int *do_not_record_p)
1558 int i, j;
1559 unsigned hash = 0;
1560 enum rtx_code code;
1561 const char *fmt;
1563 /* Used to turn recursion into iteration. We can't rely on GCC's
1564 tail-recursion elimination since we need to keep accumulating values
1565 in HASH. */
1567 if (x == 0)
1568 return hash;
1570 repeat:
1571 code = GET_CODE (x);
1572 switch (code)
1574 case REG:
1575 hash += ((unsigned int) REG << 7) + REGNO (x);
1576 return hash;
1578 case CONST_INT:
1579 hash += (((unsigned int) CONST_INT << 7) + (unsigned int) mode
1580 + (unsigned int) INTVAL (x));
1581 return hash;
1583 case CONST_DOUBLE:
1584 /* This is like the general case, except that it only counts
1585 the integers representing the constant. */
1586 hash += (unsigned int) code + (unsigned int) GET_MODE (x);
1587 if (GET_MODE (x) != VOIDmode)
1588 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
1589 hash += (unsigned int) XWINT (x, i);
1590 else
1591 hash += ((unsigned int) CONST_DOUBLE_LOW (x)
1592 + (unsigned int) CONST_DOUBLE_HIGH (x));
1593 return hash;
1595 case CONST_VECTOR:
1597 int units;
1598 rtx elt;
1600 units = CONST_VECTOR_NUNITS (x);
1602 for (i = 0; i < units; ++i)
1604 elt = CONST_VECTOR_ELT (x, i);
1605 hash += hash_expr_1 (elt, GET_MODE (elt), do_not_record_p);
1608 return hash;
1611 /* Assume there is only one rtx object for any given label. */
1612 case LABEL_REF:
1613 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
1614 differences and differences between each stage's debugging dumps. */
1615 hash += (((unsigned int) LABEL_REF << 7)
1616 + CODE_LABEL_NUMBER (XEXP (x, 0)));
1617 return hash;
1619 case SYMBOL_REF:
1621 /* Don't hash on the symbol's address to avoid bootstrap differences.
1622 Different hash values may cause expressions to be recorded in
1623 different orders and thus different registers to be used in the
1624 final assembler. This also avoids differences in the dump files
1625 between various stages. */
1626 unsigned int h = 0;
1627 const unsigned char *p = (const unsigned char *) XSTR (x, 0);
1629 while (*p)
1630 h += (h << 7) + *p++; /* ??? revisit */
1632 hash += ((unsigned int) SYMBOL_REF << 7) + h;
1633 return hash;
1636 case MEM:
1637 if (MEM_VOLATILE_P (x))
1639 *do_not_record_p = 1;
1640 return 0;
1643 hash += (unsigned int) MEM;
1644 /* We used alias set for hashing, but this is not good, since the alias
1645 set may differ in -fprofile-arcs and -fbranch-probabilities compilation
1646 causing the profiles to fail to match. */
1647 x = XEXP (x, 0);
1648 goto repeat;
1650 case PRE_DEC:
1651 case PRE_INC:
1652 case POST_DEC:
1653 case POST_INC:
1654 case PC:
1655 case CC0:
1656 case CALL:
1657 case UNSPEC_VOLATILE:
1658 *do_not_record_p = 1;
1659 return 0;
1661 case ASM_OPERANDS:
1662 if (MEM_VOLATILE_P (x))
1664 *do_not_record_p = 1;
1665 return 0;
1667 else
1669 /* We don't want to take the filename and line into account. */
1670 hash += (unsigned) code + (unsigned) GET_MODE (x)
1671 + hash_string_1 (ASM_OPERANDS_TEMPLATE (x))
1672 + hash_string_1 (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
1673 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
1675 if (ASM_OPERANDS_INPUT_LENGTH (x))
1677 for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
1679 hash += (hash_expr_1 (ASM_OPERANDS_INPUT (x, i),
1680 GET_MODE (ASM_OPERANDS_INPUT (x, i)),
1681 do_not_record_p)
1682 + hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT
1683 (x, i)));
1686 hash += hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
1687 x = ASM_OPERANDS_INPUT (x, 0);
1688 mode = GET_MODE (x);
1689 goto repeat;
1691 return hash;
1694 default:
1695 break;
1698 hash += (unsigned) code + (unsigned) GET_MODE (x);
1699 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
1701 if (fmt[i] == 'e')
1703 /* If we are about to do the last recursive call
1704 needed at this level, change it into iteration.
1705 This function is called enough to be worth it. */
1706 if (i == 0)
1708 x = XEXP (x, i);
1709 goto repeat;
1712 hash += hash_expr_1 (XEXP (x, i), 0, do_not_record_p);
1713 if (*do_not_record_p)
1714 return 0;
1717 else if (fmt[i] == 'E')
1718 for (j = 0; j < XVECLEN (x, i); j++)
1720 hash += hash_expr_1 (XVECEXP (x, i, j), 0, do_not_record_p);
1721 if (*do_not_record_p)
1722 return 0;
1725 else if (fmt[i] == 's')
1726 hash += hash_string_1 (XSTR (x, i));
1727 else if (fmt[i] == 'i')
1728 hash += (unsigned int) XINT (x, i);
1729 else
1730 abort ();
1733 return hash;
1736 /* Hash a set of register REGNO.
1738 Sets are hashed on the register that is set. This simplifies the PRE copy
1739 propagation code.
1741 ??? May need to make things more elaborate. Later, as necessary. */
1743 static unsigned int
1744 hash_set (int regno, int hash_table_size)
1746 unsigned int hash;
1748 hash = regno;
1749 return hash % hash_table_size;
1752 /* Return nonzero if exp1 is equivalent to exp2.
1753 ??? Borrowed from cse.c. Might want to remerge with cse.c. Later. */
1755 static int
1756 expr_equiv_p (rtx x, rtx y)
1758 int i, j;
1759 enum rtx_code code;
1760 const char *fmt;
1762 if (x == y)
1763 return 1;
1765 if (x == 0 || y == 0)
1766 return 0;
1768 code = GET_CODE (x);
1769 if (code != GET_CODE (y))
1770 return 0;
1772 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
1773 if (GET_MODE (x) != GET_MODE (y))
1774 return 0;
1776 switch (code)
1778 case PC:
1779 case CC0:
1780 case CONST_INT:
1781 return 0;
1783 case LABEL_REF:
1784 return XEXP (x, 0) == XEXP (y, 0);
1786 case SYMBOL_REF:
1787 return XSTR (x, 0) == XSTR (y, 0);
1789 case REG:
1790 return REGNO (x) == REGNO (y);
1792 case MEM:
1793 /* Can't merge two expressions in different alias sets, since we can
1794 decide that the expression is transparent in a block when it isn't,
1795 due to it being set with the different alias set. */
1796 if (MEM_ALIAS_SET (x) != MEM_ALIAS_SET (y))
1797 return 0;
1798 break;
1800 /* For commutative operations, check both orders. */
1801 case PLUS:
1802 case MULT:
1803 case AND:
1804 case IOR:
1805 case XOR:
1806 case NE:
1807 case EQ:
1808 return ((expr_equiv_p (XEXP (x, 0), XEXP (y, 0))
1809 && expr_equiv_p (XEXP (x, 1), XEXP (y, 1)))
1810 || (expr_equiv_p (XEXP (x, 0), XEXP (y, 1))
1811 && expr_equiv_p (XEXP (x, 1), XEXP (y, 0))));
1813 case ASM_OPERANDS:
1814 /* We don't use the generic code below because we want to
1815 disregard filename and line numbers. */
1817 /* A volatile asm isn't equivalent to any other. */
1818 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
1819 return 0;
1821 if (GET_MODE (x) != GET_MODE (y)
1822 || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
1823 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
1824 ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
1825 || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
1826 || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
1827 return 0;
1829 if (ASM_OPERANDS_INPUT_LENGTH (x))
1831 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
1832 if (! expr_equiv_p (ASM_OPERANDS_INPUT (x, i),
1833 ASM_OPERANDS_INPUT (y, i))
1834 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
1835 ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
1836 return 0;
1839 return 1;
1841 default:
1842 break;
1845 /* Compare the elements. If any pair of corresponding elements
1846 fail to match, return 0 for the whole thing. */
1848 fmt = GET_RTX_FORMAT (code);
1849 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1851 switch (fmt[i])
1853 case 'e':
1854 if (! expr_equiv_p (XEXP (x, i), XEXP (y, i)))
1855 return 0;
1856 break;
1858 case 'E':
1859 if (XVECLEN (x, i) != XVECLEN (y, i))
1860 return 0;
1861 for (j = 0; j < XVECLEN (x, i); j++)
1862 if (! expr_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
1863 return 0;
1864 break;
1866 case 's':
1867 if (strcmp (XSTR (x, i), XSTR (y, i)))
1868 return 0;
1869 break;
1871 case 'i':
1872 if (XINT (x, i) != XINT (y, i))
1873 return 0;
1874 break;
1876 case 'w':
1877 if (XWINT (x, i) != XWINT (y, i))
1878 return 0;
1879 break;
1881 case '0':
1882 break;
1884 default:
1885 abort ();
1889 return 1;
1892 /* Insert expression X in INSN in the hash TABLE.
1893 If it is already present, record it as the last occurrence in INSN's
1894 basic block.
1896 MODE is the mode of the value X is being stored into.
1897 It is only used if X is a CONST_INT.
1899 ANTIC_P is nonzero if X is an anticipatable expression.
1900 AVAIL_P is nonzero if X is an available expression. */
1902 static void
1903 insert_expr_in_table (rtx x, enum machine_mode mode, rtx insn, int antic_p,
1904 int avail_p, struct hash_table *table)
1906 int found, do_not_record_p;
1907 unsigned int hash;
1908 struct expr *cur_expr, *last_expr = NULL;
1909 struct occr *antic_occr, *avail_occr;
1910 struct occr *last_occr = NULL;
1912 hash = hash_expr (x, mode, &do_not_record_p, table->size);
1914 /* Do not insert expression in table if it contains volatile operands,
1915 or if hash_expr determines the expression is something we don't want
1916 to or can't handle. */
1917 if (do_not_record_p)
1918 return;
1920 cur_expr = table->table[hash];
1921 found = 0;
1923 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
1925 /* If the expression isn't found, save a pointer to the end of
1926 the list. */
1927 last_expr = cur_expr;
1928 cur_expr = cur_expr->next_same_hash;
1931 if (! found)
1933 cur_expr = gcse_alloc (sizeof (struct expr));
1934 bytes_used += sizeof (struct expr);
1935 if (table->table[hash] == NULL)
1936 /* This is the first pattern that hashed to this index. */
1937 table->table[hash] = cur_expr;
1938 else
1939 /* Add EXPR to end of this hash chain. */
1940 last_expr->next_same_hash = cur_expr;
1942 /* Set the fields of the expr element. */
1943 cur_expr->expr = x;
1944 cur_expr->bitmap_index = table->n_elems++;
1945 cur_expr->next_same_hash = NULL;
1946 cur_expr->antic_occr = NULL;
1947 cur_expr->avail_occr = NULL;
1950 /* Now record the occurrence(s). */
1951 if (antic_p)
1953 antic_occr = cur_expr->antic_occr;
1955 /* Search for another occurrence in the same basic block. */
1956 while (antic_occr && BLOCK_NUM (antic_occr->insn) != BLOCK_NUM (insn))
1958 /* If an occurrence isn't found, save a pointer to the end of
1959 the list. */
1960 last_occr = antic_occr;
1961 antic_occr = antic_occr->next;
1964 if (antic_occr)
1965 /* Found another instance of the expression in the same basic block.
1966 Prefer the currently recorded one. We want the first one in the
1967 block and the block is scanned from start to end. */
1968 ; /* nothing to do */
1969 else
1971 /* First occurrence of this expression in this basic block. */
1972 antic_occr = gcse_alloc (sizeof (struct occr));
1973 bytes_used += sizeof (struct occr);
1974 /* First occurrence of this expression in any block? */
1975 if (cur_expr->antic_occr == NULL)
1976 cur_expr->antic_occr = antic_occr;
1977 else
1978 last_occr->next = antic_occr;
1980 antic_occr->insn = insn;
1981 antic_occr->next = NULL;
1985 if (avail_p)
1987 avail_occr = cur_expr->avail_occr;
1989 /* Search for another occurrence in the same basic block. */
1990 while (avail_occr && BLOCK_NUM (avail_occr->insn) != BLOCK_NUM (insn))
1992 /* If an occurrence isn't found, save a pointer to the end of
1993 the list. */
1994 last_occr = avail_occr;
1995 avail_occr = avail_occr->next;
1998 if (avail_occr)
1999 /* Found another instance of the expression in the same basic block.
2000 Prefer this occurrence to the currently recorded one. We want
2001 the last one in the block and the block is scanned from start
2002 to end. */
2003 avail_occr->insn = insn;
2004 else
2006 /* First occurrence of this expression in this basic block. */
2007 avail_occr = gcse_alloc (sizeof (struct occr));
2008 bytes_used += sizeof (struct occr);
2010 /* First occurrence of this expression in any block? */
2011 if (cur_expr->avail_occr == NULL)
2012 cur_expr->avail_occr = avail_occr;
2013 else
2014 last_occr->next = avail_occr;
2016 avail_occr->insn = insn;
2017 avail_occr->next = NULL;
2022 /* Insert pattern X in INSN in the hash table.
2023 X is a SET of a reg to either another reg or a constant.
2024 If it is already present, record it as the last occurrence in INSN's
2025 basic block. */
2027 static void
2028 insert_set_in_table (rtx x, rtx insn, struct hash_table *table)
2030 int found;
2031 unsigned int hash;
2032 struct expr *cur_expr, *last_expr = NULL;
2033 struct occr *cur_occr, *last_occr = NULL;
2035 if (GET_CODE (x) != SET
2036 || GET_CODE (SET_DEST (x)) != REG)
2037 abort ();
2039 hash = hash_set (REGNO (SET_DEST (x)), table->size);
2041 cur_expr = table->table[hash];
2042 found = 0;
2044 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
2046 /* If the expression isn't found, save a pointer to the end of
2047 the list. */
2048 last_expr = cur_expr;
2049 cur_expr = cur_expr->next_same_hash;
2052 if (! found)
2054 cur_expr = gcse_alloc (sizeof (struct expr));
2055 bytes_used += sizeof (struct expr);
2056 if (table->table[hash] == NULL)
2057 /* This is the first pattern that hashed to this index. */
2058 table->table[hash] = cur_expr;
2059 else
2060 /* Add EXPR to end of this hash chain. */
2061 last_expr->next_same_hash = cur_expr;
2063 /* Set the fields of the expr element.
2064 We must copy X because it can be modified when copy propagation is
2065 performed on its operands. */
2066 cur_expr->expr = copy_rtx (x);
2067 cur_expr->bitmap_index = table->n_elems++;
2068 cur_expr->next_same_hash = NULL;
2069 cur_expr->antic_occr = NULL;
2070 cur_expr->avail_occr = NULL;
2073 /* Now record the occurrence. */
2074 cur_occr = cur_expr->avail_occr;
2076 /* Search for another occurrence in the same basic block. */
2077 while (cur_occr && BLOCK_NUM (cur_occr->insn) != BLOCK_NUM (insn))
2079 /* If an occurrence isn't found, save a pointer to the end of
2080 the list. */
2081 last_occr = cur_occr;
2082 cur_occr = cur_occr->next;
2085 if (cur_occr)
2086 /* Found another instance of the expression in the same basic block.
2087 Prefer this occurrence to the currently recorded one. We want the
2088 last one in the block and the block is scanned from start to end. */
2089 cur_occr->insn = insn;
2090 else
2092 /* First occurrence of this expression in this basic block. */
2093 cur_occr = gcse_alloc (sizeof (struct occr));
2094 bytes_used += sizeof (struct occr);
2096 /* First occurrence of this expression in any block? */
2097 if (cur_expr->avail_occr == NULL)
2098 cur_expr->avail_occr = cur_occr;
2099 else
2100 last_occr->next = cur_occr;
2102 cur_occr->insn = insn;
2103 cur_occr->next = NULL;
2107 /* Determine whether the rtx X should be treated as a constant for
2108 the purposes of GCSE's constant propagation. */
2110 static bool
2111 gcse_constant_p (rtx x)
2113 /* Consider a COMPARE of two integers constant. */
2114 if (GET_CODE (x) == COMPARE
2115 && GET_CODE (XEXP (x, 0)) == CONST_INT
2116 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2117 return true;
2120 /* Consider a COMPARE of the same registers is a constant
2121 if they are not floating point registers. */
2122 if (GET_CODE(x) == COMPARE
2123 && GET_CODE (XEXP (x, 0)) == REG
2124 && GET_CODE (XEXP (x, 1)) == REG
2125 && REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 1))
2126 && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0)))
2127 && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 1))))
2128 return true;
2130 if (GET_CODE (x) == CONSTANT_P_RTX)
2131 return false;
2133 return CONSTANT_P (x);
2136 /* Scan pattern PAT of INSN and add an entry to the hash TABLE (set or
2137 expression one). */
2139 static void
2140 hash_scan_set (rtx pat, rtx insn, struct hash_table *table)
2142 rtx src = SET_SRC (pat);
2143 rtx dest = SET_DEST (pat);
2144 rtx note;
2146 if (GET_CODE (src) == CALL)
2147 hash_scan_call (src, insn, table);
2149 else if (GET_CODE (dest) == REG)
2151 unsigned int regno = REGNO (dest);
2152 rtx tmp;
2154 /* If this is a single set and we are doing constant propagation,
2155 see if a REG_NOTE shows this equivalent to a constant. */
2156 if (table->set_p && (note = find_reg_equal_equiv_note (insn)) != 0
2157 && gcse_constant_p (XEXP (note, 0)))
2158 src = XEXP (note, 0), pat = gen_rtx_SET (VOIDmode, dest, src);
2160 /* Only record sets of pseudo-regs in the hash table. */
2161 if (! table->set_p
2162 && regno >= FIRST_PSEUDO_REGISTER
2163 /* Don't GCSE something if we can't do a reg/reg copy. */
2164 && can_copy_p (GET_MODE (dest))
2165 /* GCSE commonly inserts instruction after the insn. We can't
2166 do that easily for EH_REGION notes so disable GCSE on these
2167 for now. */
2168 && !find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2169 /* Is SET_SRC something we want to gcse? */
2170 && want_to_gcse_p (src)
2171 /* Don't CSE a nop. */
2172 && ! set_noop_p (pat)
2173 /* Don't GCSE if it has attached REG_EQUIV note.
2174 At this point this only function parameters should have
2175 REG_EQUIV notes and if the argument slot is used somewhere
2176 explicitly, it means address of parameter has been taken,
2177 so we should not extend the lifetime of the pseudo. */
2178 && ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) == 0
2179 || GET_CODE (XEXP (note, 0)) != MEM))
2181 /* An expression is not anticipatable if its operands are
2182 modified before this insn or if this is not the only SET in
2183 this insn. */
2184 int antic_p = oprs_anticipatable_p (src, insn) && single_set (insn);
2185 /* An expression is not available if its operands are
2186 subsequently modified, including this insn. It's also not
2187 available if this is a branch, because we can't insert
2188 a set after the branch. */
2189 int avail_p = (oprs_available_p (src, insn)
2190 && ! JUMP_P (insn));
2192 insert_expr_in_table (src, GET_MODE (dest), insn, antic_p, avail_p, table);
2195 /* Record sets for constant/copy propagation. */
2196 else if (table->set_p
2197 && regno >= FIRST_PSEUDO_REGISTER
2198 && ((GET_CODE (src) == REG
2199 && REGNO (src) >= FIRST_PSEUDO_REGISTER
2200 && can_copy_p (GET_MODE (dest))
2201 && REGNO (src) != regno)
2202 || gcse_constant_p (src))
2203 /* A copy is not available if its src or dest is subsequently
2204 modified. Here we want to search from INSN+1 on, but
2205 oprs_available_p searches from INSN on. */
2206 && (insn == BLOCK_END (BLOCK_NUM (insn))
2207 || ((tmp = next_nonnote_insn (insn)) != NULL_RTX
2208 && oprs_available_p (pat, tmp))))
2209 insert_set_in_table (pat, insn, table);
2213 static void
2214 hash_scan_clobber (rtx x ATTRIBUTE_UNUSED, rtx insn ATTRIBUTE_UNUSED,
2215 struct hash_table *table ATTRIBUTE_UNUSED)
2217 /* Currently nothing to do. */
2220 static void
2221 hash_scan_call (rtx x ATTRIBUTE_UNUSED, rtx insn ATTRIBUTE_UNUSED,
2222 struct hash_table *table ATTRIBUTE_UNUSED)
2224 /* Currently nothing to do. */
2227 /* Process INSN and add hash table entries as appropriate.
2229 Only available expressions that set a single pseudo-reg are recorded.
2231 Single sets in a PARALLEL could be handled, but it's an extra complication
2232 that isn't dealt with right now. The trick is handling the CLOBBERs that
2233 are also in the PARALLEL. Later.
2235 If SET_P is nonzero, this is for the assignment hash table,
2236 otherwise it is for the expression hash table.
2237 If IN_LIBCALL_BLOCK nonzero, we are in a libcall block, and should
2238 not record any expressions. */
2240 static void
2241 hash_scan_insn (rtx insn, struct hash_table *table, int in_libcall_block)
2243 rtx pat = PATTERN (insn);
2244 int i;
2246 if (in_libcall_block)
2247 return;
2249 /* Pick out the sets of INSN and for other forms of instructions record
2250 what's been modified. */
2252 if (GET_CODE (pat) == SET)
2253 hash_scan_set (pat, insn, table);
2254 else if (GET_CODE (pat) == PARALLEL)
2255 for (i = 0; i < XVECLEN (pat, 0); i++)
2257 rtx x = XVECEXP (pat, 0, i);
2259 if (GET_CODE (x) == SET)
2260 hash_scan_set (x, insn, table);
2261 else if (GET_CODE (x) == CLOBBER)
2262 hash_scan_clobber (x, insn, table);
2263 else if (GET_CODE (x) == CALL)
2264 hash_scan_call (x, insn, table);
2267 else if (GET_CODE (pat) == CLOBBER)
2268 hash_scan_clobber (pat, insn, table);
2269 else if (GET_CODE (pat) == CALL)
2270 hash_scan_call (pat, insn, table);
2273 static void
2274 dump_hash_table (FILE *file, const char *name, struct hash_table *table)
2276 int i;
2277 /* Flattened out table, so it's printed in proper order. */
2278 struct expr **flat_table;
2279 unsigned int *hash_val;
2280 struct expr *expr;
2282 flat_table = xcalloc (table->n_elems, sizeof (struct expr *));
2283 hash_val = xmalloc (table->n_elems * sizeof (unsigned int));
2285 for (i = 0; i < (int) table->size; i++)
2286 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
2288 flat_table[expr->bitmap_index] = expr;
2289 hash_val[expr->bitmap_index] = i;
2292 fprintf (file, "%s hash table (%d buckets, %d entries)\n",
2293 name, table->size, table->n_elems);
2295 for (i = 0; i < (int) table->n_elems; i++)
2296 if (flat_table[i] != 0)
2298 expr = flat_table[i];
2299 fprintf (file, "Index %d (hash value %d)\n ",
2300 expr->bitmap_index, hash_val[i]);
2301 print_rtl (file, expr->expr);
2302 fprintf (file, "\n");
2305 fprintf (file, "\n");
2307 free (flat_table);
2308 free (hash_val);
2311 /* Record register first/last/block set information for REGNO in INSN.
2313 first_set records the first place in the block where the register
2314 is set and is used to compute "anticipatability".
2316 last_set records the last place in the block where the register
2317 is set and is used to compute "availability".
2319 last_bb records the block for which first_set and last_set are
2320 valid, as a quick test to invalidate them.
2322 reg_set_in_block records whether the register is set in the block
2323 and is used to compute "transparency". */
2325 static void
2326 record_last_reg_set_info (rtx insn, int regno)
2328 struct reg_avail_info *info = &reg_avail_info[regno];
2329 int cuid = INSN_CUID (insn);
2331 info->last_set = cuid;
2332 if (info->last_bb != current_bb)
2334 info->last_bb = current_bb;
2335 info->first_set = cuid;
2336 SET_BIT (reg_set_in_block[current_bb->index], regno);
2341 /* Record all of the canonicalized MEMs of record_last_mem_set_info's insn.
2342 Note we store a pair of elements in the list, so they have to be
2343 taken off pairwise. */
2345 static void
2346 canon_list_insert (rtx dest ATTRIBUTE_UNUSED, rtx unused1 ATTRIBUTE_UNUSED,
2347 void * v_insn)
2349 rtx dest_addr, insn;
2350 int bb;
2352 while (GET_CODE (dest) == SUBREG
2353 || GET_CODE (dest) == ZERO_EXTRACT
2354 || GET_CODE (dest) == SIGN_EXTRACT
2355 || GET_CODE (dest) == STRICT_LOW_PART)
2356 dest = XEXP (dest, 0);
2358 /* If DEST is not a MEM, then it will not conflict with a load. Note
2359 that function calls are assumed to clobber memory, but are handled
2360 elsewhere. */
2362 if (GET_CODE (dest) != MEM)
2363 return;
2365 dest_addr = get_addr (XEXP (dest, 0));
2366 dest_addr = canon_rtx (dest_addr);
2367 insn = (rtx) v_insn;
2368 bb = BLOCK_NUM (insn);
2370 canon_modify_mem_list[bb] =
2371 alloc_EXPR_LIST (VOIDmode, dest_addr, canon_modify_mem_list[bb]);
2372 canon_modify_mem_list[bb] =
2373 alloc_EXPR_LIST (VOIDmode, dest, canon_modify_mem_list[bb]);
2374 bitmap_set_bit (canon_modify_mem_list_set, bb);
2377 /* Record memory modification information for INSN. We do not actually care
2378 about the memory location(s) that are set, or even how they are set (consider
2379 a CALL_INSN). We merely need to record which insns modify memory. */
2381 static void
2382 record_last_mem_set_info (rtx insn)
2384 int bb = BLOCK_NUM (insn);
2386 /* load_killed_in_block_p will handle the case of calls clobbering
2387 everything. */
2388 modify_mem_list[bb] = alloc_INSN_LIST (insn, modify_mem_list[bb]);
2389 bitmap_set_bit (modify_mem_list_set, bb);
2391 if (GET_CODE (insn) == CALL_INSN)
2393 /* Note that traversals of this loop (other than for free-ing)
2394 will break after encountering a CALL_INSN. So, there's no
2395 need to insert a pair of items, as canon_list_insert does. */
2396 canon_modify_mem_list[bb] =
2397 alloc_INSN_LIST (insn, canon_modify_mem_list[bb]);
2398 bitmap_set_bit (canon_modify_mem_list_set, bb);
2400 else
2401 note_stores (PATTERN (insn), canon_list_insert, (void*) insn);
2404 /* Called from compute_hash_table via note_stores to handle one
2405 SET or CLOBBER in an insn. DATA is really the instruction in which
2406 the SET is taking place. */
2408 static void
2409 record_last_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED, void *data)
2411 rtx last_set_insn = (rtx) data;
2413 if (GET_CODE (dest) == SUBREG)
2414 dest = SUBREG_REG (dest);
2416 if (GET_CODE (dest) == REG)
2417 record_last_reg_set_info (last_set_insn, REGNO (dest));
2418 else if (GET_CODE (dest) == MEM
2419 /* Ignore pushes, they clobber nothing. */
2420 && ! push_operand (dest, GET_MODE (dest)))
2421 record_last_mem_set_info (last_set_insn);
2424 /* Top level function to create an expression or assignment hash table.
2426 Expression entries are placed in the hash table if
2427 - they are of the form (set (pseudo-reg) src),
2428 - src is something we want to perform GCSE on,
2429 - none of the operands are subsequently modified in the block
2431 Assignment entries are placed in the hash table if
2432 - they are of the form (set (pseudo-reg) src),
2433 - src is something we want to perform const/copy propagation on,
2434 - none of the operands or target are subsequently modified in the block
2436 Currently src must be a pseudo-reg or a const_int.
2438 TABLE is the table computed. */
2440 static void
2441 compute_hash_table_work (struct hash_table *table)
2443 unsigned int i;
2445 /* While we compute the hash table we also compute a bit array of which
2446 registers are set in which blocks.
2447 ??? This isn't needed during const/copy propagation, but it's cheap to
2448 compute. Later. */
2449 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
2451 /* re-Cache any INSN_LIST nodes we have allocated. */
2452 clear_modify_mem_tables ();
2453 /* Some working arrays used to track first and last set in each block. */
2454 reg_avail_info = gmalloc (max_gcse_regno * sizeof (struct reg_avail_info));
2456 for (i = 0; i < max_gcse_regno; ++i)
2457 reg_avail_info[i].last_bb = NULL;
2459 FOR_EACH_BB (current_bb)
2461 rtx insn;
2462 unsigned int regno;
2463 int in_libcall_block;
2465 /* First pass over the instructions records information used to
2466 determine when registers and memory are first and last set.
2467 ??? hard-reg reg_set_in_block computation
2468 could be moved to compute_sets since they currently don't change. */
2470 for (insn = current_bb->head;
2471 insn && insn != NEXT_INSN (current_bb->end);
2472 insn = NEXT_INSN (insn))
2474 if (! INSN_P (insn))
2475 continue;
2477 if (GET_CODE (insn) == CALL_INSN)
2479 bool clobbers_all = false;
2480 #ifdef NON_SAVING_SETJMP
2481 if (NON_SAVING_SETJMP
2482 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
2483 clobbers_all = true;
2484 #endif
2486 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2487 if (clobbers_all
2488 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2489 record_last_reg_set_info (insn, regno);
2491 mark_call (insn);
2494 note_stores (PATTERN (insn), record_last_set_info, insn);
2497 /* Insert implicit sets in the hash table. */
2498 if (table->set_p
2499 && implicit_sets[current_bb->index] != NULL_RTX)
2500 hash_scan_set (implicit_sets[current_bb->index],
2501 current_bb->head, table);
2503 /* The next pass builds the hash table. */
2505 for (insn = current_bb->head, in_libcall_block = 0;
2506 insn && insn != NEXT_INSN (current_bb->end);
2507 insn = NEXT_INSN (insn))
2508 if (INSN_P (insn))
2510 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
2511 in_libcall_block = 1;
2512 else if (table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
2513 in_libcall_block = 0;
2514 hash_scan_insn (insn, table, in_libcall_block);
2515 if (!table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
2516 in_libcall_block = 0;
2520 free (reg_avail_info);
2521 reg_avail_info = NULL;
2524 /* Allocate space for the set/expr hash TABLE.
2525 N_INSNS is the number of instructions in the function.
2526 It is used to determine the number of buckets to use.
2527 SET_P determines whether set or expression table will
2528 be created. */
2530 static void
2531 alloc_hash_table (int n_insns, struct hash_table *table, int set_p)
2533 int n;
2535 table->size = n_insns / 4;
2536 if (table->size < 11)
2537 table->size = 11;
2539 /* Attempt to maintain efficient use of hash table.
2540 Making it an odd number is simplest for now.
2541 ??? Later take some measurements. */
2542 table->size |= 1;
2543 n = table->size * sizeof (struct expr *);
2544 table->table = gmalloc (n);
2545 table->set_p = set_p;
2548 /* Free things allocated by alloc_hash_table. */
2550 static void
2551 free_hash_table (struct hash_table *table)
2553 free (table->table);
2556 /* Compute the hash TABLE for doing copy/const propagation or
2557 expression hash table. */
2559 static void
2560 compute_hash_table (struct hash_table *table)
2562 /* Initialize count of number of entries in hash table. */
2563 table->n_elems = 0;
2564 memset (table->table, 0, table->size * sizeof (struct expr *));
2566 compute_hash_table_work (table);
2569 /* Expression tracking support. */
2571 /* Lookup pattern PAT in the expression TABLE.
2572 The result is a pointer to the table entry, or NULL if not found. */
2574 static struct expr *
2575 lookup_expr (rtx pat, struct hash_table *table)
2577 int do_not_record_p;
2578 unsigned int hash = hash_expr (pat, GET_MODE (pat), &do_not_record_p,
2579 table->size);
2580 struct expr *expr;
2582 if (do_not_record_p)
2583 return NULL;
2585 expr = table->table[hash];
2587 while (expr && ! expr_equiv_p (expr->expr, pat))
2588 expr = expr->next_same_hash;
2590 return expr;
2593 /* Lookup REGNO in the set TABLE. The result is a pointer to the
2594 table entry, or NULL if not found. */
2596 static struct expr *
2597 lookup_set (unsigned int regno, struct hash_table *table)
2599 unsigned int hash = hash_set (regno, table->size);
2600 struct expr *expr;
2602 expr = table->table[hash];
2604 while (expr && REGNO (SET_DEST (expr->expr)) != regno)
2605 expr = expr->next_same_hash;
2607 return expr;
2610 /* Return the next entry for REGNO in list EXPR. */
2612 static struct expr *
2613 next_set (unsigned int regno, struct expr *expr)
2616 expr = expr->next_same_hash;
2617 while (expr && REGNO (SET_DEST (expr->expr)) != regno);
2619 return expr;
2622 /* Like free_INSN_LIST_list or free_EXPR_LIST_list, except that the node
2623 types may be mixed. */
2625 static void
2626 free_insn_expr_list_list (rtx *listp)
2628 rtx list, next;
2630 for (list = *listp; list ; list = next)
2632 next = XEXP (list, 1);
2633 if (GET_CODE (list) == EXPR_LIST)
2634 free_EXPR_LIST_node (list);
2635 else
2636 free_INSN_LIST_node (list);
2639 *listp = NULL;
2642 /* Clear canon_modify_mem_list and modify_mem_list tables. */
2643 static void
2644 clear_modify_mem_tables (void)
2646 int i;
2648 EXECUTE_IF_SET_IN_BITMAP
2649 (modify_mem_list_set, 0, i, free_INSN_LIST_list (modify_mem_list + i));
2650 bitmap_clear (modify_mem_list_set);
2652 EXECUTE_IF_SET_IN_BITMAP
2653 (canon_modify_mem_list_set, 0, i,
2654 free_insn_expr_list_list (canon_modify_mem_list + i));
2655 bitmap_clear (canon_modify_mem_list_set);
2658 /* Release memory used by modify_mem_list_set and canon_modify_mem_list_set. */
2660 static void
2661 free_modify_mem_tables (void)
2663 clear_modify_mem_tables ();
2664 free (modify_mem_list);
2665 free (canon_modify_mem_list);
2666 modify_mem_list = 0;
2667 canon_modify_mem_list = 0;
2670 /* Reset tables used to keep track of what's still available [since the
2671 start of the block]. */
2673 static void
2674 reset_opr_set_tables (void)
2676 /* Maintain a bitmap of which regs have been set since beginning of
2677 the block. */
2678 CLEAR_REG_SET (reg_set_bitmap);
2680 /* Also keep a record of the last instruction to modify memory.
2681 For now this is very trivial, we only record whether any memory
2682 location has been modified. */
2683 clear_modify_mem_tables ();
2686 /* Return nonzero if the operands of X are not set before INSN in
2687 INSN's basic block. */
2689 static int
2690 oprs_not_set_p (rtx x, rtx insn)
2692 int i, j;
2693 enum rtx_code code;
2694 const char *fmt;
2696 if (x == 0)
2697 return 1;
2699 code = GET_CODE (x);
2700 switch (code)
2702 case PC:
2703 case CC0:
2704 case CONST:
2705 case CONST_INT:
2706 case CONST_DOUBLE:
2707 case CONST_VECTOR:
2708 case SYMBOL_REF:
2709 case LABEL_REF:
2710 case ADDR_VEC:
2711 case ADDR_DIFF_VEC:
2712 return 1;
2714 case MEM:
2715 if (load_killed_in_block_p (BLOCK_FOR_INSN (insn),
2716 INSN_CUID (insn), x, 0))
2717 return 0;
2718 else
2719 return oprs_not_set_p (XEXP (x, 0), insn);
2721 case REG:
2722 return ! REGNO_REG_SET_P (reg_set_bitmap, REGNO (x));
2724 default:
2725 break;
2728 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
2730 if (fmt[i] == 'e')
2732 /* If we are about to do the last recursive call
2733 needed at this level, change it into iteration.
2734 This function is called enough to be worth it. */
2735 if (i == 0)
2736 return oprs_not_set_p (XEXP (x, i), insn);
2738 if (! oprs_not_set_p (XEXP (x, i), insn))
2739 return 0;
2741 else if (fmt[i] == 'E')
2742 for (j = 0; j < XVECLEN (x, i); j++)
2743 if (! oprs_not_set_p (XVECEXP (x, i, j), insn))
2744 return 0;
2747 return 1;
2750 /* Mark things set by a CALL. */
2752 static void
2753 mark_call (rtx insn)
2755 if (! CONST_OR_PURE_CALL_P (insn))
2756 record_last_mem_set_info (insn);
2759 /* Mark things set by a SET. */
2761 static void
2762 mark_set (rtx pat, rtx insn)
2764 rtx dest = SET_DEST (pat);
2766 while (GET_CODE (dest) == SUBREG
2767 || GET_CODE (dest) == ZERO_EXTRACT
2768 || GET_CODE (dest) == SIGN_EXTRACT
2769 || GET_CODE (dest) == STRICT_LOW_PART)
2770 dest = XEXP (dest, 0);
2772 if (GET_CODE (dest) == REG)
2773 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (dest));
2774 else if (GET_CODE (dest) == MEM)
2775 record_last_mem_set_info (insn);
2777 if (GET_CODE (SET_SRC (pat)) == CALL)
2778 mark_call (insn);
2781 /* Record things set by a CLOBBER. */
2783 static void
2784 mark_clobber (rtx pat, rtx insn)
2786 rtx clob = XEXP (pat, 0);
2788 while (GET_CODE (clob) == SUBREG || GET_CODE (clob) == STRICT_LOW_PART)
2789 clob = XEXP (clob, 0);
2791 if (GET_CODE (clob) == REG)
2792 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (clob));
2793 else
2794 record_last_mem_set_info (insn);
2797 /* Record things set by INSN.
2798 This data is used by oprs_not_set_p. */
2800 static void
2801 mark_oprs_set (rtx insn)
2803 rtx pat = PATTERN (insn);
2804 int i;
2806 if (GET_CODE (pat) == SET)
2807 mark_set (pat, insn);
2808 else if (GET_CODE (pat) == PARALLEL)
2809 for (i = 0; i < XVECLEN (pat, 0); i++)
2811 rtx x = XVECEXP (pat, 0, i);
2813 if (GET_CODE (x) == SET)
2814 mark_set (x, insn);
2815 else if (GET_CODE (x) == CLOBBER)
2816 mark_clobber (x, insn);
2817 else if (GET_CODE (x) == CALL)
2818 mark_call (insn);
2821 else if (GET_CODE (pat) == CLOBBER)
2822 mark_clobber (pat, insn);
2823 else if (GET_CODE (pat) == CALL)
2824 mark_call (insn);
2828 /* Classic GCSE reaching definition support. */
2830 /* Allocate reaching def variables. */
2832 static void
2833 alloc_rd_mem (int n_blocks, int n_insns)
2835 rd_kill = sbitmap_vector_alloc (n_blocks, n_insns);
2836 sbitmap_vector_zero (rd_kill, n_blocks);
2838 rd_gen = sbitmap_vector_alloc (n_blocks, n_insns);
2839 sbitmap_vector_zero (rd_gen, n_blocks);
2841 reaching_defs = sbitmap_vector_alloc (n_blocks, n_insns);
2842 sbitmap_vector_zero (reaching_defs, n_blocks);
2844 rd_out = sbitmap_vector_alloc (n_blocks, n_insns);
2845 sbitmap_vector_zero (rd_out, n_blocks);
2848 /* Free reaching def variables. */
2850 static void
2851 free_rd_mem (void)
2853 sbitmap_vector_free (rd_kill);
2854 sbitmap_vector_free (rd_gen);
2855 sbitmap_vector_free (reaching_defs);
2856 sbitmap_vector_free (rd_out);
2859 /* Add INSN to the kills of BB. REGNO, set in BB, is killed by INSN. */
2861 static void
2862 handle_rd_kill_set (rtx insn, int regno, basic_block bb)
2864 struct reg_set *this_reg;
2866 for (this_reg = reg_set_table[regno]; this_reg; this_reg = this_reg ->next)
2867 if (BLOCK_NUM (this_reg->insn) != BLOCK_NUM (insn))
2868 SET_BIT (rd_kill[bb->index], INSN_CUID (this_reg->insn));
2871 /* Compute the set of kill's for reaching definitions. */
2873 static void
2874 compute_kill_rd (void)
2876 int cuid;
2877 unsigned int regno;
2878 int i;
2879 basic_block bb;
2881 /* For each block
2882 For each set bit in `gen' of the block (i.e each insn which
2883 generates a definition in the block)
2884 Call the reg set by the insn corresponding to that bit regx
2885 Look at the linked list starting at reg_set_table[regx]
2886 For each setting of regx in the linked list, which is not in
2887 this block
2888 Set the bit in `kill' corresponding to that insn. */
2889 FOR_EACH_BB (bb)
2890 for (cuid = 0; cuid < max_cuid; cuid++)
2891 if (TEST_BIT (rd_gen[bb->index], cuid))
2893 rtx insn = CUID_INSN (cuid);
2894 rtx pat = PATTERN (insn);
2896 if (GET_CODE (insn) == CALL_INSN)
2898 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2899 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2900 handle_rd_kill_set (insn, regno, bb);
2903 if (GET_CODE (pat) == PARALLEL)
2905 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
2907 enum rtx_code code = GET_CODE (XVECEXP (pat, 0, i));
2909 if ((code == SET || code == CLOBBER)
2910 && GET_CODE (XEXP (XVECEXP (pat, 0, i), 0)) == REG)
2911 handle_rd_kill_set (insn,
2912 REGNO (XEXP (XVECEXP (pat, 0, i), 0)),
2913 bb);
2916 else if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == REG)
2917 /* Each setting of this register outside of this block
2918 must be marked in the set of kills in this block. */
2919 handle_rd_kill_set (insn, REGNO (SET_DEST (pat)), bb);
2923 /* Compute the reaching definitions as in
2924 Compilers Principles, Techniques, and Tools. Aho, Sethi, Ullman,
2925 Chapter 10. It is the same algorithm as used for computing available
2926 expressions but applied to the gens and kills of reaching definitions. */
2928 static void
2929 compute_rd (void)
2931 int changed, passes;
2932 basic_block bb;
2934 FOR_EACH_BB (bb)
2935 sbitmap_copy (rd_out[bb->index] /*dst*/, rd_gen[bb->index] /*src*/);
2937 passes = 0;
2938 changed = 1;
2939 while (changed)
2941 changed = 0;
2942 FOR_EACH_BB (bb)
2944 sbitmap_union_of_preds (reaching_defs[bb->index], rd_out, bb->index);
2945 changed |= sbitmap_union_of_diff_cg (rd_out[bb->index], rd_gen[bb->index],
2946 reaching_defs[bb->index], rd_kill[bb->index]);
2948 passes++;
2951 if (gcse_file)
2952 fprintf (gcse_file, "reaching def computation: %d passes\n", passes);
2955 /* Classic GCSE available expression support. */
2957 /* Allocate memory for available expression computation. */
2959 static void
2960 alloc_avail_expr_mem (int n_blocks, int n_exprs)
2962 ae_kill = sbitmap_vector_alloc (n_blocks, n_exprs);
2963 sbitmap_vector_zero (ae_kill, n_blocks);
2965 ae_gen = sbitmap_vector_alloc (n_blocks, n_exprs);
2966 sbitmap_vector_zero (ae_gen, n_blocks);
2968 ae_in = sbitmap_vector_alloc (n_blocks, n_exprs);
2969 sbitmap_vector_zero (ae_in, n_blocks);
2971 ae_out = sbitmap_vector_alloc (n_blocks, n_exprs);
2972 sbitmap_vector_zero (ae_out, n_blocks);
2975 static void
2976 free_avail_expr_mem (void)
2978 sbitmap_vector_free (ae_kill);
2979 sbitmap_vector_free (ae_gen);
2980 sbitmap_vector_free (ae_in);
2981 sbitmap_vector_free (ae_out);
2984 /* Compute the set of available expressions generated in each basic block. */
2986 static void
2987 compute_ae_gen (struct hash_table *expr_hash_table)
2989 unsigned int i;
2990 struct expr *expr;
2991 struct occr *occr;
2993 /* For each recorded occurrence of each expression, set ae_gen[bb][expr].
2994 This is all we have to do because an expression is not recorded if it
2995 is not available, and the only expressions we want to work with are the
2996 ones that are recorded. */
2997 for (i = 0; i < expr_hash_table->size; i++)
2998 for (expr = expr_hash_table->table[i]; expr != 0; expr = expr->next_same_hash)
2999 for (occr = expr->avail_occr; occr != 0; occr = occr->next)
3000 SET_BIT (ae_gen[BLOCK_NUM (occr->insn)], expr->bitmap_index);
3003 /* Return nonzero if expression X is killed in BB. */
3005 static int
3006 expr_killed_p (rtx x, basic_block bb)
3008 int i, j;
3009 enum rtx_code code;
3010 const char *fmt;
3012 if (x == 0)
3013 return 1;
3015 code = GET_CODE (x);
3016 switch (code)
3018 case REG:
3019 return TEST_BIT (reg_set_in_block[bb->index], REGNO (x));
3021 case MEM:
3022 if (load_killed_in_block_p (bb, get_max_uid () + 1, x, 0))
3023 return 1;
3024 else
3025 return expr_killed_p (XEXP (x, 0), bb);
3027 case PC:
3028 case CC0: /*FIXME*/
3029 case CONST:
3030 case CONST_INT:
3031 case CONST_DOUBLE:
3032 case CONST_VECTOR:
3033 case SYMBOL_REF:
3034 case LABEL_REF:
3035 case ADDR_VEC:
3036 case ADDR_DIFF_VEC:
3037 return 0;
3039 default:
3040 break;
3043 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
3045 if (fmt[i] == 'e')
3047 /* If we are about to do the last recursive call
3048 needed at this level, change it into iteration.
3049 This function is called enough to be worth it. */
3050 if (i == 0)
3051 return expr_killed_p (XEXP (x, i), bb);
3052 else if (expr_killed_p (XEXP (x, i), bb))
3053 return 1;
3055 else if (fmt[i] == 'E')
3056 for (j = 0; j < XVECLEN (x, i); j++)
3057 if (expr_killed_p (XVECEXP (x, i, j), bb))
3058 return 1;
3061 return 0;
3064 /* Compute the set of available expressions killed in each basic block. */
3066 static void
3067 compute_ae_kill (sbitmap *ae_gen, sbitmap *ae_kill,
3068 struct hash_table *expr_hash_table)
3070 basic_block bb;
3071 unsigned int i;
3072 struct expr *expr;
3074 FOR_EACH_BB (bb)
3075 for (i = 0; i < expr_hash_table->size; i++)
3076 for (expr = expr_hash_table->table[i]; expr; expr = expr->next_same_hash)
3078 /* Skip EXPR if generated in this block. */
3079 if (TEST_BIT (ae_gen[bb->index], expr->bitmap_index))
3080 continue;
3082 if (expr_killed_p (expr->expr, bb))
3083 SET_BIT (ae_kill[bb->index], expr->bitmap_index);
3087 /* Actually perform the Classic GCSE optimizations. */
3089 /* Return nonzero if occurrence OCCR of expression EXPR reaches block BB.
3091 CHECK_SELF_LOOP is nonzero if we should consider a block reaching itself
3092 as a positive reach. We want to do this when there are two computations
3093 of the expression in the block.
3095 VISITED is a pointer to a working buffer for tracking which BB's have
3096 been visited. It is NULL for the top-level call.
3098 We treat reaching expressions that go through blocks containing the same
3099 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
3100 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
3101 2 as not reaching. The intent is to improve the probability of finding
3102 only one reaching expression and to reduce register lifetimes by picking
3103 the closest such expression. */
3105 static int
3106 expr_reaches_here_p_work (struct occr *occr, struct expr *expr,
3107 basic_block bb, int check_self_loop, char *visited)
3109 edge pred;
3111 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
3113 basic_block pred_bb = pred->src;
3115 if (visited[pred_bb->index])
3116 /* This predecessor has already been visited. Nothing to do. */
3118 else if (pred_bb == bb)
3120 /* BB loops on itself. */
3121 if (check_self_loop
3122 && TEST_BIT (ae_gen[pred_bb->index], expr->bitmap_index)
3123 && BLOCK_NUM (occr->insn) == pred_bb->index)
3124 return 1;
3126 visited[pred_bb->index] = 1;
3129 /* Ignore this predecessor if it kills the expression. */
3130 else if (TEST_BIT (ae_kill[pred_bb->index], expr->bitmap_index))
3131 visited[pred_bb->index] = 1;
3133 /* Does this predecessor generate this expression? */
3134 else if (TEST_BIT (ae_gen[pred_bb->index], expr->bitmap_index))
3136 /* Is this the occurrence we're looking for?
3137 Note that there's only one generating occurrence per block
3138 so we just need to check the block number. */
3139 if (BLOCK_NUM (occr->insn) == pred_bb->index)
3140 return 1;
3142 visited[pred_bb->index] = 1;
3145 /* Neither gen nor kill. */
3146 else
3148 visited[pred_bb->index] = 1;
3149 if (expr_reaches_here_p_work (occr, expr, pred_bb, check_self_loop,
3150 visited))
3152 return 1;
3156 /* All paths have been checked. */
3157 return 0;
3160 /* This wrapper for expr_reaches_here_p_work() is to ensure that any
3161 memory allocated for that function is returned. */
3163 static int
3164 expr_reaches_here_p (struct occr *occr, struct expr *expr, basic_block bb,
3165 int check_self_loop)
3167 int rval;
3168 char *visited = xcalloc (last_basic_block, 1);
3170 rval = expr_reaches_here_p_work (occr, expr, bb, check_self_loop, visited);
3172 free (visited);
3173 return rval;
3176 /* Return the instruction that computes EXPR that reaches INSN's basic block.
3177 If there is more than one such instruction, return NULL.
3179 Called only by handle_avail_expr. */
3181 static rtx
3182 computing_insn (struct expr *expr, rtx insn)
3184 basic_block bb = BLOCK_FOR_INSN (insn);
3186 if (expr->avail_occr->next == NULL)
3188 if (BLOCK_FOR_INSN (expr->avail_occr->insn) == bb)
3189 /* The available expression is actually itself
3190 (i.e. a loop in the flow graph) so do nothing. */
3191 return NULL;
3193 /* (FIXME) Case that we found a pattern that was created by
3194 a substitution that took place. */
3195 return expr->avail_occr->insn;
3197 else
3199 /* Pattern is computed more than once.
3200 Search backwards from this insn to see how many of these
3201 computations actually reach this insn. */
3202 struct occr *occr;
3203 rtx insn_computes_expr = NULL;
3204 int can_reach = 0;
3206 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
3208 if (BLOCK_FOR_INSN (occr->insn) == bb)
3210 /* The expression is generated in this block.
3211 The only time we care about this is when the expression
3212 is generated later in the block [and thus there's a loop].
3213 We let the normal cse pass handle the other cases. */
3214 if (INSN_CUID (insn) < INSN_CUID (occr->insn)
3215 && expr_reaches_here_p (occr, expr, bb, 1))
3217 can_reach++;
3218 if (can_reach > 1)
3219 return NULL;
3221 insn_computes_expr = occr->insn;
3224 else if (expr_reaches_here_p (occr, expr, bb, 0))
3226 can_reach++;
3227 if (can_reach > 1)
3228 return NULL;
3230 insn_computes_expr = occr->insn;
3234 if (insn_computes_expr == NULL)
3235 abort ();
3237 return insn_computes_expr;
3241 /* Return nonzero if the definition in DEF_INSN can reach INSN.
3242 Only called by can_disregard_other_sets. */
3244 static int
3245 def_reaches_here_p (rtx insn, rtx def_insn)
3247 rtx reg;
3249 if (TEST_BIT (reaching_defs[BLOCK_NUM (insn)], INSN_CUID (def_insn)))
3250 return 1;
3252 if (BLOCK_NUM (insn) == BLOCK_NUM (def_insn))
3254 if (INSN_CUID (def_insn) < INSN_CUID (insn))
3256 if (GET_CODE (PATTERN (def_insn)) == PARALLEL)
3257 return 1;
3258 else if (GET_CODE (PATTERN (def_insn)) == CLOBBER)
3259 reg = XEXP (PATTERN (def_insn), 0);
3260 else if (GET_CODE (PATTERN (def_insn)) == SET)
3261 reg = SET_DEST (PATTERN (def_insn));
3262 else
3263 abort ();
3265 return ! reg_set_between_p (reg, NEXT_INSN (def_insn), insn);
3267 else
3268 return 0;
3271 return 0;
3274 /* Return nonzero if *ADDR_THIS_REG can only have one value at INSN. The
3275 value returned is the number of definitions that reach INSN. Returning a
3276 value of zero means that [maybe] more than one definition reaches INSN and
3277 the caller can't perform whatever optimization it is trying. i.e. it is
3278 always safe to return zero. */
3280 static int
3281 can_disregard_other_sets (struct reg_set **addr_this_reg, rtx insn, int for_combine)
3283 int number_of_reaching_defs = 0;
3284 struct reg_set *this_reg;
3286 for (this_reg = *addr_this_reg; this_reg != 0; this_reg = this_reg->next)
3287 if (def_reaches_here_p (insn, this_reg->insn))
3289 number_of_reaching_defs++;
3290 /* Ignore parallels for now. */
3291 if (GET_CODE (PATTERN (this_reg->insn)) == PARALLEL)
3292 return 0;
3294 if (!for_combine
3295 && (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER
3296 || ! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)),
3297 SET_SRC (PATTERN (insn)))))
3298 /* A setting of the reg to a different value reaches INSN. */
3299 return 0;
3301 if (number_of_reaching_defs > 1)
3303 /* If in this setting the value the register is being set to is
3304 equal to the previous value the register was set to and this
3305 setting reaches the insn we are trying to do the substitution
3306 on then we are ok. */
3307 if (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER)
3308 return 0;
3309 else if (! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)),
3310 SET_SRC (PATTERN (insn))))
3311 return 0;
3314 *addr_this_reg = this_reg;
3317 return number_of_reaching_defs;
3320 /* Expression computed by insn is available and the substitution is legal,
3321 so try to perform the substitution.
3323 The result is nonzero if any changes were made. */
3325 static int
3326 handle_avail_expr (rtx insn, struct expr *expr)
3328 rtx pat, insn_computes_expr, expr_set;
3329 rtx to;
3330 struct reg_set *this_reg;
3331 int found_setting, use_src;
3332 int changed = 0;
3334 /* We only handle the case where one computation of the expression
3335 reaches this instruction. */
3336 insn_computes_expr = computing_insn (expr, insn);
3337 if (insn_computes_expr == NULL)
3338 return 0;
3339 expr_set = single_set (insn_computes_expr);
3340 if (!expr_set)
3341 abort ();
3343 found_setting = 0;
3344 use_src = 0;
3346 /* At this point we know only one computation of EXPR outside of this
3347 block reaches this insn. Now try to find a register that the
3348 expression is computed into. */
3349 if (GET_CODE (SET_SRC (expr_set)) == REG)
3351 /* This is the case when the available expression that reaches
3352 here has already been handled as an available expression. */
3353 unsigned int regnum_for_replacing
3354 = REGNO (SET_SRC (expr_set));
3356 /* If the register was created by GCSE we can't use `reg_set_table',
3357 however we know it's set only once. */
3358 if (regnum_for_replacing >= max_gcse_regno
3359 /* If the register the expression is computed into is set only once,
3360 or only one set reaches this insn, we can use it. */
3361 || (((this_reg = reg_set_table[regnum_for_replacing]),
3362 this_reg->next == NULL)
3363 || can_disregard_other_sets (&this_reg, insn, 0)))
3365 use_src = 1;
3366 found_setting = 1;
3370 if (!found_setting)
3372 unsigned int regnum_for_replacing
3373 = REGNO (SET_DEST (expr_set));
3375 /* This shouldn't happen. */
3376 if (regnum_for_replacing >= max_gcse_regno)
3377 abort ();
3379 this_reg = reg_set_table[regnum_for_replacing];
3381 /* If the register the expression is computed into is set only once,
3382 or only one set reaches this insn, use it. */
3383 if (this_reg->next == NULL
3384 || can_disregard_other_sets (&this_reg, insn, 0))
3385 found_setting = 1;
3388 if (found_setting)
3390 pat = PATTERN (insn);
3391 if (use_src)
3392 to = SET_SRC (expr_set);
3393 else
3394 to = SET_DEST (expr_set);
3395 changed = validate_change (insn, &SET_SRC (pat), to, 0);
3397 /* We should be able to ignore the return code from validate_change but
3398 to play it safe we check. */
3399 if (changed)
3401 gcse_subst_count++;
3402 if (gcse_file != NULL)
3404 fprintf (gcse_file, "GCSE: Replacing the source in insn %d with",
3405 INSN_UID (insn));
3406 fprintf (gcse_file, " reg %d %s insn %d\n",
3407 REGNO (to), use_src ? "from" : "set in",
3408 INSN_UID (insn_computes_expr));
3413 /* The register that the expr is computed into is set more than once. */
3414 else if (1 /*expensive_op(this_pattrn->op) && do_expensive_gcse)*/)
3416 /* Insert an insn after insnx that copies the reg set in insnx
3417 into a new pseudo register call this new register REGN.
3418 From insnb until end of basic block or until REGB is set
3419 replace all uses of REGB with REGN. */
3420 rtx new_insn;
3422 to = gen_reg_rtx (GET_MODE (SET_DEST (expr_set)));
3424 /* Generate the new insn. */
3425 /* ??? If the change fails, we return 0, even though we created
3426 an insn. I think this is ok. */
3427 new_insn
3428 = emit_insn_after (gen_rtx_SET (VOIDmode, to,
3429 SET_DEST (expr_set)),
3430 insn_computes_expr);
3432 /* Keep register set table up to date. */
3433 record_one_set (REGNO (to), new_insn);
3435 gcse_create_count++;
3436 if (gcse_file != NULL)
3438 fprintf (gcse_file, "GCSE: Creating insn %d to copy value of reg %d",
3439 INSN_UID (NEXT_INSN (insn_computes_expr)),
3440 REGNO (SET_SRC (PATTERN (NEXT_INSN (insn_computes_expr)))));
3441 fprintf (gcse_file, ", computed in insn %d,\n",
3442 INSN_UID (insn_computes_expr));
3443 fprintf (gcse_file, " into newly allocated reg %d\n",
3444 REGNO (to));
3447 pat = PATTERN (insn);
3449 /* Do register replacement for INSN. */
3450 changed = validate_change (insn, &SET_SRC (pat),
3451 SET_DEST (PATTERN
3452 (NEXT_INSN (insn_computes_expr))),
3455 /* We should be able to ignore the return code from validate_change but
3456 to play it safe we check. */
3457 if (changed)
3459 gcse_subst_count++;
3460 if (gcse_file != NULL)
3462 fprintf (gcse_file,
3463 "GCSE: Replacing the source in insn %d with reg %d ",
3464 INSN_UID (insn),
3465 REGNO (SET_DEST (PATTERN (NEXT_INSN
3466 (insn_computes_expr)))));
3467 fprintf (gcse_file, "set in insn %d\n",
3468 INSN_UID (insn_computes_expr));
3473 return changed;
3476 /* Perform classic GCSE. This is called by one_classic_gcse_pass after all
3477 the dataflow analysis has been done.
3479 The result is nonzero if a change was made. */
3481 static int
3482 classic_gcse (void)
3484 int changed;
3485 rtx insn;
3486 basic_block bb;
3488 /* Note we start at block 1. */
3490 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
3491 return 0;
3493 changed = 0;
3494 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR, next_bb)
3496 /* Reset tables used to keep track of what's still valid [since the
3497 start of the block]. */
3498 reset_opr_set_tables ();
3500 for (insn = bb->head;
3501 insn != NULL && insn != NEXT_INSN (bb->end);
3502 insn = NEXT_INSN (insn))
3504 /* Is insn of form (set (pseudo-reg) ...)? */
3505 if (GET_CODE (insn) == INSN
3506 && GET_CODE (PATTERN (insn)) == SET
3507 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
3508 && REGNO (SET_DEST (PATTERN (insn))) >= FIRST_PSEUDO_REGISTER)
3510 rtx pat = PATTERN (insn);
3511 rtx src = SET_SRC (pat);
3512 struct expr *expr;
3514 if (want_to_gcse_p (src)
3515 /* Is the expression recorded? */
3516 && ((expr = lookup_expr (src, &expr_hash_table)) != NULL)
3517 /* Is the expression available [at the start of the
3518 block]? */
3519 && TEST_BIT (ae_in[bb->index], expr->bitmap_index)
3520 /* Are the operands unchanged since the start of the
3521 block? */
3522 && oprs_not_set_p (src, insn))
3523 changed |= handle_avail_expr (insn, expr);
3526 /* Keep track of everything modified by this insn. */
3527 /* ??? Need to be careful w.r.t. mods done to INSN. */
3528 if (INSN_P (insn))
3529 mark_oprs_set (insn);
3533 return changed;
3536 /* Top level routine to perform one classic GCSE pass.
3538 Return nonzero if a change was made. */
3540 static int
3541 one_classic_gcse_pass (int pass)
3543 int changed = 0;
3545 gcse_subst_count = 0;
3546 gcse_create_count = 0;
3548 alloc_hash_table (max_cuid, &expr_hash_table, 0);
3549 alloc_rd_mem (last_basic_block, max_cuid);
3550 compute_hash_table (&expr_hash_table);
3551 if (gcse_file)
3552 dump_hash_table (gcse_file, "Expression", &expr_hash_table);
3554 if (expr_hash_table.n_elems > 0)
3556 compute_kill_rd ();
3557 compute_rd ();
3558 alloc_avail_expr_mem (last_basic_block, expr_hash_table.n_elems);
3559 compute_ae_gen (&expr_hash_table);
3560 compute_ae_kill (ae_gen, ae_kill, &expr_hash_table);
3561 compute_available (ae_gen, ae_kill, ae_out, ae_in);
3562 changed = classic_gcse ();
3563 free_avail_expr_mem ();
3566 free_rd_mem ();
3567 free_hash_table (&expr_hash_table);
3569 if (gcse_file)
3571 fprintf (gcse_file, "\n");
3572 fprintf (gcse_file, "GCSE of %s, pass %d: %d bytes needed, %d substs,",
3573 current_function_name, pass, bytes_used, gcse_subst_count);
3574 fprintf (gcse_file, "%d insns created\n", gcse_create_count);
3577 return changed;
3580 /* Compute copy/constant propagation working variables. */
3582 /* Local properties of assignments. */
3583 static sbitmap *cprop_pavloc;
3584 static sbitmap *cprop_absaltered;
3586 /* Global properties of assignments (computed from the local properties). */
3587 static sbitmap *cprop_avin;
3588 static sbitmap *cprop_avout;
3590 /* Allocate vars used for copy/const propagation. N_BLOCKS is the number of
3591 basic blocks. N_SETS is the number of sets. */
3593 static void
3594 alloc_cprop_mem (int n_blocks, int n_sets)
3596 cprop_pavloc = sbitmap_vector_alloc (n_blocks, n_sets);
3597 cprop_absaltered = sbitmap_vector_alloc (n_blocks, n_sets);
3599 cprop_avin = sbitmap_vector_alloc (n_blocks, n_sets);
3600 cprop_avout = sbitmap_vector_alloc (n_blocks, n_sets);
3603 /* Free vars used by copy/const propagation. */
3605 static void
3606 free_cprop_mem (void)
3608 sbitmap_vector_free (cprop_pavloc);
3609 sbitmap_vector_free (cprop_absaltered);
3610 sbitmap_vector_free (cprop_avin);
3611 sbitmap_vector_free (cprop_avout);
3614 /* For each block, compute whether X is transparent. X is either an
3615 expression or an assignment [though we don't care which, for this context
3616 an assignment is treated as an expression]. For each block where an
3617 element of X is modified, set (SET_P == 1) or reset (SET_P == 0) the INDX
3618 bit in BMAP. */
3620 static void
3621 compute_transp (rtx x, int indx, sbitmap *bmap, int set_p)
3623 int i, j;
3624 basic_block bb;
3625 enum rtx_code code;
3626 reg_set *r;
3627 const char *fmt;
3629 /* repeat is used to turn tail-recursion into iteration since GCC
3630 can't do it when there's no return value. */
3631 repeat:
3633 if (x == 0)
3634 return;
3636 code = GET_CODE (x);
3637 switch (code)
3639 case REG:
3640 if (set_p)
3642 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3644 FOR_EACH_BB (bb)
3645 if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
3646 SET_BIT (bmap[bb->index], indx);
3648 else
3650 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
3651 SET_BIT (bmap[BLOCK_NUM (r->insn)], indx);
3654 else
3656 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3658 FOR_EACH_BB (bb)
3659 if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
3660 RESET_BIT (bmap[bb->index], indx);
3662 else
3664 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
3665 RESET_BIT (bmap[BLOCK_NUM (r->insn)], indx);
3669 return;
3671 case MEM:
3672 FOR_EACH_BB (bb)
3674 rtx list_entry = canon_modify_mem_list[bb->index];
3676 while (list_entry)
3678 rtx dest, dest_addr;
3680 if (GET_CODE (XEXP (list_entry, 0)) == CALL_INSN)
3682 if (set_p)
3683 SET_BIT (bmap[bb->index], indx);
3684 else
3685 RESET_BIT (bmap[bb->index], indx);
3686 break;
3688 /* LIST_ENTRY must be an INSN of some kind that sets memory.
3689 Examine each hunk of memory that is modified. */
3691 dest = XEXP (list_entry, 0);
3692 list_entry = XEXP (list_entry, 1);
3693 dest_addr = XEXP (list_entry, 0);
3695 if (canon_true_dependence (dest, GET_MODE (dest), dest_addr,
3696 x, rtx_addr_varies_p))
3698 if (set_p)
3699 SET_BIT (bmap[bb->index], indx);
3700 else
3701 RESET_BIT (bmap[bb->index], indx);
3702 break;
3704 list_entry = XEXP (list_entry, 1);
3708 x = XEXP (x, 0);
3709 goto repeat;
3711 case PC:
3712 case CC0: /*FIXME*/
3713 case CONST:
3714 case CONST_INT:
3715 case CONST_DOUBLE:
3716 case CONST_VECTOR:
3717 case SYMBOL_REF:
3718 case LABEL_REF:
3719 case ADDR_VEC:
3720 case ADDR_DIFF_VEC:
3721 return;
3723 default:
3724 break;
3727 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
3729 if (fmt[i] == 'e')
3731 /* If we are about to do the last recursive call
3732 needed at this level, change it into iteration.
3733 This function is called enough to be worth it. */
3734 if (i == 0)
3736 x = XEXP (x, i);
3737 goto repeat;
3740 compute_transp (XEXP (x, i), indx, bmap, set_p);
3742 else if (fmt[i] == 'E')
3743 for (j = 0; j < XVECLEN (x, i); j++)
3744 compute_transp (XVECEXP (x, i, j), indx, bmap, set_p);
3748 /* Top level routine to do the dataflow analysis needed by copy/const
3749 propagation. */
3751 static void
3752 compute_cprop_data (void)
3754 compute_local_properties (cprop_absaltered, cprop_pavloc, NULL, &set_hash_table);
3755 compute_available (cprop_pavloc, cprop_absaltered,
3756 cprop_avout, cprop_avin);
3759 /* Copy/constant propagation. */
3761 /* Maximum number of register uses in an insn that we handle. */
3762 #define MAX_USES 8
3764 /* Table of uses found in an insn.
3765 Allocated statically to avoid alloc/free complexity and overhead. */
3766 static struct reg_use reg_use_table[MAX_USES];
3768 /* Index into `reg_use_table' while building it. */
3769 static int reg_use_count;
3771 /* Set up a list of register numbers used in INSN. The found uses are stored
3772 in `reg_use_table'. `reg_use_count' is initialized to zero before entry,
3773 and contains the number of uses in the table upon exit.
3775 ??? If a register appears multiple times we will record it multiple times.
3776 This doesn't hurt anything but it will slow things down. */
3778 static void
3779 find_used_regs (rtx *xptr, void *data ATTRIBUTE_UNUSED)
3781 int i, j;
3782 enum rtx_code code;
3783 const char *fmt;
3784 rtx x = *xptr;
3786 /* repeat is used to turn tail-recursion into iteration since GCC
3787 can't do it when there's no return value. */
3788 repeat:
3789 if (x == 0)
3790 return;
3792 code = GET_CODE (x);
3793 if (REG_P (x))
3795 if (reg_use_count == MAX_USES)
3796 return;
3798 reg_use_table[reg_use_count].reg_rtx = x;
3799 reg_use_count++;
3802 /* Recursively scan the operands of this expression. */
3804 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
3806 if (fmt[i] == 'e')
3808 /* If we are about to do the last recursive call
3809 needed at this level, change it into iteration.
3810 This function is called enough to be worth it. */
3811 if (i == 0)
3813 x = XEXP (x, 0);
3814 goto repeat;
3817 find_used_regs (&XEXP (x, i), data);
3819 else if (fmt[i] == 'E')
3820 for (j = 0; j < XVECLEN (x, i); j++)
3821 find_used_regs (&XVECEXP (x, i, j), data);
3825 /* Try to replace all non-SET_DEST occurrences of FROM in INSN with TO.
3826 Returns nonzero is successful. */
3828 static int
3829 try_replace_reg (rtx from, rtx to, rtx insn)
3831 rtx note = find_reg_equal_equiv_note (insn);
3832 rtx src = 0;
3833 int success = 0;
3834 rtx set = single_set (insn);
3836 validate_replace_src_group (from, to, insn);
3837 if (num_changes_pending () && apply_change_group ())
3838 success = 1;
3840 /* Try to simplify SET_SRC if we have substituted a constant. */
3841 if (success && set && CONSTANT_P (to))
3843 src = simplify_rtx (SET_SRC (set));
3845 if (src)
3846 validate_change (insn, &SET_SRC (set), src, 0);
3849 /* If there is already a NOTE, update the expression in it with our
3850 replacement. */
3851 if (note != 0)
3852 XEXP (note, 0) = simplify_replace_rtx (XEXP (note, 0), from, to);
3854 if (!success && set && reg_mentioned_p (from, SET_SRC (set)))
3856 /* If above failed and this is a single set, try to simplify the source of
3857 the set given our substitution. We could perhaps try this for multiple
3858 SETs, but it probably won't buy us anything. */
3859 src = simplify_replace_rtx (SET_SRC (set), from, to);
3861 if (!rtx_equal_p (src, SET_SRC (set))
3862 && validate_change (insn, &SET_SRC (set), src, 0))
3863 success = 1;
3865 /* If we've failed to do replacement, have a single SET, don't already
3866 have a note, and have no special SET, add a REG_EQUAL note to not
3867 lose information. */
3868 if (!success && note == 0 && set != 0
3869 && GET_CODE (XEXP (set, 0)) != ZERO_EXTRACT
3870 && GET_CODE (XEXP (set, 0)) != SIGN_EXTRACT)
3871 note = set_unique_reg_note (insn, REG_EQUAL, copy_rtx (src));
3874 /* REG_EQUAL may get simplified into register.
3875 We don't allow that. Remove that note. This code ought
3876 not to happen, because previous code ought to synthesize
3877 reg-reg move, but be on the safe side. */
3878 if (note && REG_P (XEXP (note, 0)))
3879 remove_note (insn, note);
3881 return success;
3884 /* Find a set of REGNOs that are available on entry to INSN's block. Returns
3885 NULL no such set is found. */
3887 static struct expr *
3888 find_avail_set (int regno, rtx insn)
3890 /* SET1 contains the last set found that can be returned to the caller for
3891 use in a substitution. */
3892 struct expr *set1 = 0;
3894 /* Loops are not possible here. To get a loop we would need two sets
3895 available at the start of the block containing INSN. ie we would
3896 need two sets like this available at the start of the block:
3898 (set (reg X) (reg Y))
3899 (set (reg Y) (reg X))
3901 This can not happen since the set of (reg Y) would have killed the
3902 set of (reg X) making it unavailable at the start of this block. */
3903 while (1)
3905 rtx src;
3906 struct expr *set = lookup_set (regno, &set_hash_table);
3908 /* Find a set that is available at the start of the block
3909 which contains INSN. */
3910 while (set)
3912 if (TEST_BIT (cprop_avin[BLOCK_NUM (insn)], set->bitmap_index))
3913 break;
3914 set = next_set (regno, set);
3917 /* If no available set was found we've reached the end of the
3918 (possibly empty) copy chain. */
3919 if (set == 0)
3920 break;
3922 if (GET_CODE (set->expr) != SET)
3923 abort ();
3925 src = SET_SRC (set->expr);
3927 /* We know the set is available.
3928 Now check that SRC is ANTLOC (i.e. none of the source operands
3929 have changed since the start of the block).
3931 If the source operand changed, we may still use it for the next
3932 iteration of this loop, but we may not use it for substitutions. */
3934 if (gcse_constant_p (src) || oprs_not_set_p (src, insn))
3935 set1 = set;
3937 /* If the source of the set is anything except a register, then
3938 we have reached the end of the copy chain. */
3939 if (GET_CODE (src) != REG)
3940 break;
3942 /* Follow the copy chain, ie start another iteration of the loop
3943 and see if we have an available copy into SRC. */
3944 regno = REGNO (src);
3947 /* SET1 holds the last set that was available and anticipatable at
3948 INSN. */
3949 return set1;
3952 /* Subroutine of cprop_insn that tries to propagate constants into
3953 JUMP_INSNS. JUMP must be a conditional jump. If SETCC is non-NULL
3954 it is the instruction that immediately precedes JUMP, and must be a
3955 single SET of a register. FROM is what we will try to replace,
3956 SRC is the constant we will try to substitute for it. Returns nonzero
3957 if a change was made. */
3959 static int
3960 cprop_jump (basic_block bb, rtx setcc, rtx jump, rtx from, rtx src)
3962 rtx new, set_src, note_src;
3963 rtx set = pc_set (jump);
3964 rtx note = find_reg_equal_equiv_note (jump);
3966 if (note)
3968 note_src = XEXP (note, 0);
3969 if (GET_CODE (note_src) == EXPR_LIST)
3970 note_src = NULL_RTX;
3972 else note_src = NULL_RTX;
3974 /* Prefer REG_EQUAL notes except those containing EXPR_LISTs. */
3975 set_src = note_src ? note_src : SET_SRC (set);
3977 /* First substitute the SETCC condition into the JUMP instruction,
3978 then substitute that given values into this expanded JUMP. */
3979 if (setcc != NULL_RTX
3980 && !modified_between_p (from, setcc, jump)
3981 && !modified_between_p (src, setcc, jump))
3983 rtx setcc_src;
3984 rtx setcc_set = single_set (setcc);
3985 rtx setcc_note = find_reg_equal_equiv_note (setcc);
3986 setcc_src = (setcc_note && GET_CODE (XEXP (setcc_note, 0)) != EXPR_LIST)
3987 ? XEXP (setcc_note, 0) : SET_SRC (setcc_set);
3988 set_src = simplify_replace_rtx (set_src, SET_DEST (setcc_set),
3989 setcc_src);
3991 else
3992 setcc = NULL_RTX;
3994 new = simplify_replace_rtx (set_src, from, src);
3996 /* If no simplification can be made, then try the next register. */
3997 if (rtx_equal_p (new, SET_SRC (set)))
3998 return 0;
4000 /* If this is now a no-op delete it, otherwise this must be a valid insn. */
4001 if (new == pc_rtx)
4002 delete_insn (jump);
4003 else
4005 /* Ensure the value computed inside the jump insn to be equivalent
4006 to one computed by setcc. */
4007 if (setcc && modified_in_p (new, setcc))
4008 return 0;
4009 if (! validate_change (jump, &SET_SRC (set), new, 0))
4011 /* When (some) constants are not valid in a comparison, and there
4012 are two registers to be replaced by constants before the entire
4013 comparison can be folded into a constant, we need to keep
4014 intermediate information in REG_EQUAL notes. For targets with
4015 separate compare insns, such notes are added by try_replace_reg.
4016 When we have a combined compare-and-branch instruction, however,
4017 we need to attach a note to the branch itself to make this
4018 optimization work. */
4020 if (!rtx_equal_p (new, note_src))
4021 set_unique_reg_note (jump, REG_EQUAL, copy_rtx (new));
4022 return 0;
4025 /* Remove REG_EQUAL note after simplification. */
4026 if (note_src)
4027 remove_note (jump, note);
4029 /* If this has turned into an unconditional jump,
4030 then put a barrier after it so that the unreachable
4031 code will be deleted. */
4032 if (GET_CODE (SET_SRC (set)) == LABEL_REF)
4033 emit_barrier_after (jump);
4036 #ifdef HAVE_cc0
4037 /* Delete the cc0 setter. */
4038 if (setcc != NULL && CC0_P (SET_DEST (single_set (setcc))))
4039 delete_insn (setcc);
4040 #endif
4042 run_jump_opt_after_gcse = 1;
4044 const_prop_count++;
4045 if (gcse_file != NULL)
4047 fprintf (gcse_file,
4048 "CONST-PROP: Replacing reg %d in jump_insn %d with constant ",
4049 REGNO (from), INSN_UID (jump));
4050 print_rtl (gcse_file, src);
4051 fprintf (gcse_file, "\n");
4053 purge_dead_edges (bb);
4055 return 1;
4058 static bool
4059 constprop_register (rtx insn, rtx from, rtx to, int alter_jumps)
4061 rtx sset;
4063 /* Check for reg or cc0 setting instructions followed by
4064 conditional branch instructions first. */
4065 if (alter_jumps
4066 && (sset = single_set (insn)) != NULL
4067 && NEXT_INSN (insn)
4068 && any_condjump_p (NEXT_INSN (insn)) && onlyjump_p (NEXT_INSN (insn)))
4070 rtx dest = SET_DEST (sset);
4071 if ((REG_P (dest) || CC0_P (dest))
4072 && cprop_jump (BLOCK_FOR_INSN (insn), insn, NEXT_INSN (insn), from, to))
4073 return 1;
4076 /* Handle normal insns next. */
4077 if (GET_CODE (insn) == INSN
4078 && try_replace_reg (from, to, insn))
4079 return 1;
4081 /* Try to propagate a CONST_INT into a conditional jump.
4082 We're pretty specific about what we will handle in this
4083 code, we can extend this as necessary over time.
4085 Right now the insn in question must look like
4086 (set (pc) (if_then_else ...)) */
4087 else if (alter_jumps && any_condjump_p (insn) && onlyjump_p (insn))
4088 return cprop_jump (BLOCK_FOR_INSN (insn), NULL, insn, from, to);
4089 return 0;
4092 /* Perform constant and copy propagation on INSN.
4093 The result is nonzero if a change was made. */
4095 static int
4096 cprop_insn (rtx insn, int alter_jumps)
4098 struct reg_use *reg_used;
4099 int changed = 0;
4100 rtx note;
4102 if (!INSN_P (insn))
4103 return 0;
4105 reg_use_count = 0;
4106 note_uses (&PATTERN (insn), find_used_regs, NULL);
4108 note = find_reg_equal_equiv_note (insn);
4110 /* We may win even when propagating constants into notes. */
4111 if (note)
4112 find_used_regs (&XEXP (note, 0), NULL);
4114 for (reg_used = &reg_use_table[0]; reg_use_count > 0;
4115 reg_used++, reg_use_count--)
4117 unsigned int regno = REGNO (reg_used->reg_rtx);
4118 rtx pat, src;
4119 struct expr *set;
4121 /* Ignore registers created by GCSE.
4122 We do this because ... */
4123 if (regno >= max_gcse_regno)
4124 continue;
4126 /* If the register has already been set in this block, there's
4127 nothing we can do. */
4128 if (! oprs_not_set_p (reg_used->reg_rtx, insn))
4129 continue;
4131 /* Find an assignment that sets reg_used and is available
4132 at the start of the block. */
4133 set = find_avail_set (regno, insn);
4134 if (! set)
4135 continue;
4137 pat = set->expr;
4138 /* ??? We might be able to handle PARALLELs. Later. */
4139 if (GET_CODE (pat) != SET)
4140 abort ();
4142 src = SET_SRC (pat);
4144 /* Constant propagation. */
4145 if (gcse_constant_p (src))
4147 if (constprop_register (insn, reg_used->reg_rtx, src, alter_jumps))
4149 changed = 1;
4150 const_prop_count++;
4151 if (gcse_file != NULL)
4153 fprintf (gcse_file, "GLOBAL CONST-PROP: Replacing reg %d in ", regno);
4154 fprintf (gcse_file, "insn %d with constant ", INSN_UID (insn));
4155 print_rtl (gcse_file, src);
4156 fprintf (gcse_file, "\n");
4158 if (INSN_DELETED_P (insn))
4159 return 1;
4162 else if (GET_CODE (src) == REG
4163 && REGNO (src) >= FIRST_PSEUDO_REGISTER
4164 && REGNO (src) != regno)
4166 if (try_replace_reg (reg_used->reg_rtx, src, insn))
4168 changed = 1;
4169 copy_prop_count++;
4170 if (gcse_file != NULL)
4172 fprintf (gcse_file, "GLOBAL COPY-PROP: Replacing reg %d in insn %d",
4173 regno, INSN_UID (insn));
4174 fprintf (gcse_file, " with reg %d\n", REGNO (src));
4177 /* The original insn setting reg_used may or may not now be
4178 deletable. We leave the deletion to flow. */
4179 /* FIXME: If it turns out that the insn isn't deletable,
4180 then we may have unnecessarily extended register lifetimes
4181 and made things worse. */
4186 return changed;
4189 /* Like find_used_regs, but avoid recording uses that appear in
4190 input-output contexts such as zero_extract or pre_dec. This
4191 restricts the cases we consider to those for which local cprop
4192 can legitimately make replacements. */
4194 static void
4195 local_cprop_find_used_regs (rtx *xptr, void *data)
4197 rtx x = *xptr;
4199 if (x == 0)
4200 return;
4202 switch (GET_CODE (x))
4204 case ZERO_EXTRACT:
4205 case SIGN_EXTRACT:
4206 case STRICT_LOW_PART:
4207 return;
4209 case PRE_DEC:
4210 case PRE_INC:
4211 case POST_DEC:
4212 case POST_INC:
4213 case PRE_MODIFY:
4214 case POST_MODIFY:
4215 /* Can only legitimately appear this early in the context of
4216 stack pushes for function arguments, but handle all of the
4217 codes nonetheless. */
4218 return;
4220 case SUBREG:
4221 /* Setting a subreg of a register larger than word_mode leaves
4222 the non-written words unchanged. */
4223 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) > BITS_PER_WORD)
4224 return;
4225 break;
4227 default:
4228 break;
4231 find_used_regs (xptr, data);
4234 /* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
4235 their REG_EQUAL notes need updating. */
4237 static bool
4238 do_local_cprop (rtx x, rtx insn, int alter_jumps, rtx *libcall_sp)
4240 rtx newreg = NULL, newcnst = NULL;
4242 /* Rule out USE instructions and ASM statements as we don't want to
4243 change the hard registers mentioned. */
4244 if (GET_CODE (x) == REG
4245 && (REGNO (x) >= FIRST_PSEUDO_REGISTER
4246 || (GET_CODE (PATTERN (insn)) != USE
4247 && asm_noperands (PATTERN (insn)) < 0)))
4249 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0);
4250 struct elt_loc_list *l;
4252 if (!val)
4253 return false;
4254 for (l = val->locs; l; l = l->next)
4256 rtx this_rtx = l->loc;
4257 rtx note;
4259 if (l->in_libcall)
4260 continue;
4262 if (gcse_constant_p (this_rtx))
4263 newcnst = this_rtx;
4264 if (REG_P (this_rtx) && REGNO (this_rtx) >= FIRST_PSEUDO_REGISTER
4265 /* Don't copy propagate if it has attached REG_EQUIV note.
4266 At this point this only function parameters should have
4267 REG_EQUIV notes and if the argument slot is used somewhere
4268 explicitly, it means address of parameter has been taken,
4269 so we should not extend the lifetime of the pseudo. */
4270 && (!(note = find_reg_note (l->setting_insn, REG_EQUIV, NULL_RTX))
4271 || GET_CODE (XEXP (note, 0)) != MEM))
4272 newreg = this_rtx;
4274 if (newcnst && constprop_register (insn, x, newcnst, alter_jumps))
4276 /* If we find a case where we can't fix the retval REG_EQUAL notes
4277 match the new register, we either have to abandon this replacement
4278 or fix delete_trivially_dead_insns to preserve the setting insn,
4279 or make it delete the REG_EUAQL note, and fix up all passes that
4280 require the REG_EQUAL note there. */
4281 if (!adjust_libcall_notes (x, newcnst, insn, libcall_sp))
4282 abort ();
4283 if (gcse_file != NULL)
4285 fprintf (gcse_file, "LOCAL CONST-PROP: Replacing reg %d in ",
4286 REGNO (x));
4287 fprintf (gcse_file, "insn %d with constant ",
4288 INSN_UID (insn));
4289 print_rtl (gcse_file, newcnst);
4290 fprintf (gcse_file, "\n");
4292 const_prop_count++;
4293 return true;
4295 else if (newreg && newreg != x && try_replace_reg (x, newreg, insn))
4297 adjust_libcall_notes (x, newreg, insn, libcall_sp);
4298 if (gcse_file != NULL)
4300 fprintf (gcse_file,
4301 "LOCAL COPY-PROP: Replacing reg %d in insn %d",
4302 REGNO (x), INSN_UID (insn));
4303 fprintf (gcse_file, " with reg %d\n", REGNO (newreg));
4305 copy_prop_count++;
4306 return true;
4309 return false;
4312 /* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
4313 their REG_EQUAL notes need updating to reflect that OLDREG has been
4314 replaced with NEWVAL in INSN. Return true if all substitutions could
4315 be made. */
4316 static bool
4317 adjust_libcall_notes (rtx oldreg, rtx newval, rtx insn, rtx *libcall_sp)
4319 rtx end;
4321 while ((end = *libcall_sp++))
4323 rtx note = find_reg_equal_equiv_note (end);
4325 if (! note)
4326 continue;
4328 if (REG_P (newval))
4330 if (reg_set_between_p (newval, PREV_INSN (insn), end))
4334 note = find_reg_equal_equiv_note (end);
4335 if (! note)
4336 continue;
4337 if (reg_mentioned_p (newval, XEXP (note, 0)))
4338 return false;
4340 while ((end = *libcall_sp++));
4341 return true;
4344 XEXP (note, 0) = replace_rtx (XEXP (note, 0), oldreg, newval);
4345 insn = end;
4347 return true;
4350 #define MAX_NESTED_LIBCALLS 9
4352 static void
4353 local_cprop_pass (int alter_jumps)
4355 rtx insn;
4356 struct reg_use *reg_used;
4357 rtx libcall_stack[MAX_NESTED_LIBCALLS + 1], *libcall_sp;
4358 bool changed = false;
4360 cselib_init ();
4361 libcall_sp = &libcall_stack[MAX_NESTED_LIBCALLS];
4362 *libcall_sp = 0;
4363 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4365 if (INSN_P (insn))
4367 rtx note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
4369 if (note)
4371 if (libcall_sp == libcall_stack)
4372 abort ();
4373 *--libcall_sp = XEXP (note, 0);
4375 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
4376 if (note)
4377 libcall_sp++;
4378 note = find_reg_equal_equiv_note (insn);
4381 reg_use_count = 0;
4382 note_uses (&PATTERN (insn), local_cprop_find_used_regs, NULL);
4383 if (note)
4384 local_cprop_find_used_regs (&XEXP (note, 0), NULL);
4386 for (reg_used = &reg_use_table[0]; reg_use_count > 0;
4387 reg_used++, reg_use_count--)
4388 if (do_local_cprop (reg_used->reg_rtx, insn, alter_jumps,
4389 libcall_sp))
4391 changed = true;
4392 break;
4394 if (INSN_DELETED_P (insn))
4395 break;
4397 while (reg_use_count);
4399 cselib_process_insn (insn);
4401 cselib_finish ();
4402 /* Global analysis may get into infinite loops for unreachable blocks. */
4403 if (changed && alter_jumps)
4405 delete_unreachable_blocks ();
4406 free_reg_set_mem ();
4407 alloc_reg_set_mem (max_reg_num ());
4408 compute_sets (get_insns ());
4412 /* Forward propagate copies. This includes copies and constants. Return
4413 nonzero if a change was made. */
4415 static int
4416 cprop (int alter_jumps)
4418 int changed;
4419 basic_block bb;
4420 rtx insn;
4422 /* Note we start at block 1. */
4423 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
4425 if (gcse_file != NULL)
4426 fprintf (gcse_file, "\n");
4427 return 0;
4430 changed = 0;
4431 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR, next_bb)
4433 /* Reset tables used to keep track of what's still valid [since the
4434 start of the block]. */
4435 reset_opr_set_tables ();
4437 for (insn = bb->head;
4438 insn != NULL && insn != NEXT_INSN (bb->end);
4439 insn = NEXT_INSN (insn))
4440 if (INSN_P (insn))
4442 changed |= cprop_insn (insn, alter_jumps);
4444 /* Keep track of everything modified by this insn. */
4445 /* ??? Need to be careful w.r.t. mods done to INSN. Don't
4446 call mark_oprs_set if we turned the insn into a NOTE. */
4447 if (GET_CODE (insn) != NOTE)
4448 mark_oprs_set (insn);
4452 if (gcse_file != NULL)
4453 fprintf (gcse_file, "\n");
4455 return changed;
4458 /* Similar to get_condition, only the resulting condition must be
4459 valid at JUMP, instead of at EARLIEST.
4461 This differs from noce_get_condition in ifcvt.c in that we prefer not to
4462 settle for the condition variable in the jump instruction being integral.
4463 We prefer to be able to record the value of a user variable, rather than
4464 the value of a temporary used in a condition. This could be solved by
4465 recording the value of *every* register scaned by canonicalize_condition,
4466 but this would require some code reorganization. */
4469 fis_get_condition (rtx jump)
4471 rtx cond, set, tmp, insn, earliest;
4472 bool reverse;
4474 if (! any_condjump_p (jump))
4475 return NULL_RTX;
4477 set = pc_set (jump);
4478 cond = XEXP (SET_SRC (set), 0);
4480 /* If this branches to JUMP_LABEL when the condition is false,
4481 reverse the condition. */
4482 reverse = (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
4483 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump));
4485 /* Use canonicalize_condition to do the dirty work of manipulating
4486 MODE_CC values and COMPARE rtx codes. */
4487 tmp = canonicalize_condition (jump, cond, reverse, &earliest, NULL_RTX);
4488 if (!tmp)
4489 return NULL_RTX;
4491 /* Verify that the given condition is valid at JUMP by virtue of not
4492 having been modified since EARLIEST. */
4493 for (insn = earliest; insn != jump; insn = NEXT_INSN (insn))
4494 if (INSN_P (insn) && modified_in_p (tmp, insn))
4495 break;
4496 if (insn == jump)
4497 return tmp;
4499 /* The condition was modified. See if we can get a partial result
4500 that doesn't follow all the reversals. Perhaps combine can fold
4501 them together later. */
4502 tmp = XEXP (tmp, 0);
4503 if (!REG_P (tmp) || GET_MODE_CLASS (GET_MODE (tmp)) != MODE_INT)
4504 return NULL_RTX;
4505 tmp = canonicalize_condition (jump, cond, reverse, &earliest, tmp);
4506 if (!tmp)
4507 return NULL_RTX;
4509 /* For sanity's sake, re-validate the new result. */
4510 for (insn = earliest; insn != jump; insn = NEXT_INSN (insn))
4511 if (INSN_P (insn) && modified_in_p (tmp, insn))
4512 return NULL_RTX;
4514 return tmp;
4517 /* Find the implicit sets of a function. An "implicit set" is a constraint
4518 on the value of a variable, implied by a conditional jump. For example,
4519 following "if (x == 2)", the then branch may be optimized as though the
4520 conditional performed an "explicit set", in this example, "x = 2". This
4521 function records the set patterns that are implicit at the start of each
4522 basic block. */
4524 static void
4525 find_implicit_sets (void)
4527 basic_block bb, dest;
4528 unsigned int count;
4529 rtx cond, new;
4531 count = 0;
4532 FOR_EACH_BB (bb)
4533 /* Check for more than one successor. */
4534 if (bb->succ && bb->succ->succ_next)
4536 cond = fis_get_condition (bb->end);
4538 if (cond
4539 && (GET_CODE (cond) == EQ || GET_CODE (cond) == NE)
4540 && GET_CODE (XEXP (cond, 0)) == REG
4541 && REGNO (XEXP (cond, 0)) >= FIRST_PSEUDO_REGISTER
4542 && gcse_constant_p (XEXP (cond, 1)))
4544 dest = GET_CODE (cond) == EQ ? BRANCH_EDGE (bb)->dest
4545 : FALLTHRU_EDGE (bb)->dest;
4547 if (dest && ! dest->pred->pred_next
4548 && dest != EXIT_BLOCK_PTR)
4550 new = gen_rtx_SET (VOIDmode, XEXP (cond, 0),
4551 XEXP (cond, 1));
4552 implicit_sets[dest->index] = new;
4553 if (gcse_file)
4555 fprintf(gcse_file, "Implicit set of reg %d in ",
4556 REGNO (XEXP (cond, 0)));
4557 fprintf(gcse_file, "basic block %d\n", dest->index);
4559 count++;
4564 if (gcse_file)
4565 fprintf (gcse_file, "Found %d implicit sets\n", count);
4568 /* Perform one copy/constant propagation pass.
4569 PASS is the pass count. If CPROP_JUMPS is true, perform constant
4570 propagation into conditional jumps. If BYPASS_JUMPS is true,
4571 perform conditional jump bypassing optimizations. */
4573 static int
4574 one_cprop_pass (int pass, int cprop_jumps, int bypass_jumps)
4576 int changed = 0;
4578 const_prop_count = 0;
4579 copy_prop_count = 0;
4581 local_cprop_pass (cprop_jumps);
4583 /* Determine implicit sets. */
4584 implicit_sets = xcalloc (last_basic_block, sizeof (rtx));
4585 find_implicit_sets ();
4587 alloc_hash_table (max_cuid, &set_hash_table, 1);
4588 compute_hash_table (&set_hash_table);
4590 /* Free implicit_sets before peak usage. */
4591 free (implicit_sets);
4592 implicit_sets = NULL;
4594 if (gcse_file)
4595 dump_hash_table (gcse_file, "SET", &set_hash_table);
4596 if (set_hash_table.n_elems > 0)
4598 alloc_cprop_mem (last_basic_block, set_hash_table.n_elems);
4599 compute_cprop_data ();
4600 changed = cprop (cprop_jumps);
4601 if (bypass_jumps)
4602 changed |= bypass_conditional_jumps ();
4603 free_cprop_mem ();
4606 free_hash_table (&set_hash_table);
4608 if (gcse_file)
4610 fprintf (gcse_file, "CPROP of %s, pass %d: %d bytes needed, ",
4611 current_function_name, pass, bytes_used);
4612 fprintf (gcse_file, "%d const props, %d copy props\n\n",
4613 const_prop_count, copy_prop_count);
4615 /* Global analysis may get into infinite loops for unreachable blocks. */
4616 if (changed && cprop_jumps)
4617 delete_unreachable_blocks ();
4619 return changed;
4622 /* Bypass conditional jumps. */
4624 /* The value of last_basic_block at the beginning of the jump_bypass
4625 pass. The use of redirect_edge_and_branch_force may introduce new
4626 basic blocks, but the data flow analysis is only valid for basic
4627 block indices less than bypass_last_basic_block. */
4629 static int bypass_last_basic_block;
4631 /* Find a set of REGNO to a constant that is available at the end of basic
4632 block BB. Returns NULL if no such set is found. Based heavily upon
4633 find_avail_set. */
4635 static struct expr *
4636 find_bypass_set (int regno, int bb)
4638 struct expr *result = 0;
4640 for (;;)
4642 rtx src;
4643 struct expr *set = lookup_set (regno, &set_hash_table);
4645 while (set)
4647 if (TEST_BIT (cprop_avout[bb], set->bitmap_index))
4648 break;
4649 set = next_set (regno, set);
4652 if (set == 0)
4653 break;
4655 if (GET_CODE (set->expr) != SET)
4656 abort ();
4658 src = SET_SRC (set->expr);
4659 if (gcse_constant_p (src))
4660 result = set;
4662 if (GET_CODE (src) != REG)
4663 break;
4665 regno = REGNO (src);
4667 return result;
4671 /* Subroutine of bypass_block that checks whether a pseudo is killed by
4672 any of the instructions inserted on an edge. Jump bypassing places
4673 condition code setters on CFG edges using insert_insn_on_edge. This
4674 function is required to check that our data flow analysis is still
4675 valid prior to commit_edge_insertions. */
4677 static bool
4678 reg_killed_on_edge (rtx reg, edge e)
4680 rtx insn;
4682 for (insn = e->insns; insn; insn = NEXT_INSN (insn))
4683 if (INSN_P (insn) && reg_set_p (reg, insn))
4684 return true;
4686 return false;
4689 /* Subroutine of bypass_conditional_jumps that attempts to bypass the given
4690 basic block BB which has more than one predecessor. If not NULL, SETCC
4691 is the first instruction of BB, which is immediately followed by JUMP_INSN
4692 JUMP. Otherwise, SETCC is NULL, and JUMP is the first insn of BB.
4693 Returns nonzero if a change was made.
4695 During the jump bypassing pass, we may place copies of SETCC instructions
4696 on CFG edges. The following routine must be careful to pay attention to
4697 these inserted insns when performing its transformations. */
4699 static int
4700 bypass_block (basic_block bb, rtx setcc, rtx jump)
4702 rtx insn, note;
4703 edge e, enext, edest;
4704 int i, change;
4705 int may_be_loop_header;
4707 insn = (setcc != NULL) ? setcc : jump;
4709 /* Determine set of register uses in INSN. */
4710 reg_use_count = 0;
4711 note_uses (&PATTERN (insn), find_used_regs, NULL);
4712 note = find_reg_equal_equiv_note (insn);
4713 if (note)
4714 find_used_regs (&XEXP (note, 0), NULL);
4716 may_be_loop_header = false;
4717 for (e = bb->pred; e; e = e->pred_next)
4718 if (e->flags & EDGE_DFS_BACK)
4720 may_be_loop_header = true;
4721 break;
4724 change = 0;
4725 for (e = bb->pred; e; e = enext)
4727 enext = e->pred_next;
4728 if (e->flags & EDGE_COMPLEX)
4729 continue;
4731 /* We can't redirect edges from new basic blocks. */
4732 if (e->src->index >= bypass_last_basic_block)
4733 continue;
4735 /* The irreducible loops created by redirecting of edges entering the
4736 loop from outside would decrease effectiveness of some of the following
4737 optimizations, so prevent this. */
4738 if (may_be_loop_header
4739 && !(e->flags & EDGE_DFS_BACK))
4740 continue;
4742 for (i = 0; i < reg_use_count; i++)
4744 struct reg_use *reg_used = &reg_use_table[i];
4745 unsigned int regno = REGNO (reg_used->reg_rtx);
4746 basic_block dest, old_dest;
4747 struct expr *set;
4748 rtx src, new;
4750 if (regno >= max_gcse_regno)
4751 continue;
4753 set = find_bypass_set (regno, e->src->index);
4755 if (! set)
4756 continue;
4758 /* Check the data flow is valid after edge insertions. */
4759 if (e->insns && reg_killed_on_edge (reg_used->reg_rtx, e))
4760 continue;
4762 src = SET_SRC (pc_set (jump));
4764 if (setcc != NULL)
4765 src = simplify_replace_rtx (src,
4766 SET_DEST (PATTERN (setcc)),
4767 SET_SRC (PATTERN (setcc)));
4769 new = simplify_replace_rtx (src, reg_used->reg_rtx,
4770 SET_SRC (set->expr));
4772 /* Jump bypassing may have already placed instructions on
4773 edges of the CFG. We can't bypass an outgoing edge that
4774 has instructions associated with it, as these insns won't
4775 get executed if the incoming edge is redirected. */
4777 if (new == pc_rtx)
4779 edest = FALLTHRU_EDGE (bb);
4780 dest = edest->insns ? NULL : edest->dest;
4782 else if (GET_CODE (new) == LABEL_REF)
4784 dest = BLOCK_FOR_INSN (XEXP (new, 0));
4785 /* Don't bypass edges containing instructions. */
4786 for (edest = bb->succ; edest; edest = edest->succ_next)
4787 if (edest->dest == dest && edest->insns)
4789 dest = NULL;
4790 break;
4793 else
4794 dest = NULL;
4796 old_dest = e->dest;
4797 if (dest != NULL
4798 && dest != old_dest
4799 && dest != EXIT_BLOCK_PTR)
4801 redirect_edge_and_branch_force (e, dest);
4803 /* Copy the register setter to the redirected edge.
4804 Don't copy CC0 setters, as CC0 is dead after jump. */
4805 if (setcc)
4807 rtx pat = PATTERN (setcc);
4808 if (!CC0_P (SET_DEST (pat)))
4809 insert_insn_on_edge (copy_insn (pat), e);
4812 if (gcse_file != NULL)
4814 fprintf (gcse_file, "JUMP-BYPASS: Proved reg %d in jump_insn %d equals constant ",
4815 regno, INSN_UID (jump));
4816 print_rtl (gcse_file, SET_SRC (set->expr));
4817 fprintf (gcse_file, "\nBypass edge from %d->%d to %d\n",
4818 e->src->index, old_dest->index, dest->index);
4820 change = 1;
4821 break;
4825 return change;
4828 /* Find basic blocks with more than one predecessor that only contain a
4829 single conditional jump. If the result of the comparison is known at
4830 compile-time from any incoming edge, redirect that edge to the
4831 appropriate target. Returns nonzero if a change was made.
4833 This function is now mis-named, because we also handle indirect jumps. */
4835 static int
4836 bypass_conditional_jumps (void)
4838 basic_block bb;
4839 int changed;
4840 rtx setcc;
4841 rtx insn;
4842 rtx dest;
4844 /* Note we start at block 1. */
4845 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
4846 return 0;
4848 bypass_last_basic_block = last_basic_block;
4849 mark_dfs_back_edges ();
4851 changed = 0;
4852 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb,
4853 EXIT_BLOCK_PTR, next_bb)
4855 /* Check for more than one predecessor. */
4856 if (bb->pred && bb->pred->pred_next)
4858 setcc = NULL_RTX;
4859 for (insn = bb->head;
4860 insn != NULL && insn != NEXT_INSN (bb->end);
4861 insn = NEXT_INSN (insn))
4862 if (GET_CODE (insn) == INSN)
4864 if (setcc)
4865 break;
4866 if (GET_CODE (PATTERN (insn)) != SET)
4867 break;
4869 dest = SET_DEST (PATTERN (insn));
4870 if (REG_P (dest) || CC0_P (dest))
4871 setcc = insn;
4872 else
4873 break;
4875 else if (GET_CODE (insn) == JUMP_INSN)
4877 if ((any_condjump_p (insn) || computed_jump_p (insn))
4878 && onlyjump_p (insn))
4879 changed |= bypass_block (bb, setcc, insn);
4880 break;
4882 else if (INSN_P (insn))
4883 break;
4887 /* If we bypassed any register setting insns, we inserted a
4888 copy on the redirected edge. These need to be committed. */
4889 if (changed)
4890 commit_edge_insertions();
4892 return changed;
4895 /* Compute PRE+LCM working variables. */
4897 /* Local properties of expressions. */
4898 /* Nonzero for expressions that are transparent in the block. */
4899 static sbitmap *transp;
4901 /* Nonzero for expressions that are transparent at the end of the block.
4902 This is only zero for expressions killed by abnormal critical edge
4903 created by a calls. */
4904 static sbitmap *transpout;
4906 /* Nonzero for expressions that are computed (available) in the block. */
4907 static sbitmap *comp;
4909 /* Nonzero for expressions that are locally anticipatable in the block. */
4910 static sbitmap *antloc;
4912 /* Nonzero for expressions where this block is an optimal computation
4913 point. */
4914 static sbitmap *pre_optimal;
4916 /* Nonzero for expressions which are redundant in a particular block. */
4917 static sbitmap *pre_redundant;
4919 /* Nonzero for expressions which should be inserted on a specific edge. */
4920 static sbitmap *pre_insert_map;
4922 /* Nonzero for expressions which should be deleted in a specific block. */
4923 static sbitmap *pre_delete_map;
4925 /* Contains the edge_list returned by pre_edge_lcm. */
4926 static struct edge_list *edge_list;
4928 /* Redundant insns. */
4929 static sbitmap pre_redundant_insns;
4931 /* Allocate vars used for PRE analysis. */
4933 static void
4934 alloc_pre_mem (int n_blocks, int n_exprs)
4936 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
4937 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
4938 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
4940 pre_optimal = NULL;
4941 pre_redundant = NULL;
4942 pre_insert_map = NULL;
4943 pre_delete_map = NULL;
4944 ae_in = NULL;
4945 ae_out = NULL;
4946 ae_kill = sbitmap_vector_alloc (n_blocks, n_exprs);
4948 /* pre_insert and pre_delete are allocated later. */
4951 /* Free vars used for PRE analysis. */
4953 static void
4954 free_pre_mem (void)
4956 sbitmap_vector_free (transp);
4957 sbitmap_vector_free (comp);
4959 /* ANTLOC and AE_KILL are freed just after pre_lcm finishes. */
4961 if (pre_optimal)
4962 sbitmap_vector_free (pre_optimal);
4963 if (pre_redundant)
4964 sbitmap_vector_free (pre_redundant);
4965 if (pre_insert_map)
4966 sbitmap_vector_free (pre_insert_map);
4967 if (pre_delete_map)
4968 sbitmap_vector_free (pre_delete_map);
4969 if (ae_in)
4970 sbitmap_vector_free (ae_in);
4971 if (ae_out)
4972 sbitmap_vector_free (ae_out);
4974 transp = comp = NULL;
4975 pre_optimal = pre_redundant = pre_insert_map = pre_delete_map = NULL;
4976 ae_in = ae_out = NULL;
4979 /* Top level routine to do the dataflow analysis needed by PRE. */
4981 static void
4982 compute_pre_data (void)
4984 sbitmap trapping_expr;
4985 basic_block bb;
4986 unsigned int ui;
4988 compute_local_properties (transp, comp, antloc, &expr_hash_table);
4989 sbitmap_vector_zero (ae_kill, last_basic_block);
4991 /* Collect expressions which might trap. */
4992 trapping_expr = sbitmap_alloc (expr_hash_table.n_elems);
4993 sbitmap_zero (trapping_expr);
4994 for (ui = 0; ui < expr_hash_table.size; ui++)
4996 struct expr *e;
4997 for (e = expr_hash_table.table[ui]; e != NULL; e = e->next_same_hash)
4998 if (may_trap_p (e->expr))
4999 SET_BIT (trapping_expr, e->bitmap_index);
5002 /* Compute ae_kill for each basic block using:
5004 ~(TRANSP | COMP)
5006 This is significantly faster than compute_ae_kill. */
5008 FOR_EACH_BB (bb)
5010 edge e;
5012 /* If the current block is the destination of an abnormal edge, we
5013 kill all trapping expressions because we won't be able to properly
5014 place the instruction on the edge. So make them neither
5015 anticipatable nor transparent. This is fairly conservative. */
5016 for (e = bb->pred; e ; e = e->pred_next)
5017 if (e->flags & EDGE_ABNORMAL)
5019 sbitmap_difference (antloc[bb->index], antloc[bb->index], trapping_expr);
5020 sbitmap_difference (transp[bb->index], transp[bb->index], trapping_expr);
5021 break;
5024 sbitmap_a_or_b (ae_kill[bb->index], transp[bb->index], comp[bb->index]);
5025 sbitmap_not (ae_kill[bb->index], ae_kill[bb->index]);
5028 edge_list = pre_edge_lcm (gcse_file, expr_hash_table.n_elems, transp, comp, antloc,
5029 ae_kill, &pre_insert_map, &pre_delete_map);
5030 sbitmap_vector_free (antloc);
5031 antloc = NULL;
5032 sbitmap_vector_free (ae_kill);
5033 ae_kill = NULL;
5034 sbitmap_free (trapping_expr);
5037 /* PRE utilities */
5039 /* Return nonzero if an occurrence of expression EXPR in OCCR_BB would reach
5040 block BB.
5042 VISITED is a pointer to a working buffer for tracking which BB's have
5043 been visited. It is NULL for the top-level call.
5045 We treat reaching expressions that go through blocks containing the same
5046 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
5047 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
5048 2 as not reaching. The intent is to improve the probability of finding
5049 only one reaching expression and to reduce register lifetimes by picking
5050 the closest such expression. */
5052 static int
5053 pre_expr_reaches_here_p_work (basic_block occr_bb, struct expr *expr, basic_block bb, char *visited)
5055 edge pred;
5057 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
5059 basic_block pred_bb = pred->src;
5061 if (pred->src == ENTRY_BLOCK_PTR
5062 /* Has predecessor has already been visited? */
5063 || visited[pred_bb->index])
5064 ;/* Nothing to do. */
5066 /* Does this predecessor generate this expression? */
5067 else if (TEST_BIT (comp[pred_bb->index], expr->bitmap_index))
5069 /* Is this the occurrence we're looking for?
5070 Note that there's only one generating occurrence per block
5071 so we just need to check the block number. */
5072 if (occr_bb == pred_bb)
5073 return 1;
5075 visited[pred_bb->index] = 1;
5077 /* Ignore this predecessor if it kills the expression. */
5078 else if (! TEST_BIT (transp[pred_bb->index], expr->bitmap_index))
5079 visited[pred_bb->index] = 1;
5081 /* Neither gen nor kill. */
5082 else
5084 visited[pred_bb->index] = 1;
5085 if (pre_expr_reaches_here_p_work (occr_bb, expr, pred_bb, visited))
5086 return 1;
5090 /* All paths have been checked. */
5091 return 0;
5094 /* The wrapper for pre_expr_reaches_here_work that ensures that any
5095 memory allocated for that function is returned. */
5097 static int
5098 pre_expr_reaches_here_p (basic_block occr_bb, struct expr *expr, basic_block bb)
5100 int rval;
5101 char *visited = xcalloc (last_basic_block, 1);
5103 rval = pre_expr_reaches_here_p_work (occr_bb, expr, bb, visited);
5105 free (visited);
5106 return rval;
5110 /* Given an expr, generate RTL which we can insert at the end of a BB,
5111 or on an edge. Set the block number of any insns generated to
5112 the value of BB. */
5114 static rtx
5115 process_insert_insn (struct expr *expr)
5117 rtx reg = expr->reaching_reg;
5118 rtx exp = copy_rtx (expr->expr);
5119 rtx pat;
5121 start_sequence ();
5123 /* If the expression is something that's an operand, like a constant,
5124 just copy it to a register. */
5125 if (general_operand (exp, GET_MODE (reg)))
5126 emit_move_insn (reg, exp);
5128 /* Otherwise, make a new insn to compute this expression and make sure the
5129 insn will be recognized (this also adds any needed CLOBBERs). Copy the
5130 expression to make sure we don't have any sharing issues. */
5131 else if (insn_invalid_p (emit_insn (gen_rtx_SET (VOIDmode, reg, exp))))
5132 abort ();
5134 pat = get_insns ();
5135 end_sequence ();
5137 return pat;
5140 /* Add EXPR to the end of basic block BB.
5142 This is used by both the PRE and code hoisting.
5144 For PRE, we want to verify that the expr is either transparent
5145 or locally anticipatable in the target block. This check makes
5146 no sense for code hoisting. */
5148 static void
5149 insert_insn_end_bb (struct expr *expr, basic_block bb, int pre)
5151 rtx insn = bb->end;
5152 rtx new_insn;
5153 rtx reg = expr->reaching_reg;
5154 int regno = REGNO (reg);
5155 rtx pat, pat_end;
5157 pat = process_insert_insn (expr);
5158 if (pat == NULL_RTX || ! INSN_P (pat))
5159 abort ();
5161 pat_end = pat;
5162 while (NEXT_INSN (pat_end) != NULL_RTX)
5163 pat_end = NEXT_INSN (pat_end);
5165 /* If the last insn is a jump, insert EXPR in front [taking care to
5166 handle cc0, etc. properly]. Similarly we need to care trapping
5167 instructions in presence of non-call exceptions. */
5169 if (GET_CODE (insn) == JUMP_INSN
5170 || (GET_CODE (insn) == INSN
5171 && (bb->succ->succ_next || (bb->succ->flags & EDGE_ABNORMAL))))
5173 #ifdef HAVE_cc0
5174 rtx note;
5175 #endif
5176 /* It should always be the case that we can put these instructions
5177 anywhere in the basic block with performing PRE optimizations.
5178 Check this. */
5179 if (GET_CODE (insn) == INSN && pre
5180 && !TEST_BIT (antloc[bb->index], expr->bitmap_index)
5181 && !TEST_BIT (transp[bb->index], expr->bitmap_index))
5182 abort ();
5184 /* If this is a jump table, then we can't insert stuff here. Since
5185 we know the previous real insn must be the tablejump, we insert
5186 the new instruction just before the tablejump. */
5187 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
5188 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
5189 insn = prev_real_insn (insn);
5191 #ifdef HAVE_cc0
5192 /* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts
5193 if cc0 isn't set. */
5194 note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
5195 if (note)
5196 insn = XEXP (note, 0);
5197 else
5199 rtx maybe_cc0_setter = prev_nonnote_insn (insn);
5200 if (maybe_cc0_setter
5201 && INSN_P (maybe_cc0_setter)
5202 && sets_cc0_p (PATTERN (maybe_cc0_setter)))
5203 insn = maybe_cc0_setter;
5205 #endif
5206 /* FIXME: What if something in cc0/jump uses value set in new insn? */
5207 new_insn = emit_insn_before (pat, insn);
5210 /* Likewise if the last insn is a call, as will happen in the presence
5211 of exception handling. */
5212 else if (GET_CODE (insn) == CALL_INSN
5213 && (bb->succ->succ_next || (bb->succ->flags & EDGE_ABNORMAL)))
5215 /* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers,
5216 we search backward and place the instructions before the first
5217 parameter is loaded. Do this for everyone for consistency and a
5218 presumption that we'll get better code elsewhere as well.
5220 It should always be the case that we can put these instructions
5221 anywhere in the basic block with performing PRE optimizations.
5222 Check this. */
5224 if (pre
5225 && !TEST_BIT (antloc[bb->index], expr->bitmap_index)
5226 && !TEST_BIT (transp[bb->index], expr->bitmap_index))
5227 abort ();
5229 /* Since different machines initialize their parameter registers
5230 in different orders, assume nothing. Collect the set of all
5231 parameter registers. */
5232 insn = find_first_parameter_load (insn, bb->head);
5234 /* If we found all the parameter loads, then we want to insert
5235 before the first parameter load.
5237 If we did not find all the parameter loads, then we might have
5238 stopped on the head of the block, which could be a CODE_LABEL.
5239 If we inserted before the CODE_LABEL, then we would be putting
5240 the insn in the wrong basic block. In that case, put the insn
5241 after the CODE_LABEL. Also, respect NOTE_INSN_BASIC_BLOCK. */
5242 while (GET_CODE (insn) == CODE_LABEL
5243 || NOTE_INSN_BASIC_BLOCK_P (insn))
5244 insn = NEXT_INSN (insn);
5246 new_insn = emit_insn_before (pat, insn);
5248 else
5249 new_insn = emit_insn_after (pat, insn);
5251 while (1)
5253 if (INSN_P (pat))
5255 add_label_notes (PATTERN (pat), new_insn);
5256 note_stores (PATTERN (pat), record_set_info, pat);
5258 if (pat == pat_end)
5259 break;
5260 pat = NEXT_INSN (pat);
5263 gcse_create_count++;
5265 if (gcse_file)
5267 fprintf (gcse_file, "PRE/HOIST: end of bb %d, insn %d, ",
5268 bb->index, INSN_UID (new_insn));
5269 fprintf (gcse_file, "copying expression %d to reg %d\n",
5270 expr->bitmap_index, regno);
5274 /* Insert partially redundant expressions on edges in the CFG to make
5275 the expressions fully redundant. */
5277 static int
5278 pre_edge_insert (struct edge_list *edge_list, struct expr **index_map)
5280 int e, i, j, num_edges, set_size, did_insert = 0;
5281 sbitmap *inserted;
5283 /* Where PRE_INSERT_MAP is nonzero, we add the expression on that edge
5284 if it reaches any of the deleted expressions. */
5286 set_size = pre_insert_map[0]->size;
5287 num_edges = NUM_EDGES (edge_list);
5288 inserted = sbitmap_vector_alloc (num_edges, expr_hash_table.n_elems);
5289 sbitmap_vector_zero (inserted, num_edges);
5291 for (e = 0; e < num_edges; e++)
5293 int indx;
5294 basic_block bb = INDEX_EDGE_PRED_BB (edge_list, e);
5296 for (i = indx = 0; i < set_size; i++, indx += SBITMAP_ELT_BITS)
5298 SBITMAP_ELT_TYPE insert = pre_insert_map[e]->elms[i];
5300 for (j = indx; insert && j < (int) expr_hash_table.n_elems; j++, insert >>= 1)
5301 if ((insert & 1) != 0 && index_map[j]->reaching_reg != NULL_RTX)
5303 struct expr *expr = index_map[j];
5304 struct occr *occr;
5306 /* Now look at each deleted occurrence of this expression. */
5307 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5309 if (! occr->deleted_p)
5310 continue;
5312 /* Insert this expression on this edge if if it would
5313 reach the deleted occurrence in BB. */
5314 if (!TEST_BIT (inserted[e], j))
5316 rtx insn;
5317 edge eg = INDEX_EDGE (edge_list, e);
5319 /* We can't insert anything on an abnormal and
5320 critical edge, so we insert the insn at the end of
5321 the previous block. There are several alternatives
5322 detailed in Morgans book P277 (sec 10.5) for
5323 handling this situation. This one is easiest for
5324 now. */
5326 if ((eg->flags & EDGE_ABNORMAL) == EDGE_ABNORMAL)
5327 insert_insn_end_bb (index_map[j], bb, 0);
5328 else
5330 insn = process_insert_insn (index_map[j]);
5331 insert_insn_on_edge (insn, eg);
5334 if (gcse_file)
5336 fprintf (gcse_file, "PRE/HOIST: edge (%d,%d), ",
5337 bb->index,
5338 INDEX_EDGE_SUCC_BB (edge_list, e)->index);
5339 fprintf (gcse_file, "copy expression %d\n",
5340 expr->bitmap_index);
5343 update_ld_motion_stores (expr);
5344 SET_BIT (inserted[e], j);
5345 did_insert = 1;
5346 gcse_create_count++;
5353 sbitmap_vector_free (inserted);
5354 return did_insert;
5357 /* Copy the result of INSN to REG. INDX is the expression number. */
5359 static void
5360 pre_insert_copy_insn (struct expr *expr, rtx insn)
5362 rtx reg = expr->reaching_reg;
5363 int regno = REGNO (reg);
5364 int indx = expr->bitmap_index;
5365 rtx set = single_set (insn);
5366 rtx new_insn;
5368 if (!set)
5369 abort ();
5371 new_insn = emit_insn_after (gen_move_insn (reg, copy_rtx (SET_DEST (set))), insn);
5373 /* Keep register set table up to date. */
5374 record_one_set (regno, new_insn);
5376 gcse_create_count++;
5378 if (gcse_file)
5379 fprintf (gcse_file,
5380 "PRE: bb %d, insn %d, copy expression %d in insn %d to reg %d\n",
5381 BLOCK_NUM (insn), INSN_UID (new_insn), indx,
5382 INSN_UID (insn), regno);
5383 update_ld_motion_stores (expr);
5386 /* Copy available expressions that reach the redundant expression
5387 to `reaching_reg'. */
5389 static void
5390 pre_insert_copies (void)
5392 unsigned int i;
5393 struct expr *expr;
5394 struct occr *occr;
5395 struct occr *avail;
5397 /* For each available expression in the table, copy the result to
5398 `reaching_reg' if the expression reaches a deleted one.
5400 ??? The current algorithm is rather brute force.
5401 Need to do some profiling. */
5403 for (i = 0; i < expr_hash_table.size; i++)
5404 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
5406 /* If the basic block isn't reachable, PPOUT will be TRUE. However,
5407 we don't want to insert a copy here because the expression may not
5408 really be redundant. So only insert an insn if the expression was
5409 deleted. This test also avoids further processing if the
5410 expression wasn't deleted anywhere. */
5411 if (expr->reaching_reg == NULL)
5412 continue;
5414 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5416 if (! occr->deleted_p)
5417 continue;
5419 for (avail = expr->avail_occr; avail != NULL; avail = avail->next)
5421 rtx insn = avail->insn;
5423 /* No need to handle this one if handled already. */
5424 if (avail->copied_p)
5425 continue;
5427 /* Don't handle this one if it's a redundant one. */
5428 if (TEST_BIT (pre_redundant_insns, INSN_CUID (insn)))
5429 continue;
5431 /* Or if the expression doesn't reach the deleted one. */
5432 if (! pre_expr_reaches_here_p (BLOCK_FOR_INSN (avail->insn),
5433 expr,
5434 BLOCK_FOR_INSN (occr->insn)))
5435 continue;
5437 /* Copy the result of avail to reaching_reg. */
5438 pre_insert_copy_insn (expr, insn);
5439 avail->copied_p = 1;
5445 /* Emit move from SRC to DEST noting the equivalence with expression computed
5446 in INSN. */
5447 static rtx
5448 gcse_emit_move_after (rtx src, rtx dest, rtx insn)
5450 rtx new;
5451 rtx set = single_set (insn), set2;
5452 rtx note;
5453 rtx eqv;
5455 /* This should never fail since we're creating a reg->reg copy
5456 we've verified to be valid. */
5458 new = emit_insn_after (gen_move_insn (dest, src), insn);
5460 /* Note the equivalence for local CSE pass. */
5461 set2 = single_set (new);
5462 if (!set2 || !rtx_equal_p (SET_DEST (set2), dest))
5463 return new;
5464 if ((note = find_reg_equal_equiv_note (insn)))
5465 eqv = XEXP (note, 0);
5466 else
5467 eqv = SET_SRC (set);
5469 set_unique_reg_note (new, REG_EQUAL, copy_insn_1 (eqv));
5471 return new;
5474 /* Delete redundant computations.
5475 Deletion is done by changing the insn to copy the `reaching_reg' of
5476 the expression into the result of the SET. It is left to later passes
5477 (cprop, cse2, flow, combine, regmove) to propagate the copy or eliminate it.
5479 Returns nonzero if a change is made. */
5481 static int
5482 pre_delete (void)
5484 unsigned int i;
5485 int changed;
5486 struct expr *expr;
5487 struct occr *occr;
5489 changed = 0;
5490 for (i = 0; i < expr_hash_table.size; i++)
5491 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
5493 int indx = expr->bitmap_index;
5495 /* We only need to search antic_occr since we require
5496 ANTLOC != 0. */
5498 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5500 rtx insn = occr->insn;
5501 rtx set;
5502 basic_block bb = BLOCK_FOR_INSN (insn);
5504 if (TEST_BIT (pre_delete_map[bb->index], indx))
5506 set = single_set (insn);
5507 if (! set)
5508 abort ();
5510 /* Create a pseudo-reg to store the result of reaching
5511 expressions into. Get the mode for the new pseudo from
5512 the mode of the original destination pseudo. */
5513 if (expr->reaching_reg == NULL)
5514 expr->reaching_reg
5515 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
5517 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
5518 delete_insn (insn);
5519 occr->deleted_p = 1;
5520 SET_BIT (pre_redundant_insns, INSN_CUID (insn));
5521 changed = 1;
5522 gcse_subst_count++;
5524 if (gcse_file)
5526 fprintf (gcse_file,
5527 "PRE: redundant insn %d (expression %d) in ",
5528 INSN_UID (insn), indx);
5529 fprintf (gcse_file, "bb %d, reaching reg is %d\n",
5530 bb->index, REGNO (expr->reaching_reg));
5536 return changed;
5539 /* Perform GCSE optimizations using PRE.
5540 This is called by one_pre_gcse_pass after all the dataflow analysis
5541 has been done.
5543 This is based on the original Morel-Renvoise paper Fred Chow's thesis, and
5544 lazy code motion from Knoop, Ruthing and Steffen as described in Advanced
5545 Compiler Design and Implementation.
5547 ??? A new pseudo reg is created to hold the reaching expression. The nice
5548 thing about the classical approach is that it would try to use an existing
5549 reg. If the register can't be adequately optimized [i.e. we introduce
5550 reload problems], one could add a pass here to propagate the new register
5551 through the block.
5553 ??? We don't handle single sets in PARALLELs because we're [currently] not
5554 able to copy the rest of the parallel when we insert copies to create full
5555 redundancies from partial redundancies. However, there's no reason why we
5556 can't handle PARALLELs in the cases where there are no partial
5557 redundancies. */
5559 static int
5560 pre_gcse (void)
5562 unsigned int i;
5563 int did_insert, changed;
5564 struct expr **index_map;
5565 struct expr *expr;
5567 /* Compute a mapping from expression number (`bitmap_index') to
5568 hash table entry. */
5570 index_map = xcalloc (expr_hash_table.n_elems, sizeof (struct expr *));
5571 for (i = 0; i < expr_hash_table.size; i++)
5572 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
5573 index_map[expr->bitmap_index] = expr;
5575 /* Reset bitmap used to track which insns are redundant. */
5576 pre_redundant_insns = sbitmap_alloc (max_cuid);
5577 sbitmap_zero (pre_redundant_insns);
5579 /* Delete the redundant insns first so that
5580 - we know what register to use for the new insns and for the other
5581 ones with reaching expressions
5582 - we know which insns are redundant when we go to create copies */
5584 changed = pre_delete ();
5586 did_insert = pre_edge_insert (edge_list, index_map);
5588 /* In other places with reaching expressions, copy the expression to the
5589 specially allocated pseudo-reg that reaches the redundant expr. */
5590 pre_insert_copies ();
5591 if (did_insert)
5593 commit_edge_insertions ();
5594 changed = 1;
5597 free (index_map);
5598 sbitmap_free (pre_redundant_insns);
5599 return changed;
5602 /* Top level routine to perform one PRE GCSE pass.
5604 Return nonzero if a change was made. */
5606 static int
5607 one_pre_gcse_pass (int pass)
5609 int changed = 0;
5611 gcse_subst_count = 0;
5612 gcse_create_count = 0;
5614 alloc_hash_table (max_cuid, &expr_hash_table, 0);
5615 add_noreturn_fake_exit_edges ();
5616 if (flag_gcse_lm)
5617 compute_ld_motion_mems ();
5619 compute_hash_table (&expr_hash_table);
5620 trim_ld_motion_mems ();
5621 if (gcse_file)
5622 dump_hash_table (gcse_file, "Expression", &expr_hash_table);
5624 if (expr_hash_table.n_elems > 0)
5626 alloc_pre_mem (last_basic_block, expr_hash_table.n_elems);
5627 compute_pre_data ();
5628 changed |= pre_gcse ();
5629 free_edge_list (edge_list);
5630 free_pre_mem ();
5633 free_ldst_mems ();
5634 remove_fake_edges ();
5635 free_hash_table (&expr_hash_table);
5637 if (gcse_file)
5639 fprintf (gcse_file, "\nPRE GCSE of %s, pass %d: %d bytes needed, ",
5640 current_function_name, pass, bytes_used);
5641 fprintf (gcse_file, "%d substs, %d insns created\n",
5642 gcse_subst_count, gcse_create_count);
5645 return changed;
5648 /* If X contains any LABEL_REF's, add REG_LABEL notes for them to INSN.
5649 If notes are added to an insn which references a CODE_LABEL, the
5650 LABEL_NUSES count is incremented. We have to add REG_LABEL notes,
5651 because the following loop optimization pass requires them. */
5653 /* ??? This is very similar to the loop.c add_label_notes function. We
5654 could probably share code here. */
5656 /* ??? If there was a jump optimization pass after gcse and before loop,
5657 then we would not need to do this here, because jump would add the
5658 necessary REG_LABEL notes. */
5660 static void
5661 add_label_notes (rtx x, rtx insn)
5663 enum rtx_code code = GET_CODE (x);
5664 int i, j;
5665 const char *fmt;
5667 if (code == LABEL_REF && !LABEL_REF_NONLOCAL_P (x))
5669 /* This code used to ignore labels that referred to dispatch tables to
5670 avoid flow generating (slightly) worse code.
5672 We no longer ignore such label references (see LABEL_REF handling in
5673 mark_jump_label for additional information). */
5675 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, XEXP (x, 0),
5676 REG_NOTES (insn));
5677 if (LABEL_P (XEXP (x, 0)))
5678 LABEL_NUSES (XEXP (x, 0))++;
5679 return;
5682 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
5684 if (fmt[i] == 'e')
5685 add_label_notes (XEXP (x, i), insn);
5686 else if (fmt[i] == 'E')
5687 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5688 add_label_notes (XVECEXP (x, i, j), insn);
5692 /* Compute transparent outgoing information for each block.
5694 An expression is transparent to an edge unless it is killed by
5695 the edge itself. This can only happen with abnormal control flow,
5696 when the edge is traversed through a call. This happens with
5697 non-local labels and exceptions.
5699 This would not be necessary if we split the edge. While this is
5700 normally impossible for abnormal critical edges, with some effort
5701 it should be possible with exception handling, since we still have
5702 control over which handler should be invoked. But due to increased
5703 EH table sizes, this may not be worthwhile. */
5705 static void
5706 compute_transpout (void)
5708 basic_block bb;
5709 unsigned int i;
5710 struct expr *expr;
5712 sbitmap_vector_ones (transpout, last_basic_block);
5714 FOR_EACH_BB (bb)
5716 /* Note that flow inserted a nop a the end of basic blocks that
5717 end in call instructions for reasons other than abnormal
5718 control flow. */
5719 if (GET_CODE (bb->end) != CALL_INSN)
5720 continue;
5722 for (i = 0; i < expr_hash_table.size; i++)
5723 for (expr = expr_hash_table.table[i]; expr ; expr = expr->next_same_hash)
5724 if (GET_CODE (expr->expr) == MEM)
5726 if (GET_CODE (XEXP (expr->expr, 0)) == SYMBOL_REF
5727 && CONSTANT_POOL_ADDRESS_P (XEXP (expr->expr, 0)))
5728 continue;
5730 /* ??? Optimally, we would use interprocedural alias
5731 analysis to determine if this mem is actually killed
5732 by this call. */
5733 RESET_BIT (transpout[bb->index], expr->bitmap_index);
5738 /* Removal of useless null pointer checks */
5740 /* Called via note_stores. X is set by SETTER. If X is a register we must
5741 invalidate nonnull_local and set nonnull_killed. DATA is really a
5742 `null_pointer_info *'.
5744 We ignore hard registers. */
5746 static void
5747 invalidate_nonnull_info (rtx x, rtx setter ATTRIBUTE_UNUSED, void *data)
5749 unsigned int regno;
5750 struct null_pointer_info *npi = (struct null_pointer_info *) data;
5752 while (GET_CODE (x) == SUBREG)
5753 x = SUBREG_REG (x);
5755 /* Ignore anything that is not a register or is a hard register. */
5756 if (GET_CODE (x) != REG
5757 || REGNO (x) < npi->min_reg
5758 || REGNO (x) >= npi->max_reg)
5759 return;
5761 regno = REGNO (x) - npi->min_reg;
5763 RESET_BIT (npi->nonnull_local[npi->current_block->index], regno);
5764 SET_BIT (npi->nonnull_killed[npi->current_block->index], regno);
5767 /* Do null-pointer check elimination for the registers indicated in
5768 NPI. NONNULL_AVIN and NONNULL_AVOUT are pre-allocated sbitmaps;
5769 they are not our responsibility to free. */
5771 static int
5772 delete_null_pointer_checks_1 (unsigned int *block_reg, sbitmap *nonnull_avin,
5773 sbitmap *nonnull_avout,
5774 struct null_pointer_info *npi)
5776 basic_block bb, current_block;
5777 sbitmap *nonnull_local = npi->nonnull_local;
5778 sbitmap *nonnull_killed = npi->nonnull_killed;
5779 int something_changed = 0;
5781 /* Compute local properties, nonnull and killed. A register will have
5782 the nonnull property if at the end of the current block its value is
5783 known to be nonnull. The killed property indicates that somewhere in
5784 the block any information we had about the register is killed.
5786 Note that a register can have both properties in a single block. That
5787 indicates that it's killed, then later in the block a new value is
5788 computed. */
5789 sbitmap_vector_zero (nonnull_local, last_basic_block);
5790 sbitmap_vector_zero (nonnull_killed, last_basic_block);
5792 FOR_EACH_BB (current_block)
5794 rtx insn, stop_insn;
5796 /* Set the current block for invalidate_nonnull_info. */
5797 npi->current_block = current_block;
5799 /* Scan each insn in the basic block looking for memory references and
5800 register sets. */
5801 stop_insn = NEXT_INSN (current_block->end);
5802 for (insn = current_block->head;
5803 insn != stop_insn;
5804 insn = NEXT_INSN (insn))
5806 rtx set;
5807 rtx reg;
5809 /* Ignore anything that is not a normal insn. */
5810 if (! INSN_P (insn))
5811 continue;
5813 /* Basically ignore anything that is not a simple SET. We do have
5814 to make sure to invalidate nonnull_local and set nonnull_killed
5815 for such insns though. */
5816 set = single_set (insn);
5817 if (!set)
5819 note_stores (PATTERN (insn), invalidate_nonnull_info, npi);
5820 continue;
5823 /* See if we've got a usable memory load. We handle it first
5824 in case it uses its address register as a dest (which kills
5825 the nonnull property). */
5826 if (GET_CODE (SET_SRC (set)) == MEM
5827 && GET_CODE ((reg = XEXP (SET_SRC (set), 0))) == REG
5828 && REGNO (reg) >= npi->min_reg
5829 && REGNO (reg) < npi->max_reg)
5830 SET_BIT (nonnull_local[current_block->index],
5831 REGNO (reg) - npi->min_reg);
5833 /* Now invalidate stuff clobbered by this insn. */
5834 note_stores (PATTERN (insn), invalidate_nonnull_info, npi);
5836 /* And handle stores, we do these last since any sets in INSN can
5837 not kill the nonnull property if it is derived from a MEM
5838 appearing in a SET_DEST. */
5839 if (GET_CODE (SET_DEST (set)) == MEM
5840 && GET_CODE ((reg = XEXP (SET_DEST (set), 0))) == REG
5841 && REGNO (reg) >= npi->min_reg
5842 && REGNO (reg) < npi->max_reg)
5843 SET_BIT (nonnull_local[current_block->index],
5844 REGNO (reg) - npi->min_reg);
5848 /* Now compute global properties based on the local properties. This
5849 is a classic global availability algorithm. */
5850 compute_available (nonnull_local, nonnull_killed,
5851 nonnull_avout, nonnull_avin);
5853 /* Now look at each bb and see if it ends with a compare of a value
5854 against zero. */
5855 FOR_EACH_BB (bb)
5857 rtx last_insn = bb->end;
5858 rtx condition, earliest;
5859 int compare_and_branch;
5861 /* Since MIN_REG is always at least FIRST_PSEUDO_REGISTER, and
5862 since BLOCK_REG[BB] is zero if this block did not end with a
5863 comparison against zero, this condition works. */
5864 if (block_reg[bb->index] < npi->min_reg
5865 || block_reg[bb->index] >= npi->max_reg)
5866 continue;
5868 /* LAST_INSN is a conditional jump. Get its condition. */
5869 condition = get_condition (last_insn, &earliest);
5871 /* If we can't determine the condition then skip. */
5872 if (! condition)
5873 continue;
5875 /* Is the register known to have a nonzero value? */
5876 if (!TEST_BIT (nonnull_avout[bb->index], block_reg[bb->index] - npi->min_reg))
5877 continue;
5879 /* Try to compute whether the compare/branch at the loop end is one or
5880 two instructions. */
5881 if (earliest == last_insn)
5882 compare_and_branch = 1;
5883 else if (earliest == prev_nonnote_insn (last_insn))
5884 compare_and_branch = 2;
5885 else
5886 continue;
5888 /* We know the register in this comparison is nonnull at exit from
5889 this block. We can optimize this comparison. */
5890 if (GET_CODE (condition) == NE)
5892 rtx new_jump;
5894 new_jump = emit_jump_insn_after (gen_jump (JUMP_LABEL (last_insn)),
5895 last_insn);
5896 JUMP_LABEL (new_jump) = JUMP_LABEL (last_insn);
5897 LABEL_NUSES (JUMP_LABEL (new_jump))++;
5898 emit_barrier_after (new_jump);
5901 something_changed = 1;
5902 delete_insn (last_insn);
5903 if (compare_and_branch == 2)
5904 delete_insn (earliest);
5905 purge_dead_edges (bb);
5907 /* Don't check this block again. (Note that BLOCK_END is
5908 invalid here; we deleted the last instruction in the
5909 block.) */
5910 block_reg[bb->index] = 0;
5913 return something_changed;
5916 /* Find EQ/NE comparisons against zero which can be (indirectly) evaluated
5917 at compile time.
5919 This is conceptually similar to global constant/copy propagation and
5920 classic global CSE (it even uses the same dataflow equations as cprop).
5922 If a register is used as memory address with the form (mem (reg)), then we
5923 know that REG can not be zero at that point in the program. Any instruction
5924 which sets REG "kills" this property.
5926 So, if every path leading to a conditional branch has an available memory
5927 reference of that form, then we know the register can not have the value
5928 zero at the conditional branch.
5930 So we merely need to compute the local properties and propagate that data
5931 around the cfg, then optimize where possible.
5933 We run this pass two times. Once before CSE, then again after CSE. This
5934 has proven to be the most profitable approach. It is rare for new
5935 optimization opportunities of this nature to appear after the first CSE
5936 pass.
5938 This could probably be integrated with global cprop with a little work. */
5941 delete_null_pointer_checks (rtx f ATTRIBUTE_UNUSED)
5943 sbitmap *nonnull_avin, *nonnull_avout;
5944 unsigned int *block_reg;
5945 basic_block bb;
5946 int reg;
5947 int regs_per_pass;
5948 int max_reg;
5949 struct null_pointer_info npi;
5950 int something_changed = 0;
5952 /* If we have only a single block, then there's nothing to do. */
5953 if (n_basic_blocks <= 1)
5954 return 0;
5956 /* Trying to perform global optimizations on flow graphs which have
5957 a high connectivity will take a long time and is unlikely to be
5958 particularly useful.
5960 In normal circumstances a cfg should have about twice as many edges
5961 as blocks. But we do not want to punish small functions which have
5962 a couple switch statements. So we require a relatively large number
5963 of basic blocks and the ratio of edges to blocks to be high. */
5964 if (n_basic_blocks > 1000 && n_edges / n_basic_blocks >= 20)
5965 return 0;
5967 /* We need four bitmaps, each with a bit for each register in each
5968 basic block. */
5969 max_reg = max_reg_num ();
5970 regs_per_pass = get_bitmap_width (4, last_basic_block, max_reg);
5972 /* Allocate bitmaps to hold local and global properties. */
5973 npi.nonnull_local = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
5974 npi.nonnull_killed = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
5975 nonnull_avin = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
5976 nonnull_avout = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
5978 /* Go through the basic blocks, seeing whether or not each block
5979 ends with a conditional branch whose condition is a comparison
5980 against zero. Record the register compared in BLOCK_REG. */
5981 block_reg = xcalloc (last_basic_block, sizeof (int));
5982 FOR_EACH_BB (bb)
5984 rtx last_insn = bb->end;
5985 rtx condition, earliest, reg;
5987 /* We only want conditional branches. */
5988 if (GET_CODE (last_insn) != JUMP_INSN
5989 || !any_condjump_p (last_insn)
5990 || !onlyjump_p (last_insn))
5991 continue;
5993 /* LAST_INSN is a conditional jump. Get its condition. */
5994 condition = get_condition (last_insn, &earliest);
5996 /* If we were unable to get the condition, or it is not an equality
5997 comparison against zero then there's nothing we can do. */
5998 if (!condition
5999 || (GET_CODE (condition) != NE && GET_CODE (condition) != EQ)
6000 || GET_CODE (XEXP (condition, 1)) != CONST_INT
6001 || (XEXP (condition, 1)
6002 != CONST0_RTX (GET_MODE (XEXP (condition, 0)))))
6003 continue;
6005 /* We must be checking a register against zero. */
6006 reg = XEXP (condition, 0);
6007 if (GET_CODE (reg) != REG)
6008 continue;
6010 block_reg[bb->index] = REGNO (reg);
6013 /* Go through the algorithm for each block of registers. */
6014 for (reg = FIRST_PSEUDO_REGISTER; reg < max_reg; reg += regs_per_pass)
6016 npi.min_reg = reg;
6017 npi.max_reg = MIN (reg + regs_per_pass, max_reg);
6018 something_changed |= delete_null_pointer_checks_1 (block_reg,
6019 nonnull_avin,
6020 nonnull_avout,
6021 &npi);
6024 /* Free the table of registers compared at the end of every block. */
6025 free (block_reg);
6027 /* Free bitmaps. */
6028 sbitmap_vector_free (npi.nonnull_local);
6029 sbitmap_vector_free (npi.nonnull_killed);
6030 sbitmap_vector_free (nonnull_avin);
6031 sbitmap_vector_free (nonnull_avout);
6033 return something_changed;
6036 /* Code Hoisting variables and subroutines. */
6038 /* Very busy expressions. */
6039 static sbitmap *hoist_vbein;
6040 static sbitmap *hoist_vbeout;
6042 /* Hoistable expressions. */
6043 static sbitmap *hoist_exprs;
6045 /* Dominator bitmaps. */
6046 dominance_info dominators;
6048 /* ??? We could compute post dominators and run this algorithm in
6049 reverse to perform tail merging, doing so would probably be
6050 more effective than the tail merging code in jump.c.
6052 It's unclear if tail merging could be run in parallel with
6053 code hoisting. It would be nice. */
6055 /* Allocate vars used for code hoisting analysis. */
6057 static void
6058 alloc_code_hoist_mem (int n_blocks, int n_exprs)
6060 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
6061 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
6062 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
6064 hoist_vbein = sbitmap_vector_alloc (n_blocks, n_exprs);
6065 hoist_vbeout = sbitmap_vector_alloc (n_blocks, n_exprs);
6066 hoist_exprs = sbitmap_vector_alloc (n_blocks, n_exprs);
6067 transpout = sbitmap_vector_alloc (n_blocks, n_exprs);
6070 /* Free vars used for code hoisting analysis. */
6072 static void
6073 free_code_hoist_mem (void)
6075 sbitmap_vector_free (antloc);
6076 sbitmap_vector_free (transp);
6077 sbitmap_vector_free (comp);
6079 sbitmap_vector_free (hoist_vbein);
6080 sbitmap_vector_free (hoist_vbeout);
6081 sbitmap_vector_free (hoist_exprs);
6082 sbitmap_vector_free (transpout);
6084 free_dominance_info (dominators);
6087 /* Compute the very busy expressions at entry/exit from each block.
6089 An expression is very busy if all paths from a given point
6090 compute the expression. */
6092 static void
6093 compute_code_hoist_vbeinout (void)
6095 int changed, passes;
6096 basic_block bb;
6098 sbitmap_vector_zero (hoist_vbeout, last_basic_block);
6099 sbitmap_vector_zero (hoist_vbein, last_basic_block);
6101 passes = 0;
6102 changed = 1;
6104 while (changed)
6106 changed = 0;
6108 /* We scan the blocks in the reverse order to speed up
6109 the convergence. */
6110 FOR_EACH_BB_REVERSE (bb)
6112 changed |= sbitmap_a_or_b_and_c_cg (hoist_vbein[bb->index], antloc[bb->index],
6113 hoist_vbeout[bb->index], transp[bb->index]);
6114 if (bb->next_bb != EXIT_BLOCK_PTR)
6115 sbitmap_intersection_of_succs (hoist_vbeout[bb->index], hoist_vbein, bb->index);
6118 passes++;
6121 if (gcse_file)
6122 fprintf (gcse_file, "hoisting vbeinout computation: %d passes\n", passes);
6125 /* Top level routine to do the dataflow analysis needed by code hoisting. */
6127 static void
6128 compute_code_hoist_data (void)
6130 compute_local_properties (transp, comp, antloc, &expr_hash_table);
6131 compute_transpout ();
6132 compute_code_hoist_vbeinout ();
6133 dominators = calculate_dominance_info (CDI_DOMINATORS);
6134 if (gcse_file)
6135 fprintf (gcse_file, "\n");
6138 /* Determine if the expression identified by EXPR_INDEX would
6139 reach BB unimpared if it was placed at the end of EXPR_BB.
6141 It's unclear exactly what Muchnick meant by "unimpared". It seems
6142 to me that the expression must either be computed or transparent in
6143 *every* block in the path(s) from EXPR_BB to BB. Any other definition
6144 would allow the expression to be hoisted out of loops, even if
6145 the expression wasn't a loop invariant.
6147 Contrast this to reachability for PRE where an expression is
6148 considered reachable if *any* path reaches instead of *all*
6149 paths. */
6151 static int
6152 hoist_expr_reaches_here_p (basic_block expr_bb, int expr_index, basic_block bb, char *visited)
6154 edge pred;
6155 int visited_allocated_locally = 0;
6158 if (visited == NULL)
6160 visited_allocated_locally = 1;
6161 visited = xcalloc (last_basic_block, 1);
6164 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
6166 basic_block pred_bb = pred->src;
6168 if (pred->src == ENTRY_BLOCK_PTR)
6169 break;
6170 else if (pred_bb == expr_bb)
6171 continue;
6172 else if (visited[pred_bb->index])
6173 continue;
6175 /* Does this predecessor generate this expression? */
6176 else if (TEST_BIT (comp[pred_bb->index], expr_index))
6177 break;
6178 else if (! TEST_BIT (transp[pred_bb->index], expr_index))
6179 break;
6181 /* Not killed. */
6182 else
6184 visited[pred_bb->index] = 1;
6185 if (! hoist_expr_reaches_here_p (expr_bb, expr_index,
6186 pred_bb, visited))
6187 break;
6190 if (visited_allocated_locally)
6191 free (visited);
6193 return (pred == NULL);
6196 /* Actually perform code hoisting. */
6198 static void
6199 hoist_code (void)
6201 basic_block bb, dominated;
6202 basic_block *domby;
6203 unsigned int domby_len;
6204 unsigned int i,j;
6205 struct expr **index_map;
6206 struct expr *expr;
6208 sbitmap_vector_zero (hoist_exprs, last_basic_block);
6210 /* Compute a mapping from expression number (`bitmap_index') to
6211 hash table entry. */
6213 index_map = xcalloc (expr_hash_table.n_elems, sizeof (struct expr *));
6214 for (i = 0; i < expr_hash_table.size; i++)
6215 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
6216 index_map[expr->bitmap_index] = expr;
6218 /* Walk over each basic block looking for potentially hoistable
6219 expressions, nothing gets hoisted from the entry block. */
6220 FOR_EACH_BB (bb)
6222 int found = 0;
6223 int insn_inserted_p;
6225 domby_len = get_dominated_by (dominators, bb, &domby);
6226 /* Examine each expression that is very busy at the exit of this
6227 block. These are the potentially hoistable expressions. */
6228 for (i = 0; i < hoist_vbeout[bb->index]->n_bits; i++)
6230 int hoistable = 0;
6232 if (TEST_BIT (hoist_vbeout[bb->index], i)
6233 && TEST_BIT (transpout[bb->index], i))
6235 /* We've found a potentially hoistable expression, now
6236 we look at every block BB dominates to see if it
6237 computes the expression. */
6238 for (j = 0; j < domby_len; j++)
6240 dominated = domby[j];
6241 /* Ignore self dominance. */
6242 if (bb == dominated)
6243 continue;
6244 /* We've found a dominated block, now see if it computes
6245 the busy expression and whether or not moving that
6246 expression to the "beginning" of that block is safe. */
6247 if (!TEST_BIT (antloc[dominated->index], i))
6248 continue;
6250 /* Note if the expression would reach the dominated block
6251 unimpared if it was placed at the end of BB.
6253 Keep track of how many times this expression is hoistable
6254 from a dominated block into BB. */
6255 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
6256 hoistable++;
6259 /* If we found more than one hoistable occurrence of this
6260 expression, then note it in the bitmap of expressions to
6261 hoist. It makes no sense to hoist things which are computed
6262 in only one BB, and doing so tends to pessimize register
6263 allocation. One could increase this value to try harder
6264 to avoid any possible code expansion due to register
6265 allocation issues; however experiments have shown that
6266 the vast majority of hoistable expressions are only movable
6267 from two successors, so raising this threshold is likely
6268 to nullify any benefit we get from code hoisting. */
6269 if (hoistable > 1)
6271 SET_BIT (hoist_exprs[bb->index], i);
6272 found = 1;
6276 /* If we found nothing to hoist, then quit now. */
6277 if (! found)
6279 free (domby);
6280 continue;
6283 /* Loop over all the hoistable expressions. */
6284 for (i = 0; i < hoist_exprs[bb->index]->n_bits; i++)
6286 /* We want to insert the expression into BB only once, so
6287 note when we've inserted it. */
6288 insn_inserted_p = 0;
6290 /* These tests should be the same as the tests above. */
6291 if (TEST_BIT (hoist_vbeout[bb->index], i))
6293 /* We've found a potentially hoistable expression, now
6294 we look at every block BB dominates to see if it
6295 computes the expression. */
6296 for (j = 0; j < domby_len; j++)
6298 dominated = domby[j];
6299 /* Ignore self dominance. */
6300 if (bb == dominated)
6301 continue;
6303 /* We've found a dominated block, now see if it computes
6304 the busy expression and whether or not moving that
6305 expression to the "beginning" of that block is safe. */
6306 if (!TEST_BIT (antloc[dominated->index], i))
6307 continue;
6309 /* The expression is computed in the dominated block and
6310 it would be safe to compute it at the start of the
6311 dominated block. Now we have to determine if the
6312 expression would reach the dominated block if it was
6313 placed at the end of BB. */
6314 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
6316 struct expr *expr = index_map[i];
6317 struct occr *occr = expr->antic_occr;
6318 rtx insn;
6319 rtx set;
6321 /* Find the right occurrence of this expression. */
6322 while (BLOCK_FOR_INSN (occr->insn) != dominated && occr)
6323 occr = occr->next;
6325 /* Should never happen. */
6326 if (!occr)
6327 abort ();
6329 insn = occr->insn;
6331 set = single_set (insn);
6332 if (! set)
6333 abort ();
6335 /* Create a pseudo-reg to store the result of reaching
6336 expressions into. Get the mode for the new pseudo
6337 from the mode of the original destination pseudo. */
6338 if (expr->reaching_reg == NULL)
6339 expr->reaching_reg
6340 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
6342 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
6343 delete_insn (insn);
6344 occr->deleted_p = 1;
6345 if (!insn_inserted_p)
6347 insert_insn_end_bb (index_map[i], bb, 0);
6348 insn_inserted_p = 1;
6354 free (domby);
6357 free (index_map);
6360 /* Top level routine to perform one code hoisting (aka unification) pass
6362 Return nonzero if a change was made. */
6364 static int
6365 one_code_hoisting_pass (void)
6367 int changed = 0;
6369 alloc_hash_table (max_cuid, &expr_hash_table, 0);
6370 compute_hash_table (&expr_hash_table);
6371 if (gcse_file)
6372 dump_hash_table (gcse_file, "Code Hosting Expressions", &expr_hash_table);
6374 if (expr_hash_table.n_elems > 0)
6376 alloc_code_hoist_mem (last_basic_block, expr_hash_table.n_elems);
6377 compute_code_hoist_data ();
6378 hoist_code ();
6379 free_code_hoist_mem ();
6382 free_hash_table (&expr_hash_table);
6384 return changed;
6387 /* Here we provide the things required to do store motion towards
6388 the exit. In order for this to be effective, gcse also needed to
6389 be taught how to move a load when it is kill only by a store to itself.
6391 int i;
6392 float a[10];
6394 void foo(float scale)
6396 for (i=0; i<10; i++)
6397 a[i] *= scale;
6400 'i' is both loaded and stored to in the loop. Normally, gcse cannot move
6401 the load out since its live around the loop, and stored at the bottom
6402 of the loop.
6404 The 'Load Motion' referred to and implemented in this file is
6405 an enhancement to gcse which when using edge based lcm, recognizes
6406 this situation and allows gcse to move the load out of the loop.
6408 Once gcse has hoisted the load, store motion can then push this
6409 load towards the exit, and we end up with no loads or stores of 'i'
6410 in the loop. */
6412 /* This will search the ldst list for a matching expression. If it
6413 doesn't find one, we create one and initialize it. */
6415 static struct ls_expr *
6416 ldst_entry (rtx x)
6418 struct ls_expr * ptr;
6420 for (ptr = first_ls_expr(); ptr != NULL; ptr = next_ls_expr (ptr))
6421 if (expr_equiv_p (ptr->pattern, x))
6422 break;
6424 if (!ptr)
6426 ptr = xmalloc (sizeof (struct ls_expr));
6428 ptr->next = pre_ldst_mems;
6429 ptr->expr = NULL;
6430 ptr->pattern = x;
6431 ptr->pattern_regs = NULL_RTX;
6432 ptr->loads = NULL_RTX;
6433 ptr->stores = NULL_RTX;
6434 ptr->reaching_reg = NULL_RTX;
6435 ptr->invalid = 0;
6436 ptr->index = 0;
6437 ptr->hash_index = 0;
6438 pre_ldst_mems = ptr;
6441 return ptr;
6444 /* Free up an individual ldst entry. */
6446 static void
6447 free_ldst_entry (struct ls_expr * ptr)
6449 free_INSN_LIST_list (& ptr->loads);
6450 free_INSN_LIST_list (& ptr->stores);
6452 free (ptr);
6455 /* Free up all memory associated with the ldst list. */
6457 static void
6458 free_ldst_mems (void)
6460 while (pre_ldst_mems)
6462 struct ls_expr * tmp = pre_ldst_mems;
6464 pre_ldst_mems = pre_ldst_mems->next;
6466 free_ldst_entry (tmp);
6469 pre_ldst_mems = NULL;
6472 /* Dump debugging info about the ldst list. */
6474 static void
6475 print_ldst_list (FILE * file)
6477 struct ls_expr * ptr;
6479 fprintf (file, "LDST list: \n");
6481 for (ptr = first_ls_expr(); ptr != NULL; ptr = next_ls_expr (ptr))
6483 fprintf (file, " Pattern (%3d): ", ptr->index);
6485 print_rtl (file, ptr->pattern);
6487 fprintf (file, "\n Loads : ");
6489 if (ptr->loads)
6490 print_rtl (file, ptr->loads);
6491 else
6492 fprintf (file, "(nil)");
6494 fprintf (file, "\n Stores : ");
6496 if (ptr->stores)
6497 print_rtl (file, ptr->stores);
6498 else
6499 fprintf (file, "(nil)");
6501 fprintf (file, "\n\n");
6504 fprintf (file, "\n");
6507 /* Returns 1 if X is in the list of ldst only expressions. */
6509 static struct ls_expr *
6510 find_rtx_in_ldst (rtx x)
6512 struct ls_expr * ptr;
6514 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
6515 if (expr_equiv_p (ptr->pattern, x) && ! ptr->invalid)
6516 return ptr;
6518 return NULL;
6521 /* Assign each element of the list of mems a monotonically increasing value. */
6523 static int
6524 enumerate_ldsts (void)
6526 struct ls_expr * ptr;
6527 int n = 0;
6529 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
6530 ptr->index = n++;
6532 return n;
6535 /* Return first item in the list. */
6537 static inline struct ls_expr *
6538 first_ls_expr (void)
6540 return pre_ldst_mems;
6543 /* Return the next item in the list after the specified one. */
6545 static inline struct ls_expr *
6546 next_ls_expr (struct ls_expr * ptr)
6548 return ptr->next;
6551 /* Load Motion for loads which only kill themselves. */
6553 /* Return true if x is a simple MEM operation, with no registers or
6554 side effects. These are the types of loads we consider for the
6555 ld_motion list, otherwise we let the usual aliasing take care of it. */
6557 static int
6558 simple_mem (rtx x)
6560 if (GET_CODE (x) != MEM)
6561 return 0;
6563 if (MEM_VOLATILE_P (x))
6564 return 0;
6566 if (GET_MODE (x) == BLKmode)
6567 return 0;
6569 /* If we are handling exceptions, we must be careful with memory references
6570 that may trap. If we are not, the behavior is undefined, so we may just
6571 continue. */
6572 if (flag_non_call_exceptions && may_trap_p (x))
6573 return 0;
6575 if (side_effects_p (x))
6576 return 0;
6578 /* Do not consider function arguments passed on stack. */
6579 if (reg_mentioned_p (stack_pointer_rtx, x))
6580 return 0;
6582 if (flag_float_store && FLOAT_MODE_P (GET_MODE (x)))
6583 return 0;
6585 return 1;
6588 /* Make sure there isn't a buried reference in this pattern anywhere.
6589 If there is, invalidate the entry for it since we're not capable
6590 of fixing it up just yet.. We have to be sure we know about ALL
6591 loads since the aliasing code will allow all entries in the
6592 ld_motion list to not-alias itself. If we miss a load, we will get
6593 the wrong value since gcse might common it and we won't know to
6594 fix it up. */
6596 static void
6597 invalidate_any_buried_refs (rtx x)
6599 const char * fmt;
6600 int i, j;
6601 struct ls_expr * ptr;
6603 /* Invalidate it in the list. */
6604 if (GET_CODE (x) == MEM && simple_mem (x))
6606 ptr = ldst_entry (x);
6607 ptr->invalid = 1;
6610 /* Recursively process the insn. */
6611 fmt = GET_RTX_FORMAT (GET_CODE (x));
6613 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6615 if (fmt[i] == 'e')
6616 invalidate_any_buried_refs (XEXP (x, i));
6617 else if (fmt[i] == 'E')
6618 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6619 invalidate_any_buried_refs (XVECEXP (x, i, j));
6623 /* Find all the 'simple' MEMs which are used in LOADs and STORES. Simple
6624 being defined as MEM loads and stores to symbols, with no side effects
6625 and no registers in the expression. For a MEM destination, we also
6626 check that the insn is still valid if we replace the destination with a
6627 REG, as is done in update_ld_motion_stores. If there are any uses/defs
6628 which don't match this criteria, they are invalidated and trimmed out
6629 later. */
6631 static void
6632 compute_ld_motion_mems (void)
6634 struct ls_expr * ptr;
6635 basic_block bb;
6636 rtx insn;
6638 pre_ldst_mems = NULL;
6640 FOR_EACH_BB (bb)
6642 for (insn = bb->head;
6643 insn && insn != NEXT_INSN (bb->end);
6644 insn = NEXT_INSN (insn))
6646 if (INSN_P (insn))
6648 if (GET_CODE (PATTERN (insn)) == SET)
6650 rtx src = SET_SRC (PATTERN (insn));
6651 rtx dest = SET_DEST (PATTERN (insn));
6653 /* Check for a simple LOAD... */
6654 if (GET_CODE (src) == MEM && simple_mem (src))
6656 ptr = ldst_entry (src);
6657 if (GET_CODE (dest) == REG)
6658 ptr->loads = alloc_INSN_LIST (insn, ptr->loads);
6659 else
6660 ptr->invalid = 1;
6662 else
6664 /* Make sure there isn't a buried load somewhere. */
6665 invalidate_any_buried_refs (src);
6668 /* Check for stores. Don't worry about aliased ones, they
6669 will block any movement we might do later. We only care
6670 about this exact pattern since those are the only
6671 circumstance that we will ignore the aliasing info. */
6672 if (GET_CODE (dest) == MEM && simple_mem (dest))
6674 ptr = ldst_entry (dest);
6676 if (GET_CODE (src) != MEM
6677 && GET_CODE (src) != ASM_OPERANDS
6678 /* Check for REG manually since want_to_gcse_p
6679 returns 0 for all REGs. */
6680 && (REG_P (src) || want_to_gcse_p (src)))
6681 ptr->stores = alloc_INSN_LIST (insn, ptr->stores);
6682 else
6683 ptr->invalid = 1;
6686 else
6687 invalidate_any_buried_refs (PATTERN (insn));
6693 /* Remove any references that have been either invalidated or are not in the
6694 expression list for pre gcse. */
6696 static void
6697 trim_ld_motion_mems (void)
6699 struct ls_expr * last = NULL;
6700 struct ls_expr * ptr = first_ls_expr ();
6702 while (ptr != NULL)
6704 int del = ptr->invalid;
6705 struct expr * expr = NULL;
6707 /* Delete if entry has been made invalid. */
6708 if (!del)
6710 unsigned int i;
6712 del = 1;
6713 /* Delete if we cannot find this mem in the expression list. */
6714 for (i = 0; i < expr_hash_table.size && del; i++)
6716 for (expr = expr_hash_table.table[i];
6717 expr != NULL;
6718 expr = expr->next_same_hash)
6719 if (expr_equiv_p (expr->expr, ptr->pattern))
6721 del = 0;
6722 break;
6727 if (del)
6729 if (last != NULL)
6731 last->next = ptr->next;
6732 free_ldst_entry (ptr);
6733 ptr = last->next;
6735 else
6737 pre_ldst_mems = pre_ldst_mems->next;
6738 free_ldst_entry (ptr);
6739 ptr = pre_ldst_mems;
6742 else
6744 /* Set the expression field if we are keeping it. */
6745 last = ptr;
6746 ptr->expr = expr;
6747 ptr = ptr->next;
6751 /* Show the world what we've found. */
6752 if (gcse_file && pre_ldst_mems != NULL)
6753 print_ldst_list (gcse_file);
6756 /* This routine will take an expression which we are replacing with
6757 a reaching register, and update any stores that are needed if
6758 that expression is in the ld_motion list. Stores are updated by
6759 copying their SRC to the reaching register, and then storing
6760 the reaching register into the store location. These keeps the
6761 correct value in the reaching register for the loads. */
6763 static void
6764 update_ld_motion_stores (struct expr * expr)
6766 struct ls_expr * mem_ptr;
6768 if ((mem_ptr = find_rtx_in_ldst (expr->expr)))
6770 /* We can try to find just the REACHED stores, but is shouldn't
6771 matter to set the reaching reg everywhere... some might be
6772 dead and should be eliminated later. */
6774 /* We replace (set mem expr) with (set reg expr) (set mem reg)
6775 where reg is the reaching reg used in the load. We checked in
6776 compute_ld_motion_mems that we can replace (set mem expr) with
6777 (set reg expr) in that insn. */
6778 rtx list = mem_ptr->stores;
6780 for ( ; list != NULL_RTX; list = XEXP (list, 1))
6782 rtx insn = XEXP (list, 0);
6783 rtx pat = PATTERN (insn);
6784 rtx src = SET_SRC (pat);
6785 rtx reg = expr->reaching_reg;
6786 rtx copy, new;
6788 /* If we've already copied it, continue. */
6789 if (expr->reaching_reg == src)
6790 continue;
6792 if (gcse_file)
6794 fprintf (gcse_file, "PRE: store updated with reaching reg ");
6795 print_rtl (gcse_file, expr->reaching_reg);
6796 fprintf (gcse_file, ":\n ");
6797 print_inline_rtx (gcse_file, insn, 8);
6798 fprintf (gcse_file, "\n");
6801 copy = gen_move_insn ( reg, copy_rtx (SET_SRC (pat)));
6802 new = emit_insn_before (copy, insn);
6803 record_one_set (REGNO (reg), new);
6804 SET_SRC (pat) = reg;
6806 /* un-recognize this pattern since it's probably different now. */
6807 INSN_CODE (insn) = -1;
6808 gcse_create_count++;
6813 /* Store motion code. */
6815 #define ANTIC_STORE_LIST(x) ((x)->loads)
6816 #define AVAIL_STORE_LIST(x) ((x)->stores)
6817 #define LAST_AVAIL_CHECK_FAILURE(x) ((x)->reaching_reg)
6819 /* This is used to communicate the target bitvector we want to use in the
6820 reg_set_info routine when called via the note_stores mechanism. */
6821 static int * regvec;
6823 /* And current insn, for the same routine. */
6824 static rtx compute_store_table_current_insn;
6826 /* Used in computing the reverse edge graph bit vectors. */
6827 static sbitmap * st_antloc;
6829 /* Global holding the number of store expressions we are dealing with. */
6830 static int num_stores;
6832 /* Checks to set if we need to mark a register set. Called from note_stores. */
6834 static void
6835 reg_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED,
6836 void *data ATTRIBUTE_UNUSED)
6838 if (GET_CODE (dest) == SUBREG)
6839 dest = SUBREG_REG (dest);
6841 if (GET_CODE (dest) == REG)
6842 regvec[REGNO (dest)] = INSN_UID (compute_store_table_current_insn);
6845 /* Return zero if some of the registers in list X are killed
6846 due to set of registers in bitmap REGS_SET. */
6848 static bool
6849 store_ops_ok (rtx x, int *regs_set)
6851 rtx reg;
6853 for (; x; x = XEXP (x, 1))
6855 reg = XEXP (x, 0);
6856 if (regs_set[REGNO(reg)])
6857 return false;
6860 return true;
6863 /* Returns a list of registers mentioned in X. */
6864 static rtx
6865 extract_mentioned_regs (rtx x)
6867 return extract_mentioned_regs_helper (x, NULL_RTX);
6870 /* Helper for extract_mentioned_regs; ACCUM is used to accumulate used
6871 registers. */
6872 static rtx
6873 extract_mentioned_regs_helper (rtx x, rtx accum)
6875 int i;
6876 enum rtx_code code;
6877 const char * fmt;
6879 /* Repeat is used to turn tail-recursion into iteration. */
6880 repeat:
6882 if (x == 0)
6883 return accum;
6885 code = GET_CODE (x);
6886 switch (code)
6888 case REG:
6889 return alloc_EXPR_LIST (0, x, accum);
6891 case MEM:
6892 x = XEXP (x, 0);
6893 goto repeat;
6895 case PRE_DEC:
6896 case PRE_INC:
6897 case POST_DEC:
6898 case POST_INC:
6899 /* We do not run this function with arguments having side effects. */
6900 abort ();
6902 case PC:
6903 case CC0: /*FIXME*/
6904 case CONST:
6905 case CONST_INT:
6906 case CONST_DOUBLE:
6907 case CONST_VECTOR:
6908 case SYMBOL_REF:
6909 case LABEL_REF:
6910 case ADDR_VEC:
6911 case ADDR_DIFF_VEC:
6912 return accum;
6914 default:
6915 break;
6918 i = GET_RTX_LENGTH (code) - 1;
6919 fmt = GET_RTX_FORMAT (code);
6921 for (; i >= 0; i--)
6923 if (fmt[i] == 'e')
6925 rtx tem = XEXP (x, i);
6927 /* If we are about to do the last recursive call
6928 needed at this level, change it into iteration. */
6929 if (i == 0)
6931 x = tem;
6932 goto repeat;
6935 accum = extract_mentioned_regs_helper (tem, accum);
6937 else if (fmt[i] == 'E')
6939 int j;
6941 for (j = 0; j < XVECLEN (x, i); j++)
6942 accum = extract_mentioned_regs_helper (XVECEXP (x, i, j), accum);
6946 return accum;
6949 /* Determine whether INSN is MEM store pattern that we will consider moving.
6950 REGS_SET_BEFORE is bitmap of registers set before (and including) the
6951 current insn, REGS_SET_AFTER is bitmap of registers set after (and
6952 including) the insn in this basic block. We must be passing through BB from
6953 head to end, as we are using this fact to speed things up.
6955 The results are stored this way:
6957 -- the first anticipatable expression is added into ANTIC_STORE_LIST
6958 -- if the processed expression is not anticipatable, NULL_RTX is added
6959 there instead, so that we can use it as indicator that no further
6960 expression of this type may be anticipatable
6961 -- if the expression is available, it is added as head of AVAIL_STORE_LIST;
6962 consequently, all of them but this head are dead and may be deleted.
6963 -- if the expression is not available, the insn due to that it fails to be
6964 available is stored in reaching_reg.
6966 The things are complicated a bit by fact that there already may be stores
6967 to the same MEM from other blocks; also caller must take care of the
6968 necessary cleanup of the temporary markers after end of the basic block.
6971 static void
6972 find_moveable_store (rtx insn, int *regs_set_before, int *regs_set_after)
6974 struct ls_expr * ptr;
6975 rtx dest, set, tmp;
6976 int check_anticipatable, check_available;
6977 basic_block bb = BLOCK_FOR_INSN (insn);
6979 set = single_set (insn);
6980 if (!set)
6981 return;
6983 dest = SET_DEST (set);
6985 if (GET_CODE (dest) != MEM || MEM_VOLATILE_P (dest)
6986 || GET_MODE (dest) == BLKmode)
6987 return;
6989 if (side_effects_p (dest))
6990 return;
6992 /* If we are handling exceptions, we must be careful with memory references
6993 that may trap. If we are not, the behavior is undefined, so we may just
6994 continue. */
6995 if (flag_non_call_exceptions && may_trap_p (dest))
6996 return;
6998 ptr = ldst_entry (dest);
6999 if (!ptr->pattern_regs)
7000 ptr->pattern_regs = extract_mentioned_regs (dest);
7002 /* Do not check for anticipatability if we either found one anticipatable
7003 store already, or tested for one and found out that it was killed. */
7004 check_anticipatable = 0;
7005 if (!ANTIC_STORE_LIST (ptr))
7006 check_anticipatable = 1;
7007 else
7009 tmp = XEXP (ANTIC_STORE_LIST (ptr), 0);
7010 if (tmp != NULL_RTX
7011 && BLOCK_FOR_INSN (tmp) != bb)
7012 check_anticipatable = 1;
7014 if (check_anticipatable)
7016 if (store_killed_before (dest, ptr->pattern_regs, insn, bb, regs_set_before))
7017 tmp = NULL_RTX;
7018 else
7019 tmp = insn;
7020 ANTIC_STORE_LIST (ptr) = alloc_INSN_LIST (tmp,
7021 ANTIC_STORE_LIST (ptr));
7024 /* It is not necessary to check whether store is available if we did
7025 it successfully before; if we failed before, do not bother to check
7026 until we reach the insn that caused us to fail. */
7027 check_available = 0;
7028 if (!AVAIL_STORE_LIST (ptr))
7029 check_available = 1;
7030 else
7032 tmp = XEXP (AVAIL_STORE_LIST (ptr), 0);
7033 if (BLOCK_FOR_INSN (tmp) != bb)
7034 check_available = 1;
7036 if (check_available)
7038 /* Check that we have already reached the insn at that the check
7039 failed last time. */
7040 if (LAST_AVAIL_CHECK_FAILURE (ptr))
7042 for (tmp = bb->end;
7043 tmp != insn && tmp != LAST_AVAIL_CHECK_FAILURE (ptr);
7044 tmp = PREV_INSN (tmp))
7045 continue;
7046 if (tmp == insn)
7047 check_available = 0;
7049 else
7050 check_available = store_killed_after (dest, ptr->pattern_regs, insn,
7051 bb, regs_set_after,
7052 &LAST_AVAIL_CHECK_FAILURE (ptr));
7054 if (!check_available)
7055 AVAIL_STORE_LIST (ptr) = alloc_INSN_LIST (insn, AVAIL_STORE_LIST (ptr));
7058 /* Find available and anticipatable stores. */
7060 static int
7061 compute_store_table (void)
7063 int ret;
7064 basic_block bb;
7065 unsigned regno;
7066 rtx insn, pat, tmp;
7067 int *last_set_in, *already_set;
7068 struct ls_expr * ptr, **prev_next_ptr_ptr;
7070 max_gcse_regno = max_reg_num ();
7072 reg_set_in_block = sbitmap_vector_alloc (last_basic_block,
7073 max_gcse_regno);
7074 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
7075 pre_ldst_mems = 0;
7076 last_set_in = xmalloc (sizeof (int) * max_gcse_regno);
7077 already_set = xmalloc (sizeof (int) * max_gcse_regno);
7079 /* Find all the stores we care about. */
7080 FOR_EACH_BB (bb)
7082 /* First compute the registers set in this block. */
7083 memset (last_set_in, 0, sizeof (int) * max_gcse_regno);
7084 regvec = last_set_in;
7086 for (insn = bb->head;
7087 insn != NEXT_INSN (bb->end);
7088 insn = NEXT_INSN (insn))
7090 if (! INSN_P (insn))
7091 continue;
7093 if (GET_CODE (insn) == CALL_INSN)
7095 bool clobbers_all = false;
7096 #ifdef NON_SAVING_SETJMP
7097 if (NON_SAVING_SETJMP
7098 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
7099 clobbers_all = true;
7100 #endif
7102 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7103 if (clobbers_all
7104 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
7105 last_set_in[regno] = INSN_UID (insn);
7108 pat = PATTERN (insn);
7109 compute_store_table_current_insn = insn;
7110 note_stores (pat, reg_set_info, NULL);
7113 /* Record the set registers. */
7114 for (regno = 0; regno < max_gcse_regno; regno++)
7115 if (last_set_in[regno])
7116 SET_BIT (reg_set_in_block[bb->index], regno);
7118 /* Now find the stores. */
7119 memset (already_set, 0, sizeof (int) * max_gcse_regno);
7120 regvec = already_set;
7121 for (insn = bb->head;
7122 insn != NEXT_INSN (bb->end);
7123 insn = NEXT_INSN (insn))
7125 if (! INSN_P (insn))
7126 continue;
7128 if (GET_CODE (insn) == CALL_INSN)
7130 bool clobbers_all = false;
7131 #ifdef NON_SAVING_SETJMP
7132 if (NON_SAVING_SETJMP
7133 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
7134 clobbers_all = true;
7135 #endif
7137 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7138 if (clobbers_all
7139 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
7140 already_set[regno] = 1;
7143 pat = PATTERN (insn);
7144 note_stores (pat, reg_set_info, NULL);
7146 /* Now that we've marked regs, look for stores. */
7147 find_moveable_store (insn, already_set, last_set_in);
7149 /* Unmark regs that are no longer set. */
7150 for (regno = 0; regno < max_gcse_regno; regno++)
7151 if (last_set_in[regno] == INSN_UID (insn))
7152 last_set_in[regno] = 0;
7155 /* Clear temporary marks. */
7156 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7158 LAST_AVAIL_CHECK_FAILURE(ptr) = NULL_RTX;
7159 if (ANTIC_STORE_LIST (ptr)
7160 && (tmp = XEXP (ANTIC_STORE_LIST (ptr), 0)) == NULL_RTX)
7161 ANTIC_STORE_LIST (ptr) = XEXP (ANTIC_STORE_LIST (ptr), 1);
7165 /* Remove the stores that are not available anywhere, as there will
7166 be no opportunity to optimize them. */
7167 for (ptr = pre_ldst_mems, prev_next_ptr_ptr = &pre_ldst_mems;
7168 ptr != NULL;
7169 ptr = *prev_next_ptr_ptr)
7171 if (!AVAIL_STORE_LIST (ptr))
7173 *prev_next_ptr_ptr = ptr->next;
7174 free_ldst_entry (ptr);
7176 else
7177 prev_next_ptr_ptr = &ptr->next;
7180 ret = enumerate_ldsts ();
7182 if (gcse_file)
7184 fprintf (gcse_file, "ST_avail and ST_antic (shown under loads..)\n");
7185 print_ldst_list (gcse_file);
7188 free (last_set_in);
7189 free (already_set);
7190 return ret;
7193 /* Check to see if the load X is aliased with STORE_PATTERN.
7194 AFTER is true if we are checking the case when STORE_PATTERN occurs
7195 after the X. */
7197 static bool
7198 load_kills_store (rtx x, rtx store_pattern, int after)
7200 if (after)
7201 return anti_dependence (x, store_pattern);
7202 else
7203 return true_dependence (store_pattern, GET_MODE (store_pattern), x,
7204 rtx_addr_varies_p);
7207 /* Go through the entire insn X, looking for any loads which might alias
7208 STORE_PATTERN. Return true if found.
7209 AFTER is true if we are checking the case when STORE_PATTERN occurs
7210 after the insn X. */
7212 static bool
7213 find_loads (rtx x, rtx store_pattern, int after)
7215 const char * fmt;
7216 int i, j;
7217 int ret = false;
7219 if (!x)
7220 return false;
7222 if (GET_CODE (x) == SET)
7223 x = SET_SRC (x);
7225 if (GET_CODE (x) == MEM)
7227 if (load_kills_store (x, store_pattern, after))
7228 return true;
7231 /* Recursively process the insn. */
7232 fmt = GET_RTX_FORMAT (GET_CODE (x));
7234 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0 && !ret; i--)
7236 if (fmt[i] == 'e')
7237 ret |= find_loads (XEXP (x, i), store_pattern, after);
7238 else if (fmt[i] == 'E')
7239 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7240 ret |= find_loads (XVECEXP (x, i, j), store_pattern, after);
7242 return ret;
7245 /* Check if INSN kills the store pattern X (is aliased with it).
7246 AFTER is true if we are checking the case when store X occurs
7247 after the insn. Return true if it it does. */
7249 static bool
7250 store_killed_in_insn (rtx x, rtx x_regs, rtx insn, int after)
7252 rtx reg, base;
7254 if (!INSN_P (insn))
7255 return false;
7257 if (GET_CODE (insn) == CALL_INSN)
7259 /* A normal or pure call might read from pattern,
7260 but a const call will not. */
7261 if (! CONST_OR_PURE_CALL_P (insn) || pure_call_p (insn))
7262 return true;
7264 /* But even a const call reads its parameters. Check whether the
7265 base of some of registers used in mem is stack pointer. */
7266 for (reg = x_regs; reg; reg = XEXP (reg, 1))
7268 base = find_base_term (XEXP (reg, 0));
7269 if (!base
7270 || (GET_CODE (base) == ADDRESS
7271 && GET_MODE (base) == Pmode
7272 && XEXP (base, 0) == stack_pointer_rtx))
7273 return true;
7276 return false;
7279 if (GET_CODE (PATTERN (insn)) == SET)
7281 rtx pat = PATTERN (insn);
7282 rtx dest = SET_DEST (pat);
7284 if (GET_CODE (dest) == SIGN_EXTRACT
7285 || GET_CODE (dest) == ZERO_EXTRACT)
7286 dest = XEXP (dest, 0);
7288 /* Check for memory stores to aliased objects. */
7289 if (GET_CODE (dest) == MEM
7290 && !expr_equiv_p (dest, x))
7292 if (after)
7294 if (output_dependence (dest, x))
7295 return true;
7297 else
7299 if (output_dependence (x, dest))
7300 return true;
7303 return find_loads (SET_SRC (pat), x, after);
7305 else
7306 return find_loads (PATTERN (insn), x, after);
7309 /* Returns true if the expression X is loaded or clobbered on or after INSN
7310 within basic block BB. REGS_SET_AFTER is bitmap of registers set in
7311 or after the insn. X_REGS is list of registers mentioned in X. If the store
7312 is killed, return the last insn in that it occurs in FAIL_INSN. */
7314 static bool
7315 store_killed_after (rtx x, rtx x_regs, rtx insn, basic_block bb,
7316 int *regs_set_after, rtx *fail_insn)
7318 rtx last = bb->end, act;
7320 if (!store_ops_ok (x_regs, regs_set_after))
7322 /* We do not know where it will happen. */
7323 if (fail_insn)
7324 *fail_insn = NULL_RTX;
7325 return true;
7328 /* Scan from the end, so that fail_insn is determined correctly. */
7329 for (act = last; act != PREV_INSN (insn); act = PREV_INSN (act))
7330 if (store_killed_in_insn (x, x_regs, act, false))
7332 if (fail_insn)
7333 *fail_insn = act;
7334 return true;
7337 return false;
7340 /* Returns true if the expression X is loaded or clobbered on or before INSN
7341 within basic block BB. X_REGS is list of registers mentioned in X.
7342 REGS_SET_BEFORE is bitmap of registers set before or in this insn. */
7343 static bool
7344 store_killed_before (rtx x, rtx x_regs, rtx insn, basic_block bb,
7345 int *regs_set_before)
7347 rtx first = bb->head;
7349 if (!store_ops_ok (x_regs, regs_set_before))
7350 return true;
7352 for ( ; insn != PREV_INSN (first); insn = PREV_INSN (insn))
7353 if (store_killed_in_insn (x, x_regs, insn, true))
7354 return true;
7356 return false;
7359 /* Fill in available, anticipatable, transparent and kill vectors in
7360 STORE_DATA, based on lists of available and anticipatable stores. */
7361 static void
7362 build_store_vectors (void)
7364 basic_block bb;
7365 int *regs_set_in_block;
7366 rtx insn, st;
7367 struct ls_expr * ptr;
7368 unsigned regno;
7370 /* Build the gen_vector. This is any store in the table which is not killed
7371 by aliasing later in its block. */
7372 ae_gen = sbitmap_vector_alloc (last_basic_block, num_stores);
7373 sbitmap_vector_zero (ae_gen, last_basic_block);
7375 st_antloc = sbitmap_vector_alloc (last_basic_block, num_stores);
7376 sbitmap_vector_zero (st_antloc, last_basic_block);
7378 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7380 for (st = AVAIL_STORE_LIST (ptr); st != NULL; st = XEXP (st, 1))
7382 insn = XEXP (st, 0);
7383 bb = BLOCK_FOR_INSN (insn);
7385 /* If we've already seen an available expression in this block,
7386 we can delete this one (It occurs earlier in the block). We'll
7387 copy the SRC expression to an unused register in case there
7388 are any side effects. */
7389 if (TEST_BIT (ae_gen[bb->index], ptr->index))
7391 rtx r = gen_reg_rtx (GET_MODE (ptr->pattern));
7392 if (gcse_file)
7393 fprintf (gcse_file, "Removing redundant store:\n");
7394 replace_store_insn (r, XEXP (st, 0), bb);
7395 continue;
7397 SET_BIT (ae_gen[bb->index], ptr->index);
7400 for (st = ANTIC_STORE_LIST (ptr); st != NULL; st = XEXP (st, 1))
7402 insn = XEXP (st, 0);
7403 bb = BLOCK_FOR_INSN (insn);
7404 SET_BIT (st_antloc[bb->index], ptr->index);
7408 ae_kill = sbitmap_vector_alloc (last_basic_block, num_stores);
7409 sbitmap_vector_zero (ae_kill, last_basic_block);
7411 transp = sbitmap_vector_alloc (last_basic_block, num_stores);
7412 sbitmap_vector_zero (transp, last_basic_block);
7413 regs_set_in_block = xmalloc (sizeof (int) * max_gcse_regno);
7415 FOR_EACH_BB (bb)
7417 for (regno = 0; regno < max_gcse_regno; regno++)
7418 regs_set_in_block[regno] = TEST_BIT (reg_set_in_block[bb->index], regno);
7420 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7422 if (store_killed_after (ptr->pattern, ptr->pattern_regs, bb->head,
7423 bb, regs_set_in_block, NULL))
7425 /* It should not be necessary to consider the expression
7426 killed if it is both anticipatable and available. */
7427 if (!TEST_BIT (st_antloc[bb->index], ptr->index)
7428 || !TEST_BIT (ae_gen[bb->index], ptr->index))
7429 SET_BIT (ae_kill[bb->index], ptr->index);
7431 else
7432 SET_BIT (transp[bb->index], ptr->index);
7436 free (regs_set_in_block);
7438 if (gcse_file)
7440 dump_sbitmap_vector (gcse_file, "st_antloc", "", st_antloc, last_basic_block);
7441 dump_sbitmap_vector (gcse_file, "st_kill", "", ae_kill, last_basic_block);
7442 dump_sbitmap_vector (gcse_file, "Transpt", "", transp, last_basic_block);
7443 dump_sbitmap_vector (gcse_file, "st_avloc", "", ae_gen, last_basic_block);
7447 /* Insert an instruction at the beginning of a basic block, and update
7448 the BLOCK_HEAD if needed. */
7450 static void
7451 insert_insn_start_bb (rtx insn, basic_block bb)
7453 /* Insert at start of successor block. */
7454 rtx prev = PREV_INSN (bb->head);
7455 rtx before = bb->head;
7456 while (before != 0)
7458 if (GET_CODE (before) != CODE_LABEL
7459 && (GET_CODE (before) != NOTE
7460 || NOTE_LINE_NUMBER (before) != NOTE_INSN_BASIC_BLOCK))
7461 break;
7462 prev = before;
7463 if (prev == bb->end)
7464 break;
7465 before = NEXT_INSN (before);
7468 insn = emit_insn_after (insn, prev);
7470 if (gcse_file)
7472 fprintf (gcse_file, "STORE_MOTION insert store at start of BB %d:\n",
7473 bb->index);
7474 print_inline_rtx (gcse_file, insn, 6);
7475 fprintf (gcse_file, "\n");
7479 /* This routine will insert a store on an edge. EXPR is the ldst entry for
7480 the memory reference, and E is the edge to insert it on. Returns nonzero
7481 if an edge insertion was performed. */
7483 static int
7484 insert_store (struct ls_expr * expr, edge e)
7486 rtx reg, insn;
7487 basic_block bb;
7488 edge tmp;
7490 /* We did all the deleted before this insert, so if we didn't delete a
7491 store, then we haven't set the reaching reg yet either. */
7492 if (expr->reaching_reg == NULL_RTX)
7493 return 0;
7495 if (e->flags & EDGE_FAKE)
7496 return 0;
7498 reg = expr->reaching_reg;
7499 insn = gen_move_insn (copy_rtx (expr->pattern), reg);
7501 /* If we are inserting this expression on ALL predecessor edges of a BB,
7502 insert it at the start of the BB, and reset the insert bits on the other
7503 edges so we don't try to insert it on the other edges. */
7504 bb = e->dest;
7505 for (tmp = e->dest->pred; tmp ; tmp = tmp->pred_next)
7506 if (!(tmp->flags & EDGE_FAKE))
7508 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
7509 if (index == EDGE_INDEX_NO_EDGE)
7510 abort ();
7511 if (! TEST_BIT (pre_insert_map[index], expr->index))
7512 break;
7515 /* If tmp is NULL, we found an insertion on every edge, blank the
7516 insertion vector for these edges, and insert at the start of the BB. */
7517 if (!tmp && bb != EXIT_BLOCK_PTR)
7519 for (tmp = e->dest->pred; tmp ; tmp = tmp->pred_next)
7521 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
7522 RESET_BIT (pre_insert_map[index], expr->index);
7524 insert_insn_start_bb (insn, bb);
7525 return 0;
7528 /* We can't insert on this edge, so we'll insert at the head of the
7529 successors block. See Morgan, sec 10.5. */
7530 if ((e->flags & EDGE_ABNORMAL) == EDGE_ABNORMAL)
7532 insert_insn_start_bb (insn, bb);
7533 return 0;
7536 insert_insn_on_edge (insn, e);
7538 if (gcse_file)
7540 fprintf (gcse_file, "STORE_MOTION insert insn on edge (%d, %d):\n",
7541 e->src->index, e->dest->index);
7542 print_inline_rtx (gcse_file, insn, 6);
7543 fprintf (gcse_file, "\n");
7546 return 1;
7549 /* This routine will replace a store with a SET to a specified register. */
7551 static void
7552 replace_store_insn (rtx reg, rtx del, basic_block bb)
7554 rtx insn;
7556 insn = gen_move_insn (reg, SET_SRC (single_set (del)));
7557 insn = emit_insn_after (insn, del);
7559 if (gcse_file)
7561 fprintf (gcse_file,
7562 "STORE_MOTION delete insn in BB %d:\n ", bb->index);
7563 print_inline_rtx (gcse_file, del, 6);
7564 fprintf (gcse_file, "\nSTORE MOTION replaced with insn:\n ");
7565 print_inline_rtx (gcse_file, insn, 6);
7566 fprintf (gcse_file, "\n");
7569 delete_insn (del);
7573 /* Delete a store, but copy the value that would have been stored into
7574 the reaching_reg for later storing. */
7576 static void
7577 delete_store (struct ls_expr * expr, basic_block bb)
7579 rtx reg, i, del;
7581 if (expr->reaching_reg == NULL_RTX)
7582 expr->reaching_reg = gen_reg_rtx (GET_MODE (expr->pattern));
7584 reg = expr->reaching_reg;
7586 for (i = AVAIL_STORE_LIST (expr); i; i = XEXP (i, 1))
7588 del = XEXP (i, 0);
7589 if (BLOCK_FOR_INSN (del) == bb)
7591 /* We know there is only one since we deleted redundant
7592 ones during the available computation. */
7593 replace_store_insn (reg, del, bb);
7594 break;
7599 /* Free memory used by store motion. */
7601 static void
7602 free_store_memory (void)
7604 free_ldst_mems ();
7606 if (ae_gen)
7607 sbitmap_vector_free (ae_gen);
7608 if (ae_kill)
7609 sbitmap_vector_free (ae_kill);
7610 if (transp)
7611 sbitmap_vector_free (transp);
7612 if (st_antloc)
7613 sbitmap_vector_free (st_antloc);
7614 if (pre_insert_map)
7615 sbitmap_vector_free (pre_insert_map);
7616 if (pre_delete_map)
7617 sbitmap_vector_free (pre_delete_map);
7618 if (reg_set_in_block)
7619 sbitmap_vector_free (reg_set_in_block);
7621 ae_gen = ae_kill = transp = st_antloc = NULL;
7622 pre_insert_map = pre_delete_map = reg_set_in_block = NULL;
7625 /* Perform store motion. Much like gcse, except we move expressions the
7626 other way by looking at the flowgraph in reverse. */
7628 static void
7629 store_motion (void)
7631 basic_block bb;
7632 int x;
7633 struct ls_expr * ptr;
7634 int update_flow = 0;
7636 if (gcse_file)
7638 fprintf (gcse_file, "before store motion\n");
7639 print_rtl (gcse_file, get_insns ());
7642 init_alias_analysis ();
7644 /* Find all the available and anticipatable stores. */
7645 num_stores = compute_store_table ();
7646 if (num_stores == 0)
7648 sbitmap_vector_free (reg_set_in_block);
7649 end_alias_analysis ();
7650 return;
7653 /* Now compute kill & transp vectors. */
7654 build_store_vectors ();
7655 add_noreturn_fake_exit_edges ();
7656 connect_infinite_loops_to_exit ();
7658 edge_list = pre_edge_rev_lcm (gcse_file, num_stores, transp, ae_gen,
7659 st_antloc, ae_kill, &pre_insert_map,
7660 &pre_delete_map);
7662 /* Now we want to insert the new stores which are going to be needed. */
7663 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7665 FOR_EACH_BB (bb)
7666 if (TEST_BIT (pre_delete_map[bb->index], ptr->index))
7667 delete_store (ptr, bb);
7669 for (x = 0; x < NUM_EDGES (edge_list); x++)
7670 if (TEST_BIT (pre_insert_map[x], ptr->index))
7671 update_flow |= insert_store (ptr, INDEX_EDGE (edge_list, x));
7674 if (update_flow)
7675 commit_edge_insertions ();
7677 free_store_memory ();
7678 free_edge_list (edge_list);
7679 remove_fake_edges ();
7680 end_alias_analysis ();
7684 /* Entry point for jump bypassing optimization pass. */
7687 bypass_jumps (FILE *file)
7689 int changed;
7691 /* We do not construct an accurate cfg in functions which call
7692 setjmp, so just punt to be safe. */
7693 if (current_function_calls_setjmp)
7694 return 0;
7696 /* For calling dump_foo fns from gdb. */
7697 debug_stderr = stderr;
7698 gcse_file = file;
7700 /* Identify the basic block information for this function, including
7701 successors and predecessors. */
7702 max_gcse_regno = max_reg_num ();
7704 if (file)
7705 dump_flow_info (file);
7707 /* Return if there's nothing to do. */
7708 if (n_basic_blocks <= 1)
7709 return 0;
7711 /* Trying to perform global optimizations on flow graphs which have
7712 a high connectivity will take a long time and is unlikely to be
7713 particularly useful.
7715 In normal circumstances a cfg should have about twice as many edges
7716 as blocks. But we do not want to punish small functions which have
7717 a couple switch statements. So we require a relatively large number
7718 of basic blocks and the ratio of edges to blocks to be high. */
7719 if (n_basic_blocks > 1000 && n_edges / n_basic_blocks >= 20)
7721 if (warn_disabled_optimization)
7722 warning ("BYPASS disabled: %d > 1000 basic blocks and %d >= 20 edges/basic block",
7723 n_basic_blocks, n_edges / n_basic_blocks);
7724 return 0;
7727 /* If allocating memory for the cprop bitmap would take up too much
7728 storage it's better just to disable the optimization. */
7729 if ((n_basic_blocks
7730 * SBITMAP_SET_SIZE (max_gcse_regno)
7731 * sizeof (SBITMAP_ELT_TYPE)) > MAX_GCSE_MEMORY)
7733 if (warn_disabled_optimization)
7734 warning ("GCSE disabled: %d basic blocks and %d registers",
7735 n_basic_blocks, max_gcse_regno);
7737 return 0;
7740 gcc_obstack_init (&gcse_obstack);
7741 bytes_used = 0;
7743 /* We need alias. */
7744 init_alias_analysis ();
7746 /* Record where pseudo-registers are set. This data is kept accurate
7747 during each pass. ??? We could also record hard-reg information here
7748 [since it's unchanging], however it is currently done during hash table
7749 computation.
7751 It may be tempting to compute MEM set information here too, but MEM sets
7752 will be subject to code motion one day and thus we need to compute
7753 information about memory sets when we build the hash tables. */
7755 alloc_reg_set_mem (max_gcse_regno);
7756 compute_sets (get_insns ());
7758 max_gcse_regno = max_reg_num ();
7759 alloc_gcse_mem (get_insns ());
7760 changed = one_cprop_pass (1, 1, 1);
7761 free_gcse_mem ();
7763 if (file)
7765 fprintf (file, "BYPASS of %s: %d basic blocks, ",
7766 current_function_name, n_basic_blocks);
7767 fprintf (file, "%d bytes\n\n", bytes_used);
7770 obstack_free (&gcse_obstack, NULL);
7771 free_reg_set_mem ();
7773 /* We are finished with alias. */
7774 end_alias_analysis ();
7775 allocate_reg_info (max_reg_num (), FALSE, FALSE);
7777 return changed;
7780 #include "gt-gcse.h"