Daily bump.
[official-gcc.git] / gcc / gcse.c
blobdeebc73e0a138b2cb761644eefff8e4eb7865162
1 /* Global common subexpression elimination/Partial redundancy elimination
2 and global constant/copy propagation for GNU compiler.
3 Copyright (C) 1997, 1998, 1999 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 /* TODO
23 - reordering of memory allocation and freeing to be more space efficient
24 - do rough calc of how many regs are needed in each block, and a rough
25 calc of how many regs are available in each class and use that to
26 throttle back the code in cases where RTX_COST is minimal.
27 - dead store elimination
28 - a store to the same address as a load does not kill the load if the
29 source of the store is also the destination of the load. Handling this
30 allows more load motion, particularly out of loops.
31 - ability to realloc sbitmap vectors would allow one initial computation
32 of reg_set_in_block with only subsequent additions, rather than
33 recomputing it for each pass
37 /* References searched while implementing this.
39 Compilers Principles, Techniques and Tools
40 Aho, Sethi, Ullman
41 Addison-Wesley, 1988
43 Global Optimization by Suppression of Partial Redundancies
44 E. Morel, C. Renvoise
45 communications of the acm, Vol. 22, Num. 2, Feb. 1979
47 A Portable Machine-Independent Global Optimizer - Design and Measurements
48 Frederick Chow
49 Stanford Ph.D. thesis, Dec. 1983
51 A Fast Algorithm for Code Movement Optimization
52 D.M. Dhamdhere
53 SIGPLAN Notices, Vol. 23, Num. 10, Oct. 1988
55 A Solution to a Problem with Morel and Renvoise's
56 Global Optimization by Suppression of Partial Redundancies
57 K-H Drechsler, M.P. Stadel
58 ACM TOPLAS, Vol. 10, Num. 4, Oct. 1988
60 Practical Adaptation of the Global Optimization
61 Algorithm of Morel and Renvoise
62 D.M. Dhamdhere
63 ACM TOPLAS, Vol. 13, Num. 2. Apr. 1991
65 Efficiently Computing Static Single Assignment Form and the Control
66 Dependence Graph
67 R. Cytron, J. Ferrante, B.K. Rosen, M.N. Wegman, and F.K. Zadeck
68 ACM TOPLAS, Vol. 13, Num. 4, Oct. 1991
70 Lazy Code Motion
71 J. Knoop, O. Ruthing, B. Steffen
72 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
74 What's In a Region? Or Computing Control Dependence Regions in Near-Linear
75 Time for Reducible Flow Control
76 Thomas Ball
77 ACM Letters on Programming Languages and Systems,
78 Vol. 2, Num. 1-4, Mar-Dec 1993
80 An Efficient Representation for Sparse Sets
81 Preston Briggs, Linda Torczon
82 ACM Letters on Programming Languages and Systems,
83 Vol. 2, Num. 1-4, Mar-Dec 1993
85 A Variation of Knoop, Ruthing, and Steffen's Lazy Code Motion
86 K-H Drechsler, M.P. Stadel
87 ACM SIGPLAN Notices, Vol. 28, Num. 5, May 1993
89 Partial Dead Code Elimination
90 J. Knoop, O. Ruthing, B. Steffen
91 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
93 Effective Partial Redundancy Elimination
94 P. Briggs, K.D. Cooper
95 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
97 The Program Structure Tree: Computing Control Regions in Linear Time
98 R. Johnson, D. Pearson, K. Pingali
99 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
101 Optimal Code Motion: Theory and Practice
102 J. Knoop, O. Ruthing, B. Steffen
103 ACM TOPLAS, Vol. 16, Num. 4, Jul. 1994
105 The power of assignment motion
106 J. Knoop, O. Ruthing, B. Steffen
107 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
109 Global code motion / global value numbering
110 C. Click
111 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
113 Value Driven Redundancy Elimination
114 L.T. Simpson
115 Rice University Ph.D. thesis, Apr. 1996
117 Value Numbering
118 L.T. Simpson
119 Massively Scalar Compiler Project, Rice University, Sep. 1996
121 High Performance Compilers for Parallel Computing
122 Michael Wolfe
123 Addison-Wesley, 1996
125 Advanced Compiler Design and Implementation
126 Steven Muchnick
127 Morgan Kaufmann, 1997
129 People wishing to speed up the code here should read:
130 Elimination Algorithms for Data Flow Analysis
131 B.G. Ryder, M.C. Paull
132 ACM Computing Surveys, Vol. 18, Num. 3, Sep. 1986
134 How to Analyze Large Programs Efficiently and Informatively
135 D.M. Dhamdhere, B.K. Rosen, F.K. Zadeck
136 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
138 People wishing to do something different can find various possibilities
139 in the above papers and elsewhere.
142 #include "config.h"
143 #include "system.h"
144 #include "toplev.h"
146 #include "rtl.h"
147 #include "regs.h"
148 #include "hard-reg-set.h"
149 #include "flags.h"
150 #include "real.h"
151 #include "insn-config.h"
152 #include "recog.h"
153 #include "basic-block.h"
154 #include "output.h"
155 #include "expr.h"
157 #include "obstack.h"
158 #define obstack_chunk_alloc gmalloc
159 #define obstack_chunk_free free
161 /* Maximum number of passes to perform. */
162 #define MAX_PASSES 1
164 /* Propagate flow information through back edges and thus enable PRE's
165 moving loop invariant calculations out of loops.
167 Originally this tended to create worse overall code, but several
168 improvements during the development of PRE seem to have made following
169 back edges generally a win.
171 Note much of the loop invariant code motion done here would normally
172 be done by loop.c, which has more heuristics for when to move invariants
173 out of loops. At some point we might need to move some of those
174 heuristics into gcse.c. */
175 #define FOLLOW_BACK_EDGES 1
177 /* We support GCSE via Partial Redundancy Elimination. PRE optimizations
178 are a superset of those done by GCSE.
180 We perform the following steps:
182 1) Compute basic block information.
184 2) Compute table of places where registers are set.
186 3) Perform copy/constant propagation.
188 4) Perform global cse.
190 5) Perform another pass of copy/constant propagation.
192 Two passes of copy/constant propagation are done because the first one
193 enables more GCSE and the second one helps to clean up the copies that
194 GCSE creates. This is needed more for PRE than for Classic because Classic
195 GCSE will try to use an existing register containing the common
196 subexpression rather than create a new one. This is harder to do for PRE
197 because of the code motion (which Classic GCSE doesn't do).
199 Expressions we are interested in GCSE-ing are of the form
200 (set (pseudo-reg) (expression)).
201 Function want_to_gcse_p says what these are.
203 PRE handles moving invariant expressions out of loops (by treating them as
204 partially redundant).
206 Eventually it would be nice to replace cse.c/gcse.c with SSA (static single
207 assignment) based GVN (global value numbering). L. T. Simpson's paper
208 (Rice University) on value numbering is a useful reference for this.
210 **********************
212 We used to support multiple passes but there are diminishing returns in
213 doing so. The first pass usually makes 90% of the changes that are doable.
214 A second pass can make a few more changes made possible by the first pass.
215 Experiments show any further passes don't make enough changes to justify
216 the expense.
218 A study of spec92 using an unlimited number of passes:
219 [1 pass] = 1208 substitutions, [2] = 577, [3] = 202, [4] = 192, [5] = 83,
220 [6] = 34, [7] = 17, [8] = 9, [9] = 4, [10] = 4, [11] = 2,
221 [12] = 2, [13] = 1, [15] = 1, [16] = 2, [41] = 1
223 It was found doing copy propagation between each pass enables further
224 substitutions.
226 PRE is quite expensive in complicated functions because the DFA can take
227 awhile to converge. Hence we only perform one pass. Macro MAX_PASSES can
228 be modified if one wants to experiment.
230 **********************
232 The steps for PRE are:
234 1) Build the hash table of expressions we wish to GCSE (expr_hash_table).
236 2) Perform the data flow analysis for PRE.
238 3) Delete the redundant instructions
240 4) Insert the required copies [if any] that make the partially
241 redundant instructions fully redundant.
243 5) For other reaching expressions, insert an instruction to copy the value
244 to a newly created pseudo that will reach the redundant instruction.
246 The deletion is done first so that when we do insertions we
247 know which pseudo reg to use.
249 Various papers have argued that PRE DFA is expensive (O(n^2)) and others
250 argue it is not. The number of iterations for the algorithm to converge
251 is typically 2-4 so I don't view it as that expensive (relatively speaking).
253 PRE GCSE depends heavily on the second CSE pass to clean up the copies
254 we create. To make an expression reach the place where it's redundant,
255 the result of the expression is copied to a new register, and the redundant
256 expression is deleted by replacing it with this new register. Classic GCSE
257 doesn't have this problem as much as it computes the reaching defs of
258 each register in each block and thus can try to use an existing register.
260 **********************
262 A fair bit of simplicity is created by creating small functions for simple
263 tasks, even when the function is only called in one place. This may
264 measurably slow things down [or may not] by creating more function call
265 overhead than is necessary. The source is laid out so that it's trivial
266 to make the affected functions inline so that one can measure what speed
267 up, if any, can be achieved, and maybe later when things settle things can
268 be rearranged.
270 Help stamp out big monolithic functions! */
272 /* GCSE global vars. */
274 /* -dG dump file. */
275 static FILE *gcse_file;
277 /* Note whether or not we should run jump optimization after gcse. We
278 want to do this for two cases.
280 * If we changed any jumps via cprop.
282 * If we added any labels via edge splitting. */
284 static int run_jump_opt_after_gcse;
286 /* Element I is a list of I's predecessors/successors. */
287 static int_list_ptr *s_preds;
288 static int_list_ptr *s_succs;
290 /* Element I is the number of predecessors/successors of basic block I. */
291 static int *num_preds;
292 static int *num_succs;
294 /* Bitmaps are normally not included in debugging dumps.
295 However it's useful to be able to print them from GDB.
296 We could create special functions for this, but it's simpler to
297 just allow passing stderr to the dump_foo fns. Since stderr can
298 be a macro, we store a copy here. */
299 static FILE *debug_stderr;
301 /* An obstack for our working variables. */
302 static struct obstack gcse_obstack;
304 /* Non-zero for each mode that supports (set (reg) (reg)).
305 This is trivially true for integer and floating point values.
306 It may or may not be true for condition codes. */
307 static char can_copy_p[(int) NUM_MACHINE_MODES];
309 /* Non-zero if can_copy_p has been initialized. */
310 static int can_copy_init_p;
312 /* Hash table of expressions. */
314 struct expr
316 /* The expression (SET_SRC for expressions, PATTERN for assignments). */
317 rtx expr;
318 /* Index in the available expression bitmaps. */
319 int bitmap_index;
320 /* Next entry with the same hash. */
321 struct expr *next_same_hash;
322 /* List of anticipatable occurrences in basic blocks in the function.
323 An "anticipatable occurrence" is one that is the first occurrence in the
324 basic block, the operands are not modified in the basic block prior
325 to the occurrence and the output is not used between the start of
326 the block and the occurrence. */
327 struct occr *antic_occr;
328 /* List of available occurrence in basic blocks in the function.
329 An "available occurrence" is one that is the last occurrence in the
330 basic block and the operands are not modified by following statements in
331 the basic block [including this insn]. */
332 struct occr *avail_occr;
333 /* Non-null if the computation is PRE redundant.
334 The value is the newly created pseudo-reg to record a copy of the
335 expression in all the places that reach the redundant copy. */
336 rtx reaching_reg;
339 /* Occurrence of an expression.
340 There is one per basic block. If a pattern appears more than once the
341 last appearance is used [or first for anticipatable expressions]. */
343 struct occr
345 /* Next occurrence of this expression. */
346 struct occr *next;
347 /* The insn that computes the expression. */
348 rtx insn;
349 /* Non-zero if this [anticipatable] occurrence has been deleted. */
350 char deleted_p;
351 /* Non-zero if this [available] occurrence has been copied to
352 reaching_reg. */
353 /* ??? This is mutually exclusive with deleted_p, so they could share
354 the same byte. */
355 char copied_p;
358 /* Expression and copy propagation hash tables.
359 Each hash table is an array of buckets.
360 ??? It is known that if it were an array of entries, structure elements
361 `next_same_hash' and `bitmap_index' wouldn't be necessary. However, it is
362 not clear whether in the final analysis a sufficient amount of memory would
363 be saved as the size of the available expression bitmaps would be larger
364 [one could build a mapping table without holes afterwards though].
365 Someday I'll perform the computation and figure it out.
368 /* Total size of the expression hash table, in elements. */
369 static int expr_hash_table_size;
370 /* The table itself.
371 This is an array of `expr_hash_table_size' elements. */
372 static struct expr **expr_hash_table;
374 /* Total size of the copy propagation hash table, in elements. */
375 static int set_hash_table_size;
376 /* The table itself.
377 This is an array of `set_hash_table_size' elements. */
378 static struct expr **set_hash_table;
380 /* Mapping of uids to cuids.
381 Only real insns get cuids. */
382 static int *uid_cuid;
384 /* Highest UID in UID_CUID. */
385 static int max_uid;
387 /* Get the cuid of an insn. */
388 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
390 /* Number of cuids. */
391 static int max_cuid;
393 /* Mapping of cuids to insns. */
394 static rtx *cuid_insn;
396 /* Get insn from cuid. */
397 #define CUID_INSN(CUID) (cuid_insn[CUID])
399 /* Maximum register number in function prior to doing gcse + 1.
400 Registers created during this pass have regno >= max_gcse_regno.
401 This is named with "gcse" to not collide with global of same name. */
402 static int max_gcse_regno;
404 /* Maximum number of cse-able expressions found. */
405 static int n_exprs;
406 /* Maximum number of assignments for copy propagation found. */
407 static int n_sets;
409 /* Table of registers that are modified.
410 For each register, each element is a list of places where the pseudo-reg
411 is set.
413 For simplicity, GCSE is done on sets of pseudo-regs only. PRE GCSE only
414 requires knowledge of which blocks kill which regs [and thus could use
415 a bitmap instead of the lists `reg_set_table' uses].
417 `reg_set_table' and could be turned into an array of bitmaps
418 (num-bbs x num-regs)
419 [however perhaps it may be useful to keep the data as is].
420 One advantage of recording things this way is that `reg_set_table' is
421 fairly sparse with respect to pseudo regs but for hard regs could be
422 fairly dense [relatively speaking].
423 And recording sets of pseudo-regs in lists speeds
424 up functions like compute_transp since in the case of pseudo-regs we only
425 need to iterate over the number of times a pseudo-reg is set, not over the
426 number of basic blocks [clearly there is a bit of a slow down in the cases
427 where a pseudo is set more than once in a block, however it is believed
428 that the net effect is to speed things up]. This isn't done for hard-regs
429 because recording call-clobbered hard-regs in `reg_set_table' at each
430 function call can consume a fair bit of memory, and iterating over hard-regs
431 stored this way in compute_transp will be more expensive. */
433 typedef struct reg_set {
434 /* The next setting of this register. */
435 struct reg_set *next;
436 /* The insn where it was set. */
437 rtx insn;
438 } reg_set;
439 static reg_set **reg_set_table;
440 /* Size of `reg_set_table'.
441 The table starts out at max_gcse_regno + slop, and is enlarged as
442 necessary. */
443 static int reg_set_table_size;
444 /* Amount to grow `reg_set_table' by when it's full. */
445 #define REG_SET_TABLE_SLOP 100
447 /* Bitmap containing one bit for each register in the program.
448 Used when performing GCSE to track which registers have been set since
449 the start of the basic block. */
450 static sbitmap reg_set_bitmap;
452 /* For each block, a bitmap of registers set in the block.
453 This is used by expr_killed_p and compute_transp.
454 It is computed during hash table computation and not by compute_sets
455 as it includes registers added since the last pass (or between cprop and
456 gcse) and it's currently not easy to realloc sbitmap vectors. */
457 static sbitmap *reg_set_in_block;
459 /* For each block, non-zero if memory is set in that block.
460 This is computed during hash table computation and is used by
461 expr_killed_p and compute_transp.
462 ??? Handling of memory is very simple, we don't make any attempt
463 to optimize things (later).
464 ??? This can be computed by compute_sets since the information
465 doesn't change. */
466 static char *mem_set_in_block;
468 /* Various variables for statistics gathering. */
470 /* Memory used in a pass.
471 This isn't intended to be absolutely precise. Its intent is only
472 to keep an eye on memory usage. */
473 static int bytes_used;
474 /* GCSE substitutions made. */
475 static int gcse_subst_count;
476 /* Number of copy instructions created. */
477 static int gcse_create_count;
478 /* Number of constants propagated. */
479 static int const_prop_count;
480 /* Number of copys propagated. */
481 static int copy_prop_count;
483 extern char *current_function_name;
484 extern int current_function_calls_setjmp;
486 /* These variables are used by classic GCSE.
487 Normally they'd be defined a bit later, but `rd_gen' needs to
488 be declared sooner. */
490 /* A bitmap of all ones for implementing the algorithm for available
491 expressions and reaching definitions. */
492 /* ??? Available expression bitmaps have a different size than reaching
493 definition bitmaps. This should be the larger of the two, however, it
494 is not currently used for reaching definitions. */
495 static sbitmap u_bitmap;
497 /* Each block has a bitmap of each type.
498 The length of each blocks bitmap is:
500 max_cuid - for reaching definitions
501 n_exprs - for available expressions
503 Thus we view the bitmaps as 2 dimensional arrays. i.e.
504 rd_kill[block_num][cuid_num]
505 ae_kill[block_num][expr_num]
508 /* For reaching defs */
509 static sbitmap *rd_kill, *rd_gen, *reaching_defs, *rd_out;
511 /* for available exprs */
512 static sbitmap *ae_kill, *ae_gen, *ae_in, *ae_out;
515 static void compute_can_copy PROTO ((void));
517 static char *gmalloc PROTO ((unsigned int));
518 static char *grealloc PROTO ((char *, unsigned int));
519 static char *gcse_alloc PROTO ((unsigned long));
520 static void alloc_gcse_mem PROTO ((rtx));
521 static void free_gcse_mem PROTO ((void));
522 static void alloc_reg_set_mem PROTO ((int));
523 static void free_reg_set_mem PROTO ((void));
524 static void record_one_set PROTO ((int, rtx));
525 static void record_set_info PROTO ((rtx, rtx));
526 static void compute_sets PROTO ((rtx));
528 static void hash_scan_insn PROTO ((rtx, int, int));
529 static void hash_scan_set PROTO ((rtx, rtx, int));
530 static void hash_scan_clobber PROTO ((rtx, rtx));
531 static void hash_scan_call PROTO ((rtx, rtx));
532 static int want_to_gcse_p PROTO ((rtx));
533 static int oprs_unchanged_p PROTO ((rtx, rtx, int));
534 static int oprs_anticipatable_p PROTO ((rtx, rtx));
535 static int oprs_available_p PROTO ((rtx, rtx));
536 static void insert_expr_in_table PROTO ((rtx, enum machine_mode,
537 rtx, int, int));
538 static void insert_set_in_table PROTO ((rtx, rtx));
539 static unsigned int hash_expr PROTO ((rtx, enum machine_mode,
540 int *, int));
541 static unsigned int hash_expr_1 PROTO ((rtx, enum machine_mode, int *));
542 static unsigned int hash_set PROTO ((int, int));
543 static int expr_equiv_p PROTO ((rtx, rtx));
544 static void record_last_reg_set_info PROTO ((rtx, int));
545 static void record_last_mem_set_info PROTO ((rtx));
546 static void record_last_set_info PROTO ((rtx, rtx));
547 static void compute_hash_table PROTO ((int));
548 static void alloc_set_hash_table PROTO ((int));
549 static void free_set_hash_table PROTO ((void));
550 static void compute_set_hash_table PROTO ((void));
551 static void alloc_expr_hash_table PROTO ((int));
552 static void free_expr_hash_table PROTO ((void));
553 static void compute_expr_hash_table PROTO ((void));
554 static void dump_hash_table PROTO ((FILE *, const char *, struct expr **,
555 int, int));
556 static struct expr *lookup_expr PROTO ((rtx));
557 static struct expr *lookup_set PROTO ((int, rtx));
558 static struct expr *next_set PROTO ((int, struct expr *));
559 static void reset_opr_set_tables PROTO ((void));
560 static int oprs_not_set_p PROTO ((rtx, rtx));
561 static void mark_call PROTO ((rtx));
562 static void mark_set PROTO ((rtx, rtx));
563 static void mark_clobber PROTO ((rtx, rtx));
564 static void mark_oprs_set PROTO ((rtx));
566 static void alloc_cprop_mem PROTO ((int, int));
567 static void free_cprop_mem PROTO ((void));
568 static void compute_transp PROTO ((rtx, int, sbitmap *, int));
569 static void compute_transpout PROTO ((void));
570 static void compute_local_properties PROTO ((sbitmap *, sbitmap *,
571 sbitmap *, int));
572 static void compute_cprop_avinout PROTO ((void));
573 static void compute_cprop_data PROTO ((void));
574 static void find_used_regs PROTO ((rtx));
575 static int try_replace_reg PROTO ((rtx, rtx, rtx));
576 static struct expr *find_avail_set PROTO ((int, rtx));
577 static int cprop_insn PROTO ((rtx, int));
578 static int cprop PROTO ((int));
579 static int one_cprop_pass PROTO ((int, int));
581 static void alloc_pre_mem PROTO ((int, int));
582 static void free_pre_mem PROTO ((void));
583 static void compute_pre_data PROTO ((void));
584 static int pre_expr_reaches_here_p PROTO ((int, struct expr *,
585 int, int, char *));
586 static void insert_insn_end_bb PROTO ((struct expr *, int, int));
587 static void pre_insert PROTO ((struct expr **));
588 static void pre_insert_copy_insn PROTO ((struct expr *, rtx));
589 static void pre_insert_copies PROTO ((void));
590 static int pre_delete PROTO ((void));
591 static int pre_gcse PROTO ((void));
592 static int one_pre_gcse_pass PROTO ((int));
594 static void add_label_notes PROTO ((rtx, rtx));
596 static void alloc_rd_mem PROTO ((int, int));
597 static void free_rd_mem PROTO ((void));
598 static void handle_rd_kill_set PROTO ((rtx, int, int));
599 static void compute_kill_rd PROTO ((void));
600 static void compute_rd PROTO ((void));
601 static void alloc_avail_expr_mem PROTO ((int, int));
602 static void free_avail_expr_mem PROTO ((void));
603 static void compute_ae_gen PROTO ((void));
604 static int expr_killed_p PROTO ((rtx, int));
605 static void compute_ae_kill PROTO ((void));
606 static void compute_available PROTO ((void));
607 static int expr_reaches_here_p PROTO ((struct occr *, struct expr *,
608 int, int, char *));
609 static rtx computing_insn PROTO ((struct expr *, rtx));
610 static int def_reaches_here_p PROTO ((rtx, rtx));
611 static int can_disregard_other_sets PROTO ((struct reg_set **, rtx, int));
612 static int handle_avail_expr PROTO ((rtx, struct expr *));
613 static int classic_gcse PROTO ((void));
614 static int one_classic_gcse_pass PROTO ((int));
617 /* Entry point for global common subexpression elimination.
618 F is the first instruction in the function. */
621 gcse_main (f, file)
622 rtx f;
623 FILE *file;
625 int changed, pass;
626 /* Bytes used at start of pass. */
627 int initial_bytes_used;
628 /* Maximum number of bytes used by a pass. */
629 int max_pass_bytes;
630 /* Point to release obstack data from for each pass. */
631 char *gcse_obstack_bottom;
633 /* We do not construct an accurate cfg in functions which call
634 setjmp, so just punt to be safe. */
635 if (current_function_calls_setjmp)
636 return 0;
638 /* Assume that we do not need to run jump optimizations after gcse. */
639 run_jump_opt_after_gcse = 0;
641 /* For calling dump_foo fns from gdb. */
642 debug_stderr = stderr;
643 gcse_file = file;
645 /* Identify the basic block information for this function, including
646 successors and predecessors. */
647 max_gcse_regno = max_reg_num ();
648 find_basic_blocks (f, max_gcse_regno, file, 1);
650 /* Return if there's nothing to do. */
651 if (n_basic_blocks <= 1)
653 /* Free storage allocated by find_basic_blocks. */
654 free_basic_block_vars (0);
655 return 0;
658 /* See what modes support reg/reg copy operations. */
659 if (! can_copy_init_p)
661 compute_can_copy ();
662 can_copy_init_p = 1;
665 gcc_obstack_init (&gcse_obstack);
667 /* Allocate and compute predecessors/successors. */
669 s_preds = (int_list_ptr *) alloca (n_basic_blocks * sizeof (int_list_ptr));
670 s_succs = (int_list_ptr *) alloca (n_basic_blocks * sizeof (int_list_ptr));
671 num_preds = (int *) alloca (n_basic_blocks * sizeof (int));
672 num_succs = (int *) alloca (n_basic_blocks * sizeof (int));
673 bytes_used = 4 * n_basic_blocks * sizeof (int_list_ptr);
674 compute_preds_succs (s_preds, s_succs, num_preds, num_succs);
676 if (file)
677 dump_bb_data (file, s_preds, s_succs, 0);
679 /* Record where pseudo-registers are set.
680 This data is kept accurate during each pass.
681 ??? We could also record hard-reg information here
682 [since it's unchanging], however it is currently done during
683 hash table computation.
685 It may be tempting to compute MEM set information here too, but MEM
686 sets will be subject to code motion one day and thus we need to compute
687 information about memory sets when we build the hash tables. */
689 alloc_reg_set_mem (max_gcse_regno);
690 compute_sets (f);
692 pass = 0;
693 initial_bytes_used = bytes_used;
694 max_pass_bytes = 0;
695 gcse_obstack_bottom = gcse_alloc (1);
696 changed = 1;
697 while (changed && pass < MAX_PASSES)
699 changed = 0;
700 if (file)
701 fprintf (file, "GCSE pass %d\n\n", pass + 1);
703 /* Initialize bytes_used to the space for the pred/succ lists,
704 and the reg_set_table data. */
705 bytes_used = initial_bytes_used;
707 /* Each pass may create new registers, so recalculate each time. */
708 max_gcse_regno = max_reg_num ();
710 alloc_gcse_mem (f);
712 /* Don't allow constant propagation to modify jumps
713 during this pass. */
714 changed = one_cprop_pass (pass + 1, 0);
716 if (optimize_size)
717 changed |= one_classic_gcse_pass (pass + 1);
718 else
719 changed |= one_pre_gcse_pass (pass + 1);
721 if (max_pass_bytes < bytes_used)
722 max_pass_bytes = bytes_used;
724 free_gcse_mem ();
726 if (file)
728 fprintf (file, "\n");
729 fflush (file);
731 obstack_free (&gcse_obstack, gcse_obstack_bottom);
732 pass++;
735 /* Do one last pass of copy propagation, including cprop into
736 conditional jumps. */
738 max_gcse_regno = max_reg_num ();
739 alloc_gcse_mem (f);
740 /* This time, go ahead and allow cprop to alter jumps. */
741 one_cprop_pass (pass + 1, 1);
742 free_gcse_mem ();
744 if (file)
746 fprintf (file, "GCSE of %s: %d basic blocks, ",
747 current_function_name, n_basic_blocks);
748 fprintf (file, "%d pass%s, %d bytes\n\n",
749 pass, pass > 1 ? "es" : "", max_pass_bytes);
752 /* Free our obstack. */
753 obstack_free (&gcse_obstack, NULL_PTR);
754 /* Free reg_set_table. */
755 free_reg_set_mem ();
756 /* Free storage used to record predecessor/successor data. */
757 free_bb_mem ();
758 /* Free storage allocated by find_basic_blocks. */
759 free_basic_block_vars (0);
760 return run_jump_opt_after_gcse;
763 /* Misc. utilities. */
765 /* Compute which modes support reg/reg copy operations. */
767 static void
768 compute_can_copy ()
770 int i;
771 #ifndef AVOID_CCMODE_COPIES
772 rtx reg,insn;
773 #endif
774 char *free_point = (char *) oballoc (1);
776 bzero (can_copy_p, NUM_MACHINE_MODES);
778 start_sequence ();
779 for (i = 0; i < NUM_MACHINE_MODES; i++)
781 switch (GET_MODE_CLASS (i))
783 case MODE_CC :
784 #ifdef AVOID_CCMODE_COPIES
785 can_copy_p[i] = 0;
786 #else
787 reg = gen_rtx_REG ((enum machine_mode) i, LAST_VIRTUAL_REGISTER + 1);
788 insn = emit_insn (gen_rtx_SET (VOIDmode, reg, reg));
789 if (recog (PATTERN (insn), insn, NULL_PTR) >= 0)
790 can_copy_p[i] = 1;
791 #endif
792 break;
793 default :
794 can_copy_p[i] = 1;
795 break;
798 end_sequence ();
800 /* Free the objects we just allocated. */
801 obfree (free_point);
804 /* Cover function to xmalloc to record bytes allocated. */
806 static char *
807 gmalloc (size)
808 unsigned int size;
810 bytes_used += size;
811 return xmalloc (size);
814 /* Cover function to xrealloc.
815 We don't record the additional size since we don't know it.
816 It won't affect memory usage stats much anyway. */
818 static char *
819 grealloc (ptr, size)
820 char *ptr;
821 unsigned int size;
823 return xrealloc (ptr, size);
826 /* Cover function to obstack_alloc.
827 We don't need to record the bytes allocated here since
828 obstack_chunk_alloc is set to gmalloc. */
830 static char *
831 gcse_alloc (size)
832 unsigned long size;
834 return (char *) obstack_alloc (&gcse_obstack, size);
837 /* Allocate memory for the cuid mapping array,
838 and reg/memory set tracking tables.
840 This is called at the start of each pass. */
842 static void
843 alloc_gcse_mem (f)
844 rtx f;
846 int i,n;
847 rtx insn;
849 /* Find the largest UID and create a mapping from UIDs to CUIDs.
850 CUIDs are like UIDs except they increase monotonically, have no gaps,
851 and only apply to real insns. */
853 max_uid = get_max_uid ();
854 n = (max_uid + 1) * sizeof (int);
855 uid_cuid = (int *) gmalloc (n);
856 bzero ((char *) uid_cuid, n);
857 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
859 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
860 INSN_CUID (insn) = i++;
861 else
862 INSN_CUID (insn) = i;
865 /* Create a table mapping cuids to insns. */
867 max_cuid = i;
868 n = (max_cuid + 1) * sizeof (rtx);
869 cuid_insn = (rtx *) gmalloc (n);
870 bzero ((char *) cuid_insn, n);
871 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
873 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
875 CUID_INSN (i) = insn;
876 i++;
880 /* Allocate vars to track sets of regs. */
882 reg_set_bitmap = (sbitmap) sbitmap_alloc (max_gcse_regno);
884 /* Allocate vars to track sets of regs, memory per block. */
886 reg_set_in_block = (sbitmap *) sbitmap_vector_alloc (n_basic_blocks,
887 max_gcse_regno);
888 mem_set_in_block = (char *) gmalloc (n_basic_blocks);
891 /* Free memory allocated by alloc_gcse_mem. */
893 static void
894 free_gcse_mem ()
896 free (uid_cuid);
897 free (cuid_insn);
899 free (reg_set_bitmap);
901 free (reg_set_in_block);
902 free (mem_set_in_block);
906 /* Compute the local properties of each recorded expression.
907 Local properties are those that are defined by the block, irrespective
908 of other blocks.
910 An expression is transparent in a block if its operands are not modified
911 in the block.
913 An expression is computed (locally available) in a block if it is computed
914 at least once and expression would contain the same value if the
915 computation was moved to the end of the block.
917 An expression is locally anticipatable in a block if it is computed at
918 least once and expression would contain the same value if the computation
919 was moved to the beginning of the block.
921 We call this routine for cprop, pre and code hoisting. They all
922 compute basically the same information and thus can easily share
923 this code.
925 TRANSP, COMP, and ANTLOC are destination sbitmaps for recording
926 local properties. If NULL, then it is not necessary to compute
927 or record that particular property.
929 SETP controls which hash table to look at. If zero, this routine
930 looks at the expr hash table; if nonzero this routine looks at
931 the set hash table. Additionally, TRANSP is computed as ~TRANSP,
932 since this is really cprop's ABSALTERED. */
934 static void
935 compute_local_properties (transp, comp, antloc, setp)
936 sbitmap *transp;
937 sbitmap *comp;
938 sbitmap *antloc;
939 int setp;
941 int i, hash_table_size;
942 struct expr **hash_table;
944 /* Initialize any bitmaps that were passed in. */
945 if (transp)
947 if (setp)
948 sbitmap_vector_zero (transp, n_basic_blocks);
949 else
950 sbitmap_vector_ones (transp, n_basic_blocks);
952 if (comp)
953 sbitmap_vector_zero (comp, n_basic_blocks);
954 if (antloc)
955 sbitmap_vector_zero (antloc, n_basic_blocks);
957 /* We use the same code for cprop, pre and hoisting. For cprop
958 we care about the set hash table, for pre and hoisting we
959 care about the expr hash table. */
960 hash_table_size = setp ? set_hash_table_size : expr_hash_table_size;
961 hash_table = setp ? set_hash_table : expr_hash_table;
963 for (i = 0; i < hash_table_size; i++)
965 struct expr *expr;
967 for (expr = hash_table[i]; expr != NULL; expr = expr->next_same_hash)
969 struct occr *occr;
970 int indx = expr->bitmap_index;
972 /* The expression is transparent in this block if it is not killed.
973 We start by assuming all are transparent [none are killed], and
974 then reset the bits for those that are. */
976 if (transp)
977 compute_transp (expr->expr, indx, transp, setp);
979 /* The occurrences recorded in antic_occr are exactly those that
980 we want to set to non-zero in ANTLOC. */
982 if (antloc)
984 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
986 int bb = BLOCK_NUM (occr->insn);
987 SET_BIT (antloc[bb], indx);
989 /* While we're scanning the table, this is a good place to
990 initialize this. */
991 occr->deleted_p = 0;
995 /* The occurrences recorded in avail_occr are exactly those that
996 we want to set to non-zero in COMP. */
997 if (comp)
1000 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
1002 int bb = BLOCK_NUM (occr->insn);
1003 SET_BIT (comp[bb], indx);
1005 /* While we're scanning the table, this is a good place to
1006 initialize this. */
1007 occr->copied_p = 0;
1011 /* While we're scanning the table, this is a good place to
1012 initialize this. */
1013 expr->reaching_reg = 0;
1019 /* Register set information.
1021 `reg_set_table' records where each register is set or otherwise
1022 modified. */
1024 static struct obstack reg_set_obstack;
1026 static void
1027 alloc_reg_set_mem (n_regs)
1028 int n_regs;
1030 int n;
1032 reg_set_table_size = n_regs + REG_SET_TABLE_SLOP;
1033 n = reg_set_table_size * sizeof (struct reg_set *);
1034 reg_set_table = (struct reg_set **) gmalloc (n);
1035 bzero ((char *) reg_set_table, n);
1037 gcc_obstack_init (&reg_set_obstack);
1040 static void
1041 free_reg_set_mem ()
1043 free (reg_set_table);
1044 obstack_free (&reg_set_obstack, NULL_PTR);
1047 /* Record REGNO in the reg_set table. */
1049 static void
1050 record_one_set (regno, insn)
1051 int regno;
1052 rtx insn;
1054 /* allocate a new reg_set element and link it onto the list */
1055 struct reg_set *new_reg_info, *reg_info_ptr1, *reg_info_ptr2;
1057 /* If the table isn't big enough, enlarge it. */
1058 if (regno >= reg_set_table_size)
1060 int new_size = regno + REG_SET_TABLE_SLOP;
1061 reg_set_table = (struct reg_set **)
1062 grealloc ((char *) reg_set_table,
1063 new_size * sizeof (struct reg_set *));
1064 bzero ((char *) (reg_set_table + reg_set_table_size),
1065 (new_size - reg_set_table_size) * sizeof (struct reg_set *));
1066 reg_set_table_size = new_size;
1069 new_reg_info = (struct reg_set *) obstack_alloc (&reg_set_obstack,
1070 sizeof (struct reg_set));
1071 bytes_used += sizeof (struct reg_set);
1072 new_reg_info->insn = insn;
1073 new_reg_info->next = NULL;
1074 if (reg_set_table[regno] == NULL)
1075 reg_set_table[regno] = new_reg_info;
1076 else
1078 reg_info_ptr1 = reg_info_ptr2 = reg_set_table[regno];
1079 /* ??? One could keep a "last" pointer to speed this up. */
1080 while (reg_info_ptr1 != NULL)
1082 reg_info_ptr2 = reg_info_ptr1;
1083 reg_info_ptr1 = reg_info_ptr1->next;
1085 reg_info_ptr2->next = new_reg_info;
1089 /* For communication between next two functions (via note_stores). */
1090 static rtx record_set_insn;
1092 /* Called from compute_sets via note_stores to handle one
1093 SET or CLOBBER in an insn. */
1095 static void
1096 record_set_info (dest, setter)
1097 rtx dest, setter ATTRIBUTE_UNUSED;
1099 if (GET_CODE (dest) == SUBREG)
1100 dest = SUBREG_REG (dest);
1102 if (GET_CODE (dest) == REG)
1104 if (REGNO (dest) >= FIRST_PSEUDO_REGISTER)
1105 record_one_set (REGNO (dest), record_set_insn);
1109 /* Scan the function and record each set of each pseudo-register.
1111 This is called once, at the start of the gcse pass.
1112 See the comments for `reg_set_table' for further docs. */
1114 static void
1115 compute_sets (f)
1116 rtx f;
1118 rtx insn = f;
1120 while (insn)
1122 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1124 record_set_insn = insn;
1125 note_stores (PATTERN (insn), record_set_info);
1127 insn = NEXT_INSN (insn);
1131 /* Hash table support. */
1133 #define NEVER_SET -1
1135 /* For each register, the cuid of the first/last insn in the block to set it,
1136 or -1 if not set. */
1137 static int *reg_first_set;
1138 static int *reg_last_set;
1140 /* While computing "first/last set" info, this is the CUID of first/last insn
1141 to set memory or -1 if not set. `mem_last_set' is also used when
1142 performing GCSE to record whether memory has been set since the beginning
1143 of the block.
1144 Note that handling of memory is very simple, we don't make any attempt
1145 to optimize things (later). */
1146 static int mem_first_set;
1147 static int mem_last_set;
1149 /* Perform a quick check whether X, the source of a set, is something
1150 we want to consider for GCSE. */
1152 static int
1153 want_to_gcse_p (x)
1154 rtx x;
1156 enum rtx_code code = GET_CODE (x);
1158 switch (code)
1160 case REG:
1161 case SUBREG:
1162 case CONST_INT:
1163 case CONST_DOUBLE:
1164 case CALL:
1165 return 0;
1167 default:
1168 break;
1171 return 1;
1174 /* Return non-zero if the operands of expression X are unchanged from the
1175 start of INSN's basic block up to but not including INSN (if AVAIL_P == 0),
1176 or from INSN to the end of INSN's basic block (if AVAIL_P != 0). */
1178 static int
1179 oprs_unchanged_p (x, insn, avail_p)
1180 rtx x, insn;
1181 int avail_p;
1183 int i;
1184 enum rtx_code code;
1185 char *fmt;
1187 /* repeat is used to turn tail-recursion into iteration. */
1188 repeat:
1190 if (x == 0)
1191 return 1;
1193 code = GET_CODE (x);
1194 switch (code)
1196 case REG:
1197 if (avail_p)
1198 return (reg_last_set[REGNO (x)] == NEVER_SET
1199 || reg_last_set[REGNO (x)] < INSN_CUID (insn));
1200 else
1201 return (reg_first_set[REGNO (x)] == NEVER_SET
1202 || reg_first_set[REGNO (x)] >= INSN_CUID (insn));
1204 case MEM:
1205 if (avail_p)
1207 if (mem_last_set != NEVER_SET
1208 && mem_last_set >= INSN_CUID (insn))
1209 return 0;
1211 else
1213 if (mem_first_set != NEVER_SET
1214 && mem_first_set < INSN_CUID (insn))
1215 return 0;
1217 x = XEXP (x, 0);
1218 goto repeat;
1220 case PRE_DEC:
1221 case PRE_INC:
1222 case POST_DEC:
1223 case POST_INC:
1224 return 0;
1226 case PC:
1227 case CC0: /*FIXME*/
1228 case CONST:
1229 case CONST_INT:
1230 case CONST_DOUBLE:
1231 case SYMBOL_REF:
1232 case LABEL_REF:
1233 case ADDR_VEC:
1234 case ADDR_DIFF_VEC:
1235 return 1;
1237 default:
1238 break;
1241 i = GET_RTX_LENGTH (code) - 1;
1242 fmt = GET_RTX_FORMAT (code);
1243 for (; i >= 0; i--)
1245 if (fmt[i] == 'e')
1247 rtx tem = XEXP (x, i);
1249 /* If we are about to do the last recursive call
1250 needed at this level, change it into iteration.
1251 This function is called enough to be worth it. */
1252 if (i == 0)
1254 x = tem;
1255 goto repeat;
1257 if (! oprs_unchanged_p (tem, insn, avail_p))
1258 return 0;
1260 else if (fmt[i] == 'E')
1262 int j;
1263 for (j = 0; j < XVECLEN (x, i); j++)
1265 if (! oprs_unchanged_p (XVECEXP (x, i, j), insn, avail_p))
1266 return 0;
1271 return 1;
1274 /* Return non-zero if the operands of expression X are unchanged from
1275 the start of INSN's basic block up to but not including INSN. */
1277 static int
1278 oprs_anticipatable_p (x, insn)
1279 rtx x, insn;
1281 return oprs_unchanged_p (x, insn, 0);
1284 /* Return non-zero if the operands of expression X are unchanged from
1285 INSN to the end of INSN's basic block. */
1287 static int
1288 oprs_available_p (x, insn)
1289 rtx x, insn;
1291 return oprs_unchanged_p (x, insn, 1);
1294 /* Hash expression X.
1295 MODE is only used if X is a CONST_INT.
1296 A boolean indicating if a volatile operand is found or if the expression
1297 contains something we don't want to insert in the table is stored in
1298 DO_NOT_RECORD_P.
1300 ??? One might want to merge this with canon_hash. Later. */
1302 static unsigned int
1303 hash_expr (x, mode, do_not_record_p, hash_table_size)
1304 rtx x;
1305 enum machine_mode mode;
1306 int *do_not_record_p;
1307 int hash_table_size;
1309 unsigned int hash;
1311 *do_not_record_p = 0;
1313 hash = hash_expr_1 (x, mode, do_not_record_p);
1314 return hash % hash_table_size;
1317 /* Subroutine of hash_expr to do the actual work. */
1319 static unsigned int
1320 hash_expr_1 (x, mode, do_not_record_p)
1321 rtx x;
1322 enum machine_mode mode;
1323 int *do_not_record_p;
1325 int i, j;
1326 unsigned hash = 0;
1327 enum rtx_code code;
1328 char *fmt;
1330 /* repeat is used to turn tail-recursion into iteration. */
1331 repeat:
1333 if (x == 0)
1334 return hash;
1336 code = GET_CODE (x);
1337 switch (code)
1339 case REG:
1341 register int regno = REGNO (x);
1342 hash += ((unsigned) REG << 7) + regno;
1343 return hash;
1346 case CONST_INT:
1348 unsigned HOST_WIDE_INT tem = INTVAL (x);
1349 hash += ((unsigned) CONST_INT << 7) + (unsigned) mode + tem;
1350 return hash;
1353 case CONST_DOUBLE:
1354 /* This is like the general case, except that it only counts
1355 the integers representing the constant. */
1356 hash += (unsigned) code + (unsigned) GET_MODE (x);
1357 if (GET_MODE (x) != VOIDmode)
1358 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
1360 unsigned tem = XINT (x, i);
1361 hash += tem;
1363 else
1364 hash += ((unsigned) CONST_DOUBLE_LOW (x)
1365 + (unsigned) CONST_DOUBLE_HIGH (x));
1366 return hash;
1368 /* Assume there is only one rtx object for any given label. */
1369 case LABEL_REF:
1370 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
1371 differences and differences between each stage's debugging dumps. */
1372 hash += ((unsigned) LABEL_REF << 7) + CODE_LABEL_NUMBER (XEXP (x, 0));
1373 return hash;
1375 case SYMBOL_REF:
1377 /* Don't hash on the symbol's address to avoid bootstrap differences.
1378 Different hash values may cause expressions to be recorded in
1379 different orders and thus different registers to be used in the
1380 final assembler. This also avoids differences in the dump files
1381 between various stages. */
1382 unsigned int h = 0;
1383 unsigned char *p = (unsigned char *) XSTR (x, 0);
1384 while (*p)
1385 h += (h << 7) + *p++; /* ??? revisit */
1386 hash += ((unsigned) SYMBOL_REF << 7) + h;
1387 return hash;
1390 case MEM:
1391 if (MEM_VOLATILE_P (x))
1393 *do_not_record_p = 1;
1394 return 0;
1396 hash += (unsigned) MEM;
1397 x = XEXP (x, 0);
1398 goto repeat;
1400 case PRE_DEC:
1401 case PRE_INC:
1402 case POST_DEC:
1403 case POST_INC:
1404 case PC:
1405 case CC0:
1406 case CALL:
1407 case UNSPEC_VOLATILE:
1408 *do_not_record_p = 1;
1409 return 0;
1411 case ASM_OPERANDS:
1412 if (MEM_VOLATILE_P (x))
1414 *do_not_record_p = 1;
1415 return 0;
1418 default:
1419 break;
1422 i = GET_RTX_LENGTH (code) - 1;
1423 hash += (unsigned) code + (unsigned) GET_MODE (x);
1424 fmt = GET_RTX_FORMAT (code);
1425 for (; i >= 0; i--)
1427 if (fmt[i] == 'e')
1429 rtx tem = XEXP (x, i);
1431 /* If we are about to do the last recursive call
1432 needed at this level, change it into iteration.
1433 This function is called enough to be worth it. */
1434 if (i == 0)
1436 x = tem;
1437 goto repeat;
1439 hash += hash_expr_1 (tem, 0, do_not_record_p);
1440 if (*do_not_record_p)
1441 return 0;
1443 else if (fmt[i] == 'E')
1444 for (j = 0; j < XVECLEN (x, i); j++)
1446 hash += hash_expr_1 (XVECEXP (x, i, j), 0, do_not_record_p);
1447 if (*do_not_record_p)
1448 return 0;
1450 else if (fmt[i] == 's')
1452 register unsigned char *p = (unsigned char *) XSTR (x, i);
1453 if (p)
1454 while (*p)
1455 hash += *p++;
1457 else if (fmt[i] == 'i')
1459 register unsigned tem = XINT (x, i);
1460 hash += tem;
1462 else
1463 abort ();
1466 return hash;
1469 /* Hash a set of register REGNO.
1471 Sets are hashed on the register that is set.
1472 This simplifies the PRE copy propagation code.
1474 ??? May need to make things more elaborate. Later, as necessary. */
1476 static unsigned int
1477 hash_set (regno, hash_table_size)
1478 int regno;
1479 int hash_table_size;
1481 unsigned int hash;
1483 hash = regno;
1484 return hash % hash_table_size;
1487 /* Return non-zero if exp1 is equivalent to exp2.
1488 ??? Borrowed from cse.c. Might want to remerge with cse.c. Later. */
1490 static int
1491 expr_equiv_p (x, y)
1492 rtx x, y;
1494 register int i, j;
1495 register enum rtx_code code;
1496 register char *fmt;
1498 if (x == y)
1499 return 1;
1500 if (x == 0 || y == 0)
1501 return x == y;
1503 code = GET_CODE (x);
1504 if (code != GET_CODE (y))
1505 return 0;
1507 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
1508 if (GET_MODE (x) != GET_MODE (y))
1509 return 0;
1511 switch (code)
1513 case PC:
1514 case CC0:
1515 return x == y;
1517 case CONST_INT:
1518 return INTVAL (x) == INTVAL (y);
1520 case LABEL_REF:
1521 return XEXP (x, 0) == XEXP (y, 0);
1523 case SYMBOL_REF:
1524 return XSTR (x, 0) == XSTR (y, 0);
1526 case REG:
1527 return REGNO (x) == REGNO (y);
1529 /* For commutative operations, check both orders. */
1530 case PLUS:
1531 case MULT:
1532 case AND:
1533 case IOR:
1534 case XOR:
1535 case NE:
1536 case EQ:
1537 return ((expr_equiv_p (XEXP (x, 0), XEXP (y, 0))
1538 && expr_equiv_p (XEXP (x, 1), XEXP (y, 1)))
1539 || (expr_equiv_p (XEXP (x, 0), XEXP (y, 1))
1540 && expr_equiv_p (XEXP (x, 1), XEXP (y, 0))));
1542 default:
1543 break;
1546 /* Compare the elements. If any pair of corresponding elements
1547 fail to match, return 0 for the whole thing. */
1549 fmt = GET_RTX_FORMAT (code);
1550 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1552 switch (fmt[i])
1554 case 'e':
1555 if (! expr_equiv_p (XEXP (x, i), XEXP (y, i)))
1556 return 0;
1557 break;
1559 case 'E':
1560 if (XVECLEN (x, i) != XVECLEN (y, i))
1561 return 0;
1562 for (j = 0; j < XVECLEN (x, i); j++)
1563 if (! expr_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
1564 return 0;
1565 break;
1567 case 's':
1568 if (strcmp (XSTR (x, i), XSTR (y, i)))
1569 return 0;
1570 break;
1572 case 'i':
1573 if (XINT (x, i) != XINT (y, i))
1574 return 0;
1575 break;
1577 case 'w':
1578 if (XWINT (x, i) != XWINT (y, i))
1579 return 0;
1580 break;
1582 case '0':
1583 break;
1585 default:
1586 abort ();
1590 return 1;
1593 /* Insert expression X in INSN in the hash table.
1594 If it is already present, record it as the last occurrence in INSN's
1595 basic block.
1597 MODE is the mode of the value X is being stored into.
1598 It is only used if X is a CONST_INT.
1600 ANTIC_P is non-zero if X is an anticipatable expression.
1601 AVAIL_P is non-zero if X is an available expression. */
1603 static void
1604 insert_expr_in_table (x, mode, insn, antic_p, avail_p)
1605 rtx x;
1606 enum machine_mode mode;
1607 rtx insn;
1608 int antic_p, avail_p;
1610 int found, do_not_record_p;
1611 unsigned int hash;
1612 struct expr *cur_expr, *last_expr = NULL;
1613 struct occr *antic_occr, *avail_occr;
1614 struct occr *last_occr = NULL;
1616 hash = hash_expr (x, mode, &do_not_record_p, expr_hash_table_size);
1618 /* Do not insert expression in table if it contains volatile operands,
1619 or if hash_expr determines the expression is something we don't want
1620 to or can't handle. */
1621 if (do_not_record_p)
1622 return;
1624 cur_expr = expr_hash_table[hash];
1625 found = 0;
1627 while (cur_expr && ! (found = expr_equiv_p (cur_expr->expr, x)))
1629 /* If the expression isn't found, save a pointer to the end of
1630 the list. */
1631 last_expr = cur_expr;
1632 cur_expr = cur_expr->next_same_hash;
1635 if (! found)
1637 cur_expr = (struct expr *) gcse_alloc (sizeof (struct expr));
1638 bytes_used += sizeof (struct expr);
1639 if (expr_hash_table[hash] == NULL)
1641 /* This is the first pattern that hashed to this index. */
1642 expr_hash_table[hash] = cur_expr;
1644 else
1646 /* Add EXPR to end of this hash chain. */
1647 last_expr->next_same_hash = cur_expr;
1649 /* Set the fields of the expr element. */
1650 cur_expr->expr = x;
1651 cur_expr->bitmap_index = n_exprs++;
1652 cur_expr->next_same_hash = NULL;
1653 cur_expr->antic_occr = NULL;
1654 cur_expr->avail_occr = NULL;
1657 /* Now record the occurrence(s). */
1659 if (antic_p)
1661 antic_occr = cur_expr->antic_occr;
1663 /* Search for another occurrence in the same basic block. */
1664 while (antic_occr && BLOCK_NUM (antic_occr->insn) != BLOCK_NUM (insn))
1666 /* If an occurrence isn't found, save a pointer to the end of
1667 the list. */
1668 last_occr = antic_occr;
1669 antic_occr = antic_occr->next;
1672 if (antic_occr)
1674 /* Found another instance of the expression in the same basic block.
1675 Prefer the currently recorded one. We want the first one in the
1676 block and the block is scanned from start to end. */
1677 ; /* nothing to do */
1679 else
1681 /* First occurrence of this expression in this basic block. */
1682 antic_occr = (struct occr *) gcse_alloc (sizeof (struct occr));
1683 bytes_used += sizeof (struct occr);
1684 /* First occurrence of this expression in any block? */
1685 if (cur_expr->antic_occr == NULL)
1686 cur_expr->antic_occr = antic_occr;
1687 else
1688 last_occr->next = antic_occr;
1689 antic_occr->insn = insn;
1690 antic_occr->next = NULL;
1694 if (avail_p)
1696 avail_occr = cur_expr->avail_occr;
1698 /* Search for another occurrence in the same basic block. */
1699 while (avail_occr && BLOCK_NUM (avail_occr->insn) != BLOCK_NUM (insn))
1701 /* If an occurrence isn't found, save a pointer to the end of
1702 the list. */
1703 last_occr = avail_occr;
1704 avail_occr = avail_occr->next;
1707 if (avail_occr)
1709 /* Found another instance of the expression in the same basic block.
1710 Prefer this occurrence to the currently recorded one. We want
1711 the last one in the block and the block is scanned from start
1712 to end. */
1713 avail_occr->insn = insn;
1715 else
1717 /* First occurrence of this expression in this basic block. */
1718 avail_occr = (struct occr *) gcse_alloc (sizeof (struct occr));
1719 bytes_used += sizeof (struct occr);
1720 /* First occurrence of this expression in any block? */
1721 if (cur_expr->avail_occr == NULL)
1722 cur_expr->avail_occr = avail_occr;
1723 else
1724 last_occr->next = avail_occr;
1725 avail_occr->insn = insn;
1726 avail_occr->next = NULL;
1731 /* Insert pattern X in INSN in the hash table.
1732 X is a SET of a reg to either another reg or a constant.
1733 If it is already present, record it as the last occurrence in INSN's
1734 basic block. */
1736 static void
1737 insert_set_in_table (x, insn)
1738 rtx x;
1739 rtx insn;
1741 int found;
1742 unsigned int hash;
1743 struct expr *cur_expr, *last_expr = NULL;
1744 struct occr *cur_occr, *last_occr = NULL;
1746 if (GET_CODE (x) != SET
1747 || GET_CODE (SET_DEST (x)) != REG)
1748 abort ();
1750 hash = hash_set (REGNO (SET_DEST (x)), set_hash_table_size);
1752 cur_expr = set_hash_table[hash];
1753 found = 0;
1755 while (cur_expr && ! (found = expr_equiv_p (cur_expr->expr, x)))
1757 /* If the expression isn't found, save a pointer to the end of
1758 the list. */
1759 last_expr = cur_expr;
1760 cur_expr = cur_expr->next_same_hash;
1763 if (! found)
1765 cur_expr = (struct expr *) gcse_alloc (sizeof (struct expr));
1766 bytes_used += sizeof (struct expr);
1767 if (set_hash_table[hash] == NULL)
1769 /* This is the first pattern that hashed to this index. */
1770 set_hash_table[hash] = cur_expr;
1772 else
1774 /* Add EXPR to end of this hash chain. */
1775 last_expr->next_same_hash = cur_expr;
1777 /* Set the fields of the expr element.
1778 We must copy X because it can be modified when copy propagation is
1779 performed on its operands. */
1780 /* ??? Should this go in a different obstack? */
1781 cur_expr->expr = copy_rtx (x);
1782 cur_expr->bitmap_index = n_sets++;
1783 cur_expr->next_same_hash = NULL;
1784 cur_expr->antic_occr = NULL;
1785 cur_expr->avail_occr = NULL;
1788 /* Now record the occurrence. */
1790 cur_occr = cur_expr->avail_occr;
1792 /* Search for another occurrence in the same basic block. */
1793 while (cur_occr && BLOCK_NUM (cur_occr->insn) != BLOCK_NUM (insn))
1795 /* If an occurrence isn't found, save a pointer to the end of
1796 the list. */
1797 last_occr = cur_occr;
1798 cur_occr = cur_occr->next;
1801 if (cur_occr)
1803 /* Found another instance of the expression in the same basic block.
1804 Prefer this occurrence to the currently recorded one. We want
1805 the last one in the block and the block is scanned from start
1806 to end. */
1807 cur_occr->insn = insn;
1809 else
1811 /* First occurrence of this expression in this basic block. */
1812 cur_occr = (struct occr *) gcse_alloc (sizeof (struct occr));
1813 bytes_used += sizeof (struct occr);
1814 /* First occurrence of this expression in any block? */
1815 if (cur_expr->avail_occr == NULL)
1816 cur_expr->avail_occr = cur_occr;
1817 else
1818 last_occr->next = cur_occr;
1819 cur_occr->insn = insn;
1820 cur_occr->next = NULL;
1824 /* Scan pattern PAT of INSN and add an entry to the hash table.
1825 If SET_P is non-zero, this is for the assignment hash table,
1826 otherwise it is for the expression hash table. */
1828 static void
1829 hash_scan_set (pat, insn, set_p)
1830 rtx pat, insn;
1831 int set_p;
1833 rtx src = SET_SRC (pat);
1834 rtx dest = SET_DEST (pat);
1836 if (GET_CODE (src) == CALL)
1837 hash_scan_call (src, insn);
1839 if (GET_CODE (dest) == REG)
1841 int regno = REGNO (dest);
1842 rtx tmp;
1844 /* Only record sets of pseudo-regs in the hash table. */
1845 if (! set_p
1846 && regno >= FIRST_PSEUDO_REGISTER
1847 /* Don't GCSE something if we can't do a reg/reg copy. */
1848 && can_copy_p [GET_MODE (dest)]
1849 /* Is SET_SRC something we want to gcse? */
1850 && want_to_gcse_p (src))
1852 /* An expression is not anticipatable if its operands are
1853 modified before this insn. */
1854 int antic_p = ! optimize_size && oprs_anticipatable_p (src, insn);
1855 /* An expression is not available if its operands are
1856 subsequently modified, including this insn. */
1857 int avail_p = oprs_available_p (src, insn);
1858 insert_expr_in_table (src, GET_MODE (dest), insn, antic_p, avail_p);
1860 /* Record sets for constant/copy propagation. */
1861 else if (set_p
1862 && regno >= FIRST_PSEUDO_REGISTER
1863 && ((GET_CODE (src) == REG
1864 && REGNO (src) >= FIRST_PSEUDO_REGISTER
1865 && can_copy_p [GET_MODE (dest)])
1866 /* ??? CONST_INT:wip */
1867 || GET_CODE (src) == CONST_INT
1868 || GET_CODE (src) == CONST_DOUBLE)
1869 /* A copy is not available if its src or dest is subsequently
1870 modified. Here we want to search from INSN+1 on, but
1871 oprs_available_p searches from INSN on. */
1872 && (insn == BLOCK_END (BLOCK_NUM (insn))
1873 || ((tmp = next_nonnote_insn (insn)) != NULL_RTX
1874 && oprs_available_p (pat, tmp))))
1875 insert_set_in_table (pat, insn);
1879 static void
1880 hash_scan_clobber (x, insn)
1881 rtx x ATTRIBUTE_UNUSED, insn ATTRIBUTE_UNUSED;
1883 /* Currently nothing to do. */
1886 static void
1887 hash_scan_call (x, insn)
1888 rtx x ATTRIBUTE_UNUSED, insn ATTRIBUTE_UNUSED;
1890 /* Currently nothing to do. */
1893 /* Process INSN and add hash table entries as appropriate.
1895 Only available expressions that set a single pseudo-reg are recorded.
1897 Single sets in a PARALLEL could be handled, but it's an extra complication
1898 that isn't dealt with right now. The trick is handling the CLOBBERs that
1899 are also in the PARALLEL. Later.
1901 If SET_P is non-zero, this is for the assignment hash table,
1902 otherwise it is for the expression hash table.
1903 If IN_LIBCALL_BLOCK nonzero, we are in a libcall block, and should
1904 not record any expressions. */
1906 static void
1907 hash_scan_insn (insn, set_p, in_libcall_block)
1908 rtx insn;
1909 int set_p;
1910 int in_libcall_block;
1912 rtx pat = PATTERN (insn);
1914 /* Pick out the sets of INSN and for other forms of instructions record
1915 what's been modified. */
1917 if (GET_CODE (pat) == SET && ! in_libcall_block)
1918 hash_scan_set (pat, insn, set_p);
1919 else if (GET_CODE (pat) == PARALLEL)
1921 int i;
1923 for (i = 0; i < XVECLEN (pat, 0); i++)
1925 rtx x = XVECEXP (pat, 0, i);
1927 if (GET_CODE (x) == SET)
1929 if (GET_CODE (SET_SRC (x)) == CALL)
1930 hash_scan_call (SET_SRC (x), insn);
1932 else if (GET_CODE (x) == CLOBBER)
1933 hash_scan_clobber (x, insn);
1934 else if (GET_CODE (x) == CALL)
1935 hash_scan_call (x, insn);
1938 else if (GET_CODE (pat) == CLOBBER)
1939 hash_scan_clobber (pat, insn);
1940 else if (GET_CODE (pat) == CALL)
1941 hash_scan_call (pat, insn);
1944 static void
1945 dump_hash_table (file, name, table, table_size, total_size)
1946 FILE *file;
1947 const char *name;
1948 struct expr **table;
1949 int table_size, total_size;
1951 int i;
1952 /* Flattened out table, so it's printed in proper order. */
1953 struct expr **flat_table = (struct expr **) alloca (total_size * sizeof (struct expr *));
1954 unsigned int *hash_val = (unsigned int *) alloca (total_size * sizeof (unsigned int));
1956 bzero ((char *) flat_table, total_size * sizeof (struct expr *));
1957 for (i = 0; i < table_size; i++)
1959 struct expr *expr;
1961 for (expr = table[i]; expr != NULL; expr = expr->next_same_hash)
1963 flat_table[expr->bitmap_index] = expr;
1964 hash_val[expr->bitmap_index] = i;
1968 fprintf (file, "%s hash table (%d buckets, %d entries)\n",
1969 name, table_size, total_size);
1971 for (i = 0; i < total_size; i++)
1973 struct expr *expr = flat_table[i];
1975 fprintf (file, "Index %d (hash value %d)\n ",
1976 expr->bitmap_index, hash_val[i]);
1977 print_rtl (file, expr->expr);
1978 fprintf (file, "\n");
1981 fprintf (file, "\n");
1984 /* Record register first/last/block set information for REGNO in INSN.
1985 reg_first_set records the first place in the block where the register
1986 is set and is used to compute "anticipatability".
1987 reg_last_set records the last place in the block where the register
1988 is set and is used to compute "availability".
1989 reg_set_in_block records whether the register is set in the block
1990 and is used to compute "transparency". */
1992 static void
1993 record_last_reg_set_info (insn, regno)
1994 rtx insn;
1995 int regno;
1997 if (reg_first_set[regno] == NEVER_SET)
1998 reg_first_set[regno] = INSN_CUID (insn);
1999 reg_last_set[regno] = INSN_CUID (insn);
2000 SET_BIT (reg_set_in_block[BLOCK_NUM (insn)], regno);
2003 /* Record memory first/last/block set information for INSN. */
2005 static void
2006 record_last_mem_set_info (insn)
2007 rtx insn;
2009 if (mem_first_set == NEVER_SET)
2010 mem_first_set = INSN_CUID (insn);
2011 mem_last_set = INSN_CUID (insn);
2012 mem_set_in_block[BLOCK_NUM (insn)] = 1;
2015 /* Used for communicating between next two routines. */
2016 static rtx last_set_insn;
2018 /* Called from compute_hash_table via note_stores to handle one
2019 SET or CLOBBER in an insn. */
2021 static void
2022 record_last_set_info (dest, setter)
2023 rtx dest, setter ATTRIBUTE_UNUSED;
2025 if (GET_CODE (dest) == SUBREG)
2026 dest = SUBREG_REG (dest);
2028 if (GET_CODE (dest) == REG)
2029 record_last_reg_set_info (last_set_insn, REGNO (dest));
2030 else if (GET_CODE (dest) == MEM
2031 /* Ignore pushes, they clobber nothing. */
2032 && ! push_operand (dest, GET_MODE (dest)))
2033 record_last_mem_set_info (last_set_insn);
2036 /* Top level function to create an expression or assignment hash table.
2038 Expression entries are placed in the hash table if
2039 - they are of the form (set (pseudo-reg) src),
2040 - src is something we want to perform GCSE on,
2041 - none of the operands are subsequently modified in the block
2043 Assignment entries are placed in the hash table if
2044 - they are of the form (set (pseudo-reg) src),
2045 - src is something we want to perform const/copy propagation on,
2046 - none of the operands or target are subsequently modified in the block
2047 Currently src must be a pseudo-reg or a const_int.
2049 F is the first insn.
2050 SET_P is non-zero for computing the assignment hash table. */
2052 static void
2053 compute_hash_table (set_p)
2054 int set_p;
2056 int bb;
2058 /* While we compute the hash table we also compute a bit array of which
2059 registers are set in which blocks.
2060 We also compute which blocks set memory, in the absence of aliasing
2061 support [which is TODO].
2062 ??? This isn't needed during const/copy propagation, but it's cheap to
2063 compute. Later. */
2064 sbitmap_vector_zero (reg_set_in_block, n_basic_blocks);
2065 bzero ((char *) mem_set_in_block, n_basic_blocks);
2067 /* Some working arrays used to track first and last set in each block. */
2068 /* ??? One could use alloca here, but at some size a threshold is crossed
2069 beyond which one should use malloc. Are we at that threshold here? */
2070 reg_first_set = (int *) gmalloc (max_gcse_regno * sizeof (int));
2071 reg_last_set = (int *) gmalloc (max_gcse_regno * sizeof (int));
2073 for (bb = 0; bb < n_basic_blocks; bb++)
2075 rtx insn;
2076 int regno;
2077 int in_libcall_block;
2078 int i;
2080 /* First pass over the instructions records information used to
2081 determine when registers and memory are first and last set.
2082 ??? The mem_set_in_block and hard-reg reg_set_in_block computation
2083 could be moved to compute_sets since they currently don't change. */
2085 for (i = 0; i < max_gcse_regno; i++)
2086 reg_first_set[i] = reg_last_set[i] = NEVER_SET;
2087 mem_first_set = NEVER_SET;
2088 mem_last_set = NEVER_SET;
2090 for (insn = BLOCK_HEAD (bb);
2091 insn && insn != NEXT_INSN (BLOCK_END (bb));
2092 insn = NEXT_INSN (insn))
2094 #ifdef NON_SAVING_SETJMP
2095 if (NON_SAVING_SETJMP && GET_CODE (insn) == NOTE
2096 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
2098 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2099 record_last_reg_set_info (insn, regno);
2100 continue;
2102 #endif
2104 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
2105 continue;
2107 if (GET_CODE (insn) == CALL_INSN)
2109 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2110 if ((call_used_regs[regno]
2111 && regno != STACK_POINTER_REGNUM
2112 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2113 && regno != HARD_FRAME_POINTER_REGNUM
2114 #endif
2115 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
2116 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
2117 #endif
2118 #if defined (PIC_OFFSET_TABLE_REGNUM) && !defined (PIC_OFFSET_TABLE_REG_CALL_CLOBBERED)
2119 && ! (regno == PIC_OFFSET_TABLE_REGNUM && flag_pic)
2120 #endif
2122 && regno != FRAME_POINTER_REGNUM)
2123 || global_regs[regno])
2124 record_last_reg_set_info (insn, regno);
2125 if (! CONST_CALL_P (insn))
2126 record_last_mem_set_info (insn);
2129 last_set_insn = insn;
2130 note_stores (PATTERN (insn), record_last_set_info);
2133 /* The next pass builds the hash table. */
2135 for (insn = BLOCK_HEAD (bb), in_libcall_block = 0;
2136 insn && insn != NEXT_INSN (BLOCK_END (bb));
2137 insn = NEXT_INSN (insn))
2139 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2141 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
2142 in_libcall_block = 1;
2143 else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
2144 in_libcall_block = 0;
2145 hash_scan_insn (insn, set_p, in_libcall_block);
2150 free (reg_first_set);
2151 free (reg_last_set);
2152 /* Catch bugs early. */
2153 reg_first_set = reg_last_set = 0;
2156 /* Allocate space for the set hash table.
2157 N_INSNS is the number of instructions in the function.
2158 It is used to determine the number of buckets to use. */
2160 static void
2161 alloc_set_hash_table (n_insns)
2162 int n_insns;
2164 int n;
2166 set_hash_table_size = n_insns / 4;
2167 if (set_hash_table_size < 11)
2168 set_hash_table_size = 11;
2169 /* Attempt to maintain efficient use of hash table.
2170 Making it an odd number is simplest for now.
2171 ??? Later take some measurements. */
2172 set_hash_table_size |= 1;
2173 n = set_hash_table_size * sizeof (struct expr *);
2174 set_hash_table = (struct expr **) gmalloc (n);
2177 /* Free things allocated by alloc_set_hash_table. */
2179 static void
2180 free_set_hash_table ()
2182 free (set_hash_table);
2185 /* Compute the hash table for doing copy/const propagation. */
2187 static void
2188 compute_set_hash_table ()
2190 /* Initialize count of number of entries in hash table. */
2191 n_sets = 0;
2192 bzero ((char *) set_hash_table, set_hash_table_size * sizeof (struct expr *));
2194 compute_hash_table (1);
2197 /* Allocate space for the expression hash table.
2198 N_INSNS is the number of instructions in the function.
2199 It is used to determine the number of buckets to use. */
2201 static void
2202 alloc_expr_hash_table (n_insns)
2203 int n_insns;
2205 int n;
2207 expr_hash_table_size = n_insns / 2;
2208 /* Make sure the amount is usable. */
2209 if (expr_hash_table_size < 11)
2210 expr_hash_table_size = 11;
2211 /* Attempt to maintain efficient use of hash table.
2212 Making it an odd number is simplest for now.
2213 ??? Later take some measurements. */
2214 expr_hash_table_size |= 1;
2215 n = expr_hash_table_size * sizeof (struct expr *);
2216 expr_hash_table = (struct expr **) gmalloc (n);
2219 /* Free things allocated by alloc_expr_hash_table. */
2221 static void
2222 free_expr_hash_table ()
2224 free (expr_hash_table);
2227 /* Compute the hash table for doing GCSE. */
2229 static void
2230 compute_expr_hash_table ()
2232 /* Initialize count of number of entries in hash table. */
2233 n_exprs = 0;
2234 bzero ((char *) expr_hash_table, expr_hash_table_size * sizeof (struct expr *));
2236 compute_hash_table (0);
2239 /* Expression tracking support. */
2241 /* Lookup pattern PAT in the expression table.
2242 The result is a pointer to the table entry, or NULL if not found. */
2244 static struct expr *
2245 lookup_expr (pat)
2246 rtx pat;
2248 int do_not_record_p;
2249 unsigned int hash = hash_expr (pat, GET_MODE (pat), &do_not_record_p,
2250 expr_hash_table_size);
2251 struct expr *expr;
2253 if (do_not_record_p)
2254 return NULL;
2256 expr = expr_hash_table[hash];
2258 while (expr && ! expr_equiv_p (expr->expr, pat))
2259 expr = expr->next_same_hash;
2261 return expr;
2264 /* Lookup REGNO in the set table.
2265 If PAT is non-NULL look for the entry that matches it, otherwise return
2266 the first entry for REGNO.
2267 The result is a pointer to the table entry, or NULL if not found. */
2269 static struct expr *
2270 lookup_set (regno, pat)
2271 int regno;
2272 rtx pat;
2274 unsigned int hash = hash_set (regno, set_hash_table_size);
2275 struct expr *expr;
2277 expr = set_hash_table[hash];
2279 if (pat)
2281 while (expr && ! expr_equiv_p (expr->expr, pat))
2282 expr = expr->next_same_hash;
2284 else
2286 while (expr && REGNO (SET_DEST (expr->expr)) != regno)
2287 expr = expr->next_same_hash;
2290 return expr;
2293 /* Return the next entry for REGNO in list EXPR. */
2295 static struct expr *
2296 next_set (regno, expr)
2297 int regno;
2298 struct expr *expr;
2301 expr = expr->next_same_hash;
2302 while (expr && REGNO (SET_DEST (expr->expr)) != regno);
2303 return expr;
2306 /* Reset tables used to keep track of what's still available [since the
2307 start of the block]. */
2309 static void
2310 reset_opr_set_tables ()
2312 /* Maintain a bitmap of which regs have been set since beginning of
2313 the block. */
2314 sbitmap_zero (reg_set_bitmap);
2315 /* Also keep a record of the last instruction to modify memory.
2316 For now this is very trivial, we only record whether any memory
2317 location has been modified. */
2318 mem_last_set = 0;
2321 /* Return non-zero if the operands of X are not set before INSN in
2322 INSN's basic block. */
2324 static int
2325 oprs_not_set_p (x, insn)
2326 rtx x, insn;
2328 int i;
2329 enum rtx_code code;
2330 char *fmt;
2332 /* repeat is used to turn tail-recursion into iteration. */
2333 repeat:
2335 if (x == 0)
2336 return 1;
2338 code = GET_CODE (x);
2339 switch (code)
2341 case PC:
2342 case CC0:
2343 case CONST:
2344 case CONST_INT:
2345 case CONST_DOUBLE:
2346 case SYMBOL_REF:
2347 case LABEL_REF:
2348 case ADDR_VEC:
2349 case ADDR_DIFF_VEC:
2350 return 1;
2352 case MEM:
2353 if (mem_last_set != 0)
2354 return 0;
2355 x = XEXP (x, 0);
2356 goto repeat;
2358 case REG:
2359 return ! TEST_BIT (reg_set_bitmap, REGNO (x));
2361 default:
2362 break;
2365 fmt = GET_RTX_FORMAT (code);
2366 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2368 if (fmt[i] == 'e')
2370 int not_set_p;
2371 /* If we are about to do the last recursive call
2372 needed at this level, change it into iteration.
2373 This function is called enough to be worth it. */
2374 if (i == 0)
2376 x = XEXP (x, 0);
2377 goto repeat;
2379 not_set_p = oprs_not_set_p (XEXP (x, i), insn);
2380 if (! not_set_p)
2381 return 0;
2383 else if (fmt[i] == 'E')
2385 int j;
2386 for (j = 0; j < XVECLEN (x, i); j++)
2388 int not_set_p = oprs_not_set_p (XVECEXP (x, i, j), insn);
2389 if (! not_set_p)
2390 return 0;
2395 return 1;
2398 /* Mark things set by a CALL. */
2400 static void
2401 mark_call (insn)
2402 rtx insn;
2404 mem_last_set = INSN_CUID (insn);
2407 /* Mark things set by a SET. */
2409 static void
2410 mark_set (pat, insn)
2411 rtx pat, insn;
2413 rtx dest = SET_DEST (pat);
2415 while (GET_CODE (dest) == SUBREG
2416 || GET_CODE (dest) == ZERO_EXTRACT
2417 || GET_CODE (dest) == SIGN_EXTRACT
2418 || GET_CODE (dest) == STRICT_LOW_PART)
2419 dest = XEXP (dest, 0);
2421 if (GET_CODE (dest) == REG)
2422 SET_BIT (reg_set_bitmap, REGNO (dest));
2423 else if (GET_CODE (dest) == MEM)
2424 mem_last_set = INSN_CUID (insn);
2426 if (GET_CODE (SET_SRC (pat)) == CALL)
2427 mark_call (insn);
2430 /* Record things set by a CLOBBER. */
2432 static void
2433 mark_clobber (pat, insn)
2434 rtx pat, insn;
2436 rtx clob = XEXP (pat, 0);
2438 while (GET_CODE (clob) == SUBREG || GET_CODE (clob) == STRICT_LOW_PART)
2439 clob = XEXP (clob, 0);
2441 if (GET_CODE (clob) == REG)
2442 SET_BIT (reg_set_bitmap, REGNO (clob));
2443 else
2444 mem_last_set = INSN_CUID (insn);
2447 /* Record things set by INSN.
2448 This data is used by oprs_not_set_p. */
2450 static void
2451 mark_oprs_set (insn)
2452 rtx insn;
2454 rtx pat = PATTERN (insn);
2456 if (GET_CODE (pat) == SET)
2457 mark_set (pat, insn);
2458 else if (GET_CODE (pat) == PARALLEL)
2460 int i;
2462 for (i = 0; i < XVECLEN (pat, 0); i++)
2464 rtx x = XVECEXP (pat, 0, i);
2466 if (GET_CODE (x) == SET)
2467 mark_set (x, insn);
2468 else if (GET_CODE (x) == CLOBBER)
2469 mark_clobber (x, insn);
2470 else if (GET_CODE (x) == CALL)
2471 mark_call (insn);
2474 else if (GET_CODE (pat) == CLOBBER)
2475 mark_clobber (pat, insn);
2476 else if (GET_CODE (pat) == CALL)
2477 mark_call (insn);
2481 /* Classic GCSE reaching definition support. */
2483 /* Allocate reaching def variables. */
2485 static void
2486 alloc_rd_mem (n_blocks, n_insns)
2487 int n_blocks, n_insns;
2489 rd_kill = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2490 sbitmap_vector_zero (rd_kill, n_basic_blocks);
2492 rd_gen = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2493 sbitmap_vector_zero (rd_gen, n_basic_blocks);
2495 reaching_defs = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2496 sbitmap_vector_zero (reaching_defs, n_basic_blocks);
2498 rd_out = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2499 sbitmap_vector_zero (rd_out, n_basic_blocks);
2502 /* Free reaching def variables. */
2504 static void
2505 free_rd_mem ()
2507 free (rd_kill);
2508 free (rd_gen);
2509 free (reaching_defs);
2510 free (rd_out);
2513 /* Add INSN to the kills of BB.
2514 REGNO, set in BB, is killed by INSN. */
2516 static void
2517 handle_rd_kill_set (insn, regno, bb)
2518 rtx insn;
2519 int regno, bb;
2521 struct reg_set *this_reg = reg_set_table[regno];
2523 while (this_reg)
2525 if (BLOCK_NUM (this_reg->insn) != BLOCK_NUM (insn))
2526 SET_BIT (rd_kill[bb], INSN_CUID (this_reg->insn));
2527 this_reg = this_reg->next;
2531 /* Compute the set of kill's for reaching definitions. */
2533 static void
2534 compute_kill_rd ()
2536 int bb,cuid;
2538 /* For each block
2539 For each set bit in `gen' of the block (i.e each insn which
2540 generates a definition in the block)
2541 Call the reg set by the insn corresponding to that bit regx
2542 Look at the linked list starting at reg_set_table[regx]
2543 For each setting of regx in the linked list, which is not in
2544 this block
2545 Set the bit in `kill' corresponding to that insn
2548 for (bb = 0; bb < n_basic_blocks; bb++)
2550 for (cuid = 0; cuid < max_cuid; cuid++)
2552 if (TEST_BIT (rd_gen[bb], cuid))
2554 rtx insn = CUID_INSN (cuid);
2555 rtx pat = PATTERN (insn);
2557 if (GET_CODE (insn) == CALL_INSN)
2559 int regno;
2561 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2563 if ((call_used_regs[regno]
2564 && regno != STACK_POINTER_REGNUM
2565 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2566 && regno != HARD_FRAME_POINTER_REGNUM
2567 #endif
2568 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
2569 && ! (regno == ARG_POINTER_REGNUM
2570 && fixed_regs[regno])
2571 #endif
2572 #if defined (PIC_OFFSET_TABLE_REGNUM) && !defined (PIC_OFFSET_TABLE_REG_CALL_CLOBBERED)
2573 && ! (regno == PIC_OFFSET_TABLE_REGNUM && flag_pic)
2574 #endif
2575 && regno != FRAME_POINTER_REGNUM)
2576 || global_regs[regno])
2577 handle_rd_kill_set (insn, regno, bb);
2581 if (GET_CODE (pat) == PARALLEL)
2583 int i;
2585 /* We work backwards because ... */
2586 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
2588 enum rtx_code code = GET_CODE (XVECEXP (pat, 0, i));
2589 if ((code == SET || code == CLOBBER)
2590 && GET_CODE (XEXP (XVECEXP (pat, 0, i), 0)) == REG)
2591 handle_rd_kill_set (insn,
2592 REGNO (XEXP (XVECEXP (pat, 0, i), 0)),
2593 bb);
2596 else if (GET_CODE (pat) == SET)
2598 if (GET_CODE (SET_DEST (pat)) == REG)
2600 /* Each setting of this register outside of this block
2601 must be marked in the set of kills in this block. */
2602 handle_rd_kill_set (insn, REGNO (SET_DEST (pat)), bb);
2605 /* FIXME: CLOBBER? */
2611 /* Compute the reaching definitions as in
2612 Compilers Principles, Techniques, and Tools. Aho, Sethi, Ullman,
2613 Chapter 10. It is the same algorithm as used for computing available
2614 expressions but applied to the gens and kills of reaching definitions. */
2616 static void
2617 compute_rd ()
2619 int bb, changed, passes;
2621 for (bb = 0; bb < n_basic_blocks; bb++)
2622 sbitmap_copy (rd_out[bb] /*dst*/, rd_gen[bb] /*src*/);
2624 passes = 0;
2625 changed = 1;
2626 while (changed)
2628 changed = 0;
2629 for (bb = 0; bb < n_basic_blocks; bb++)
2631 sbitmap_union_of_predecessors (reaching_defs[bb], rd_out,
2632 bb, s_preds);
2633 changed |= sbitmap_union_of_diff (rd_out[bb], rd_gen[bb],
2634 reaching_defs[bb], rd_kill[bb]);
2636 passes++;
2639 if (gcse_file)
2640 fprintf (gcse_file, "reaching def computation: %d passes\n", passes);
2643 /* Classic GCSE available expression support. */
2645 /* Allocate memory for available expression computation. */
2647 static void
2648 alloc_avail_expr_mem (n_blocks, n_exprs)
2649 int n_blocks, n_exprs;
2651 ae_kill = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
2652 sbitmap_vector_zero (ae_kill, n_basic_blocks);
2654 ae_gen = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
2655 sbitmap_vector_zero (ae_gen, n_basic_blocks);
2657 ae_in = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
2658 sbitmap_vector_zero (ae_in, n_basic_blocks);
2660 ae_out = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
2661 sbitmap_vector_zero (ae_out, n_basic_blocks);
2663 u_bitmap = (sbitmap) sbitmap_alloc (n_exprs);
2664 sbitmap_ones (u_bitmap);
2667 static void
2668 free_avail_expr_mem ()
2670 free (ae_kill);
2671 free (ae_gen);
2672 free (ae_in);
2673 free (ae_out);
2674 free (u_bitmap);
2677 /* Compute the set of available expressions generated in each basic block. */
2679 static void
2680 compute_ae_gen ()
2682 int i;
2684 /* For each recorded occurrence of each expression, set ae_gen[bb][expr].
2685 This is all we have to do because an expression is not recorded if it
2686 is not available, and the only expressions we want to work with are the
2687 ones that are recorded. */
2689 for (i = 0; i < expr_hash_table_size; i++)
2691 struct expr *expr = expr_hash_table[i];
2692 while (expr != NULL)
2694 struct occr *occr = expr->avail_occr;
2695 while (occr != NULL)
2697 SET_BIT (ae_gen[BLOCK_NUM (occr->insn)], expr->bitmap_index);
2698 occr = occr->next;
2700 expr = expr->next_same_hash;
2705 /* Return non-zero if expression X is killed in BB. */
2707 static int
2708 expr_killed_p (x, bb)
2709 rtx x;
2710 int bb;
2712 int i;
2713 enum rtx_code code;
2714 char *fmt;
2716 /* repeat is used to turn tail-recursion into iteration. */
2717 repeat:
2719 if (x == 0)
2720 return 1;
2722 code = GET_CODE (x);
2723 switch (code)
2725 case REG:
2726 return TEST_BIT (reg_set_in_block[bb], REGNO (x));
2728 case MEM:
2729 if (mem_set_in_block[bb])
2730 return 1;
2731 x = XEXP (x, 0);
2732 goto repeat;
2734 case PC:
2735 case CC0: /*FIXME*/
2736 case CONST:
2737 case CONST_INT:
2738 case CONST_DOUBLE:
2739 case SYMBOL_REF:
2740 case LABEL_REF:
2741 case ADDR_VEC:
2742 case ADDR_DIFF_VEC:
2743 return 0;
2745 default:
2746 break;
2749 i = GET_RTX_LENGTH (code) - 1;
2750 fmt = GET_RTX_FORMAT (code);
2751 for (; i >= 0; i--)
2753 if (fmt[i] == 'e')
2755 rtx tem = XEXP (x, i);
2757 /* If we are about to do the last recursive call
2758 needed at this level, change it into iteration.
2759 This function is called enough to be worth it. */
2760 if (i == 0)
2762 x = tem;
2763 goto repeat;
2765 if (expr_killed_p (tem, bb))
2766 return 1;
2768 else if (fmt[i] == 'E')
2770 int j;
2771 for (j = 0; j < XVECLEN (x, i); j++)
2773 if (expr_killed_p (XVECEXP (x, i, j), bb))
2774 return 1;
2779 return 0;
2782 /* Compute the set of available expressions killed in each basic block. */
2784 static void
2785 compute_ae_kill ()
2787 int bb,i;
2789 for (bb = 0; bb < n_basic_blocks; bb++)
2791 for (i = 0; i < expr_hash_table_size; i++)
2793 struct expr *expr = expr_hash_table[i];
2795 for ( ; expr != NULL; expr = expr->next_same_hash)
2797 /* Skip EXPR if generated in this block. */
2798 if (TEST_BIT (ae_gen[bb], expr->bitmap_index))
2799 continue;
2801 if (expr_killed_p (expr->expr, bb))
2802 SET_BIT (ae_kill[bb], expr->bitmap_index);
2808 /* Compute available expressions.
2810 Implement the algorithm to find available expressions
2811 as given in the Aho Sethi Ullman book, pages 627-631. */
2813 static void
2814 compute_available ()
2816 int bb, changed, passes;
2818 sbitmap_zero (ae_in[0]);
2820 sbitmap_copy (ae_out[0] /*dst*/, ae_gen[0] /*src*/);
2822 for (bb = 1; bb < n_basic_blocks; bb++)
2823 sbitmap_difference (ae_out[bb], u_bitmap, ae_kill[bb]);
2825 passes = 0;
2826 changed = 1;
2827 while (changed)
2829 changed = 0;
2830 for (bb = 1; bb < n_basic_blocks; bb++)
2832 sbitmap_intersect_of_predecessors (ae_in[bb], ae_out, bb, s_preds);
2833 changed |= sbitmap_union_of_diff (ae_out[bb], ae_gen[bb],
2834 ae_in[bb], ae_kill[bb]);
2836 passes++;
2839 if (gcse_file)
2840 fprintf (gcse_file, "avail expr computation: %d passes\n", passes);
2843 /* Actually perform the Classic GCSE optimizations. */
2845 /* Return non-zero if occurrence OCCR of expression EXPR reaches block BB.
2847 CHECK_SELF_LOOP is non-zero if we should consider a block reaching itself
2848 as a positive reach. We want to do this when there are two computations
2849 of the expression in the block.
2851 VISITED is a pointer to a working buffer for tracking which BB's have
2852 been visited. It is NULL for the top-level call.
2854 We treat reaching expressions that go through blocks containing the same
2855 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
2856 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
2857 2 as not reaching. The intent is to improve the probability of finding
2858 only one reaching expression and to reduce register lifetimes by picking
2859 the closest such expression. */
2861 static int
2862 expr_reaches_here_p (occr, expr, bb, check_self_loop, visited)
2863 struct occr *occr;
2864 struct expr *expr;
2865 int bb;
2866 int check_self_loop;
2867 char *visited;
2869 int_list_ptr pred;
2871 if (visited == NULL)
2873 visited = (char *) alloca (n_basic_blocks);
2874 bzero (visited, n_basic_blocks);
2877 for (pred = s_preds[bb]; pred != NULL; pred = pred->next)
2879 int pred_bb = INT_LIST_VAL (pred);
2881 if (visited[pred_bb])
2883 /* This predecessor has already been visited.
2884 Nothing to do. */
2887 else if (pred_bb == bb)
2889 /* BB loops on itself. */
2890 if (check_self_loop
2891 && TEST_BIT (ae_gen[pred_bb], expr->bitmap_index)
2892 && BLOCK_NUM (occr->insn) == pred_bb)
2893 return 1;
2894 visited[pred_bb] = 1;
2896 /* Ignore this predecessor if it kills the expression. */
2897 else if (TEST_BIT (ae_kill[pred_bb], expr->bitmap_index))
2898 visited[pred_bb] = 1;
2899 /* Does this predecessor generate this expression? */
2900 else if (TEST_BIT (ae_gen[pred_bb], expr->bitmap_index))
2902 /* Is this the occurrence we're looking for?
2903 Note that there's only one generating occurrence per block
2904 so we just need to check the block number. */
2905 if (BLOCK_NUM (occr->insn) == pred_bb)
2906 return 1;
2907 visited[pred_bb] = 1;
2909 /* Neither gen nor kill. */
2910 else
2912 visited[pred_bb] = 1;
2913 if (expr_reaches_here_p (occr, expr, pred_bb, check_self_loop, visited))
2914 return 1;
2918 /* All paths have been checked. */
2919 return 0;
2922 /* Return the instruction that computes EXPR that reaches INSN's basic block.
2923 If there is more than one such instruction, return NULL.
2925 Called only by handle_avail_expr. */
2927 static rtx
2928 computing_insn (expr, insn)
2929 struct expr *expr;
2930 rtx insn;
2932 int bb = BLOCK_NUM (insn);
2934 if (expr->avail_occr->next == NULL)
2936 if (BLOCK_NUM (expr->avail_occr->insn) == bb)
2938 /* The available expression is actually itself
2939 (i.e. a loop in the flow graph) so do nothing. */
2940 return NULL;
2942 /* (FIXME) Case that we found a pattern that was created by
2943 a substitution that took place. */
2944 return expr->avail_occr->insn;
2946 else
2948 /* Pattern is computed more than once.
2949 Search backwards from this insn to see how many of these
2950 computations actually reach this insn. */
2951 struct occr *occr;
2952 rtx insn_computes_expr = NULL;
2953 int can_reach = 0;
2955 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
2957 if (BLOCK_NUM (occr->insn) == bb)
2959 /* The expression is generated in this block.
2960 The only time we care about this is when the expression
2961 is generated later in the block [and thus there's a loop].
2962 We let the normal cse pass handle the other cases. */
2963 if (INSN_CUID (insn) < INSN_CUID (occr->insn))
2965 if (expr_reaches_here_p (occr, expr, bb, 1, NULL))
2967 can_reach++;
2968 if (can_reach > 1)
2969 return NULL;
2970 insn_computes_expr = occr->insn;
2974 else /* Computation of the pattern outside this block. */
2976 if (expr_reaches_here_p (occr, expr, bb, 0, NULL))
2978 can_reach++;
2979 if (can_reach > 1)
2980 return NULL;
2981 insn_computes_expr = occr->insn;
2986 if (insn_computes_expr == NULL)
2987 abort ();
2988 return insn_computes_expr;
2992 /* Return non-zero if the definition in DEF_INSN can reach INSN.
2993 Only called by can_disregard_other_sets. */
2995 static int
2996 def_reaches_here_p (insn, def_insn)
2997 rtx insn, def_insn;
2999 rtx reg;
3001 if (TEST_BIT (reaching_defs[BLOCK_NUM (insn)], INSN_CUID (def_insn)))
3002 return 1;
3004 if (BLOCK_NUM (insn) == BLOCK_NUM (def_insn))
3006 if (INSN_CUID (def_insn) < INSN_CUID (insn))
3008 if (GET_CODE (PATTERN (def_insn)) == PARALLEL)
3009 return 1;
3010 if (GET_CODE (PATTERN (def_insn)) == CLOBBER)
3011 reg = XEXP (PATTERN (def_insn), 0);
3012 else if (GET_CODE (PATTERN (def_insn)) == SET)
3013 reg = SET_DEST (PATTERN (def_insn));
3014 else
3015 abort ();
3016 return ! reg_set_between_p (reg, NEXT_INSN (def_insn), insn);
3018 else
3019 return 0;
3022 return 0;
3025 /* Return non-zero if *ADDR_THIS_REG can only have one value at INSN.
3026 The value returned is the number of definitions that reach INSN.
3027 Returning a value of zero means that [maybe] more than one definition
3028 reaches INSN and the caller can't perform whatever optimization it is
3029 trying. i.e. it is always safe to return zero. */
3031 static int
3032 can_disregard_other_sets (addr_this_reg, insn, for_combine)
3033 struct reg_set **addr_this_reg;
3034 rtx insn;
3035 int for_combine;
3037 int number_of_reaching_defs = 0;
3038 struct reg_set *this_reg = *addr_this_reg;
3040 while (this_reg)
3042 if (def_reaches_here_p (insn, this_reg->insn))
3044 number_of_reaching_defs++;
3045 /* Ignore parallels for now. */
3046 if (GET_CODE (PATTERN (this_reg->insn)) == PARALLEL)
3047 return 0;
3048 if (!for_combine
3049 && (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER
3050 || ! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)),
3051 SET_SRC (PATTERN (insn)))))
3053 /* A setting of the reg to a different value reaches INSN. */
3054 return 0;
3056 if (number_of_reaching_defs > 1)
3058 /* If in this setting the value the register is being
3059 set to is equal to the previous value the register
3060 was set to and this setting reaches the insn we are
3061 trying to do the substitution on then we are ok. */
3063 if (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER)
3064 return 0;
3065 if (! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)),
3066 SET_SRC (PATTERN (insn))))
3067 return 0;
3069 *addr_this_reg = this_reg;
3072 /* prev_this_reg = this_reg; */
3073 this_reg = this_reg->next;
3076 return number_of_reaching_defs;
3079 /* Expression computed by insn is available and the substitution is legal,
3080 so try to perform the substitution.
3082 The result is non-zero if any changes were made. */
3084 static int
3085 handle_avail_expr (insn, expr)
3086 rtx insn;
3087 struct expr *expr;
3089 rtx pat, insn_computes_expr;
3090 rtx to;
3091 struct reg_set *this_reg;
3092 int found_setting, use_src;
3093 int changed = 0;
3095 /* We only handle the case where one computation of the expression
3096 reaches this instruction. */
3097 insn_computes_expr = computing_insn (expr, insn);
3098 if (insn_computes_expr == NULL)
3099 return 0;
3101 found_setting = 0;
3102 use_src = 0;
3104 /* At this point we know only one computation of EXPR outside of this
3105 block reaches this insn. Now try to find a register that the
3106 expression is computed into. */
3108 if (GET_CODE (SET_SRC (PATTERN (insn_computes_expr))) == REG)
3110 /* This is the case when the available expression that reaches
3111 here has already been handled as an available expression. */
3112 int regnum_for_replacing = REGNO (SET_SRC (PATTERN (insn_computes_expr)));
3113 /* If the register was created by GCSE we can't use `reg_set_table',
3114 however we know it's set only once. */
3115 if (regnum_for_replacing >= max_gcse_regno
3116 /* If the register the expression is computed into is set only once,
3117 or only one set reaches this insn, we can use it. */
3118 || (((this_reg = reg_set_table[regnum_for_replacing]),
3119 this_reg->next == NULL)
3120 || can_disregard_other_sets (&this_reg, insn, 0)))
3122 use_src = 1;
3123 found_setting = 1;
3127 if (!found_setting)
3129 int regnum_for_replacing = REGNO (SET_DEST (PATTERN (insn_computes_expr)));
3130 /* This shouldn't happen. */
3131 if (regnum_for_replacing >= max_gcse_regno)
3132 abort ();
3133 this_reg = reg_set_table[regnum_for_replacing];
3134 /* If the register the expression is computed into is set only once,
3135 or only one set reaches this insn, use it. */
3136 if (this_reg->next == NULL
3137 || can_disregard_other_sets (&this_reg, insn, 0))
3138 found_setting = 1;
3141 if (found_setting)
3143 pat = PATTERN (insn);
3144 if (use_src)
3145 to = SET_SRC (PATTERN (insn_computes_expr));
3146 else
3147 to = SET_DEST (PATTERN (insn_computes_expr));
3148 changed = validate_change (insn, &SET_SRC (pat), to, 0);
3150 /* We should be able to ignore the return code from validate_change but
3151 to play it safe we check. */
3152 if (changed)
3154 gcse_subst_count++;
3155 if (gcse_file != NULL)
3157 fprintf (gcse_file, "GCSE: Replacing the source in insn %d with reg %d %s insn %d\n",
3158 INSN_UID (insn), REGNO (to),
3159 use_src ? "from" : "set in",
3160 INSN_UID (insn_computes_expr));
3165 /* The register that the expr is computed into is set more than once. */
3166 else if (1 /*expensive_op(this_pattrn->op) && do_expensive_gcse)*/)
3168 /* Insert an insn after insnx that copies the reg set in insnx
3169 into a new pseudo register call this new register REGN.
3170 From insnb until end of basic block or until REGB is set
3171 replace all uses of REGB with REGN. */
3172 rtx new_insn;
3174 to = gen_reg_rtx (GET_MODE (SET_DEST (PATTERN (insn_computes_expr))));
3176 /* Generate the new insn. */
3177 /* ??? If the change fails, we return 0, even though we created
3178 an insn. I think this is ok. */
3179 new_insn
3180 = emit_insn_after (gen_rtx_SET (VOIDmode, to,
3181 SET_DEST (PATTERN (insn_computes_expr))),
3182 insn_computes_expr);
3183 /* Keep block number table up to date. */
3184 set_block_num (new_insn, BLOCK_NUM (insn_computes_expr));
3185 /* Keep register set table up to date. */
3186 record_one_set (REGNO (to), new_insn);
3188 gcse_create_count++;
3189 if (gcse_file != NULL)
3191 fprintf (gcse_file, "GCSE: Creating insn %d to copy value of reg %d, computed in insn %d,\n",
3192 INSN_UID (NEXT_INSN (insn_computes_expr)),
3193 REGNO (SET_SRC (PATTERN (NEXT_INSN (insn_computes_expr)))),
3194 INSN_UID (insn_computes_expr));
3195 fprintf (gcse_file, " into newly allocated reg %d\n", REGNO (to));
3198 pat = PATTERN (insn);
3200 /* Do register replacement for INSN. */
3201 changed = validate_change (insn, &SET_SRC (pat),
3202 SET_DEST (PATTERN (NEXT_INSN (insn_computes_expr))),
3205 /* We should be able to ignore the return code from validate_change but
3206 to play it safe we check. */
3207 if (changed)
3209 gcse_subst_count++;
3210 if (gcse_file != NULL)
3212 fprintf (gcse_file, "GCSE: Replacing the source in insn %d with reg %d set in insn %d\n",
3213 INSN_UID (insn),
3214 REGNO (SET_DEST (PATTERN (NEXT_INSN (insn_computes_expr)))),
3215 INSN_UID (insn_computes_expr));
3221 return changed;
3224 /* Perform classic GCSE.
3225 This is called by one_classic_gcse_pass after all the dataflow analysis
3226 has been done.
3228 The result is non-zero if a change was made. */
3230 static int
3231 classic_gcse ()
3233 int bb, changed;
3234 rtx insn;
3236 /* Note we start at block 1. */
3238 changed = 0;
3239 for (bb = 1; bb < n_basic_blocks; bb++)
3241 /* Reset tables used to keep track of what's still valid [since the
3242 start of the block]. */
3243 reset_opr_set_tables ();
3245 for (insn = BLOCK_HEAD (bb);
3246 insn != NULL && insn != NEXT_INSN (BLOCK_END (bb));
3247 insn = NEXT_INSN (insn))
3249 /* Is insn of form (set (pseudo-reg) ...)? */
3251 if (GET_CODE (insn) == INSN
3252 && GET_CODE (PATTERN (insn)) == SET
3253 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
3254 && REGNO (SET_DEST (PATTERN (insn))) >= FIRST_PSEUDO_REGISTER)
3256 rtx pat = PATTERN (insn);
3257 rtx src = SET_SRC (pat);
3258 struct expr *expr;
3260 if (want_to_gcse_p (src)
3261 /* Is the expression recorded? */
3262 && ((expr = lookup_expr (src)) != NULL)
3263 /* Is the expression available [at the start of the
3264 block]? */
3265 && TEST_BIT (ae_in[bb], expr->bitmap_index)
3266 /* Are the operands unchanged since the start of the
3267 block? */
3268 && oprs_not_set_p (src, insn))
3269 changed |= handle_avail_expr (insn, expr);
3272 /* Keep track of everything modified by this insn. */
3273 /* ??? Need to be careful w.r.t. mods done to INSN. */
3274 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3275 mark_oprs_set (insn);
3279 return changed;
3282 /* Top level routine to perform one classic GCSE pass.
3284 Return non-zero if a change was made. */
3286 static int
3287 one_classic_gcse_pass (pass)
3288 int pass;
3290 int changed = 0;
3292 gcse_subst_count = 0;
3293 gcse_create_count = 0;
3295 alloc_expr_hash_table (max_cuid);
3296 alloc_rd_mem (n_basic_blocks, max_cuid);
3297 compute_expr_hash_table ();
3298 if (gcse_file)
3299 dump_hash_table (gcse_file, "Expression", expr_hash_table,
3300 expr_hash_table_size, n_exprs);
3301 if (n_exprs > 0)
3303 compute_kill_rd ();
3304 compute_rd ();
3305 alloc_avail_expr_mem (n_basic_blocks, n_exprs);
3306 compute_ae_gen ();
3307 compute_ae_kill ();
3308 compute_available ();
3309 changed = classic_gcse ();
3310 free_avail_expr_mem ();
3312 free_rd_mem ();
3313 free_expr_hash_table ();
3315 if (gcse_file)
3317 fprintf (gcse_file, "\n");
3318 fprintf (gcse_file, "GCSE of %s, pass %d: %d bytes needed, %d substs, %d insns created\n",
3319 current_function_name, pass,
3320 bytes_used, gcse_subst_count, gcse_create_count);
3323 return changed;
3326 /* Compute copy/constant propagation working variables. */
3328 /* Local properties of assignments. */
3330 static sbitmap *cprop_pavloc;
3331 static sbitmap *cprop_absaltered;
3333 /* Global properties of assignments (computed from the local properties). */
3335 static sbitmap *cprop_avin;
3336 static sbitmap *cprop_avout;
3338 /* Allocate vars used for copy/const propagation.
3339 N_BLOCKS is the number of basic blocks.
3340 N_SETS is the number of sets. */
3342 static void
3343 alloc_cprop_mem (n_blocks, n_sets)
3344 int n_blocks, n_sets;
3346 cprop_pavloc = sbitmap_vector_alloc (n_blocks, n_sets);
3347 cprop_absaltered = sbitmap_vector_alloc (n_blocks, n_sets);
3349 cprop_avin = sbitmap_vector_alloc (n_blocks, n_sets);
3350 cprop_avout = sbitmap_vector_alloc (n_blocks, n_sets);
3353 /* Free vars used by copy/const propagation. */
3355 static void
3356 free_cprop_mem ()
3358 free (cprop_pavloc);
3359 free (cprop_absaltered);
3360 free (cprop_avin);
3361 free (cprop_avout);
3364 /* For each block, compute whether X is transparent.
3365 X is either an expression or an assignment [though we don't care which,
3366 for this context an assignment is treated as an expression].
3367 For each block where an element of X is modified, set (SET_P == 1) or reset
3368 (SET_P == 0) the INDX bit in BMAP. */
3370 static void
3371 compute_transp (x, indx, bmap, set_p)
3372 rtx x;
3373 int indx;
3374 sbitmap *bmap;
3375 int set_p;
3377 int bb,i;
3378 enum rtx_code code;
3379 char *fmt;
3381 /* repeat is used to turn tail-recursion into iteration. */
3382 repeat:
3384 if (x == 0)
3385 return;
3387 code = GET_CODE (x);
3388 switch (code)
3390 case REG:
3392 reg_set *r;
3393 int regno = REGNO (x);
3395 if (set_p)
3397 if (regno < FIRST_PSEUDO_REGISTER)
3399 for (bb = 0; bb < n_basic_blocks; bb++)
3400 if (TEST_BIT (reg_set_in_block[bb], regno))
3401 SET_BIT (bmap[bb], indx);
3403 else
3405 for (r = reg_set_table[regno]; r != NULL; r = r->next)
3407 bb = BLOCK_NUM (r->insn);
3408 SET_BIT (bmap[bb], indx);
3412 else
3414 if (regno < FIRST_PSEUDO_REGISTER)
3416 for (bb = 0; bb < n_basic_blocks; bb++)
3417 if (TEST_BIT (reg_set_in_block[bb], regno))
3418 RESET_BIT (bmap[bb], indx);
3420 else
3422 for (r = reg_set_table[regno]; r != NULL; r = r->next)
3424 bb = BLOCK_NUM (r->insn);
3425 RESET_BIT (bmap[bb], indx);
3429 return;
3432 case MEM:
3433 if (set_p)
3435 for (bb = 0; bb < n_basic_blocks; bb++)
3436 if (mem_set_in_block[bb])
3437 SET_BIT (bmap[bb], indx);
3439 else
3441 for (bb = 0; bb < n_basic_blocks; bb++)
3442 if (mem_set_in_block[bb])
3443 RESET_BIT (bmap[bb], indx);
3445 x = XEXP (x, 0);
3446 goto repeat;
3448 case PC:
3449 case CC0: /*FIXME*/
3450 case CONST:
3451 case CONST_INT:
3452 case CONST_DOUBLE:
3453 case SYMBOL_REF:
3454 case LABEL_REF:
3455 case ADDR_VEC:
3456 case ADDR_DIFF_VEC:
3457 return;
3459 default:
3460 break;
3463 i = GET_RTX_LENGTH (code) - 1;
3464 fmt = GET_RTX_FORMAT (code);
3465 for (; i >= 0; i--)
3467 if (fmt[i] == 'e')
3469 rtx tem = XEXP (x, i);
3471 /* If we are about to do the last recursive call
3472 needed at this level, change it into iteration.
3473 This function is called enough to be worth it. */
3474 if (i == 0)
3476 x = tem;
3477 goto repeat;
3479 compute_transp (tem, indx, bmap, set_p);
3481 else if (fmt[i] == 'E')
3483 int j;
3484 for (j = 0; j < XVECLEN (x, i); j++)
3485 compute_transp (XVECEXP (x, i, j), indx, bmap, set_p);
3490 /* Compute the available expressions at the start and end of each
3491 basic block for cprop. This particular dataflow equation is
3492 used often enough that we might want to generalize it and make
3493 as a subroutine for other global optimizations that need available
3494 in/out information. */
3495 static void
3496 compute_cprop_avinout ()
3498 int bb, changed, passes;
3500 sbitmap_zero (cprop_avin[0]);
3501 sbitmap_vector_ones (cprop_avout, n_basic_blocks);
3503 passes = 0;
3504 changed = 1;
3505 while (changed)
3507 changed = 0;
3508 for (bb = 0; bb < n_basic_blocks; bb++)
3510 if (bb != 0)
3511 sbitmap_intersect_of_predecessors (cprop_avin[bb],
3512 cprop_avout, bb, s_preds);
3513 changed |= sbitmap_union_of_diff (cprop_avout[bb],
3514 cprop_pavloc[bb],
3515 cprop_avin[bb],
3516 cprop_absaltered[bb]);
3518 passes++;
3521 if (gcse_file)
3522 fprintf (gcse_file, "cprop avail expr computation: %d passes\n", passes);
3525 /* Top level routine to do the dataflow analysis needed by copy/const
3526 propagation. */
3528 static void
3529 compute_cprop_data ()
3531 compute_local_properties (cprop_absaltered, cprop_pavloc, NULL, 1);
3532 compute_cprop_avinout ();
3535 /* Copy/constant propagation. */
3537 struct reg_use {
3538 rtx reg_rtx;
3541 /* Maximum number of register uses in an insn that we handle. */
3542 #define MAX_USES 8
3544 /* Table of uses found in an insn.
3545 Allocated statically to avoid alloc/free complexity and overhead. */
3546 static struct reg_use reg_use_table[MAX_USES];
3548 /* Index into `reg_use_table' while building it. */
3549 static int reg_use_count;
3551 /* Set up a list of register numbers used in INSN.
3552 The found uses are stored in `reg_use_table'.
3553 `reg_use_count' is initialized to zero before entry, and
3554 contains the number of uses in the table upon exit.
3556 ??? If a register appears multiple times we will record it multiple
3557 times. This doesn't hurt anything but it will slow things down. */
3559 static void
3560 find_used_regs (x)
3561 rtx x;
3563 int i;
3564 enum rtx_code code;
3565 char *fmt;
3567 /* repeat is used to turn tail-recursion into iteration. */
3568 repeat:
3570 if (x == 0)
3571 return;
3573 code = GET_CODE (x);
3574 switch (code)
3576 case REG:
3577 if (reg_use_count == MAX_USES)
3578 return;
3579 reg_use_table[reg_use_count].reg_rtx = x;
3580 reg_use_count++;
3581 return;
3583 case MEM:
3584 x = XEXP (x, 0);
3585 goto repeat;
3587 case PC:
3588 case CC0:
3589 case CONST:
3590 case CONST_INT:
3591 case CONST_DOUBLE:
3592 case SYMBOL_REF:
3593 case LABEL_REF:
3594 case CLOBBER:
3595 case ADDR_VEC:
3596 case ADDR_DIFF_VEC:
3597 case ASM_INPUT: /*FIXME*/
3598 return;
3600 case SET:
3601 if (GET_CODE (SET_DEST (x)) == MEM)
3602 find_used_regs (SET_DEST (x));
3603 x = SET_SRC (x);
3604 goto repeat;
3606 default:
3607 break;
3610 /* Recursively scan the operands of this expression. */
3612 fmt = GET_RTX_FORMAT (code);
3613 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3615 if (fmt[i] == 'e')
3617 /* If we are about to do the last recursive call
3618 needed at this level, change it into iteration.
3619 This function is called enough to be worth it. */
3620 if (i == 0)
3622 x = XEXP (x, 0);
3623 goto repeat;
3625 find_used_regs (XEXP (x, i));
3627 else if (fmt[i] == 'E')
3629 int j;
3630 for (j = 0; j < XVECLEN (x, i); j++)
3631 find_used_regs (XVECEXP (x, i, j));
3636 /* Try to replace all non-SET_DEST occurrences of FROM in INSN with TO.
3637 Returns non-zero is successful. */
3639 static int
3640 try_replace_reg (from, to, insn)
3641 rtx from, to, insn;
3643 /* If this fails we could try to simplify the result of the
3644 replacement and attempt to recognize the simplified insn.
3646 But we need a general simplify_rtx that doesn't have pass
3647 specific state variables. I'm not aware of one at the moment. */
3648 return validate_replace_src (from, to, insn);
3651 /* Find a set of REGNO that is available on entry to INSN's block.
3652 Returns NULL if not found. */
3654 static struct expr *
3655 find_avail_set (regno, insn)
3656 int regno;
3657 rtx insn;
3659 struct expr *set = lookup_set (regno, NULL_RTX);
3661 while (set)
3663 if (TEST_BIT (cprop_avin[BLOCK_NUM (insn)], set->bitmap_index))
3664 break;
3665 set = next_set (regno, set);
3668 return set;
3671 /* Perform constant and copy propagation on INSN.
3672 The result is non-zero if a change was made. */
3674 static int
3675 cprop_insn (insn, alter_jumps)
3676 rtx insn;
3677 int alter_jumps;
3679 struct reg_use *reg_used;
3680 int changed = 0;
3682 /* Only propagate into SETs. Note that a conditional jump is a
3683 SET with pc_rtx as the destination. */
3684 if ((GET_CODE (insn) != INSN
3685 && GET_CODE (insn) != JUMP_INSN)
3686 || GET_CODE (PATTERN (insn)) != SET)
3687 return 0;
3689 reg_use_count = 0;
3690 find_used_regs (PATTERN (insn));
3692 reg_used = &reg_use_table[0];
3693 for ( ; reg_use_count > 0; reg_used++, reg_use_count--)
3695 rtx pat, src;
3696 struct expr *set;
3697 int regno = REGNO (reg_used->reg_rtx);
3699 /* Ignore registers created by GCSE.
3700 We do this because ... */
3701 if (regno >= max_gcse_regno)
3702 continue;
3704 /* If the register has already been set in this block, there's
3705 nothing we can do. */
3706 if (! oprs_not_set_p (reg_used->reg_rtx, insn))
3707 continue;
3709 /* Find an assignment that sets reg_used and is available
3710 at the start of the block. */
3711 set = find_avail_set (regno, insn);
3712 if (! set)
3713 continue;
3715 pat = set->expr;
3716 /* ??? We might be able to handle PARALLELs. Later. */
3717 if (GET_CODE (pat) != SET)
3718 abort ();
3719 src = SET_SRC (pat);
3721 /* Constant propagation. */
3722 if (GET_CODE (src) == CONST_INT || GET_CODE (src) == CONST_DOUBLE)
3724 /* Handle normal insns first. */
3725 if (GET_CODE (insn) == INSN
3726 && try_replace_reg (reg_used->reg_rtx, src, insn))
3728 changed = 1;
3729 const_prop_count++;
3730 if (gcse_file != NULL)
3732 fprintf (gcse_file, "CONST-PROP: Replacing reg %d in insn %d with constant ",
3733 regno, INSN_UID (insn));
3734 print_rtl (gcse_file, src);
3735 fprintf (gcse_file, "\n");
3738 /* The original insn setting reg_used may or may not now be
3739 deletable. We leave the deletion to flow. */
3742 /* Try to propagate a CONST_INT into a conditional jump.
3743 We're pretty specific about what we will handle in this
3744 code, we can extend this as necessary over time.
3746 Right now the insn in question must look like
3748 (set (pc) (if_then_else ...))
3750 Note this does not currently handle machines which use cc0. */
3751 else if (alter_jumps
3752 && GET_CODE (insn) == JUMP_INSN
3753 && condjump_p (insn)
3754 && ! simplejump_p (insn))
3756 /* We want a copy of the JUMP_INSN so we can modify it
3757 in-place as needed without effecting the original. */
3758 rtx copy = copy_rtx (insn);
3759 rtx set = PATTERN (copy);
3760 rtx temp;
3762 /* Replace the register with the appropriate constant. */
3763 replace_rtx (SET_SRC (set), reg_used->reg_rtx, src);
3765 temp = simplify_ternary_operation (GET_CODE (SET_SRC (set)),
3766 GET_MODE (SET_SRC (set)),
3767 GET_MODE (XEXP (SET_SRC (set), 0)),
3768 XEXP (SET_SRC (set), 0),
3769 XEXP (SET_SRC (set), 1),
3770 XEXP (SET_SRC (set), 2));
3772 /* If no simplification can be made, then try the next
3773 register. */
3774 if (temp)
3775 SET_SRC (set) = temp;
3776 else
3777 continue;
3779 /* That may have changed the structure of TEMP, so
3780 force it to be rerecognized if it has not turned
3781 into a nop or unconditional jump. */
3783 INSN_CODE (copy) = -1;
3784 if ((SET_DEST (set) == pc_rtx
3785 && (SET_SRC (set) == pc_rtx
3786 || GET_CODE (SET_SRC (set)) == LABEL_REF))
3787 || recog (PATTERN (copy), copy, NULL) >= 0)
3789 /* This has either become an unconditional jump
3790 or a nop-jump. We'd like to delete nop jumps
3791 here, but doing so confuses gcse. So we just
3792 make the replacement and let later passes
3793 sort things out. */
3794 PATTERN (insn) = set;
3795 INSN_CODE (insn) = -1;
3797 /* One less use of the label this insn used to jump to
3798 if we turned this into a NOP jump. */
3799 if (SET_SRC (set) == pc_rtx && JUMP_LABEL (insn) != 0)
3800 --LABEL_NUSES (JUMP_LABEL (insn));
3802 /* If this has turned into an unconditional jump,
3803 then put a barrier after it so that the unreachable
3804 code will be deleted. */
3805 if (GET_CODE (SET_SRC (set)) == LABEL_REF)
3806 emit_barrier_after (insn);
3808 run_jump_opt_after_gcse = 1;
3810 changed = 1;
3811 const_prop_count++;
3812 if (gcse_file != NULL)
3814 fprintf (gcse_file, "CONST-PROP: Replacing reg %d in insn %d with constant ",
3815 regno, INSN_UID (insn));
3816 print_rtl (gcse_file, src);
3817 fprintf (gcse_file, "\n");
3822 else if (GET_CODE (src) == REG
3823 && REGNO (src) >= FIRST_PSEUDO_REGISTER
3824 && REGNO (src) != regno)
3826 /* We know the set is available.
3827 Now check that SET_SRC is ANTLOC (i.e. none of the source operands
3828 have changed since the start of the block). */
3829 if (oprs_not_set_p (src, insn))
3831 if (try_replace_reg (reg_used->reg_rtx, src, insn))
3833 changed = 1;
3834 copy_prop_count++;
3835 if (gcse_file != NULL)
3837 fprintf (gcse_file, "COPY-PROP: Replacing reg %d in insn %d with reg %d\n",
3838 regno, INSN_UID (insn), REGNO (src));
3841 /* The original insn setting reg_used may or may not now be
3842 deletable. We leave the deletion to flow. */
3843 /* FIXME: If it turns out that the insn isn't deletable,
3844 then we may have unnecessarily extended register lifetimes
3845 and made things worse. */
3851 return changed;
3854 /* Forward propagate copies.
3855 This includes copies and constants.
3856 Return non-zero if a change was made. */
3858 static int
3859 cprop (alter_jumps)
3860 int alter_jumps;
3862 int bb, changed;
3863 rtx insn;
3865 /* Note we start at block 1. */
3867 changed = 0;
3868 for (bb = 1; bb < n_basic_blocks; bb++)
3870 /* Reset tables used to keep track of what's still valid [since the
3871 start of the block]. */
3872 reset_opr_set_tables ();
3874 for (insn = BLOCK_HEAD (bb);
3875 insn != NULL && insn != NEXT_INSN (BLOCK_END (bb));
3876 insn = NEXT_INSN (insn))
3878 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3880 changed |= cprop_insn (insn, alter_jumps);
3882 /* Keep track of everything modified by this insn. */
3883 /* ??? Need to be careful w.r.t. mods done to INSN. */
3884 mark_oprs_set (insn);
3889 if (gcse_file != NULL)
3890 fprintf (gcse_file, "\n");
3892 return changed;
3895 /* Perform one copy/constant propagation pass.
3896 F is the first insn in the function.
3897 PASS is the pass count. */
3899 static int
3900 one_cprop_pass (pass, alter_jumps)
3901 int pass;
3902 int alter_jumps;
3904 int changed = 0;
3906 const_prop_count = 0;
3907 copy_prop_count = 0;
3909 alloc_set_hash_table (max_cuid);
3910 compute_set_hash_table ();
3911 if (gcse_file)
3912 dump_hash_table (gcse_file, "SET", set_hash_table, set_hash_table_size,
3913 n_sets);
3914 if (n_sets > 0)
3916 alloc_cprop_mem (n_basic_blocks, n_sets);
3917 compute_cprop_data ();
3918 changed = cprop (alter_jumps);
3919 free_cprop_mem ();
3921 free_set_hash_table ();
3923 if (gcse_file)
3925 fprintf (gcse_file, "CPROP of %s, pass %d: %d bytes needed, %d const props, %d copy props\n",
3926 current_function_name, pass,
3927 bytes_used, const_prop_count, copy_prop_count);
3928 fprintf (gcse_file, "\n");
3931 return changed;
3934 /* Compute PRE+LCM working variables. */
3936 /* Local properties of expressions. */
3937 /* Nonzero for expressions that are transparent in the block. */
3938 static sbitmap *transp;
3940 /* Nonzero for expressions that are transparent at the end of the block.
3941 This is only zero for expressions killed by abnormal critical edge
3942 created by a calls. */
3943 static sbitmap *transpout;
3945 /* Nonzero for expressions that are computed (available) in the block. */
3946 static sbitmap *comp;
3948 /* Nonzero for expressions that are locally anticipatable in the block. */
3949 static sbitmap *antloc;
3951 /* Nonzero for expressions where this block is an optimal computation
3952 point. */
3953 static sbitmap *pre_optimal;
3955 /* Nonzero for expressions which are redundant in a particular block. */
3956 static sbitmap *pre_redundant;
3958 static sbitmap *temp_bitmap;
3960 /* Redundant insns. */
3961 static sbitmap pre_redundant_insns;
3963 /* Allocate vars used for PRE analysis. */
3965 static void
3966 alloc_pre_mem (n_blocks, n_exprs)
3967 int n_blocks, n_exprs;
3969 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
3970 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
3971 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
3973 temp_bitmap = sbitmap_vector_alloc (n_blocks, n_exprs);
3974 pre_optimal = sbitmap_vector_alloc (n_blocks, n_exprs);
3975 pre_redundant = sbitmap_vector_alloc (n_blocks, n_exprs);
3976 transpout = sbitmap_vector_alloc (n_blocks, n_exprs);
3979 /* Free vars used for PRE analysis. */
3981 static void
3982 free_pre_mem ()
3984 free (transp);
3985 free (comp);
3986 free (antloc);
3988 free (pre_optimal);
3989 free (pre_redundant);
3990 free (transpout);
3993 /* Top level routine to do the dataflow analysis needed by PRE. */
3995 static void
3996 compute_pre_data ()
3998 compute_local_properties (transp, comp, antloc, 0);
3999 compute_transpout ();
4000 pre_lcm (n_basic_blocks, n_exprs, s_preds, s_succs, transp,
4001 antloc, pre_redundant, pre_optimal);
4005 /* PRE utilities */
4007 /* Return non-zero if an occurrence of expression EXPR in OCCR_BB would reach
4008 block BB.
4010 VISITED is a pointer to a working buffer for tracking which BB's have
4011 been visited. It is NULL for the top-level call.
4013 CHECK_PRE_COMP controls whether or not we check for a computation of
4014 EXPR in OCCR_BB.
4016 We treat reaching expressions that go through blocks containing the same
4017 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
4018 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
4019 2 as not reaching. The intent is to improve the probability of finding
4020 only one reaching expression and to reduce register lifetimes by picking
4021 the closest such expression. */
4023 static int
4024 pre_expr_reaches_here_p (occr_bb, expr, bb, check_pre_comp, visited)
4025 int occr_bb;
4026 struct expr *expr;
4027 int bb;
4028 int check_pre_comp;
4029 char *visited;
4031 int_list_ptr pred;
4033 if (visited == NULL)
4035 visited = (char *) alloca (n_basic_blocks);
4036 bzero (visited, n_basic_blocks);
4039 for (pred = s_preds[bb]; pred != NULL; pred = pred->next)
4041 int pred_bb = INT_LIST_VAL (pred);
4043 if (pred_bb == ENTRY_BLOCK
4044 /* Has predecessor has already been visited? */
4045 || visited[pred_bb])
4047 /* Nothing to do. */
4049 /* Does this predecessor generate this expression? */
4050 else if ((!check_pre_comp && occr_bb == pred_bb)
4051 || TEST_BIT (comp[pred_bb], expr->bitmap_index))
4053 /* Is this the occurrence we're looking for?
4054 Note that there's only one generating occurrence per block
4055 so we just need to check the block number. */
4056 if (occr_bb == pred_bb)
4057 return 1;
4058 visited[pred_bb] = 1;
4060 /* Ignore this predecessor if it kills the expression. */
4061 else if (! TEST_BIT (transp[pred_bb], expr->bitmap_index))
4062 visited[pred_bb] = 1;
4063 /* Neither gen nor kill. */
4064 else
4066 visited[pred_bb] = 1;
4067 if (pre_expr_reaches_here_p (occr_bb, expr, pred_bb,
4068 check_pre_comp, visited))
4069 return 1;
4073 /* All paths have been checked. */
4074 return 0;
4077 /* Add EXPR to the end of basic block BB.
4079 This is used by both the PRE and code hoisting.
4081 For PRE, we want to verify that the expr is either transparent
4082 or locally anticipatable in the target block. This check makes
4083 no sense for code hoisting. */
4085 static void
4086 insert_insn_end_bb (expr, bb, pre)
4087 struct expr *expr;
4088 int bb;
4089 int pre;
4091 rtx insn = BLOCK_END (bb);
4092 rtx new_insn;
4093 rtx reg = expr->reaching_reg;
4094 int regno = REGNO (reg);
4095 rtx pat, copied_expr;
4096 rtx first_new_insn;
4098 start_sequence ();
4099 copied_expr = copy_rtx (expr->expr);
4100 emit_move_insn (reg, copied_expr);
4101 first_new_insn = get_insns ();
4102 pat = gen_sequence ();
4103 end_sequence ();
4105 /* If the last insn is a jump, insert EXPR in front [taking care to
4106 handle cc0, etc. properly]. */
4108 if (GET_CODE (insn) == JUMP_INSN)
4110 #ifdef HAVE_cc0
4111 rtx note;
4112 #endif
4114 /* If this is a jump table, then we can't insert stuff here. Since
4115 we know the previous real insn must be the tablejump, we insert
4116 the new instruction just before the tablejump. */
4117 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
4118 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
4119 insn = prev_real_insn (insn);
4121 #ifdef HAVE_cc0
4122 /* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts
4123 if cc0 isn't set. */
4124 note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
4125 if (note)
4126 insn = XEXP (note, 0);
4127 else
4129 rtx maybe_cc0_setter = prev_nonnote_insn (insn);
4130 if (maybe_cc0_setter
4131 && GET_RTX_CLASS (GET_CODE (maybe_cc0_setter)) == 'i'
4132 && sets_cc0_p (PATTERN (maybe_cc0_setter)))
4133 insn = maybe_cc0_setter;
4135 #endif
4136 /* FIXME: What if something in cc0/jump uses value set in new insn? */
4137 new_insn = emit_insn_before (pat, insn);
4138 if (BLOCK_HEAD (bb) == insn)
4139 BLOCK_HEAD (bb) = new_insn;
4141 /* Likewise if the last insn is a call, as will happen in the presence
4142 of exception handling. */
4143 else if (GET_CODE (insn) == CALL_INSN)
4145 HARD_REG_SET parm_regs;
4146 int nparm_regs;
4147 rtx p;
4149 /* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers,
4150 we search backward and place the instructions before the first
4151 parameter is loaded. Do this for everyone for consistency and a
4152 presumtion that we'll get better code elsewhere as well. */
4154 /* It should always be the case that we can put these instructions
4155 anywhere in the basic block with performing PRE optimizations.
4156 Check this. */
4157 if (pre
4158 && !TEST_BIT (antloc[bb], expr->bitmap_index)
4159 && !TEST_BIT (transp[bb], expr->bitmap_index))
4160 abort ();
4162 /* Since different machines initialize their parameter registers
4163 in different orders, assume nothing. Collect the set of all
4164 parameter registers. */
4165 CLEAR_HARD_REG_SET (parm_regs);
4166 nparm_regs = 0;
4167 for (p = CALL_INSN_FUNCTION_USAGE (insn); p ; p = XEXP (p, 1))
4168 if (GET_CODE (XEXP (p, 0)) == USE
4169 && GET_CODE (XEXP (XEXP (p, 0), 0)) == REG)
4171 int regno = REGNO (XEXP (XEXP (p, 0), 0));
4172 if (regno >= FIRST_PSEUDO_REGISTER)
4173 abort ();
4174 SET_HARD_REG_BIT (parm_regs, regno);
4175 nparm_regs++;
4178 /* Search backward for the first set of a register in this set. */
4179 while (nparm_regs && BLOCK_HEAD (bb) != insn)
4181 insn = PREV_INSN (insn);
4182 p = single_set (insn);
4183 if (p && GET_CODE (SET_DEST (p)) == REG
4184 && REGNO (SET_DEST (p)) < FIRST_PSEUDO_REGISTER
4185 && TEST_HARD_REG_BIT (parm_regs, REGNO (SET_DEST (p))))
4187 CLEAR_HARD_REG_BIT (parm_regs, REGNO (SET_DEST (p)));
4188 nparm_regs--;
4192 /* If we found all the parameter loads, then we want to insert
4193 before the first parameter load.
4195 If we did not find all the parameter loads, then we might have
4196 stopped on the head of the block, which could be a CODE_LABEL.
4197 If we inserted before the CODE_LABEL, then we would be putting
4198 the insn in the wrong basic block. In that case, put the insn
4199 after the CODE_LABEL.
4201 ?!? Do we need to account for NOTE_INSN_BASIC_BLOCK here? */
4202 if (GET_CODE (insn) != CODE_LABEL)
4204 new_insn = emit_insn_before (pat, insn);
4205 if (BLOCK_HEAD (bb) == insn)
4206 BLOCK_HEAD (bb) = new_insn;
4208 else
4210 new_insn = emit_insn_after (pat, insn);
4213 else
4215 new_insn = emit_insn_after (pat, insn);
4216 BLOCK_END (bb) = new_insn;
4219 /* Keep block number table up to date.
4220 Note, PAT could be a multiple insn sequence, we have to make
4221 sure that each insn in the sequence is handled. */
4222 if (GET_CODE (pat) == SEQUENCE)
4224 int i;
4226 for (i = 0; i < XVECLEN (pat, 0); i++)
4228 rtx insn = XVECEXP (pat, 0, i);
4229 set_block_num (insn, bb);
4230 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
4231 add_label_notes (PATTERN (insn), new_insn);
4232 record_set_insn = insn;
4233 note_stores (PATTERN (insn), record_set_info);
4236 else
4238 add_label_notes (SET_SRC (pat), new_insn);
4239 set_block_num (new_insn, bb);
4240 /* Keep register set table up to date. */
4241 record_one_set (regno, new_insn);
4244 gcse_create_count++;
4246 if (gcse_file)
4248 fprintf (gcse_file, "PRE/HOIST: end of bb %d, insn %d, copying expression %d to reg %d\n",
4249 bb, INSN_UID (new_insn), expr->bitmap_index, regno);
4253 /* Insert partially redundant expressions at the ends of appropriate basic
4254 blocks making them fully redundant. */
4256 static void
4257 pre_insert (index_map)
4258 struct expr **index_map;
4260 int bb, i, set_size;
4261 sbitmap *inserted;
4263 /* Compute INSERT = PRE_OPTIMAL & ~PRE_REDUNDANT.
4264 Where INSERT is nonzero, we add the expression at the end of the basic
4265 block if it reaches any of the deleted expressions. */
4267 set_size = pre_optimal[0]->size;
4268 inserted = sbitmap_vector_alloc (n_basic_blocks, n_exprs);
4269 sbitmap_vector_zero (inserted, n_basic_blocks);
4271 for (bb = 0; bb < n_basic_blocks; bb++)
4273 int indx;
4275 /* This computes the number of potential insertions we need. */
4276 sbitmap_not (temp_bitmap[bb], pre_redundant[bb]);
4277 sbitmap_a_and_b (temp_bitmap[bb], temp_bitmap[bb], pre_optimal[bb]);
4279 /* TEMP_BITMAP[bb] now contains a bitmap of the expressions that we need
4280 to insert at the end of this basic block. */
4281 for (i = indx = 0; i < set_size; i++, indx += SBITMAP_ELT_BITS)
4283 SBITMAP_ELT_TYPE insert = temp_bitmap[bb]->elms[i];
4284 int j;
4286 for (j = indx; insert && j < n_exprs; j++, insert >>= 1)
4288 if ((insert & 1) != 0 && index_map[j]->reaching_reg != NULL_RTX)
4290 struct expr *expr = index_map[j];
4291 struct occr *occr;
4293 /* Now look at each deleted occurence of this expression. */
4294 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
4296 if (! occr->deleted_p)
4297 continue;
4299 /* Insert this expression at the end of BB if it would
4300 reach the deleted occurence. */
4301 if (!TEST_BIT (inserted[bb], j)
4302 && pre_expr_reaches_here_p (bb, expr,
4303 BLOCK_NUM (occr->insn), 0,
4304 NULL))
4306 SET_BIT (inserted[bb], j);
4307 insert_insn_end_bb (index_map[j], bb, 1);
4316 /* Copy the result of INSN to REG.
4317 INDX is the expression number. */
4319 static void
4320 pre_insert_copy_insn (expr, insn)
4321 struct expr *expr;
4322 rtx insn;
4324 rtx reg = expr->reaching_reg;
4325 int regno = REGNO (reg);
4326 int indx = expr->bitmap_index;
4327 rtx set = single_set (insn);
4328 rtx new_insn;
4330 if (!set)
4331 abort ();
4332 new_insn = emit_insn_after (gen_rtx_SET (VOIDmode, reg, SET_DEST (set)),
4333 insn);
4334 /* Keep block number table up to date. */
4335 set_block_num (new_insn, BLOCK_NUM (insn));
4336 /* Keep register set table up to date. */
4337 record_one_set (regno, new_insn);
4339 gcse_create_count++;
4341 if (gcse_file)
4343 fprintf (gcse_file, "PRE: bb %d, insn %d, copying expression %d in insn %d to reg %d\n",
4344 BLOCK_NUM (insn), INSN_UID (new_insn), indx, INSN_UID (insn), regno);
4348 /* Copy available expressions that reach the redundant expression
4349 to `reaching_reg'. */
4351 static void
4352 pre_insert_copies ()
4354 int i, bb;
4356 for (bb = 0; bb < n_basic_blocks; bb++)
4358 sbitmap_a_and_b (temp_bitmap[bb], pre_optimal[bb], pre_redundant[bb]);
4361 /* For each available expression in the table, copy the result to
4362 `reaching_reg' if the expression reaches a deleted one.
4364 ??? The current algorithm is rather brute force.
4365 Need to do some profiling. */
4367 for (i = 0; i < expr_hash_table_size; i++)
4369 struct expr *expr;
4371 for (expr = expr_hash_table[i]; expr != NULL; expr = expr->next_same_hash)
4373 struct occr *occr;
4375 /* If the basic block isn't reachable, PPOUT will be TRUE.
4376 However, we don't want to insert a copy here because the
4377 expression may not really be redundant. So only insert
4378 an insn if the expression was deleted.
4379 This test also avoids further processing if the expression
4380 wasn't deleted anywhere. */
4381 if (expr->reaching_reg == NULL)
4382 continue;
4384 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
4386 struct occr *avail;
4388 if (! occr->deleted_p)
4389 continue;
4391 for (avail = expr->avail_occr; avail != NULL; avail = avail->next)
4393 rtx insn = avail->insn;
4394 int bb = BLOCK_NUM (insn);
4396 if (!TEST_BIT (temp_bitmap[bb], expr->bitmap_index))
4397 continue;
4399 /* No need to handle this one if handled already. */
4400 if (avail->copied_p)
4401 continue;
4402 /* Don't handle this one if it's a redundant one. */
4403 if (TEST_BIT (pre_redundant_insns, INSN_CUID (insn)))
4404 continue;
4405 /* Or if the expression doesn't reach the deleted one. */
4406 if (! pre_expr_reaches_here_p (BLOCK_NUM (avail->insn), expr,
4407 BLOCK_NUM (occr->insn),
4408 1, NULL))
4409 continue;
4411 /* Copy the result of avail to reaching_reg. */
4412 pre_insert_copy_insn (expr, insn);
4413 avail->copied_p = 1;
4420 /* Delete redundant computations.
4421 Deletion is done by changing the insn to copy the `reaching_reg' of
4422 the expression into the result of the SET. It is left to later passes
4423 (cprop, cse2, flow, combine, regmove) to propagate the copy or eliminate it.
4425 Returns non-zero if a change is made. */
4427 static int
4428 pre_delete ()
4430 int i, bb, changed;
4432 /* Compute the expressions which are redundant and need to be replaced by
4433 copies from the reaching reg to the target reg. */
4434 for (bb = 0; bb < n_basic_blocks; bb++)
4436 sbitmap_not (temp_bitmap[bb], pre_optimal[bb]);
4437 sbitmap_a_and_b (temp_bitmap[bb], temp_bitmap[bb], pre_redundant[bb]);
4440 changed = 0;
4441 for (i = 0; i < expr_hash_table_size; i++)
4443 struct expr *expr;
4445 for (expr = expr_hash_table[i]; expr != NULL; expr = expr->next_same_hash)
4447 struct occr *occr;
4448 int indx = expr->bitmap_index;
4450 /* We only need to search antic_occr since we require
4451 ANTLOC != 0. */
4453 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
4455 rtx insn = occr->insn;
4456 rtx set;
4457 int bb = BLOCK_NUM (insn);
4459 if (TEST_BIT (temp_bitmap[bb], indx))
4461 set = single_set (insn);
4462 if (! set)
4463 abort ();
4465 /* Create a pseudo-reg to store the result of reaching
4466 expressions into. Get the mode for the new pseudo
4467 from the mode of the original destination pseudo. */
4468 if (expr->reaching_reg == NULL)
4469 expr->reaching_reg
4470 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
4472 /* In theory this should never fail since we're creating
4473 a reg->reg copy.
4475 However, on the x86 some of the movXX patterns actually
4476 contain clobbers of scratch regs. This may cause the
4477 insn created by validate_change to not match any pattern
4478 and thus cause validate_change to fail. */
4479 if (validate_change (insn, &SET_SRC (set),
4480 expr->reaching_reg, 0))
4482 occr->deleted_p = 1;
4483 SET_BIT (pre_redundant_insns, INSN_CUID (insn));
4484 changed = 1;
4485 gcse_subst_count++;
4488 if (gcse_file)
4490 fprintf (gcse_file,
4491 "PRE: redundant insn %d (expression %d) in bb %d, reaching reg is %d\n",
4492 INSN_UID (insn), indx, bb, REGNO (expr->reaching_reg));
4499 return changed;
4502 /* Perform GCSE optimizations using PRE.
4503 This is called by one_pre_gcse_pass after all the dataflow analysis
4504 has been done.
4506 This is based on the original Morel-Renvoise paper Fred Chow's thesis,
4507 and lazy code motion from Knoop, Ruthing and Steffen as described in
4508 Advanced Compiler Design and Implementation.
4510 ??? A new pseudo reg is created to hold the reaching expression.
4511 The nice thing about the classical approach is that it would try to
4512 use an existing reg. If the register can't be adequately optimized
4513 [i.e. we introduce reload problems], one could add a pass here to
4514 propagate the new register through the block.
4516 ??? We don't handle single sets in PARALLELs because we're [currently]
4517 not able to copy the rest of the parallel when we insert copies to create
4518 full redundancies from partial redundancies. However, there's no reason
4519 why we can't handle PARALLELs in the cases where there are no partial
4520 redundancies. */
4522 static int
4523 pre_gcse ()
4525 int i;
4526 int changed;
4527 struct expr **index_map;
4529 /* Compute a mapping from expression number (`bitmap_index') to
4530 hash table entry. */
4532 index_map = (struct expr **) alloca (n_exprs * sizeof (struct expr *));
4533 bzero ((char *) index_map, n_exprs * sizeof (struct expr *));
4534 for (i = 0; i < expr_hash_table_size; i++)
4536 struct expr *expr;
4538 for (expr = expr_hash_table[i]; expr != NULL; expr = expr->next_same_hash)
4539 index_map[expr->bitmap_index] = expr;
4542 /* Reset bitmap used to track which insns are redundant. */
4543 pre_redundant_insns = sbitmap_alloc (max_cuid);
4544 sbitmap_zero (pre_redundant_insns);
4546 /* Delete the redundant insns first so that
4547 - we know what register to use for the new insns and for the other
4548 ones with reaching expressions
4549 - we know which insns are redundant when we go to create copies */
4550 changed = pre_delete ();
4552 /* Insert insns in places that make partially redundant expressions
4553 fully redundant. */
4554 pre_insert (index_map);
4556 /* In other places with reaching expressions, copy the expression to the
4557 specially allocated pseudo-reg that reaches the redundant expression. */
4558 pre_insert_copies ();
4560 free (pre_redundant_insns);
4562 return changed;
4565 /* Top level routine to perform one PRE GCSE pass.
4567 Return non-zero if a change was made. */
4569 static int
4570 one_pre_gcse_pass (pass)
4571 int pass;
4573 int changed = 0;
4575 gcse_subst_count = 0;
4576 gcse_create_count = 0;
4578 alloc_expr_hash_table (max_cuid);
4579 compute_expr_hash_table ();
4580 if (gcse_file)
4581 dump_hash_table (gcse_file, "Expression", expr_hash_table,
4582 expr_hash_table_size, n_exprs);
4583 if (n_exprs > 0)
4585 alloc_pre_mem (n_basic_blocks, n_exprs);
4586 compute_pre_data ();
4587 changed |= pre_gcse ();
4588 free_pre_mem ();
4590 free_expr_hash_table ();
4592 if (gcse_file)
4594 fprintf (gcse_file, "\n");
4595 fprintf (gcse_file, "PRE GCSE of %s, pass %d: %d bytes needed, %d substs, %d insns created\n",
4596 current_function_name, pass,
4597 bytes_used, gcse_subst_count, gcse_create_count);
4600 return changed;
4603 /* If X contains any LABEL_REF's, add REG_LABEL notes for them to INSN.
4604 We have to add REG_LABEL notes, because the following loop optimization
4605 pass requires them. */
4607 /* ??? This is very similar to the loop.c add_label_notes function. We
4608 could probably share code here. */
4610 /* ??? If there was a jump optimization pass after gcse and before loop,
4611 then we would not need to do this here, because jump would add the
4612 necessary REG_LABEL notes. */
4614 static void
4615 add_label_notes (x, insn)
4616 rtx x;
4617 rtx insn;
4619 enum rtx_code code = GET_CODE (x);
4620 int i, j;
4621 char *fmt;
4623 if (code == LABEL_REF && !LABEL_REF_NONLOCAL_P (x))
4625 /* This code used to ignore labels that referred to dispatch tables to
4626 avoid flow generating (slighly) worse code.
4628 We no longer ignore such label references (see LABEL_REF handling in
4629 mark_jump_label for additional information). */
4630 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_LABEL, XEXP (x, 0),
4631 REG_NOTES (insn));
4632 return;
4635 fmt = GET_RTX_FORMAT (code);
4636 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4638 if (fmt[i] == 'e')
4639 add_label_notes (XEXP (x, i), insn);
4640 else if (fmt[i] == 'E')
4641 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4642 add_label_notes (XVECEXP (x, i, j), insn);
4646 /* Compute transparent outgoing information for each block.
4648 An expression is transparent to an edge unless it is killed by
4649 the edge itself. This can only happen with abnormal control flow,
4650 when the edge is traversed through a call. This happens with
4651 non-local labels and exceptions.
4653 This would not be necessary if we split the edge. While this is
4654 normally impossible for abnormal critical edges, with some effort
4655 it should be possible with exception handling, since we still have
4656 control over which handler should be invoked. But due to increased
4657 EH table sizes, this may not be worthwhile. */
4659 static void
4660 compute_transpout ()
4662 int bb;
4664 sbitmap_vector_ones (transpout, n_basic_blocks);
4666 for (bb = 0; bb < n_basic_blocks; ++bb)
4668 int i;
4670 /* Note that flow inserted a nop a the end of basic blocks that
4671 end in call instructions for reasons other than abnormal
4672 control flow. */
4673 if (GET_CODE (BLOCK_END (bb)) != CALL_INSN)
4674 continue;
4676 for (i = 0; i < expr_hash_table_size; i++)
4678 struct expr *expr;
4679 for (expr = expr_hash_table[i]; expr ; expr = expr->next_same_hash)
4680 if (GET_CODE (expr->expr) == MEM)
4682 rtx addr = XEXP (expr->expr, 0);
4684 if (GET_CODE (addr) == SYMBOL_REF
4685 && CONSTANT_POOL_ADDRESS_P (addr))
4686 continue;
4688 /* ??? Optimally, we would use interprocedural alias
4689 analysis to determine if this mem is actually killed
4690 by this call. */
4691 RESET_BIT (transpout[bb], expr->bitmap_index);