2004-08-18 David Daney <ddaney@avtrex.com>
[official-gcc.git] / gcc / cse.c
blob15e2dc8e60b01469f2e2cfa3f6cb84f311a0f97f
1 /* Common subexpression elimination for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 /* stdio.h must precede rtl.h for FFS. */
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
28 #include "rtl.h"
29 #include "tm_p.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "basic-block.h"
33 #include "flags.h"
34 #include "real.h"
35 #include "insn-config.h"
36 #include "recog.h"
37 #include "function.h"
38 #include "expr.h"
39 #include "toplev.h"
40 #include "output.h"
41 #include "ggc.h"
42 #include "timevar.h"
43 #include "except.h"
44 #include "target.h"
45 #include "params.h"
46 #include "rtlhooks-def.h"
48 /* The basic idea of common subexpression elimination is to go
49 through the code, keeping a record of expressions that would
50 have the same value at the current scan point, and replacing
51 expressions encountered with the cheapest equivalent expression.
53 It is too complicated to keep track of the different possibilities
54 when control paths merge in this code; so, at each label, we forget all
55 that is known and start fresh. This can be described as processing each
56 extended basic block separately. We have a separate pass to perform
57 global CSE.
59 Note CSE can turn a conditional or computed jump into a nop or
60 an unconditional jump. When this occurs we arrange to run the jump
61 optimizer after CSE to delete the unreachable code.
63 We use two data structures to record the equivalent expressions:
64 a hash table for most expressions, and a vector of "quantity
65 numbers" to record equivalent (pseudo) registers.
67 The use of the special data structure for registers is desirable
68 because it is faster. It is possible because registers references
69 contain a fairly small number, the register number, taken from
70 a contiguously allocated series, and two register references are
71 identical if they have the same number. General expressions
72 do not have any such thing, so the only way to retrieve the
73 information recorded on an expression other than a register
74 is to keep it in a hash table.
76 Registers and "quantity numbers":
78 At the start of each basic block, all of the (hardware and pseudo)
79 registers used in the function are given distinct quantity
80 numbers to indicate their contents. During scan, when the code
81 copies one register into another, we copy the quantity number.
82 When a register is loaded in any other way, we allocate a new
83 quantity number to describe the value generated by this operation.
84 `reg_qty' records what quantity a register is currently thought
85 of as containing.
87 All real quantity numbers are greater than or equal to `max_reg'.
88 If register N has not been assigned a quantity, reg_qty[N] will equal N.
90 Quantity numbers below `max_reg' do not exist and none of the `qty_table'
91 entries should be referenced with an index below `max_reg'.
93 We also maintain a bidirectional chain of registers for each
94 quantity number. The `qty_table` members `first_reg' and `last_reg',
95 and `reg_eqv_table' members `next' and `prev' hold these chains.
97 The first register in a chain is the one whose lifespan is least local.
98 Among equals, it is the one that was seen first.
99 We replace any equivalent register with that one.
101 If two registers have the same quantity number, it must be true that
102 REG expressions with qty_table `mode' must be in the hash table for both
103 registers and must be in the same class.
105 The converse is not true. Since hard registers may be referenced in
106 any mode, two REG expressions might be equivalent in the hash table
107 but not have the same quantity number if the quantity number of one
108 of the registers is not the same mode as those expressions.
110 Constants and quantity numbers
112 When a quantity has a known constant value, that value is stored
113 in the appropriate qty_table `const_rtx'. This is in addition to
114 putting the constant in the hash table as is usual for non-regs.
116 Whether a reg or a constant is preferred is determined by the configuration
117 macro CONST_COSTS and will often depend on the constant value. In any
118 event, expressions containing constants can be simplified, by fold_rtx.
120 When a quantity has a known nearly constant value (such as an address
121 of a stack slot), that value is stored in the appropriate qty_table
122 `const_rtx'.
124 Integer constants don't have a machine mode. However, cse
125 determines the intended machine mode from the destination
126 of the instruction that moves the constant. The machine mode
127 is recorded in the hash table along with the actual RTL
128 constant expression so that different modes are kept separate.
130 Other expressions:
132 To record known equivalences among expressions in general
133 we use a hash table called `table'. It has a fixed number of buckets
134 that contain chains of `struct table_elt' elements for expressions.
135 These chains connect the elements whose expressions have the same
136 hash codes.
138 Other chains through the same elements connect the elements which
139 currently have equivalent values.
141 Register references in an expression are canonicalized before hashing
142 the expression. This is done using `reg_qty' and qty_table `first_reg'.
143 The hash code of a register reference is computed using the quantity
144 number, not the register number.
146 When the value of an expression changes, it is necessary to remove from the
147 hash table not just that expression but all expressions whose values
148 could be different as a result.
150 1. If the value changing is in memory, except in special cases
151 ANYTHING referring to memory could be changed. That is because
152 nobody knows where a pointer does not point.
153 The function `invalidate_memory' removes what is necessary.
155 The special cases are when the address is constant or is
156 a constant plus a fixed register such as the frame pointer
157 or a static chain pointer. When such addresses are stored in,
158 we can tell exactly which other such addresses must be invalidated
159 due to overlap. `invalidate' does this.
160 All expressions that refer to non-constant
161 memory addresses are also invalidated. `invalidate_memory' does this.
163 2. If the value changing is a register, all expressions
164 containing references to that register, and only those,
165 must be removed.
167 Because searching the entire hash table for expressions that contain
168 a register is very slow, we try to figure out when it isn't necessary.
169 Precisely, this is necessary only when expressions have been
170 entered in the hash table using this register, and then the value has
171 changed, and then another expression wants to be added to refer to
172 the register's new value. This sequence of circumstances is rare
173 within any one basic block.
175 The vectors `reg_tick' and `reg_in_table' are used to detect this case.
176 reg_tick[i] is incremented whenever a value is stored in register i.
177 reg_in_table[i] holds -1 if no references to register i have been
178 entered in the table; otherwise, it contains the value reg_tick[i] had
179 when the references were entered. If we want to enter a reference
180 and reg_in_table[i] != reg_tick[i], we must scan and remove old references.
181 Until we want to enter a new entry, the mere fact that the two vectors
182 don't match makes the entries be ignored if anyone tries to match them.
184 Registers themselves are entered in the hash table as well as in
185 the equivalent-register chains. However, the vectors `reg_tick'
186 and `reg_in_table' do not apply to expressions which are simple
187 register references. These expressions are removed from the table
188 immediately when they become invalid, and this can be done even if
189 we do not immediately search for all the expressions that refer to
190 the register.
192 A CLOBBER rtx in an instruction invalidates its operand for further
193 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
194 invalidates everything that resides in memory.
196 Related expressions:
198 Constant expressions that differ only by an additive integer
199 are called related. When a constant expression is put in
200 the table, the related expression with no constant term
201 is also entered. These are made to point at each other
202 so that it is possible to find out if there exists any
203 register equivalent to an expression related to a given expression. */
205 /* One plus largest register number used in this function. */
207 static int max_reg;
209 /* One plus largest instruction UID used in this function at time of
210 cse_main call. */
212 static int max_insn_uid;
214 /* Length of qty_table vector. We know in advance we will not need
215 a quantity number this big. */
217 static int max_qty;
219 /* Next quantity number to be allocated.
220 This is 1 + the largest number needed so far. */
222 static int next_qty;
224 /* Per-qty information tracking.
226 `first_reg' and `last_reg' track the head and tail of the
227 chain of registers which currently contain this quantity.
229 `mode' contains the machine mode of this quantity.
231 `const_rtx' holds the rtx of the constant value of this
232 quantity, if known. A summations of the frame/arg pointer
233 and a constant can also be entered here. When this holds
234 a known value, `const_insn' is the insn which stored the
235 constant value.
237 `comparison_{code,const,qty}' are used to track when a
238 comparison between a quantity and some constant or register has
239 been passed. In such a case, we know the results of the comparison
240 in case we see it again. These members record a comparison that
241 is known to be true. `comparison_code' holds the rtx code of such
242 a comparison, else it is set to UNKNOWN and the other two
243 comparison members are undefined. `comparison_const' holds
244 the constant being compared against, or zero if the comparison
245 is not against a constant. `comparison_qty' holds the quantity
246 being compared against when the result is known. If the comparison
247 is not with a register, `comparison_qty' is -1. */
249 struct qty_table_elem
251 rtx const_rtx;
252 rtx const_insn;
253 rtx comparison_const;
254 int comparison_qty;
255 unsigned int first_reg, last_reg;
256 /* The sizes of these fields should match the sizes of the
257 code and mode fields of struct rtx_def (see rtl.h). */
258 ENUM_BITFIELD(rtx_code) comparison_code : 16;
259 ENUM_BITFIELD(machine_mode) mode : 8;
262 /* The table of all qtys, indexed by qty number. */
263 static struct qty_table_elem *qty_table;
265 #ifdef HAVE_cc0
266 /* For machines that have a CC0, we do not record its value in the hash
267 table since its use is guaranteed to be the insn immediately following
268 its definition and any other insn is presumed to invalidate it.
270 Instead, we store below the value last assigned to CC0. If it should
271 happen to be a constant, it is stored in preference to the actual
272 assigned value. In case it is a constant, we store the mode in which
273 the constant should be interpreted. */
275 static rtx prev_insn_cc0;
276 static enum machine_mode prev_insn_cc0_mode;
278 /* Previous actual insn. 0 if at first insn of basic block. */
280 static rtx prev_insn;
281 #endif
283 /* Insn being scanned. */
285 static rtx this_insn;
287 /* Index by register number, gives the number of the next (or
288 previous) register in the chain of registers sharing the same
289 value.
291 Or -1 if this register is at the end of the chain.
293 If reg_qty[N] == N, reg_eqv_table[N].next is undefined. */
295 /* Per-register equivalence chain. */
296 struct reg_eqv_elem
298 int next, prev;
301 /* The table of all register equivalence chains. */
302 static struct reg_eqv_elem *reg_eqv_table;
304 struct cse_reg_info
306 /* Next in hash chain. */
307 struct cse_reg_info *hash_next;
309 /* The next cse_reg_info structure in the free or used list. */
310 struct cse_reg_info *next;
312 /* Search key */
313 unsigned int regno;
315 /* The quantity number of the register's current contents. */
316 int reg_qty;
318 /* The number of times the register has been altered in the current
319 basic block. */
320 int reg_tick;
322 /* The REG_TICK value at which rtx's containing this register are
323 valid in the hash table. If this does not equal the current
324 reg_tick value, such expressions existing in the hash table are
325 invalid. */
326 int reg_in_table;
328 /* The SUBREG that was set when REG_TICK was last incremented. Set
329 to -1 if the last store was to the whole register, not a subreg. */
330 unsigned int subreg_ticked;
333 /* A free list of cse_reg_info entries. */
334 static struct cse_reg_info *cse_reg_info_free_list;
336 /* A used list of cse_reg_info entries. */
337 static struct cse_reg_info *cse_reg_info_used_list;
338 static struct cse_reg_info *cse_reg_info_used_list_end;
340 /* A mapping from registers to cse_reg_info data structures. */
341 #define REGHASH_SHIFT 7
342 #define REGHASH_SIZE (1 << REGHASH_SHIFT)
343 #define REGHASH_MASK (REGHASH_SIZE - 1)
344 static struct cse_reg_info *reg_hash[REGHASH_SIZE];
346 #define REGHASH_FN(REGNO) \
347 (((REGNO) ^ ((REGNO) >> REGHASH_SHIFT)) & REGHASH_MASK)
349 /* The last lookup we did into the cse_reg_info_tree. This allows us
350 to cache repeated lookups. */
351 static unsigned int cached_regno;
352 static struct cse_reg_info *cached_cse_reg_info;
354 /* A HARD_REG_SET containing all the hard registers for which there is
355 currently a REG expression in the hash table. Note the difference
356 from the above variables, which indicate if the REG is mentioned in some
357 expression in the table. */
359 static HARD_REG_SET hard_regs_in_table;
361 /* CUID of insn that starts the basic block currently being cse-processed. */
363 static int cse_basic_block_start;
365 /* CUID of insn that ends the basic block currently being cse-processed. */
367 static int cse_basic_block_end;
369 /* Vector mapping INSN_UIDs to cuids.
370 The cuids are like uids but increase monotonically always.
371 We use them to see whether a reg is used outside a given basic block. */
373 static int *uid_cuid;
375 /* Highest UID in UID_CUID. */
376 static int max_uid;
378 /* Get the cuid of an insn. */
380 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
382 /* Nonzero if this pass has made changes, and therefore it's
383 worthwhile to run the garbage collector. */
385 static int cse_altered;
387 /* Nonzero if cse has altered conditional jump insns
388 in such a way that jump optimization should be redone. */
390 static int cse_jumps_altered;
392 /* Nonzero if we put a LABEL_REF into the hash table for an INSN without a
393 REG_LABEL, we have to rerun jump after CSE to put in the note. */
394 static int recorded_label_ref;
396 /* canon_hash stores 1 in do_not_record
397 if it notices a reference to CC0, PC, or some other volatile
398 subexpression. */
400 static int do_not_record;
402 #ifdef LOAD_EXTEND_OP
404 /* Scratch rtl used when looking for load-extended copy of a MEM. */
405 static rtx memory_extend_rtx;
406 #endif
408 /* canon_hash stores 1 in hash_arg_in_memory
409 if it notices a reference to memory within the expression being hashed. */
411 static int hash_arg_in_memory;
413 /* The hash table contains buckets which are chains of `struct table_elt's,
414 each recording one expression's information.
415 That expression is in the `exp' field.
417 The canon_exp field contains a canonical (from the point of view of
418 alias analysis) version of the `exp' field.
420 Those elements with the same hash code are chained in both directions
421 through the `next_same_hash' and `prev_same_hash' fields.
423 Each set of expressions with equivalent values
424 are on a two-way chain through the `next_same_value'
425 and `prev_same_value' fields, and all point with
426 the `first_same_value' field at the first element in
427 that chain. The chain is in order of increasing cost.
428 Each element's cost value is in its `cost' field.
430 The `in_memory' field is nonzero for elements that
431 involve any reference to memory. These elements are removed
432 whenever a write is done to an unidentified location in memory.
433 To be safe, we assume that a memory address is unidentified unless
434 the address is either a symbol constant or a constant plus
435 the frame pointer or argument pointer.
437 The `related_value' field is used to connect related expressions
438 (that differ by adding an integer).
439 The related expressions are chained in a circular fashion.
440 `related_value' is zero for expressions for which this
441 chain is not useful.
443 The `cost' field stores the cost of this element's expression.
444 The `regcost' field stores the value returned by approx_reg_cost for
445 this element's expression.
447 The `is_const' flag is set if the element is a constant (including
448 a fixed address).
450 The `flag' field is used as a temporary during some search routines.
452 The `mode' field is usually the same as GET_MODE (`exp'), but
453 if `exp' is a CONST_INT and has no machine mode then the `mode'
454 field is the mode it was being used as. Each constant is
455 recorded separately for each mode it is used with. */
457 struct table_elt
459 rtx exp;
460 rtx canon_exp;
461 struct table_elt *next_same_hash;
462 struct table_elt *prev_same_hash;
463 struct table_elt *next_same_value;
464 struct table_elt *prev_same_value;
465 struct table_elt *first_same_value;
466 struct table_elt *related_value;
467 int cost;
468 int regcost;
469 /* The size of this field should match the size
470 of the mode field of struct rtx_def (see rtl.h). */
471 ENUM_BITFIELD(machine_mode) mode : 8;
472 char in_memory;
473 char is_const;
474 char flag;
477 /* We don't want a lot of buckets, because we rarely have very many
478 things stored in the hash table, and a lot of buckets slows
479 down a lot of loops that happen frequently. */
480 #define HASH_SHIFT 5
481 #define HASH_SIZE (1 << HASH_SHIFT)
482 #define HASH_MASK (HASH_SIZE - 1)
484 /* Compute hash code of X in mode M. Special-case case where X is a pseudo
485 register (hard registers may require `do_not_record' to be set). */
487 #define HASH(X, M) \
488 ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER \
489 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
490 : canon_hash (X, M)) & HASH_MASK)
492 /* Determine whether register number N is considered a fixed register for the
493 purpose of approximating register costs.
494 It is desirable to replace other regs with fixed regs, to reduce need for
495 non-fixed hard regs.
496 A reg wins if it is either the frame pointer or designated as fixed. */
497 #define FIXED_REGNO_P(N) \
498 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
499 || fixed_regs[N] || global_regs[N])
501 /* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
502 hard registers and pointers into the frame are the cheapest with a cost
503 of 0. Next come pseudos with a cost of one and other hard registers with
504 a cost of 2. Aside from these special cases, call `rtx_cost'. */
506 #define CHEAP_REGNO(N) \
507 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
508 || (N) == STACK_POINTER_REGNUM || (N) == ARG_POINTER_REGNUM \
509 || ((N) >= FIRST_VIRTUAL_REGISTER && (N) <= LAST_VIRTUAL_REGISTER) \
510 || ((N) < FIRST_PSEUDO_REGISTER \
511 && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
513 #define COST(X) (REG_P (X) ? 0 : notreg_cost (X, SET))
514 #define COST_IN(X,OUTER) (REG_P (X) ? 0 : notreg_cost (X, OUTER))
516 /* Get the info associated with register N. */
518 #define GET_CSE_REG_INFO(N) \
519 (((N) == cached_regno && cached_cse_reg_info) \
520 ? cached_cse_reg_info : get_cse_reg_info ((N)))
522 /* Get the number of times this register has been updated in this
523 basic block. */
525 #define REG_TICK(N) ((GET_CSE_REG_INFO (N))->reg_tick)
527 /* Get the point at which REG was recorded in the table. */
529 #define REG_IN_TABLE(N) ((GET_CSE_REG_INFO (N))->reg_in_table)
531 /* Get the SUBREG set at the last increment to REG_TICK (-1 if not a
532 SUBREG). */
534 #define SUBREG_TICKED(N) ((GET_CSE_REG_INFO (N))->subreg_ticked)
536 /* Get the quantity number for REG. */
538 #define REG_QTY(N) ((GET_CSE_REG_INFO (N))->reg_qty)
540 /* Determine if the quantity number for register X represents a valid index
541 into the qty_table. */
543 #define REGNO_QTY_VALID_P(N) (REG_QTY (N) != (int) (N))
545 static struct table_elt *table[HASH_SIZE];
547 /* Chain of `struct table_elt's made so far for this function
548 but currently removed from the table. */
550 static struct table_elt *free_element_chain;
552 /* Number of `struct table_elt' structures made so far for this function. */
554 static int n_elements_made;
556 /* Maximum value `n_elements_made' has had so far in this compilation
557 for functions previously processed. */
559 static int max_elements_made;
561 /* Surviving equivalence class when two equivalence classes are merged
562 by recording the effects of a jump in the last insn. Zero if the
563 last insn was not a conditional jump. */
565 static struct table_elt *last_jump_equiv_class;
567 /* Set to the cost of a constant pool reference if one was found for a
568 symbolic constant. If this was found, it means we should try to
569 convert constants into constant pool entries if they don't fit in
570 the insn. */
572 static int constant_pool_entries_cost;
573 static int constant_pool_entries_regcost;
575 /* This data describes a block that will be processed by cse_basic_block. */
577 struct cse_basic_block_data
579 /* Lowest CUID value of insns in block. */
580 int low_cuid;
581 /* Highest CUID value of insns in block. */
582 int high_cuid;
583 /* Total number of SETs in block. */
584 int nsets;
585 /* Last insn in the block. */
586 rtx last;
587 /* Size of current branch path, if any. */
588 int path_size;
589 /* Current branch path, indicating which branches will be taken. */
590 struct branch_path
592 /* The branch insn. */
593 rtx branch;
594 /* Whether it should be taken or not. AROUND is the same as taken
595 except that it is used when the destination label is not preceded
596 by a BARRIER. */
597 enum taken {PATH_TAKEN, PATH_NOT_TAKEN, PATH_AROUND} status;
598 } *path;
601 static bool fixed_base_plus_p (rtx x);
602 static int notreg_cost (rtx, enum rtx_code);
603 static int approx_reg_cost_1 (rtx *, void *);
604 static int approx_reg_cost (rtx);
605 static int preferable (int, int, int, int);
606 static void new_basic_block (void);
607 static void make_new_qty (unsigned int, enum machine_mode);
608 static void make_regs_eqv (unsigned int, unsigned int);
609 static void delete_reg_equiv (unsigned int);
610 static int mention_regs (rtx);
611 static int insert_regs (rtx, struct table_elt *, int);
612 static void remove_from_table (struct table_elt *, unsigned);
613 static struct table_elt *lookup (rtx, unsigned, enum machine_mode);
614 static struct table_elt *lookup_for_remove (rtx, unsigned, enum machine_mode);
615 static rtx lookup_as_function (rtx, enum rtx_code);
616 static struct table_elt *insert (rtx, struct table_elt *, unsigned,
617 enum machine_mode);
618 static void merge_equiv_classes (struct table_elt *, struct table_elt *);
619 static void invalidate (rtx, enum machine_mode);
620 static int cse_rtx_varies_p (rtx, int);
621 static void remove_invalid_refs (unsigned int);
622 static void remove_invalid_subreg_refs (unsigned int, unsigned int,
623 enum machine_mode);
624 static void rehash_using_reg (rtx);
625 static void invalidate_memory (void);
626 static void invalidate_for_call (void);
627 static rtx use_related_value (rtx, struct table_elt *);
628 static unsigned canon_hash (rtx, enum machine_mode);
629 static unsigned canon_hash_string (const char *);
630 static unsigned safe_hash (rtx, enum machine_mode);
631 static int exp_equiv_p (rtx, rtx, int, int);
632 static rtx canon_reg (rtx, rtx);
633 static void find_best_addr (rtx, rtx *, enum machine_mode);
634 static enum rtx_code find_comparison_args (enum rtx_code, rtx *, rtx *,
635 enum machine_mode *,
636 enum machine_mode *);
637 static rtx fold_rtx (rtx, rtx);
638 static rtx equiv_constant (rtx);
639 static void record_jump_equiv (rtx, int);
640 static void record_jump_cond (enum rtx_code, enum machine_mode, rtx, rtx,
641 int);
642 static void cse_insn (rtx, rtx);
643 static void cse_end_of_basic_block (rtx, struct cse_basic_block_data *,
644 int, int, int);
645 static int addr_affects_sp_p (rtx);
646 static void invalidate_from_clobbers (rtx);
647 static rtx cse_process_notes (rtx, rtx);
648 static void cse_around_loop (rtx);
649 static void invalidate_skipped_set (rtx, rtx, void *);
650 static void invalidate_skipped_block (rtx);
651 static void cse_check_loop_start (rtx, rtx, void *);
652 static void cse_set_around_loop (rtx, rtx, rtx);
653 static rtx cse_basic_block (rtx, rtx, struct branch_path *, int);
654 static void count_reg_usage (rtx, int *, int);
655 static int check_for_label_ref (rtx *, void *);
656 extern void dump_class (struct table_elt*);
657 static struct cse_reg_info * get_cse_reg_info (unsigned int);
658 static int check_dependence (rtx *, void *);
660 static void flush_hash_table (void);
661 static bool insn_live_p (rtx, int *);
662 static bool set_live_p (rtx, rtx, int *);
663 static bool dead_libcall_p (rtx, int *);
664 static int cse_change_cc_mode (rtx *, void *);
665 static void cse_change_cc_mode_insns (rtx, rtx, rtx);
666 static enum machine_mode cse_cc_succs (basic_block, rtx, rtx, bool);
669 #undef RTL_HOOKS_GEN_LOWPART
670 #define RTL_HOOKS_GEN_LOWPART gen_lowpart_if_possible
672 static const struct rtl_hooks cse_rtl_hooks = RTL_HOOKS_INITIALIZER;
674 /* Nonzero if X has the form (PLUS frame-pointer integer). We check for
675 virtual regs here because the simplify_*_operation routines are called
676 by integrate.c, which is called before virtual register instantiation. */
678 static bool
679 fixed_base_plus_p (rtx x)
681 switch (GET_CODE (x))
683 case REG:
684 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx)
685 return true;
686 if (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
687 return true;
688 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
689 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
690 return true;
691 return false;
693 case PLUS:
694 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
695 return false;
696 return fixed_base_plus_p (XEXP (x, 0));
698 default:
699 return false;
703 /* Dump the expressions in the equivalence class indicated by CLASSP.
704 This function is used only for debugging. */
705 void
706 dump_class (struct table_elt *classp)
708 struct table_elt *elt;
710 fprintf (stderr, "Equivalence chain for ");
711 print_rtl (stderr, classp->exp);
712 fprintf (stderr, ": \n");
714 for (elt = classp->first_same_value; elt; elt = elt->next_same_value)
716 print_rtl (stderr, elt->exp);
717 fprintf (stderr, "\n");
721 /* Subroutine of approx_reg_cost; called through for_each_rtx. */
723 static int
724 approx_reg_cost_1 (rtx *xp, void *data)
726 rtx x = *xp;
727 int *cost_p = data;
729 if (x && REG_P (x))
731 unsigned int regno = REGNO (x);
733 if (! CHEAP_REGNO (regno))
735 if (regno < FIRST_PSEUDO_REGISTER)
737 if (SMALL_REGISTER_CLASSES)
738 return 1;
739 *cost_p += 2;
741 else
742 *cost_p += 1;
746 return 0;
749 /* Return an estimate of the cost of the registers used in an rtx.
750 This is mostly the number of different REG expressions in the rtx;
751 however for some exceptions like fixed registers we use a cost of
752 0. If any other hard register reference occurs, return MAX_COST. */
754 static int
755 approx_reg_cost (rtx x)
757 int cost = 0;
759 if (for_each_rtx (&x, approx_reg_cost_1, (void *) &cost))
760 return MAX_COST;
762 return cost;
765 /* Return a negative value if an rtx A, whose costs are given by COST_A
766 and REGCOST_A, is more desirable than an rtx B.
767 Return a positive value if A is less desirable, or 0 if the two are
768 equally good. */
769 static int
770 preferable (int cost_a, int regcost_a, int cost_b, int regcost_b)
772 /* First, get rid of cases involving expressions that are entirely
773 unwanted. */
774 if (cost_a != cost_b)
776 if (cost_a == MAX_COST)
777 return 1;
778 if (cost_b == MAX_COST)
779 return -1;
782 /* Avoid extending lifetimes of hardregs. */
783 if (regcost_a != regcost_b)
785 if (regcost_a == MAX_COST)
786 return 1;
787 if (regcost_b == MAX_COST)
788 return -1;
791 /* Normal operation costs take precedence. */
792 if (cost_a != cost_b)
793 return cost_a - cost_b;
794 /* Only if these are identical consider effects on register pressure. */
795 if (regcost_a != regcost_b)
796 return regcost_a - regcost_b;
797 return 0;
800 /* Internal function, to compute cost when X is not a register; called
801 from COST macro to keep it simple. */
803 static int
804 notreg_cost (rtx x, enum rtx_code outer)
806 return ((GET_CODE (x) == SUBREG
807 && REG_P (SUBREG_REG (x))
808 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
809 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
810 && (GET_MODE_SIZE (GET_MODE (x))
811 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
812 && subreg_lowpart_p (x)
813 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (x)),
814 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))))
816 : rtx_cost (x, outer) * 2);
820 static struct cse_reg_info *
821 get_cse_reg_info (unsigned int regno)
823 struct cse_reg_info **hash_head = &reg_hash[REGHASH_FN (regno)];
824 struct cse_reg_info *p;
826 for (p = *hash_head; p != NULL; p = p->hash_next)
827 if (p->regno == regno)
828 break;
830 if (p == NULL)
832 /* Get a new cse_reg_info structure. */
833 if (cse_reg_info_free_list)
835 p = cse_reg_info_free_list;
836 cse_reg_info_free_list = p->next;
838 else
839 p = xmalloc (sizeof (struct cse_reg_info));
841 /* Insert into hash table. */
842 p->hash_next = *hash_head;
843 *hash_head = p;
845 /* Initialize it. */
846 p->reg_tick = 1;
847 p->reg_in_table = -1;
848 p->subreg_ticked = -1;
849 p->reg_qty = regno;
850 p->regno = regno;
851 p->next = cse_reg_info_used_list;
852 cse_reg_info_used_list = p;
853 if (!cse_reg_info_used_list_end)
854 cse_reg_info_used_list_end = p;
857 /* Cache this lookup; we tend to be looking up information about the
858 same register several times in a row. */
859 cached_regno = regno;
860 cached_cse_reg_info = p;
862 return p;
865 /* Clear the hash table and initialize each register with its own quantity,
866 for a new basic block. */
868 static void
869 new_basic_block (void)
871 int i;
873 next_qty = max_reg;
875 /* Clear out hash table state for this pass. */
877 memset (reg_hash, 0, sizeof reg_hash);
879 if (cse_reg_info_used_list)
881 cse_reg_info_used_list_end->next = cse_reg_info_free_list;
882 cse_reg_info_free_list = cse_reg_info_used_list;
883 cse_reg_info_used_list = cse_reg_info_used_list_end = 0;
885 cached_cse_reg_info = 0;
887 CLEAR_HARD_REG_SET (hard_regs_in_table);
889 /* The per-quantity values used to be initialized here, but it is
890 much faster to initialize each as it is made in `make_new_qty'. */
892 for (i = 0; i < HASH_SIZE; i++)
894 struct table_elt *first;
896 first = table[i];
897 if (first != NULL)
899 struct table_elt *last = first;
901 table[i] = NULL;
903 while (last->next_same_hash != NULL)
904 last = last->next_same_hash;
906 /* Now relink this hash entire chain into
907 the free element list. */
909 last->next_same_hash = free_element_chain;
910 free_element_chain = first;
914 #ifdef HAVE_cc0
915 prev_insn = 0;
916 prev_insn_cc0 = 0;
917 #endif
920 /* Say that register REG contains a quantity in mode MODE not in any
921 register before and initialize that quantity. */
923 static void
924 make_new_qty (unsigned int reg, enum machine_mode mode)
926 int q;
927 struct qty_table_elem *ent;
928 struct reg_eqv_elem *eqv;
930 if (next_qty >= max_qty)
931 abort ();
933 q = REG_QTY (reg) = next_qty++;
934 ent = &qty_table[q];
935 ent->first_reg = reg;
936 ent->last_reg = reg;
937 ent->mode = mode;
938 ent->const_rtx = ent->const_insn = NULL_RTX;
939 ent->comparison_code = UNKNOWN;
941 eqv = &reg_eqv_table[reg];
942 eqv->next = eqv->prev = -1;
945 /* Make reg NEW equivalent to reg OLD.
946 OLD is not changing; NEW is. */
948 static void
949 make_regs_eqv (unsigned int new, unsigned int old)
951 unsigned int lastr, firstr;
952 int q = REG_QTY (old);
953 struct qty_table_elem *ent;
955 ent = &qty_table[q];
957 /* Nothing should become eqv until it has a "non-invalid" qty number. */
958 if (! REGNO_QTY_VALID_P (old))
959 abort ();
961 REG_QTY (new) = q;
962 firstr = ent->first_reg;
963 lastr = ent->last_reg;
965 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
966 hard regs. Among pseudos, if NEW will live longer than any other reg
967 of the same qty, and that is beyond the current basic block,
968 make it the new canonical replacement for this qty. */
969 if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
970 /* Certain fixed registers might be of the class NO_REGS. This means
971 that not only can they not be allocated by the compiler, but
972 they cannot be used in substitutions or canonicalizations
973 either. */
974 && (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
975 && ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
976 || (new >= FIRST_PSEUDO_REGISTER
977 && (firstr < FIRST_PSEUDO_REGISTER
978 || ((uid_cuid[REGNO_LAST_UID (new)] > cse_basic_block_end
979 || (uid_cuid[REGNO_FIRST_UID (new)]
980 < cse_basic_block_start))
981 && (uid_cuid[REGNO_LAST_UID (new)]
982 > uid_cuid[REGNO_LAST_UID (firstr)]))))))
984 reg_eqv_table[firstr].prev = new;
985 reg_eqv_table[new].next = firstr;
986 reg_eqv_table[new].prev = -1;
987 ent->first_reg = new;
989 else
991 /* If NEW is a hard reg (known to be non-fixed), insert at end.
992 Otherwise, insert before any non-fixed hard regs that are at the
993 end. Registers of class NO_REGS cannot be used as an
994 equivalent for anything. */
995 while (lastr < FIRST_PSEUDO_REGISTER && reg_eqv_table[lastr].prev >= 0
996 && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
997 && new >= FIRST_PSEUDO_REGISTER)
998 lastr = reg_eqv_table[lastr].prev;
999 reg_eqv_table[new].next = reg_eqv_table[lastr].next;
1000 if (reg_eqv_table[lastr].next >= 0)
1001 reg_eqv_table[reg_eqv_table[lastr].next].prev = new;
1002 else
1003 qty_table[q].last_reg = new;
1004 reg_eqv_table[lastr].next = new;
1005 reg_eqv_table[new].prev = lastr;
1009 /* Remove REG from its equivalence class. */
1011 static void
1012 delete_reg_equiv (unsigned int reg)
1014 struct qty_table_elem *ent;
1015 int q = REG_QTY (reg);
1016 int p, n;
1018 /* If invalid, do nothing. */
1019 if (q == (int) reg)
1020 return;
1022 ent = &qty_table[q];
1024 p = reg_eqv_table[reg].prev;
1025 n = reg_eqv_table[reg].next;
1027 if (n != -1)
1028 reg_eqv_table[n].prev = p;
1029 else
1030 ent->last_reg = p;
1031 if (p != -1)
1032 reg_eqv_table[p].next = n;
1033 else
1034 ent->first_reg = n;
1036 REG_QTY (reg) = reg;
1039 /* Remove any invalid expressions from the hash table
1040 that refer to any of the registers contained in expression X.
1042 Make sure that newly inserted references to those registers
1043 as subexpressions will be considered valid.
1045 mention_regs is not called when a register itself
1046 is being stored in the table.
1048 Return 1 if we have done something that may have changed the hash code
1049 of X. */
1051 static int
1052 mention_regs (rtx x)
1054 enum rtx_code code;
1055 int i, j;
1056 const char *fmt;
1057 int changed = 0;
1059 if (x == 0)
1060 return 0;
1062 code = GET_CODE (x);
1063 if (code == REG)
1065 unsigned int regno = REGNO (x);
1066 unsigned int endregno
1067 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
1068 : hard_regno_nregs[regno][GET_MODE (x)]);
1069 unsigned int i;
1071 for (i = regno; i < endregno; i++)
1073 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1074 remove_invalid_refs (i);
1076 REG_IN_TABLE (i) = REG_TICK (i);
1077 SUBREG_TICKED (i) = -1;
1080 return 0;
1083 /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1084 pseudo if they don't use overlapping words. We handle only pseudos
1085 here for simplicity. */
1086 if (code == SUBREG && REG_P (SUBREG_REG (x))
1087 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1089 unsigned int i = REGNO (SUBREG_REG (x));
1091 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1093 /* If REG_IN_TABLE (i) differs from REG_TICK (i) by one, and
1094 the last store to this register really stored into this
1095 subreg, then remove the memory of this subreg.
1096 Otherwise, remove any memory of the entire register and
1097 all its subregs from the table. */
1098 if (REG_TICK (i) - REG_IN_TABLE (i) > 1
1099 || SUBREG_TICKED (i) != REGNO (SUBREG_REG (x)))
1100 remove_invalid_refs (i);
1101 else
1102 remove_invalid_subreg_refs (i, SUBREG_BYTE (x), GET_MODE (x));
1105 REG_IN_TABLE (i) = REG_TICK (i);
1106 SUBREG_TICKED (i) = REGNO (SUBREG_REG (x));
1107 return 0;
1110 /* If X is a comparison or a COMPARE and either operand is a register
1111 that does not have a quantity, give it one. This is so that a later
1112 call to record_jump_equiv won't cause X to be assigned a different
1113 hash code and not found in the table after that call.
1115 It is not necessary to do this here, since rehash_using_reg can
1116 fix up the table later, but doing this here eliminates the need to
1117 call that expensive function in the most common case where the only
1118 use of the register is in the comparison. */
1120 if (code == COMPARE || COMPARISON_P (x))
1122 if (REG_P (XEXP (x, 0))
1123 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
1124 if (insert_regs (XEXP (x, 0), NULL, 0))
1126 rehash_using_reg (XEXP (x, 0));
1127 changed = 1;
1130 if (REG_P (XEXP (x, 1))
1131 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
1132 if (insert_regs (XEXP (x, 1), NULL, 0))
1134 rehash_using_reg (XEXP (x, 1));
1135 changed = 1;
1139 fmt = GET_RTX_FORMAT (code);
1140 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1141 if (fmt[i] == 'e')
1142 changed |= mention_regs (XEXP (x, i));
1143 else if (fmt[i] == 'E')
1144 for (j = 0; j < XVECLEN (x, i); j++)
1145 changed |= mention_regs (XVECEXP (x, i, j));
1147 return changed;
1150 /* Update the register quantities for inserting X into the hash table
1151 with a value equivalent to CLASSP.
1152 (If the class does not contain a REG, it is irrelevant.)
1153 If MODIFIED is nonzero, X is a destination; it is being modified.
1154 Note that delete_reg_equiv should be called on a register
1155 before insert_regs is done on that register with MODIFIED != 0.
1157 Nonzero value means that elements of reg_qty have changed
1158 so X's hash code may be different. */
1160 static int
1161 insert_regs (rtx x, struct table_elt *classp, int modified)
1163 if (REG_P (x))
1165 unsigned int regno = REGNO (x);
1166 int qty_valid;
1168 /* If REGNO is in the equivalence table already but is of the
1169 wrong mode for that equivalence, don't do anything here. */
1171 qty_valid = REGNO_QTY_VALID_P (regno);
1172 if (qty_valid)
1174 struct qty_table_elem *ent = &qty_table[REG_QTY (regno)];
1176 if (ent->mode != GET_MODE (x))
1177 return 0;
1180 if (modified || ! qty_valid)
1182 if (classp)
1183 for (classp = classp->first_same_value;
1184 classp != 0;
1185 classp = classp->next_same_value)
1186 if (REG_P (classp->exp)
1187 && GET_MODE (classp->exp) == GET_MODE (x))
1189 make_regs_eqv (regno, REGNO (classp->exp));
1190 return 1;
1193 /* Mention_regs for a SUBREG checks if REG_TICK is exactly one larger
1194 than REG_IN_TABLE to find out if there was only a single preceding
1195 invalidation - for the SUBREG - or another one, which would be
1196 for the full register. However, if we find here that REG_TICK
1197 indicates that the register is invalid, it means that it has
1198 been invalidated in a separate operation. The SUBREG might be used
1199 now (then this is a recursive call), or we might use the full REG
1200 now and a SUBREG of it later. So bump up REG_TICK so that
1201 mention_regs will do the right thing. */
1202 if (! modified
1203 && REG_IN_TABLE (regno) >= 0
1204 && REG_TICK (regno) == REG_IN_TABLE (regno) + 1)
1205 REG_TICK (regno)++;
1206 make_new_qty (regno, GET_MODE (x));
1207 return 1;
1210 return 0;
1213 /* If X is a SUBREG, we will likely be inserting the inner register in the
1214 table. If that register doesn't have an assigned quantity number at
1215 this point but does later, the insertion that we will be doing now will
1216 not be accessible because its hash code will have changed. So assign
1217 a quantity number now. */
1219 else if (GET_CODE (x) == SUBREG && REG_P (SUBREG_REG (x))
1220 && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1222 insert_regs (SUBREG_REG (x), NULL, 0);
1223 mention_regs (x);
1224 return 1;
1226 else
1227 return mention_regs (x);
1230 /* Look in or update the hash table. */
1232 /* Remove table element ELT from use in the table.
1233 HASH is its hash code, made using the HASH macro.
1234 It's an argument because often that is known in advance
1235 and we save much time not recomputing it. */
1237 static void
1238 remove_from_table (struct table_elt *elt, unsigned int hash)
1240 if (elt == 0)
1241 return;
1243 /* Mark this element as removed. See cse_insn. */
1244 elt->first_same_value = 0;
1246 /* Remove the table element from its equivalence class. */
1249 struct table_elt *prev = elt->prev_same_value;
1250 struct table_elt *next = elt->next_same_value;
1252 if (next)
1253 next->prev_same_value = prev;
1255 if (prev)
1256 prev->next_same_value = next;
1257 else
1259 struct table_elt *newfirst = next;
1260 while (next)
1262 next->first_same_value = newfirst;
1263 next = next->next_same_value;
1268 /* Remove the table element from its hash bucket. */
1271 struct table_elt *prev = elt->prev_same_hash;
1272 struct table_elt *next = elt->next_same_hash;
1274 if (next)
1275 next->prev_same_hash = prev;
1277 if (prev)
1278 prev->next_same_hash = next;
1279 else if (table[hash] == elt)
1280 table[hash] = next;
1281 else
1283 /* This entry is not in the proper hash bucket. This can happen
1284 when two classes were merged by `merge_equiv_classes'. Search
1285 for the hash bucket that it heads. This happens only very
1286 rarely, so the cost is acceptable. */
1287 for (hash = 0; hash < HASH_SIZE; hash++)
1288 if (table[hash] == elt)
1289 table[hash] = next;
1293 /* Remove the table element from its related-value circular chain. */
1295 if (elt->related_value != 0 && elt->related_value != elt)
1297 struct table_elt *p = elt->related_value;
1299 while (p->related_value != elt)
1300 p = p->related_value;
1301 p->related_value = elt->related_value;
1302 if (p->related_value == p)
1303 p->related_value = 0;
1306 /* Now add it to the free element chain. */
1307 elt->next_same_hash = free_element_chain;
1308 free_element_chain = elt;
1311 /* Look up X in the hash table and return its table element,
1312 or 0 if X is not in the table.
1314 MODE is the machine-mode of X, or if X is an integer constant
1315 with VOIDmode then MODE is the mode with which X will be used.
1317 Here we are satisfied to find an expression whose tree structure
1318 looks like X. */
1320 static struct table_elt *
1321 lookup (rtx x, unsigned int hash, enum machine_mode mode)
1323 struct table_elt *p;
1325 for (p = table[hash]; p; p = p->next_same_hash)
1326 if (mode == p->mode && ((x == p->exp && REG_P (x))
1327 || exp_equiv_p (x, p->exp, !REG_P (x), 0)))
1328 return p;
1330 return 0;
1333 /* Like `lookup' but don't care whether the table element uses invalid regs.
1334 Also ignore discrepancies in the machine mode of a register. */
1336 static struct table_elt *
1337 lookup_for_remove (rtx x, unsigned int hash, enum machine_mode mode)
1339 struct table_elt *p;
1341 if (REG_P (x))
1343 unsigned int regno = REGNO (x);
1345 /* Don't check the machine mode when comparing registers;
1346 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1347 for (p = table[hash]; p; p = p->next_same_hash)
1348 if (REG_P (p->exp)
1349 && REGNO (p->exp) == regno)
1350 return p;
1352 else
1354 for (p = table[hash]; p; p = p->next_same_hash)
1355 if (mode == p->mode && (x == p->exp || exp_equiv_p (x, p->exp, 0, 0)))
1356 return p;
1359 return 0;
1362 /* Look for an expression equivalent to X and with code CODE.
1363 If one is found, return that expression. */
1365 static rtx
1366 lookup_as_function (rtx x, enum rtx_code code)
1368 struct table_elt *p
1369 = lookup (x, safe_hash (x, VOIDmode) & HASH_MASK, GET_MODE (x));
1371 /* If we are looking for a CONST_INT, the mode doesn't really matter, as
1372 long as we are narrowing. So if we looked in vain for a mode narrower
1373 than word_mode before, look for word_mode now. */
1374 if (p == 0 && code == CONST_INT
1375 && GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (word_mode))
1377 x = copy_rtx (x);
1378 PUT_MODE (x, word_mode);
1379 p = lookup (x, safe_hash (x, VOIDmode) & HASH_MASK, word_mode);
1382 if (p == 0)
1383 return 0;
1385 for (p = p->first_same_value; p; p = p->next_same_value)
1386 if (GET_CODE (p->exp) == code
1387 /* Make sure this is a valid entry in the table. */
1388 && exp_equiv_p (p->exp, p->exp, 1, 0))
1389 return p->exp;
1391 return 0;
1394 /* Insert X in the hash table, assuming HASH is its hash code
1395 and CLASSP is an element of the class it should go in
1396 (or 0 if a new class should be made).
1397 It is inserted at the proper position to keep the class in
1398 the order cheapest first.
1400 MODE is the machine-mode of X, or if X is an integer constant
1401 with VOIDmode then MODE is the mode with which X will be used.
1403 For elements of equal cheapness, the most recent one
1404 goes in front, except that the first element in the list
1405 remains first unless a cheaper element is added. The order of
1406 pseudo-registers does not matter, as canon_reg will be called to
1407 find the cheapest when a register is retrieved from the table.
1409 The in_memory field in the hash table element is set to 0.
1410 The caller must set it nonzero if appropriate.
1412 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1413 and if insert_regs returns a nonzero value
1414 you must then recompute its hash code before calling here.
1416 If necessary, update table showing constant values of quantities. */
1418 #define CHEAPER(X, Y) \
1419 (preferable ((X)->cost, (X)->regcost, (Y)->cost, (Y)->regcost) < 0)
1421 static struct table_elt *
1422 insert (rtx x, struct table_elt *classp, unsigned int hash, enum machine_mode mode)
1424 struct table_elt *elt;
1426 /* If X is a register and we haven't made a quantity for it,
1427 something is wrong. */
1428 if (REG_P (x) && ! REGNO_QTY_VALID_P (REGNO (x)))
1429 abort ();
1431 /* If X is a hard register, show it is being put in the table. */
1432 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
1434 unsigned int regno = REGNO (x);
1435 unsigned int endregno = regno + hard_regno_nregs[regno][GET_MODE (x)];
1436 unsigned int i;
1438 for (i = regno; i < endregno; i++)
1439 SET_HARD_REG_BIT (hard_regs_in_table, i);
1442 /* Put an element for X into the right hash bucket. */
1444 elt = free_element_chain;
1445 if (elt)
1446 free_element_chain = elt->next_same_hash;
1447 else
1449 n_elements_made++;
1450 elt = xmalloc (sizeof (struct table_elt));
1453 elt->exp = x;
1454 elt->canon_exp = NULL_RTX;
1455 elt->cost = COST (x);
1456 elt->regcost = approx_reg_cost (x);
1457 elt->next_same_value = 0;
1458 elt->prev_same_value = 0;
1459 elt->next_same_hash = table[hash];
1460 elt->prev_same_hash = 0;
1461 elt->related_value = 0;
1462 elt->in_memory = 0;
1463 elt->mode = mode;
1464 elt->is_const = (CONSTANT_P (x) || fixed_base_plus_p (x));
1466 if (table[hash])
1467 table[hash]->prev_same_hash = elt;
1468 table[hash] = elt;
1470 /* Put it into the proper value-class. */
1471 if (classp)
1473 classp = classp->first_same_value;
1474 if (CHEAPER (elt, classp))
1475 /* Insert at the head of the class. */
1477 struct table_elt *p;
1478 elt->next_same_value = classp;
1479 classp->prev_same_value = elt;
1480 elt->first_same_value = elt;
1482 for (p = classp; p; p = p->next_same_value)
1483 p->first_same_value = elt;
1485 else
1487 /* Insert not at head of the class. */
1488 /* Put it after the last element cheaper than X. */
1489 struct table_elt *p, *next;
1491 for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1492 p = next);
1494 /* Put it after P and before NEXT. */
1495 elt->next_same_value = next;
1496 if (next)
1497 next->prev_same_value = elt;
1499 elt->prev_same_value = p;
1500 p->next_same_value = elt;
1501 elt->first_same_value = classp;
1504 else
1505 elt->first_same_value = elt;
1507 /* If this is a constant being set equivalent to a register or a register
1508 being set equivalent to a constant, note the constant equivalence.
1510 If this is a constant, it cannot be equivalent to a different constant,
1511 and a constant is the only thing that can be cheaper than a register. So
1512 we know the register is the head of the class (before the constant was
1513 inserted).
1515 If this is a register that is not already known equivalent to a
1516 constant, we must check the entire class.
1518 If this is a register that is already known equivalent to an insn,
1519 update the qtys `const_insn' to show that `this_insn' is the latest
1520 insn making that quantity equivalent to the constant. */
1522 if (elt->is_const && classp && REG_P (classp->exp)
1523 && !REG_P (x))
1525 int exp_q = REG_QTY (REGNO (classp->exp));
1526 struct qty_table_elem *exp_ent = &qty_table[exp_q];
1528 exp_ent->const_rtx = gen_lowpart (exp_ent->mode, x);
1529 exp_ent->const_insn = this_insn;
1532 else if (REG_P (x)
1533 && classp
1534 && ! qty_table[REG_QTY (REGNO (x))].const_rtx
1535 && ! elt->is_const)
1537 struct table_elt *p;
1539 for (p = classp; p != 0; p = p->next_same_value)
1541 if (p->is_const && !REG_P (p->exp))
1543 int x_q = REG_QTY (REGNO (x));
1544 struct qty_table_elem *x_ent = &qty_table[x_q];
1546 x_ent->const_rtx
1547 = gen_lowpart (GET_MODE (x), p->exp);
1548 x_ent->const_insn = this_insn;
1549 break;
1554 else if (REG_P (x)
1555 && qty_table[REG_QTY (REGNO (x))].const_rtx
1556 && GET_MODE (x) == qty_table[REG_QTY (REGNO (x))].mode)
1557 qty_table[REG_QTY (REGNO (x))].const_insn = this_insn;
1559 /* If this is a constant with symbolic value,
1560 and it has a term with an explicit integer value,
1561 link it up with related expressions. */
1562 if (GET_CODE (x) == CONST)
1564 rtx subexp = get_related_value (x);
1565 unsigned subhash;
1566 struct table_elt *subelt, *subelt_prev;
1568 if (subexp != 0)
1570 /* Get the integer-free subexpression in the hash table. */
1571 subhash = safe_hash (subexp, mode) & HASH_MASK;
1572 subelt = lookup (subexp, subhash, mode);
1573 if (subelt == 0)
1574 subelt = insert (subexp, NULL, subhash, mode);
1575 /* Initialize SUBELT's circular chain if it has none. */
1576 if (subelt->related_value == 0)
1577 subelt->related_value = subelt;
1578 /* Find the element in the circular chain that precedes SUBELT. */
1579 subelt_prev = subelt;
1580 while (subelt_prev->related_value != subelt)
1581 subelt_prev = subelt_prev->related_value;
1582 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1583 This way the element that follows SUBELT is the oldest one. */
1584 elt->related_value = subelt_prev->related_value;
1585 subelt_prev->related_value = elt;
1589 return elt;
1592 /* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1593 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1594 the two classes equivalent.
1596 CLASS1 will be the surviving class; CLASS2 should not be used after this
1597 call.
1599 Any invalid entries in CLASS2 will not be copied. */
1601 static void
1602 merge_equiv_classes (struct table_elt *class1, struct table_elt *class2)
1604 struct table_elt *elt, *next, *new;
1606 /* Ensure we start with the head of the classes. */
1607 class1 = class1->first_same_value;
1608 class2 = class2->first_same_value;
1610 /* If they were already equal, forget it. */
1611 if (class1 == class2)
1612 return;
1614 for (elt = class2; elt; elt = next)
1616 unsigned int hash;
1617 rtx exp = elt->exp;
1618 enum machine_mode mode = elt->mode;
1620 next = elt->next_same_value;
1622 /* Remove old entry, make a new one in CLASS1's class.
1623 Don't do this for invalid entries as we cannot find their
1624 hash code (it also isn't necessary). */
1625 if (REG_P (exp) || exp_equiv_p (exp, exp, 1, 0))
1627 bool need_rehash = false;
1629 hash_arg_in_memory = 0;
1630 hash = HASH (exp, mode);
1632 if (REG_P (exp))
1634 need_rehash = (unsigned) REG_QTY (REGNO (exp)) != REGNO (exp);
1635 delete_reg_equiv (REGNO (exp));
1638 remove_from_table (elt, hash);
1640 if (insert_regs (exp, class1, 0) || need_rehash)
1642 rehash_using_reg (exp);
1643 hash = HASH (exp, mode);
1645 new = insert (exp, class1, hash, mode);
1646 new->in_memory = hash_arg_in_memory;
1651 /* Flush the entire hash table. */
1653 static void
1654 flush_hash_table (void)
1656 int i;
1657 struct table_elt *p;
1659 for (i = 0; i < HASH_SIZE; i++)
1660 for (p = table[i]; p; p = table[i])
1662 /* Note that invalidate can remove elements
1663 after P in the current hash chain. */
1664 if (REG_P (p->exp))
1665 invalidate (p->exp, p->mode);
1666 else
1667 remove_from_table (p, i);
1671 /* Function called for each rtx to check whether true dependence exist. */
1672 struct check_dependence_data
1674 enum machine_mode mode;
1675 rtx exp;
1676 rtx addr;
1679 static int
1680 check_dependence (rtx *x, void *data)
1682 struct check_dependence_data *d = (struct check_dependence_data *) data;
1683 if (*x && MEM_P (*x))
1684 return canon_true_dependence (d->exp, d->mode, d->addr, *x,
1685 cse_rtx_varies_p);
1686 else
1687 return 0;
1690 /* Remove from the hash table, or mark as invalid, all expressions whose
1691 values could be altered by storing in X. X is a register, a subreg, or
1692 a memory reference with nonvarying address (because, when a memory
1693 reference with a varying address is stored in, all memory references are
1694 removed by invalidate_memory so specific invalidation is superfluous).
1695 FULL_MODE, if not VOIDmode, indicates that this much should be
1696 invalidated instead of just the amount indicated by the mode of X. This
1697 is only used for bitfield stores into memory.
1699 A nonvarying address may be just a register or just a symbol reference,
1700 or it may be either of those plus a numeric offset. */
1702 static void
1703 invalidate (rtx x, enum machine_mode full_mode)
1705 int i;
1706 struct table_elt *p;
1707 rtx addr;
1709 switch (GET_CODE (x))
1711 case REG:
1713 /* If X is a register, dependencies on its contents are recorded
1714 through the qty number mechanism. Just change the qty number of
1715 the register, mark it as invalid for expressions that refer to it,
1716 and remove it itself. */
1717 unsigned int regno = REGNO (x);
1718 unsigned int hash = HASH (x, GET_MODE (x));
1720 /* Remove REGNO from any quantity list it might be on and indicate
1721 that its value might have changed. If it is a pseudo, remove its
1722 entry from the hash table.
1724 For a hard register, we do the first two actions above for any
1725 additional hard registers corresponding to X. Then, if any of these
1726 registers are in the table, we must remove any REG entries that
1727 overlap these registers. */
1729 delete_reg_equiv (regno);
1730 REG_TICK (regno)++;
1731 SUBREG_TICKED (regno) = -1;
1733 if (regno >= FIRST_PSEUDO_REGISTER)
1735 /* Because a register can be referenced in more than one mode,
1736 we might have to remove more than one table entry. */
1737 struct table_elt *elt;
1739 while ((elt = lookup_for_remove (x, hash, GET_MODE (x))))
1740 remove_from_table (elt, hash);
1742 else
1744 HOST_WIDE_INT in_table
1745 = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1746 unsigned int endregno
1747 = regno + hard_regno_nregs[regno][GET_MODE (x)];
1748 unsigned int tregno, tendregno, rn;
1749 struct table_elt *p, *next;
1751 CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1753 for (rn = regno + 1; rn < endregno; rn++)
1755 in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, rn);
1756 CLEAR_HARD_REG_BIT (hard_regs_in_table, rn);
1757 delete_reg_equiv (rn);
1758 REG_TICK (rn)++;
1759 SUBREG_TICKED (rn) = -1;
1762 if (in_table)
1763 for (hash = 0; hash < HASH_SIZE; hash++)
1764 for (p = table[hash]; p; p = next)
1766 next = p->next_same_hash;
1768 if (!REG_P (p->exp)
1769 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1770 continue;
1772 tregno = REGNO (p->exp);
1773 tendregno
1774 = tregno + hard_regno_nregs[tregno][GET_MODE (p->exp)];
1775 if (tendregno > regno && tregno < endregno)
1776 remove_from_table (p, hash);
1780 return;
1782 case SUBREG:
1783 invalidate (SUBREG_REG (x), VOIDmode);
1784 return;
1786 case PARALLEL:
1787 for (i = XVECLEN (x, 0) - 1; i >= 0; --i)
1788 invalidate (XVECEXP (x, 0, i), VOIDmode);
1789 return;
1791 case EXPR_LIST:
1792 /* This is part of a disjoint return value; extract the location in
1793 question ignoring the offset. */
1794 invalidate (XEXP (x, 0), VOIDmode);
1795 return;
1797 case MEM:
1798 addr = canon_rtx (get_addr (XEXP (x, 0)));
1799 /* Calculate the canonical version of X here so that
1800 true_dependence doesn't generate new RTL for X on each call. */
1801 x = canon_rtx (x);
1803 /* Remove all hash table elements that refer to overlapping pieces of
1804 memory. */
1805 if (full_mode == VOIDmode)
1806 full_mode = GET_MODE (x);
1808 for (i = 0; i < HASH_SIZE; i++)
1810 struct table_elt *next;
1812 for (p = table[i]; p; p = next)
1814 next = p->next_same_hash;
1815 if (p->in_memory)
1817 struct check_dependence_data d;
1819 /* Just canonicalize the expression once;
1820 otherwise each time we call invalidate
1821 true_dependence will canonicalize the
1822 expression again. */
1823 if (!p->canon_exp)
1824 p->canon_exp = canon_rtx (p->exp);
1825 d.exp = x;
1826 d.addr = addr;
1827 d.mode = full_mode;
1828 if (for_each_rtx (&p->canon_exp, check_dependence, &d))
1829 remove_from_table (p, i);
1833 return;
1835 default:
1836 abort ();
1840 /* Remove all expressions that refer to register REGNO,
1841 since they are already invalid, and we are about to
1842 mark that register valid again and don't want the old
1843 expressions to reappear as valid. */
1845 static void
1846 remove_invalid_refs (unsigned int regno)
1848 unsigned int i;
1849 struct table_elt *p, *next;
1851 for (i = 0; i < HASH_SIZE; i++)
1852 for (p = table[i]; p; p = next)
1854 next = p->next_same_hash;
1855 if (!REG_P (p->exp)
1856 && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
1857 remove_from_table (p, i);
1861 /* Likewise for a subreg with subreg_reg REGNO, subreg_byte OFFSET,
1862 and mode MODE. */
1863 static void
1864 remove_invalid_subreg_refs (unsigned int regno, unsigned int offset,
1865 enum machine_mode mode)
1867 unsigned int i;
1868 struct table_elt *p, *next;
1869 unsigned int end = offset + (GET_MODE_SIZE (mode) - 1);
1871 for (i = 0; i < HASH_SIZE; i++)
1872 for (p = table[i]; p; p = next)
1874 rtx exp = p->exp;
1875 next = p->next_same_hash;
1877 if (!REG_P (exp)
1878 && (GET_CODE (exp) != SUBREG
1879 || !REG_P (SUBREG_REG (exp))
1880 || REGNO (SUBREG_REG (exp)) != regno
1881 || (((SUBREG_BYTE (exp)
1882 + (GET_MODE_SIZE (GET_MODE (exp)) - 1)) >= offset)
1883 && SUBREG_BYTE (exp) <= end))
1884 && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
1885 remove_from_table (p, i);
1889 /* Recompute the hash codes of any valid entries in the hash table that
1890 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
1892 This is called when we make a jump equivalence. */
1894 static void
1895 rehash_using_reg (rtx x)
1897 unsigned int i;
1898 struct table_elt *p, *next;
1899 unsigned hash;
1901 if (GET_CODE (x) == SUBREG)
1902 x = SUBREG_REG (x);
1904 /* If X is not a register or if the register is known not to be in any
1905 valid entries in the table, we have no work to do. */
1907 if (!REG_P (x)
1908 || REG_IN_TABLE (REGNO (x)) < 0
1909 || REG_IN_TABLE (REGNO (x)) != REG_TICK (REGNO (x)))
1910 return;
1912 /* Scan all hash chains looking for valid entries that mention X.
1913 If we find one and it is in the wrong hash chain, move it. */
1915 for (i = 0; i < HASH_SIZE; i++)
1916 for (p = table[i]; p; p = next)
1918 next = p->next_same_hash;
1919 if (reg_mentioned_p (x, p->exp)
1920 && exp_equiv_p (p->exp, p->exp, 1, 0)
1921 && i != (hash = safe_hash (p->exp, p->mode) & HASH_MASK))
1923 if (p->next_same_hash)
1924 p->next_same_hash->prev_same_hash = p->prev_same_hash;
1926 if (p->prev_same_hash)
1927 p->prev_same_hash->next_same_hash = p->next_same_hash;
1928 else
1929 table[i] = p->next_same_hash;
1931 p->next_same_hash = table[hash];
1932 p->prev_same_hash = 0;
1933 if (table[hash])
1934 table[hash]->prev_same_hash = p;
1935 table[hash] = p;
1940 /* Remove from the hash table any expression that is a call-clobbered
1941 register. Also update their TICK values. */
1943 static void
1944 invalidate_for_call (void)
1946 unsigned int regno, endregno;
1947 unsigned int i;
1948 unsigned hash;
1949 struct table_elt *p, *next;
1950 int in_table = 0;
1952 /* Go through all the hard registers. For each that is clobbered in
1953 a CALL_INSN, remove the register from quantity chains and update
1954 reg_tick if defined. Also see if any of these registers is currently
1955 in the table. */
1957 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1958 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
1960 delete_reg_equiv (regno);
1961 if (REG_TICK (regno) >= 0)
1963 REG_TICK (regno)++;
1964 SUBREG_TICKED (regno) = -1;
1967 in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
1970 /* In the case where we have no call-clobbered hard registers in the
1971 table, we are done. Otherwise, scan the table and remove any
1972 entry that overlaps a call-clobbered register. */
1974 if (in_table)
1975 for (hash = 0; hash < HASH_SIZE; hash++)
1976 for (p = table[hash]; p; p = next)
1978 next = p->next_same_hash;
1980 if (!REG_P (p->exp)
1981 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1982 continue;
1984 regno = REGNO (p->exp);
1985 endregno = regno + hard_regno_nregs[regno][GET_MODE (p->exp)];
1987 for (i = regno; i < endregno; i++)
1988 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1990 remove_from_table (p, hash);
1991 break;
1996 /* Given an expression X of type CONST,
1997 and ELT which is its table entry (or 0 if it
1998 is not in the hash table),
1999 return an alternate expression for X as a register plus integer.
2000 If none can be found, return 0. */
2002 static rtx
2003 use_related_value (rtx x, struct table_elt *elt)
2005 struct table_elt *relt = 0;
2006 struct table_elt *p, *q;
2007 HOST_WIDE_INT offset;
2009 /* First, is there anything related known?
2010 If we have a table element, we can tell from that.
2011 Otherwise, must look it up. */
2013 if (elt != 0 && elt->related_value != 0)
2014 relt = elt;
2015 else if (elt == 0 && GET_CODE (x) == CONST)
2017 rtx subexp = get_related_value (x);
2018 if (subexp != 0)
2019 relt = lookup (subexp,
2020 safe_hash (subexp, GET_MODE (subexp)) & HASH_MASK,
2021 GET_MODE (subexp));
2024 if (relt == 0)
2025 return 0;
2027 /* Search all related table entries for one that has an
2028 equivalent register. */
2030 p = relt;
2031 while (1)
2033 /* This loop is strange in that it is executed in two different cases.
2034 The first is when X is already in the table. Then it is searching
2035 the RELATED_VALUE list of X's class (RELT). The second case is when
2036 X is not in the table. Then RELT points to a class for the related
2037 value.
2039 Ensure that, whatever case we are in, that we ignore classes that have
2040 the same value as X. */
2042 if (rtx_equal_p (x, p->exp))
2043 q = 0;
2044 else
2045 for (q = p->first_same_value; q; q = q->next_same_value)
2046 if (REG_P (q->exp))
2047 break;
2049 if (q)
2050 break;
2052 p = p->related_value;
2054 /* We went all the way around, so there is nothing to be found.
2055 Alternatively, perhaps RELT was in the table for some other reason
2056 and it has no related values recorded. */
2057 if (p == relt || p == 0)
2058 break;
2061 if (q == 0)
2062 return 0;
2064 offset = (get_integer_term (x) - get_integer_term (p->exp));
2065 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
2066 return plus_constant (q->exp, offset);
2069 /* Hash a string. Just add its bytes up. */
2070 static inline unsigned
2071 canon_hash_string (const char *ps)
2073 unsigned hash = 0;
2074 const unsigned char *p = (const unsigned char *) ps;
2076 if (p)
2077 while (*p)
2078 hash += *p++;
2080 return hash;
2083 /* Hash an rtx. We are careful to make sure the value is never negative.
2084 Equivalent registers hash identically.
2085 MODE is used in hashing for CONST_INTs only;
2086 otherwise the mode of X is used.
2088 Store 1 in do_not_record if any subexpression is volatile.
2090 Store 1 in hash_arg_in_memory if X contains a MEM rtx
2091 which does not have the MEM_READONLY_P bit set.
2093 Note that cse_insn knows that the hash code of a MEM expression
2094 is just (int) MEM plus the hash code of the address. */
2096 static unsigned
2097 canon_hash (rtx x, enum machine_mode mode)
2099 int i, j;
2100 unsigned hash = 0;
2101 enum rtx_code code;
2102 const char *fmt;
2104 /* repeat is used to turn tail-recursion into iteration. */
2105 repeat:
2106 if (x == 0)
2107 return hash;
2109 code = GET_CODE (x);
2110 switch (code)
2112 case REG:
2114 unsigned int regno = REGNO (x);
2115 bool record;
2117 /* On some machines, we can't record any non-fixed hard register,
2118 because extending its life will cause reload problems. We
2119 consider ap, fp, sp, gp to be fixed for this purpose.
2121 We also consider CCmode registers to be fixed for this purpose;
2122 failure to do so leads to failure to simplify 0<100 type of
2123 conditionals.
2125 On all machines, we can't record any global registers.
2126 Nor should we record any register that is in a small
2127 class, as defined by CLASS_LIKELY_SPILLED_P. */
2129 if (regno >= FIRST_PSEUDO_REGISTER)
2130 record = true;
2131 else if (x == frame_pointer_rtx
2132 || x == hard_frame_pointer_rtx
2133 || x == arg_pointer_rtx
2134 || x == stack_pointer_rtx
2135 || x == pic_offset_table_rtx)
2136 record = true;
2137 else if (global_regs[regno])
2138 record = false;
2139 else if (fixed_regs[regno])
2140 record = true;
2141 else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_CC)
2142 record = true;
2143 else if (SMALL_REGISTER_CLASSES)
2144 record = false;
2145 else if (CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (regno)))
2146 record = false;
2147 else
2148 record = true;
2150 if (!record)
2152 do_not_record = 1;
2153 return 0;
2156 hash += ((unsigned) REG << 7) + (unsigned) REG_QTY (regno);
2157 return hash;
2160 /* We handle SUBREG of a REG specially because the underlying
2161 reg changes its hash value with every value change; we don't
2162 want to have to forget unrelated subregs when one subreg changes. */
2163 case SUBREG:
2165 if (REG_P (SUBREG_REG (x)))
2167 hash += (((unsigned) SUBREG << 7)
2168 + REGNO (SUBREG_REG (x))
2169 + (SUBREG_BYTE (x) / UNITS_PER_WORD));
2170 return hash;
2172 break;
2175 case CONST_INT:
2177 unsigned HOST_WIDE_INT tem = INTVAL (x);
2178 hash += ((unsigned) CONST_INT << 7) + (unsigned) mode + tem;
2179 return hash;
2182 case CONST_DOUBLE:
2183 /* This is like the general case, except that it only counts
2184 the integers representing the constant. */
2185 hash += (unsigned) code + (unsigned) GET_MODE (x);
2186 if (GET_MODE (x) != VOIDmode)
2187 hash += real_hash (CONST_DOUBLE_REAL_VALUE (x));
2188 else
2189 hash += ((unsigned) CONST_DOUBLE_LOW (x)
2190 + (unsigned) CONST_DOUBLE_HIGH (x));
2191 return hash;
2193 case CONST_VECTOR:
2195 int units;
2196 rtx elt;
2198 units = CONST_VECTOR_NUNITS (x);
2200 for (i = 0; i < units; ++i)
2202 elt = CONST_VECTOR_ELT (x, i);
2203 hash += canon_hash (elt, GET_MODE (elt));
2206 return hash;
2209 /* Assume there is only one rtx object for any given label. */
2210 case LABEL_REF:
2211 hash += ((unsigned) LABEL_REF << 7) + (unsigned long) XEXP (x, 0);
2212 return hash;
2214 case SYMBOL_REF:
2215 hash += ((unsigned) SYMBOL_REF << 7) + (unsigned long) XSTR (x, 0);
2216 return hash;
2218 case MEM:
2219 /* We don't record if marked volatile or if BLKmode since we don't
2220 know the size of the move. */
2221 if (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode)
2223 do_not_record = 1;
2224 return 0;
2226 if (!MEM_READONLY_P (x))
2227 hash_arg_in_memory = 1;
2229 /* Now that we have already found this special case,
2230 might as well speed it up as much as possible. */
2231 hash += (unsigned) MEM;
2232 x = XEXP (x, 0);
2233 goto repeat;
2235 case USE:
2236 /* A USE that mentions non-volatile memory needs special
2237 handling since the MEM may be BLKmode which normally
2238 prevents an entry from being made. Pure calls are
2239 marked by a USE which mentions BLKmode memory. */
2240 if (MEM_P (XEXP (x, 0))
2241 && ! MEM_VOLATILE_P (XEXP (x, 0)))
2243 hash += (unsigned) USE;
2244 x = XEXP (x, 0);
2246 if (!MEM_READONLY_P (x))
2247 hash_arg_in_memory = 1;
2249 /* Now that we have already found this special case,
2250 might as well speed it up as much as possible. */
2251 hash += (unsigned) MEM;
2252 x = XEXP (x, 0);
2253 goto repeat;
2255 break;
2257 case PRE_DEC:
2258 case PRE_INC:
2259 case POST_DEC:
2260 case POST_INC:
2261 case PRE_MODIFY:
2262 case POST_MODIFY:
2263 case PC:
2264 case CC0:
2265 case CALL:
2266 case UNSPEC_VOLATILE:
2267 do_not_record = 1;
2268 return 0;
2270 case ASM_OPERANDS:
2271 if (MEM_VOLATILE_P (x))
2273 do_not_record = 1;
2274 return 0;
2276 else
2278 /* We don't want to take the filename and line into account. */
2279 hash += (unsigned) code + (unsigned) GET_MODE (x)
2280 + canon_hash_string (ASM_OPERANDS_TEMPLATE (x))
2281 + canon_hash_string (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
2282 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
2284 if (ASM_OPERANDS_INPUT_LENGTH (x))
2286 for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2288 hash += (canon_hash (ASM_OPERANDS_INPUT (x, i),
2289 GET_MODE (ASM_OPERANDS_INPUT (x, i)))
2290 + canon_hash_string (ASM_OPERANDS_INPUT_CONSTRAINT
2291 (x, i)));
2294 hash += canon_hash_string (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
2295 x = ASM_OPERANDS_INPUT (x, 0);
2296 mode = GET_MODE (x);
2297 goto repeat;
2300 return hash;
2302 break;
2304 default:
2305 break;
2308 i = GET_RTX_LENGTH (code) - 1;
2309 hash += (unsigned) code + (unsigned) GET_MODE (x);
2310 fmt = GET_RTX_FORMAT (code);
2311 for (; i >= 0; i--)
2313 if (fmt[i] == 'e')
2315 rtx tem = XEXP (x, i);
2317 /* If we are about to do the last recursive call
2318 needed at this level, change it into iteration.
2319 This function is called enough to be worth it. */
2320 if (i == 0)
2322 x = tem;
2323 goto repeat;
2325 hash += canon_hash (tem, 0);
2327 else if (fmt[i] == 'E')
2328 for (j = 0; j < XVECLEN (x, i); j++)
2329 hash += canon_hash (XVECEXP (x, i, j), 0);
2330 else if (fmt[i] == 's')
2331 hash += canon_hash_string (XSTR (x, i));
2332 else if (fmt[i] == 'i')
2334 unsigned tem = XINT (x, i);
2335 hash += tem;
2337 else if (fmt[i] == '0' || fmt[i] == 't')
2338 /* Unused. */
2340 else
2341 abort ();
2343 return hash;
2346 /* Like canon_hash but with no side effects. */
2348 static unsigned
2349 safe_hash (rtx x, enum machine_mode mode)
2351 int save_do_not_record = do_not_record;
2352 int save_hash_arg_in_memory = hash_arg_in_memory;
2353 unsigned hash = canon_hash (x, mode);
2354 hash_arg_in_memory = save_hash_arg_in_memory;
2355 do_not_record = save_do_not_record;
2356 return hash;
2359 /* Return 1 iff X and Y would canonicalize into the same thing,
2360 without actually constructing the canonicalization of either one.
2361 If VALIDATE is nonzero,
2362 we assume X is an expression being processed from the rtl
2363 and Y was found in the hash table. We check register refs
2364 in Y for being marked as valid.
2366 If EQUAL_VALUES is nonzero, we allow a register to match a constant value
2367 that is known to be in the register. Ordinarily, we don't allow them
2368 to match, because letting them match would cause unpredictable results
2369 in all the places that search a hash table chain for an equivalent
2370 for a given value. A possible equivalent that has different structure
2371 has its hash code computed from different data. Whether the hash code
2372 is the same as that of the given value is pure luck. */
2374 static int
2375 exp_equiv_p (rtx x, rtx y, int validate, int equal_values)
2377 int i, j;
2378 enum rtx_code code;
2379 const char *fmt;
2381 /* Note: it is incorrect to assume an expression is equivalent to itself
2382 if VALIDATE is nonzero. */
2383 if (x == y && !validate)
2384 return 1;
2385 if (x == 0 || y == 0)
2386 return x == y;
2388 code = GET_CODE (x);
2389 if (code != GET_CODE (y))
2391 if (!equal_values)
2392 return 0;
2394 /* If X is a constant and Y is a register or vice versa, they may be
2395 equivalent. We only have to validate if Y is a register. */
2396 if (CONSTANT_P (x) && REG_P (y)
2397 && REGNO_QTY_VALID_P (REGNO (y)))
2399 int y_q = REG_QTY (REGNO (y));
2400 struct qty_table_elem *y_ent = &qty_table[y_q];
2402 if (GET_MODE (y) == y_ent->mode
2403 && rtx_equal_p (x, y_ent->const_rtx)
2404 && (! validate || REG_IN_TABLE (REGNO (y)) == REG_TICK (REGNO (y))))
2405 return 1;
2408 if (CONSTANT_P (y) && code == REG
2409 && REGNO_QTY_VALID_P (REGNO (x)))
2411 int x_q = REG_QTY (REGNO (x));
2412 struct qty_table_elem *x_ent = &qty_table[x_q];
2414 if (GET_MODE (x) == x_ent->mode
2415 && rtx_equal_p (y, x_ent->const_rtx))
2416 return 1;
2419 return 0;
2422 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2423 if (GET_MODE (x) != GET_MODE (y))
2424 return 0;
2426 switch (code)
2428 case PC:
2429 case CC0:
2430 case CONST_INT:
2431 return x == y;
2433 case LABEL_REF:
2434 return XEXP (x, 0) == XEXP (y, 0);
2436 case SYMBOL_REF:
2437 return XSTR (x, 0) == XSTR (y, 0);
2439 case REG:
2441 unsigned int regno = REGNO (y);
2442 unsigned int endregno
2443 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
2444 : hard_regno_nregs[regno][GET_MODE (y)]);
2445 unsigned int i;
2447 /* If the quantities are not the same, the expressions are not
2448 equivalent. If there are and we are not to validate, they
2449 are equivalent. Otherwise, ensure all regs are up-to-date. */
2451 if (REG_QTY (REGNO (x)) != REG_QTY (regno))
2452 return 0;
2454 if (! validate)
2455 return 1;
2457 for (i = regno; i < endregno; i++)
2458 if (REG_IN_TABLE (i) != REG_TICK (i))
2459 return 0;
2461 return 1;
2464 /* For commutative operations, check both orders. */
2465 case PLUS:
2466 case MULT:
2467 case AND:
2468 case IOR:
2469 case XOR:
2470 case NE:
2471 case EQ:
2472 return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0), validate, equal_values)
2473 && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2474 validate, equal_values))
2475 || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2476 validate, equal_values)
2477 && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2478 validate, equal_values)));
2480 case ASM_OPERANDS:
2481 /* We don't use the generic code below because we want to
2482 disregard filename and line numbers. */
2484 /* A volatile asm isn't equivalent to any other. */
2485 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2486 return 0;
2488 if (GET_MODE (x) != GET_MODE (y)
2489 || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
2490 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2491 ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
2492 || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
2493 || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
2494 return 0;
2496 if (ASM_OPERANDS_INPUT_LENGTH (x))
2498 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
2499 if (! exp_equiv_p (ASM_OPERANDS_INPUT (x, i),
2500 ASM_OPERANDS_INPUT (y, i),
2501 validate, equal_values)
2502 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
2503 ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
2504 return 0;
2507 return 1;
2509 default:
2510 break;
2513 /* Compare the elements. If any pair of corresponding elements
2514 fail to match, return 0 for the whole things. */
2516 fmt = GET_RTX_FORMAT (code);
2517 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2519 switch (fmt[i])
2521 case 'e':
2522 if (! exp_equiv_p (XEXP (x, i), XEXP (y, i), validate, equal_values))
2523 return 0;
2524 break;
2526 case 'E':
2527 if (XVECLEN (x, i) != XVECLEN (y, i))
2528 return 0;
2529 for (j = 0; j < XVECLEN (x, i); j++)
2530 if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2531 validate, equal_values))
2532 return 0;
2533 break;
2535 case 's':
2536 if (strcmp (XSTR (x, i), XSTR (y, i)))
2537 return 0;
2538 break;
2540 case 'i':
2541 if (XINT (x, i) != XINT (y, i))
2542 return 0;
2543 break;
2545 case 'w':
2546 if (XWINT (x, i) != XWINT (y, i))
2547 return 0;
2548 break;
2550 case '0':
2551 case 't':
2552 break;
2554 default:
2555 abort ();
2559 return 1;
2562 /* Return 1 if X has a value that can vary even between two
2563 executions of the program. 0 means X can be compared reliably
2564 against certain constants or near-constants. */
2566 static int
2567 cse_rtx_varies_p (rtx x, int from_alias)
2569 /* We need not check for X and the equivalence class being of the same
2570 mode because if X is equivalent to a constant in some mode, it
2571 doesn't vary in any mode. */
2573 if (REG_P (x)
2574 && REGNO_QTY_VALID_P (REGNO (x)))
2576 int x_q = REG_QTY (REGNO (x));
2577 struct qty_table_elem *x_ent = &qty_table[x_q];
2579 if (GET_MODE (x) == x_ent->mode
2580 && x_ent->const_rtx != NULL_RTX)
2581 return 0;
2584 if (GET_CODE (x) == PLUS
2585 && GET_CODE (XEXP (x, 1)) == CONST_INT
2586 && REG_P (XEXP (x, 0))
2587 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
2589 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2590 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2592 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2593 && x0_ent->const_rtx != NULL_RTX)
2594 return 0;
2597 /* This can happen as the result of virtual register instantiation, if
2598 the initial constant is too large to be a valid address. This gives
2599 us a three instruction sequence, load large offset into a register,
2600 load fp minus a constant into a register, then a MEM which is the
2601 sum of the two `constant' registers. */
2602 if (GET_CODE (x) == PLUS
2603 && REG_P (XEXP (x, 0))
2604 && REG_P (XEXP (x, 1))
2605 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2606 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
2608 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2609 int x1_q = REG_QTY (REGNO (XEXP (x, 1)));
2610 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2611 struct qty_table_elem *x1_ent = &qty_table[x1_q];
2613 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2614 && x0_ent->const_rtx != NULL_RTX
2615 && (GET_MODE (XEXP (x, 1)) == x1_ent->mode)
2616 && x1_ent->const_rtx != NULL_RTX)
2617 return 0;
2620 return rtx_varies_p (x, from_alias);
2623 /* Subroutine of canon_reg. Pass *XLOC through canon_reg, and validate
2624 the result if necessary. INSN is as for canon_reg. */
2626 static void
2627 validate_canon_reg (rtx *xloc, rtx insn)
2629 rtx new = canon_reg (*xloc, insn);
2630 int insn_code;
2632 /* If replacing pseudo with hard reg or vice versa, ensure the
2633 insn remains valid. Likewise if the insn has MATCH_DUPs. */
2634 if (insn != 0 && new != 0
2635 && REG_P (new) && REG_P (*xloc)
2636 && (((REGNO (new) < FIRST_PSEUDO_REGISTER)
2637 != (REGNO (*xloc) < FIRST_PSEUDO_REGISTER))
2638 || GET_MODE (new) != GET_MODE (*xloc)
2639 || (insn_code = recog_memoized (insn)) < 0
2640 || insn_data[insn_code].n_dups > 0))
2641 validate_change (insn, xloc, new, 1);
2642 else
2643 *xloc = new;
2646 /* Canonicalize an expression:
2647 replace each register reference inside it
2648 with the "oldest" equivalent register.
2650 If INSN is nonzero and we are replacing a pseudo with a hard register
2651 or vice versa, validate_change is used to ensure that INSN remains valid
2652 after we make our substitution. The calls are made with IN_GROUP nonzero
2653 so apply_change_group must be called upon the outermost return from this
2654 function (unless INSN is zero). The result of apply_change_group can
2655 generally be discarded since the changes we are making are optional. */
2657 static rtx
2658 canon_reg (rtx x, rtx insn)
2660 int i;
2661 enum rtx_code code;
2662 const char *fmt;
2664 if (x == 0)
2665 return x;
2667 code = GET_CODE (x);
2668 switch (code)
2670 case PC:
2671 case CC0:
2672 case CONST:
2673 case CONST_INT:
2674 case CONST_DOUBLE:
2675 case CONST_VECTOR:
2676 case SYMBOL_REF:
2677 case LABEL_REF:
2678 case ADDR_VEC:
2679 case ADDR_DIFF_VEC:
2680 return x;
2682 case REG:
2684 int first;
2685 int q;
2686 struct qty_table_elem *ent;
2688 /* Never replace a hard reg, because hard regs can appear
2689 in more than one machine mode, and we must preserve the mode
2690 of each occurrence. Also, some hard regs appear in
2691 MEMs that are shared and mustn't be altered. Don't try to
2692 replace any reg that maps to a reg of class NO_REGS. */
2693 if (REGNO (x) < FIRST_PSEUDO_REGISTER
2694 || ! REGNO_QTY_VALID_P (REGNO (x)))
2695 return x;
2697 q = REG_QTY (REGNO (x));
2698 ent = &qty_table[q];
2699 first = ent->first_reg;
2700 return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2701 : REGNO_REG_CLASS (first) == NO_REGS ? x
2702 : gen_rtx_REG (ent->mode, first));
2705 default:
2706 break;
2709 fmt = GET_RTX_FORMAT (code);
2710 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2712 int j;
2714 if (fmt[i] == 'e')
2715 validate_canon_reg (&XEXP (x, i), insn);
2716 else if (fmt[i] == 'E')
2717 for (j = 0; j < XVECLEN (x, i); j++)
2718 validate_canon_reg (&XVECEXP (x, i, j), insn);
2721 return x;
2724 /* LOC is a location within INSN that is an operand address (the contents of
2725 a MEM). Find the best equivalent address to use that is valid for this
2726 insn.
2728 On most CISC machines, complicated address modes are costly, and rtx_cost
2729 is a good approximation for that cost. However, most RISC machines have
2730 only a few (usually only one) memory reference formats. If an address is
2731 valid at all, it is often just as cheap as any other address. Hence, for
2732 RISC machines, we use `address_cost' to compare the costs of various
2733 addresses. For two addresses of equal cost, choose the one with the
2734 highest `rtx_cost' value as that has the potential of eliminating the
2735 most insns. For equal costs, we choose the first in the equivalence
2736 class. Note that we ignore the fact that pseudo registers are cheaper than
2737 hard registers here because we would also prefer the pseudo registers. */
2739 static void
2740 find_best_addr (rtx insn, rtx *loc, enum machine_mode mode)
2742 struct table_elt *elt;
2743 rtx addr = *loc;
2744 struct table_elt *p;
2745 int found_better = 1;
2746 int save_do_not_record = do_not_record;
2747 int save_hash_arg_in_memory = hash_arg_in_memory;
2748 int addr_volatile;
2749 int regno;
2750 unsigned hash;
2752 /* Do not try to replace constant addresses or addresses of local and
2753 argument slots. These MEM expressions are made only once and inserted
2754 in many instructions, as well as being used to control symbol table
2755 output. It is not safe to clobber them.
2757 There are some uncommon cases where the address is already in a register
2758 for some reason, but we cannot take advantage of that because we have
2759 no easy way to unshare the MEM. In addition, looking up all stack
2760 addresses is costly. */
2761 if ((GET_CODE (addr) == PLUS
2762 && REG_P (XEXP (addr, 0))
2763 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2764 && (regno = REGNO (XEXP (addr, 0)),
2765 regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM
2766 || regno == ARG_POINTER_REGNUM))
2767 || (REG_P (addr)
2768 && (regno = REGNO (addr), regno == FRAME_POINTER_REGNUM
2769 || regno == HARD_FRAME_POINTER_REGNUM
2770 || regno == ARG_POINTER_REGNUM))
2771 || CONSTANT_ADDRESS_P (addr))
2772 return;
2774 /* If this address is not simply a register, try to fold it. This will
2775 sometimes simplify the expression. Many simplifications
2776 will not be valid, but some, usually applying the associative rule, will
2777 be valid and produce better code. */
2778 if (!REG_P (addr))
2780 rtx folded = fold_rtx (copy_rtx (addr), NULL_RTX);
2781 int addr_folded_cost = address_cost (folded, mode);
2782 int addr_cost = address_cost (addr, mode);
2784 if ((addr_folded_cost < addr_cost
2785 || (addr_folded_cost == addr_cost
2786 /* ??? The rtx_cost comparison is left over from an older
2787 version of this code. It is probably no longer helpful. */
2788 && (rtx_cost (folded, MEM) > rtx_cost (addr, MEM)
2789 || approx_reg_cost (folded) < approx_reg_cost (addr))))
2790 && validate_change (insn, loc, folded, 0))
2791 addr = folded;
2794 /* If this address is not in the hash table, we can't look for equivalences
2795 of the whole address. Also, ignore if volatile. */
2797 do_not_record = 0;
2798 hash = HASH (addr, Pmode);
2799 addr_volatile = do_not_record;
2800 do_not_record = save_do_not_record;
2801 hash_arg_in_memory = save_hash_arg_in_memory;
2803 if (addr_volatile)
2804 return;
2806 elt = lookup (addr, hash, Pmode);
2808 if (elt)
2810 /* We need to find the best (under the criteria documented above) entry
2811 in the class that is valid. We use the `flag' field to indicate
2812 choices that were invalid and iterate until we can't find a better
2813 one that hasn't already been tried. */
2815 for (p = elt->first_same_value; p; p = p->next_same_value)
2816 p->flag = 0;
2818 while (found_better)
2820 int best_addr_cost = address_cost (*loc, mode);
2821 int best_rtx_cost = (elt->cost + 1) >> 1;
2822 int exp_cost;
2823 struct table_elt *best_elt = elt;
2825 found_better = 0;
2826 for (p = elt->first_same_value; p; p = p->next_same_value)
2827 if (! p->flag)
2829 if ((REG_P (p->exp)
2830 || exp_equiv_p (p->exp, p->exp, 1, 0))
2831 && ((exp_cost = address_cost (p->exp, mode)) < best_addr_cost
2832 || (exp_cost == best_addr_cost
2833 && ((p->cost + 1) >> 1) > best_rtx_cost)))
2835 found_better = 1;
2836 best_addr_cost = exp_cost;
2837 best_rtx_cost = (p->cost + 1) >> 1;
2838 best_elt = p;
2842 if (found_better)
2844 if (validate_change (insn, loc,
2845 canon_reg (copy_rtx (best_elt->exp),
2846 NULL_RTX), 0))
2847 return;
2848 else
2849 best_elt->flag = 1;
2854 /* If the address is a binary operation with the first operand a register
2855 and the second a constant, do the same as above, but looking for
2856 equivalences of the register. Then try to simplify before checking for
2857 the best address to use. This catches a few cases: First is when we
2858 have REG+const and the register is another REG+const. We can often merge
2859 the constants and eliminate one insn and one register. It may also be
2860 that a machine has a cheap REG+REG+const. Finally, this improves the
2861 code on the Alpha for unaligned byte stores. */
2863 if (flag_expensive_optimizations
2864 && ARITHMETIC_P (*loc)
2865 && REG_P (XEXP (*loc, 0)))
2867 rtx op1 = XEXP (*loc, 1);
2869 do_not_record = 0;
2870 hash = HASH (XEXP (*loc, 0), Pmode);
2871 do_not_record = save_do_not_record;
2872 hash_arg_in_memory = save_hash_arg_in_memory;
2874 elt = lookup (XEXP (*loc, 0), hash, Pmode);
2875 if (elt == 0)
2876 return;
2878 /* We need to find the best (under the criteria documented above) entry
2879 in the class that is valid. We use the `flag' field to indicate
2880 choices that were invalid and iterate until we can't find a better
2881 one that hasn't already been tried. */
2883 for (p = elt->first_same_value; p; p = p->next_same_value)
2884 p->flag = 0;
2886 while (found_better)
2888 int best_addr_cost = address_cost (*loc, mode);
2889 int best_rtx_cost = (COST (*loc) + 1) >> 1;
2890 struct table_elt *best_elt = elt;
2891 rtx best_rtx = *loc;
2892 int count;
2894 /* This is at worst case an O(n^2) algorithm, so limit our search
2895 to the first 32 elements on the list. This avoids trouble
2896 compiling code with very long basic blocks that can easily
2897 call simplify_gen_binary so many times that we run out of
2898 memory. */
2900 found_better = 0;
2901 for (p = elt->first_same_value, count = 0;
2902 p && count < 32;
2903 p = p->next_same_value, count++)
2904 if (! p->flag
2905 && (REG_P (p->exp)
2906 || exp_equiv_p (p->exp, p->exp, 1, 0)))
2908 rtx new = simplify_gen_binary (GET_CODE (*loc), Pmode,
2909 p->exp, op1);
2910 int new_cost;
2911 new_cost = address_cost (new, mode);
2913 if (new_cost < best_addr_cost
2914 || (new_cost == best_addr_cost
2915 && (COST (new) + 1) >> 1 > best_rtx_cost))
2917 found_better = 1;
2918 best_addr_cost = new_cost;
2919 best_rtx_cost = (COST (new) + 1) >> 1;
2920 best_elt = p;
2921 best_rtx = new;
2925 if (found_better)
2927 if (validate_change (insn, loc,
2928 canon_reg (copy_rtx (best_rtx),
2929 NULL_RTX), 0))
2930 return;
2931 else
2932 best_elt->flag = 1;
2938 /* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
2939 operation (EQ, NE, GT, etc.), follow it back through the hash table and
2940 what values are being compared.
2942 *PARG1 and *PARG2 are updated to contain the rtx representing the values
2943 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
2944 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
2945 compared to produce cc0.
2947 The return value is the comparison operator and is either the code of
2948 A or the code corresponding to the inverse of the comparison. */
2950 static enum rtx_code
2951 find_comparison_args (enum rtx_code code, rtx *parg1, rtx *parg2,
2952 enum machine_mode *pmode1, enum machine_mode *pmode2)
2954 rtx arg1, arg2;
2956 arg1 = *parg1, arg2 = *parg2;
2958 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
2960 while (arg2 == CONST0_RTX (GET_MODE (arg1)))
2962 /* Set nonzero when we find something of interest. */
2963 rtx x = 0;
2964 int reverse_code = 0;
2965 struct table_elt *p = 0;
2967 /* If arg1 is a COMPARE, extract the comparison arguments from it.
2968 On machines with CC0, this is the only case that can occur, since
2969 fold_rtx will return the COMPARE or item being compared with zero
2970 when given CC0. */
2972 if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
2973 x = arg1;
2975 /* If ARG1 is a comparison operator and CODE is testing for
2976 STORE_FLAG_VALUE, get the inner arguments. */
2978 else if (COMPARISON_P (arg1))
2980 #ifdef FLOAT_STORE_FLAG_VALUE
2981 REAL_VALUE_TYPE fsfv;
2982 #endif
2984 if (code == NE
2985 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2986 && code == LT && STORE_FLAG_VALUE == -1)
2987 #ifdef FLOAT_STORE_FLAG_VALUE
2988 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
2989 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
2990 REAL_VALUE_NEGATIVE (fsfv)))
2991 #endif
2993 x = arg1;
2994 else if (code == EQ
2995 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2996 && code == GE && STORE_FLAG_VALUE == -1)
2997 #ifdef FLOAT_STORE_FLAG_VALUE
2998 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
2999 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3000 REAL_VALUE_NEGATIVE (fsfv)))
3001 #endif
3003 x = arg1, reverse_code = 1;
3006 /* ??? We could also check for
3008 (ne (and (eq (...) (const_int 1))) (const_int 0))
3010 and related forms, but let's wait until we see them occurring. */
3012 if (x == 0)
3013 /* Look up ARG1 in the hash table and see if it has an equivalence
3014 that lets us see what is being compared. */
3015 p = lookup (arg1, safe_hash (arg1, GET_MODE (arg1)) & HASH_MASK,
3016 GET_MODE (arg1));
3017 if (p)
3019 p = p->first_same_value;
3021 /* If what we compare is already known to be constant, that is as
3022 good as it gets.
3023 We need to break the loop in this case, because otherwise we
3024 can have an infinite loop when looking at a reg that is known
3025 to be a constant which is the same as a comparison of a reg
3026 against zero which appears later in the insn stream, which in
3027 turn is constant and the same as the comparison of the first reg
3028 against zero... */
3029 if (p->is_const)
3030 break;
3033 for (; p; p = p->next_same_value)
3035 enum machine_mode inner_mode = GET_MODE (p->exp);
3036 #ifdef FLOAT_STORE_FLAG_VALUE
3037 REAL_VALUE_TYPE fsfv;
3038 #endif
3040 /* If the entry isn't valid, skip it. */
3041 if (! exp_equiv_p (p->exp, p->exp, 1, 0))
3042 continue;
3044 if (GET_CODE (p->exp) == COMPARE
3045 /* Another possibility is that this machine has a compare insn
3046 that includes the comparison code. In that case, ARG1 would
3047 be equivalent to a comparison operation that would set ARG1 to
3048 either STORE_FLAG_VALUE or zero. If this is an NE operation,
3049 ORIG_CODE is the actual comparison being done; if it is an EQ,
3050 we must reverse ORIG_CODE. On machine with a negative value
3051 for STORE_FLAG_VALUE, also look at LT and GE operations. */
3052 || ((code == NE
3053 || (code == LT
3054 && GET_MODE_CLASS (inner_mode) == MODE_INT
3055 && (GET_MODE_BITSIZE (inner_mode)
3056 <= HOST_BITS_PER_WIDE_INT)
3057 && (STORE_FLAG_VALUE
3058 & ((HOST_WIDE_INT) 1
3059 << (GET_MODE_BITSIZE (inner_mode) - 1))))
3060 #ifdef FLOAT_STORE_FLAG_VALUE
3061 || (code == LT
3062 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3063 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3064 REAL_VALUE_NEGATIVE (fsfv)))
3065 #endif
3067 && COMPARISON_P (p->exp)))
3069 x = p->exp;
3070 break;
3072 else if ((code == EQ
3073 || (code == GE
3074 && GET_MODE_CLASS (inner_mode) == MODE_INT
3075 && (GET_MODE_BITSIZE (inner_mode)
3076 <= HOST_BITS_PER_WIDE_INT)
3077 && (STORE_FLAG_VALUE
3078 & ((HOST_WIDE_INT) 1
3079 << (GET_MODE_BITSIZE (inner_mode) - 1))))
3080 #ifdef FLOAT_STORE_FLAG_VALUE
3081 || (code == GE
3082 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3083 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3084 REAL_VALUE_NEGATIVE (fsfv)))
3085 #endif
3087 && COMPARISON_P (p->exp))
3089 reverse_code = 1;
3090 x = p->exp;
3091 break;
3094 /* If this non-trapping address, e.g. fp + constant, the
3095 equivalent is a better operand since it may let us predict
3096 the value of the comparison. */
3097 else if (!rtx_addr_can_trap_p (p->exp))
3099 arg1 = p->exp;
3100 continue;
3104 /* If we didn't find a useful equivalence for ARG1, we are done.
3105 Otherwise, set up for the next iteration. */
3106 if (x == 0)
3107 break;
3109 /* If we need to reverse the comparison, make sure that that is
3110 possible -- we can't necessarily infer the value of GE from LT
3111 with floating-point operands. */
3112 if (reverse_code)
3114 enum rtx_code reversed = reversed_comparison_code (x, NULL_RTX);
3115 if (reversed == UNKNOWN)
3116 break;
3117 else
3118 code = reversed;
3120 else if (COMPARISON_P (x))
3121 code = GET_CODE (x);
3122 arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
3125 /* Return our results. Return the modes from before fold_rtx
3126 because fold_rtx might produce const_int, and then it's too late. */
3127 *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
3128 *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
3130 return code;
3133 /* If X is a nontrivial arithmetic operation on an argument
3134 for which a constant value can be determined, return
3135 the result of operating on that value, as a constant.
3136 Otherwise, return X, possibly with one or more operands
3137 modified by recursive calls to this function.
3139 If X is a register whose contents are known, we do NOT
3140 return those contents here. equiv_constant is called to
3141 perform that task.
3143 INSN is the insn that we may be modifying. If it is 0, make a copy
3144 of X before modifying it. */
3146 static rtx
3147 fold_rtx (rtx x, rtx insn)
3149 enum rtx_code code;
3150 enum machine_mode mode;
3151 const char *fmt;
3152 int i;
3153 rtx new = 0;
3154 int copied = 0;
3155 int must_swap = 0;
3157 /* Folded equivalents of first two operands of X. */
3158 rtx folded_arg0;
3159 rtx folded_arg1;
3161 /* Constant equivalents of first three operands of X;
3162 0 when no such equivalent is known. */
3163 rtx const_arg0;
3164 rtx const_arg1;
3165 rtx const_arg2;
3167 /* The mode of the first operand of X. We need this for sign and zero
3168 extends. */
3169 enum machine_mode mode_arg0;
3171 if (x == 0)
3172 return x;
3174 mode = GET_MODE (x);
3175 code = GET_CODE (x);
3176 switch (code)
3178 case CONST:
3179 case CONST_INT:
3180 case CONST_DOUBLE:
3181 case CONST_VECTOR:
3182 case SYMBOL_REF:
3183 case LABEL_REF:
3184 case REG:
3185 /* No use simplifying an EXPR_LIST
3186 since they are used only for lists of args
3187 in a function call's REG_EQUAL note. */
3188 case EXPR_LIST:
3189 return x;
3191 #ifdef HAVE_cc0
3192 case CC0:
3193 return prev_insn_cc0;
3194 #endif
3196 case PC:
3197 /* If the next insn is a CODE_LABEL followed by a jump table,
3198 PC's value is a LABEL_REF pointing to that label. That
3199 lets us fold switch statements on the VAX. */
3201 rtx next;
3202 if (insn && tablejump_p (insn, &next, NULL))
3203 return gen_rtx_LABEL_REF (Pmode, next);
3205 break;
3207 case SUBREG:
3208 /* See if we previously assigned a constant value to this SUBREG. */
3209 if ((new = lookup_as_function (x, CONST_INT)) != 0
3210 || (new = lookup_as_function (x, CONST_DOUBLE)) != 0)
3211 return new;
3213 /* If this is a paradoxical SUBREG, we have no idea what value the
3214 extra bits would have. However, if the operand is equivalent
3215 to a SUBREG whose operand is the same as our mode, and all the
3216 modes are within a word, we can just use the inner operand
3217 because these SUBREGs just say how to treat the register.
3219 Similarly if we find an integer constant. */
3221 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3223 enum machine_mode imode = GET_MODE (SUBREG_REG (x));
3224 struct table_elt *elt;
3226 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
3227 && GET_MODE_SIZE (imode) <= UNITS_PER_WORD
3228 && (elt = lookup (SUBREG_REG (x), HASH (SUBREG_REG (x), imode),
3229 imode)) != 0)
3230 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
3232 if (CONSTANT_P (elt->exp)
3233 && GET_MODE (elt->exp) == VOIDmode)
3234 return elt->exp;
3236 if (GET_CODE (elt->exp) == SUBREG
3237 && GET_MODE (SUBREG_REG (elt->exp)) == mode
3238 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
3239 return copy_rtx (SUBREG_REG (elt->exp));
3242 return x;
3245 /* Fold SUBREG_REG. If it changed, see if we can simplify the SUBREG.
3246 We might be able to if the SUBREG is extracting a single word in an
3247 integral mode or extracting the low part. */
3249 folded_arg0 = fold_rtx (SUBREG_REG (x), insn);
3250 const_arg0 = equiv_constant (folded_arg0);
3251 if (const_arg0)
3252 folded_arg0 = const_arg0;
3254 if (folded_arg0 != SUBREG_REG (x))
3256 new = simplify_subreg (mode, folded_arg0,
3257 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
3258 if (new)
3259 return new;
3262 if (REG_P (folded_arg0)
3263 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (folded_arg0)))
3265 struct table_elt *elt;
3267 /* We can use HASH here since we know that canon_hash won't be
3268 called. */
3269 elt = lookup (folded_arg0,
3270 HASH (folded_arg0, GET_MODE (folded_arg0)),
3271 GET_MODE (folded_arg0));
3273 if (elt)
3274 elt = elt->first_same_value;
3276 if (subreg_lowpart_p (x))
3277 /* If this is a narrowing SUBREG and our operand is a REG, see
3278 if we can find an equivalence for REG that is an arithmetic
3279 operation in a wider mode where both operands are paradoxical
3280 SUBREGs from objects of our result mode. In that case, we
3281 couldn-t report an equivalent value for that operation, since we
3282 don't know what the extra bits will be. But we can find an
3283 equivalence for this SUBREG by folding that operation in the
3284 narrow mode. This allows us to fold arithmetic in narrow modes
3285 when the machine only supports word-sized arithmetic.
3287 Also look for a case where we have a SUBREG whose operand
3288 is the same as our result. If both modes are smaller
3289 than a word, we are simply interpreting a register in
3290 different modes and we can use the inner value. */
3292 for (; elt; elt = elt->next_same_value)
3294 enum rtx_code eltcode = GET_CODE (elt->exp);
3296 /* Just check for unary and binary operations. */
3297 if (UNARY_P (elt->exp)
3298 && eltcode != SIGN_EXTEND
3299 && eltcode != ZERO_EXTEND
3300 && GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3301 && GET_MODE (SUBREG_REG (XEXP (elt->exp, 0))) == mode
3302 && (GET_MODE_CLASS (mode)
3303 == GET_MODE_CLASS (GET_MODE (XEXP (elt->exp, 0)))))
3305 rtx op0 = SUBREG_REG (XEXP (elt->exp, 0));
3307 if (!REG_P (op0) && ! CONSTANT_P (op0))
3308 op0 = fold_rtx (op0, NULL_RTX);
3310 op0 = equiv_constant (op0);
3311 if (op0)
3312 new = simplify_unary_operation (GET_CODE (elt->exp), mode,
3313 op0, mode);
3315 else if (ARITHMETIC_P (elt->exp)
3316 && eltcode != DIV && eltcode != MOD
3317 && eltcode != UDIV && eltcode != UMOD
3318 && eltcode != ASHIFTRT && eltcode != LSHIFTRT
3319 && eltcode != ROTATE && eltcode != ROTATERT
3320 && ((GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3321 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 0)))
3322 == mode))
3323 || CONSTANT_P (XEXP (elt->exp, 0)))
3324 && ((GET_CODE (XEXP (elt->exp, 1)) == SUBREG
3325 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 1)))
3326 == mode))
3327 || CONSTANT_P (XEXP (elt->exp, 1))))
3329 rtx op0 = gen_lowpart_common (mode, XEXP (elt->exp, 0));
3330 rtx op1 = gen_lowpart_common (mode, XEXP (elt->exp, 1));
3332 if (op0 && !REG_P (op0) && ! CONSTANT_P (op0))
3333 op0 = fold_rtx (op0, NULL_RTX);
3335 if (op0)
3336 op0 = equiv_constant (op0);
3338 if (op1 && !REG_P (op1) && ! CONSTANT_P (op1))
3339 op1 = fold_rtx (op1, NULL_RTX);
3341 if (op1)
3342 op1 = equiv_constant (op1);
3344 /* If we are looking for the low SImode part of
3345 (ashift:DI c (const_int 32)), it doesn't work
3346 to compute that in SImode, because a 32-bit shift
3347 in SImode is unpredictable. We know the value is 0. */
3348 if (op0 && op1
3349 && GET_CODE (elt->exp) == ASHIFT
3350 && GET_CODE (op1) == CONST_INT
3351 && INTVAL (op1) >= GET_MODE_BITSIZE (mode))
3353 if (INTVAL (op1)
3354 < GET_MODE_BITSIZE (GET_MODE (elt->exp)))
3355 /* If the count fits in the inner mode's width,
3356 but exceeds the outer mode's width,
3357 the value will get truncated to 0
3358 by the subreg. */
3359 new = CONST0_RTX (mode);
3360 else
3361 /* If the count exceeds even the inner mode's width,
3362 don't fold this expression. */
3363 new = 0;
3365 else if (op0 && op1)
3366 new = simplify_binary_operation (GET_CODE (elt->exp), mode, op0, op1);
3369 else if (GET_CODE (elt->exp) == SUBREG
3370 && GET_MODE (SUBREG_REG (elt->exp)) == mode
3371 && (GET_MODE_SIZE (GET_MODE (folded_arg0))
3372 <= UNITS_PER_WORD)
3373 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
3374 new = copy_rtx (SUBREG_REG (elt->exp));
3376 if (new)
3377 return new;
3379 else
3380 /* A SUBREG resulting from a zero extension may fold to zero if
3381 it extracts higher bits than the ZERO_EXTEND's source bits.
3382 FIXME: if combine tried to, er, combine these instructions,
3383 this transformation may be moved to simplify_subreg. */
3384 for (; elt; elt = elt->next_same_value)
3386 if (GET_CODE (elt->exp) == ZERO_EXTEND
3387 && subreg_lsb (x)
3388 >= GET_MODE_BITSIZE (GET_MODE (XEXP (elt->exp, 0))))
3389 return CONST0_RTX (mode);
3393 return x;
3395 case NOT:
3396 case NEG:
3397 /* If we have (NOT Y), see if Y is known to be (NOT Z).
3398 If so, (NOT Y) simplifies to Z. Similarly for NEG. */
3399 new = lookup_as_function (XEXP (x, 0), code);
3400 if (new)
3401 return fold_rtx (copy_rtx (XEXP (new, 0)), insn);
3402 break;
3404 case MEM:
3405 /* If we are not actually processing an insn, don't try to find the
3406 best address. Not only don't we care, but we could modify the
3407 MEM in an invalid way since we have no insn to validate against. */
3408 if (insn != 0)
3409 find_best_addr (insn, &XEXP (x, 0), GET_MODE (x));
3412 /* Even if we don't fold in the insn itself,
3413 we can safely do so here, in hopes of getting a constant. */
3414 rtx addr = fold_rtx (XEXP (x, 0), NULL_RTX);
3415 rtx base = 0;
3416 HOST_WIDE_INT offset = 0;
3418 if (REG_P (addr)
3419 && REGNO_QTY_VALID_P (REGNO (addr)))
3421 int addr_q = REG_QTY (REGNO (addr));
3422 struct qty_table_elem *addr_ent = &qty_table[addr_q];
3424 if (GET_MODE (addr) == addr_ent->mode
3425 && addr_ent->const_rtx != NULL_RTX)
3426 addr = addr_ent->const_rtx;
3429 /* If address is constant, split it into a base and integer offset. */
3430 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
3431 base = addr;
3432 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
3433 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
3435 base = XEXP (XEXP (addr, 0), 0);
3436 offset = INTVAL (XEXP (XEXP (addr, 0), 1));
3438 else if (GET_CODE (addr) == LO_SUM
3439 && GET_CODE (XEXP (addr, 1)) == SYMBOL_REF)
3440 base = XEXP (addr, 1);
3442 /* If this is a constant pool reference, we can fold it into its
3443 constant to allow better value tracking. */
3444 if (base && GET_CODE (base) == SYMBOL_REF
3445 && CONSTANT_POOL_ADDRESS_P (base))
3447 rtx constant = get_pool_constant (base);
3448 enum machine_mode const_mode = get_pool_mode (base);
3449 rtx new;
3451 if (CONSTANT_P (constant) && GET_CODE (constant) != CONST_INT)
3453 constant_pool_entries_cost = COST (constant);
3454 constant_pool_entries_regcost = approx_reg_cost (constant);
3457 /* If we are loading the full constant, we have an equivalence. */
3458 if (offset == 0 && mode == const_mode)
3459 return constant;
3461 /* If this actually isn't a constant (weird!), we can't do
3462 anything. Otherwise, handle the two most common cases:
3463 extracting a word from a multi-word constant, and extracting
3464 the low-order bits. Other cases don't seem common enough to
3465 worry about. */
3466 if (! CONSTANT_P (constant))
3467 return x;
3469 if (GET_MODE_CLASS (mode) == MODE_INT
3470 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
3471 && offset % UNITS_PER_WORD == 0
3472 && (new = operand_subword (constant,
3473 offset / UNITS_PER_WORD,
3474 0, const_mode)) != 0)
3475 return new;
3477 if (((BYTES_BIG_ENDIAN
3478 && offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
3479 || (! BYTES_BIG_ENDIAN && offset == 0))
3480 && (new = gen_lowpart (mode, constant)) != 0)
3481 return new;
3484 /* If this is a reference to a label at a known position in a jump
3485 table, we also know its value. */
3486 if (base && GET_CODE (base) == LABEL_REF)
3488 rtx label = XEXP (base, 0);
3489 rtx table_insn = NEXT_INSN (label);
3491 if (table_insn && JUMP_P (table_insn)
3492 && GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
3494 rtx table = PATTERN (table_insn);
3496 if (offset >= 0
3497 && (offset / GET_MODE_SIZE (GET_MODE (table))
3498 < XVECLEN (table, 0)))
3499 return XVECEXP (table, 0,
3500 offset / GET_MODE_SIZE (GET_MODE (table)));
3502 if (table_insn && JUMP_P (table_insn)
3503 && GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
3505 rtx table = PATTERN (table_insn);
3507 if (offset >= 0
3508 && (offset / GET_MODE_SIZE (GET_MODE (table))
3509 < XVECLEN (table, 1)))
3511 offset /= GET_MODE_SIZE (GET_MODE (table));
3512 new = gen_rtx_MINUS (Pmode, XVECEXP (table, 1, offset),
3513 XEXP (table, 0));
3515 if (GET_MODE (table) != Pmode)
3516 new = gen_rtx_TRUNCATE (GET_MODE (table), new);
3518 /* Indicate this is a constant. This isn't a
3519 valid form of CONST, but it will only be used
3520 to fold the next insns and then discarded, so
3521 it should be safe.
3523 Note this expression must be explicitly discarded,
3524 by cse_insn, else it may end up in a REG_EQUAL note
3525 and "escape" to cause problems elsewhere. */
3526 return gen_rtx_CONST (GET_MODE (new), new);
3531 return x;
3534 #ifdef NO_FUNCTION_CSE
3535 case CALL:
3536 if (CONSTANT_P (XEXP (XEXP (x, 0), 0)))
3537 return x;
3538 break;
3539 #endif
3541 case ASM_OPERANDS:
3542 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
3543 validate_change (insn, &ASM_OPERANDS_INPUT (x, i),
3544 fold_rtx (ASM_OPERANDS_INPUT (x, i), insn), 0);
3545 break;
3547 default:
3548 break;
3551 const_arg0 = 0;
3552 const_arg1 = 0;
3553 const_arg2 = 0;
3554 mode_arg0 = VOIDmode;
3556 /* Try folding our operands.
3557 Then see which ones have constant values known. */
3559 fmt = GET_RTX_FORMAT (code);
3560 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3561 if (fmt[i] == 'e')
3563 rtx arg = XEXP (x, i);
3564 rtx folded_arg = arg, const_arg = 0;
3565 enum machine_mode mode_arg = GET_MODE (arg);
3566 rtx cheap_arg, expensive_arg;
3567 rtx replacements[2];
3568 int j;
3569 int old_cost = COST_IN (XEXP (x, i), code);
3571 /* Most arguments are cheap, so handle them specially. */
3572 switch (GET_CODE (arg))
3574 case REG:
3575 /* This is the same as calling equiv_constant; it is duplicated
3576 here for speed. */
3577 if (REGNO_QTY_VALID_P (REGNO (arg)))
3579 int arg_q = REG_QTY (REGNO (arg));
3580 struct qty_table_elem *arg_ent = &qty_table[arg_q];
3582 if (arg_ent->const_rtx != NULL_RTX
3583 && !REG_P (arg_ent->const_rtx)
3584 && GET_CODE (arg_ent->const_rtx) != PLUS)
3585 const_arg
3586 = gen_lowpart (GET_MODE (arg),
3587 arg_ent->const_rtx);
3589 break;
3591 case CONST:
3592 case CONST_INT:
3593 case SYMBOL_REF:
3594 case LABEL_REF:
3595 case CONST_DOUBLE:
3596 case CONST_VECTOR:
3597 const_arg = arg;
3598 break;
3600 #ifdef HAVE_cc0
3601 case CC0:
3602 folded_arg = prev_insn_cc0;
3603 mode_arg = prev_insn_cc0_mode;
3604 const_arg = equiv_constant (folded_arg);
3605 break;
3606 #endif
3608 default:
3609 folded_arg = fold_rtx (arg, insn);
3610 const_arg = equiv_constant (folded_arg);
3613 /* For the first three operands, see if the operand
3614 is constant or equivalent to a constant. */
3615 switch (i)
3617 case 0:
3618 folded_arg0 = folded_arg;
3619 const_arg0 = const_arg;
3620 mode_arg0 = mode_arg;
3621 break;
3622 case 1:
3623 folded_arg1 = folded_arg;
3624 const_arg1 = const_arg;
3625 break;
3626 case 2:
3627 const_arg2 = const_arg;
3628 break;
3631 /* Pick the least expensive of the folded argument and an
3632 equivalent constant argument. */
3633 if (const_arg == 0 || const_arg == folded_arg
3634 || COST_IN (const_arg, code) > COST_IN (folded_arg, code))
3635 cheap_arg = folded_arg, expensive_arg = const_arg;
3636 else
3637 cheap_arg = const_arg, expensive_arg = folded_arg;
3639 /* Try to replace the operand with the cheapest of the two
3640 possibilities. If it doesn't work and this is either of the first
3641 two operands of a commutative operation, try swapping them.
3642 If THAT fails, try the more expensive, provided it is cheaper
3643 than what is already there. */
3645 if (cheap_arg == XEXP (x, i))
3646 continue;
3648 if (insn == 0 && ! copied)
3650 x = copy_rtx (x);
3651 copied = 1;
3654 /* Order the replacements from cheapest to most expensive. */
3655 replacements[0] = cheap_arg;
3656 replacements[1] = expensive_arg;
3658 for (j = 0; j < 2 && replacements[j]; j++)
3660 int new_cost = COST_IN (replacements[j], code);
3662 /* Stop if what existed before was cheaper. Prefer constants
3663 in the case of a tie. */
3664 if (new_cost > old_cost
3665 || (new_cost == old_cost && CONSTANT_P (XEXP (x, i))))
3666 break;
3668 /* It's not safe to substitute the operand of a conversion
3669 operator with a constant, as the conversion's identity
3670 depends upon the mode of it's operand. This optimization
3671 is handled by the call to simplify_unary_operation. */
3672 if (GET_RTX_CLASS (code) == RTX_UNARY
3673 && GET_MODE (replacements[j]) != mode_arg0
3674 && (code == ZERO_EXTEND
3675 || code == SIGN_EXTEND
3676 || code == TRUNCATE
3677 || code == FLOAT_TRUNCATE
3678 || code == FLOAT_EXTEND
3679 || code == FLOAT
3680 || code == FIX
3681 || code == UNSIGNED_FLOAT
3682 || code == UNSIGNED_FIX))
3683 continue;
3685 if (validate_change (insn, &XEXP (x, i), replacements[j], 0))
3686 break;
3688 if (GET_RTX_CLASS (code) == RTX_COMM_COMPARE
3689 || GET_RTX_CLASS (code) == RTX_COMM_ARITH)
3691 validate_change (insn, &XEXP (x, i), XEXP (x, 1 - i), 1);
3692 validate_change (insn, &XEXP (x, 1 - i), replacements[j], 1);
3694 if (apply_change_group ())
3696 /* Swap them back to be invalid so that this loop can
3697 continue and flag them to be swapped back later. */
3698 rtx tem;
3700 tem = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1);
3701 XEXP (x, 1) = tem;
3702 must_swap = 1;
3703 break;
3709 else
3711 if (fmt[i] == 'E')
3712 /* Don't try to fold inside of a vector of expressions.
3713 Doing nothing is harmless. */
3717 /* If a commutative operation, place a constant integer as the second
3718 operand unless the first operand is also a constant integer. Otherwise,
3719 place any constant second unless the first operand is also a constant. */
3721 if (COMMUTATIVE_P (x))
3723 if (must_swap
3724 || swap_commutative_operands_p (const_arg0 ? const_arg0
3725 : XEXP (x, 0),
3726 const_arg1 ? const_arg1
3727 : XEXP (x, 1)))
3729 rtx tem = XEXP (x, 0);
3731 if (insn == 0 && ! copied)
3733 x = copy_rtx (x);
3734 copied = 1;
3737 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
3738 validate_change (insn, &XEXP (x, 1), tem, 1);
3739 if (apply_change_group ())
3741 tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
3742 tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
3747 /* If X is an arithmetic operation, see if we can simplify it. */
3749 switch (GET_RTX_CLASS (code))
3751 case RTX_UNARY:
3753 int is_const = 0;
3755 /* We can't simplify extension ops unless we know the
3756 original mode. */
3757 if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
3758 && mode_arg0 == VOIDmode)
3759 break;
3761 /* If we had a CONST, strip it off and put it back later if we
3762 fold. */
3763 if (const_arg0 != 0 && GET_CODE (const_arg0) == CONST)
3764 is_const = 1, const_arg0 = XEXP (const_arg0, 0);
3766 new = simplify_unary_operation (code, mode,
3767 const_arg0 ? const_arg0 : folded_arg0,
3768 mode_arg0);
3769 if (new != 0 && is_const)
3770 new = gen_rtx_CONST (mode, new);
3772 break;
3774 case RTX_COMPARE:
3775 case RTX_COMM_COMPARE:
3776 /* See what items are actually being compared and set FOLDED_ARG[01]
3777 to those values and CODE to the actual comparison code. If any are
3778 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
3779 do anything if both operands are already known to be constant. */
3781 if (const_arg0 == 0 || const_arg1 == 0)
3783 struct table_elt *p0, *p1;
3784 rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
3785 enum machine_mode mode_arg1;
3787 #ifdef FLOAT_STORE_FLAG_VALUE
3788 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3790 true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
3791 (FLOAT_STORE_FLAG_VALUE (mode), mode));
3792 false_rtx = CONST0_RTX (mode);
3794 #endif
3796 code = find_comparison_args (code, &folded_arg0, &folded_arg1,
3797 &mode_arg0, &mode_arg1);
3798 const_arg0 = equiv_constant (folded_arg0);
3799 const_arg1 = equiv_constant (folded_arg1);
3801 /* If the mode is VOIDmode or a MODE_CC mode, we don't know
3802 what kinds of things are being compared, so we can't do
3803 anything with this comparison. */
3805 if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
3806 break;
3808 /* If we do not now have two constants being compared, see
3809 if we can nevertheless deduce some things about the
3810 comparison. */
3811 if (const_arg0 == 0 || const_arg1 == 0)
3813 /* Some addresses are known to be nonzero. We don't know
3814 their sign, but equality comparisons are known. */
3815 if (const_arg1 == const0_rtx
3816 && nonzero_address_p (folded_arg0))
3818 if (code == EQ)
3819 return false_rtx;
3820 else if (code == NE)
3821 return true_rtx;
3824 /* See if the two operands are the same. */
3826 if (folded_arg0 == folded_arg1
3827 || (REG_P (folded_arg0)
3828 && REG_P (folded_arg1)
3829 && (REG_QTY (REGNO (folded_arg0))
3830 == REG_QTY (REGNO (folded_arg1))))
3831 || ((p0 = lookup (folded_arg0,
3832 (safe_hash (folded_arg0, mode_arg0)
3833 & HASH_MASK), mode_arg0))
3834 && (p1 = lookup (folded_arg1,
3835 (safe_hash (folded_arg1, mode_arg0)
3836 & HASH_MASK), mode_arg0))
3837 && p0->first_same_value == p1->first_same_value))
3839 /* Sadly two equal NaNs are not equivalent. */
3840 if (!HONOR_NANS (mode_arg0))
3841 return ((code == EQ || code == LE || code == GE
3842 || code == LEU || code == GEU || code == UNEQ
3843 || code == UNLE || code == UNGE
3844 || code == ORDERED)
3845 ? true_rtx : false_rtx);
3846 /* Take care for the FP compares we can resolve. */
3847 if (code == UNEQ || code == UNLE || code == UNGE)
3848 return true_rtx;
3849 if (code == LTGT || code == LT || code == GT)
3850 return false_rtx;
3853 /* If FOLDED_ARG0 is a register, see if the comparison we are
3854 doing now is either the same as we did before or the reverse
3855 (we only check the reverse if not floating-point). */
3856 else if (REG_P (folded_arg0))
3858 int qty = REG_QTY (REGNO (folded_arg0));
3860 if (REGNO_QTY_VALID_P (REGNO (folded_arg0)))
3862 struct qty_table_elem *ent = &qty_table[qty];
3864 if ((comparison_dominates_p (ent->comparison_code, code)
3865 || (! FLOAT_MODE_P (mode_arg0)
3866 && comparison_dominates_p (ent->comparison_code,
3867 reverse_condition (code))))
3868 && (rtx_equal_p (ent->comparison_const, folded_arg1)
3869 || (const_arg1
3870 && rtx_equal_p (ent->comparison_const,
3871 const_arg1))
3872 || (REG_P (folded_arg1)
3873 && (REG_QTY (REGNO (folded_arg1)) == ent->comparison_qty))))
3874 return (comparison_dominates_p (ent->comparison_code, code)
3875 ? true_rtx : false_rtx);
3881 /* If we are comparing against zero, see if the first operand is
3882 equivalent to an IOR with a constant. If so, we may be able to
3883 determine the result of this comparison. */
3885 if (const_arg1 == const0_rtx)
3887 rtx y = lookup_as_function (folded_arg0, IOR);
3888 rtx inner_const;
3890 if (y != 0
3891 && (inner_const = equiv_constant (XEXP (y, 1))) != 0
3892 && GET_CODE (inner_const) == CONST_INT
3893 && INTVAL (inner_const) != 0)
3895 int sign_bitnum = GET_MODE_BITSIZE (mode_arg0) - 1;
3896 int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
3897 && (INTVAL (inner_const)
3898 & ((HOST_WIDE_INT) 1 << sign_bitnum)));
3899 rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
3901 #ifdef FLOAT_STORE_FLAG_VALUE
3902 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3904 true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
3905 (FLOAT_STORE_FLAG_VALUE (mode), mode));
3906 false_rtx = CONST0_RTX (mode);
3908 #endif
3910 switch (code)
3912 case EQ:
3913 return false_rtx;
3914 case NE:
3915 return true_rtx;
3916 case LT: case LE:
3917 if (has_sign)
3918 return true_rtx;
3919 break;
3920 case GT: case GE:
3921 if (has_sign)
3922 return false_rtx;
3923 break;
3924 default:
3925 break;
3931 rtx op0 = const_arg0 ? const_arg0 : folded_arg0;
3932 rtx op1 = const_arg1 ? const_arg1 : folded_arg1;
3933 new = simplify_relational_operation (code, mode, mode_arg0, op0, op1);
3935 break;
3937 case RTX_BIN_ARITH:
3938 case RTX_COMM_ARITH:
3939 switch (code)
3941 case PLUS:
3942 /* If the second operand is a LABEL_REF, see if the first is a MINUS
3943 with that LABEL_REF as its second operand. If so, the result is
3944 the first operand of that MINUS. This handles switches with an
3945 ADDR_DIFF_VEC table. */
3946 if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
3948 rtx y
3949 = GET_CODE (folded_arg0) == MINUS ? folded_arg0
3950 : lookup_as_function (folded_arg0, MINUS);
3952 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
3953 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
3954 return XEXP (y, 0);
3956 /* Now try for a CONST of a MINUS like the above. */
3957 if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
3958 : lookup_as_function (folded_arg0, CONST))) != 0
3959 && GET_CODE (XEXP (y, 0)) == MINUS
3960 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
3961 && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg1, 0))
3962 return XEXP (XEXP (y, 0), 0);
3965 /* Likewise if the operands are in the other order. */
3966 if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
3968 rtx y
3969 = GET_CODE (folded_arg1) == MINUS ? folded_arg1
3970 : lookup_as_function (folded_arg1, MINUS);
3972 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
3973 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
3974 return XEXP (y, 0);
3976 /* Now try for a CONST of a MINUS like the above. */
3977 if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
3978 : lookup_as_function (folded_arg1, CONST))) != 0
3979 && GET_CODE (XEXP (y, 0)) == MINUS
3980 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
3981 && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg0, 0))
3982 return XEXP (XEXP (y, 0), 0);
3985 /* If second operand is a register equivalent to a negative
3986 CONST_INT, see if we can find a register equivalent to the
3987 positive constant. Make a MINUS if so. Don't do this for
3988 a non-negative constant since we might then alternate between
3989 choosing positive and negative constants. Having the positive
3990 constant previously-used is the more common case. Be sure
3991 the resulting constant is non-negative; if const_arg1 were
3992 the smallest negative number this would overflow: depending
3993 on the mode, this would either just be the same value (and
3994 hence not save anything) or be incorrect. */
3995 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT
3996 && INTVAL (const_arg1) < 0
3997 /* This used to test
3999 -INTVAL (const_arg1) >= 0
4001 But The Sun V5.0 compilers mis-compiled that test. So
4002 instead we test for the problematic value in a more direct
4003 manner and hope the Sun compilers get it correct. */
4004 && INTVAL (const_arg1) !=
4005 ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1))
4006 && REG_P (folded_arg1))
4008 rtx new_const = GEN_INT (-INTVAL (const_arg1));
4009 struct table_elt *p
4010 = lookup (new_const, safe_hash (new_const, mode) & HASH_MASK,
4011 mode);
4013 if (p)
4014 for (p = p->first_same_value; p; p = p->next_same_value)
4015 if (REG_P (p->exp))
4016 return simplify_gen_binary (MINUS, mode, folded_arg0,
4017 canon_reg (p->exp, NULL_RTX));
4019 goto from_plus;
4021 case MINUS:
4022 /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
4023 If so, produce (PLUS Z C2-C). */
4024 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT)
4026 rtx y = lookup_as_function (XEXP (x, 0), PLUS);
4027 if (y && GET_CODE (XEXP (y, 1)) == CONST_INT)
4028 return fold_rtx (plus_constant (copy_rtx (y),
4029 -INTVAL (const_arg1)),
4030 NULL_RTX);
4033 /* Fall through. */
4035 from_plus:
4036 case SMIN: case SMAX: case UMIN: case UMAX:
4037 case IOR: case AND: case XOR:
4038 case MULT:
4039 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
4040 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
4041 is known to be of similar form, we may be able to replace the
4042 operation with a combined operation. This may eliminate the
4043 intermediate operation if every use is simplified in this way.
4044 Note that the similar optimization done by combine.c only works
4045 if the intermediate operation's result has only one reference. */
4047 if (REG_P (folded_arg0)
4048 && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
4050 int is_shift
4051 = (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
4052 rtx y = lookup_as_function (folded_arg0, code);
4053 rtx inner_const;
4054 enum rtx_code associate_code;
4055 rtx new_const;
4057 if (y == 0
4058 || 0 == (inner_const
4059 = equiv_constant (fold_rtx (XEXP (y, 1), 0)))
4060 || GET_CODE (inner_const) != CONST_INT
4061 /* If we have compiled a statement like
4062 "if (x == (x & mask1))", and now are looking at
4063 "x & mask2", we will have a case where the first operand
4064 of Y is the same as our first operand. Unless we detect
4065 this case, an infinite loop will result. */
4066 || XEXP (y, 0) == folded_arg0)
4067 break;
4069 /* Don't associate these operations if they are a PLUS with the
4070 same constant and it is a power of two. These might be doable
4071 with a pre- or post-increment. Similarly for two subtracts of
4072 identical powers of two with post decrement. */
4074 if (code == PLUS && const_arg1 == inner_const
4075 && ((HAVE_PRE_INCREMENT
4076 && exact_log2 (INTVAL (const_arg1)) >= 0)
4077 || (HAVE_POST_INCREMENT
4078 && exact_log2 (INTVAL (const_arg1)) >= 0)
4079 || (HAVE_PRE_DECREMENT
4080 && exact_log2 (- INTVAL (const_arg1)) >= 0)
4081 || (HAVE_POST_DECREMENT
4082 && exact_log2 (- INTVAL (const_arg1)) >= 0)))
4083 break;
4085 /* Compute the code used to compose the constants. For example,
4086 A-C1-C2 is A-(C1 + C2), so if CODE == MINUS, we want PLUS. */
4088 associate_code = (is_shift || code == MINUS ? PLUS : code);
4090 new_const = simplify_binary_operation (associate_code, mode,
4091 const_arg1, inner_const);
4093 if (new_const == 0)
4094 break;
4096 /* If we are associating shift operations, don't let this
4097 produce a shift of the size of the object or larger.
4098 This could occur when we follow a sign-extend by a right
4099 shift on a machine that does a sign-extend as a pair
4100 of shifts. */
4102 if (is_shift && GET_CODE (new_const) == CONST_INT
4103 && INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
4105 /* As an exception, we can turn an ASHIFTRT of this
4106 form into a shift of the number of bits - 1. */
4107 if (code == ASHIFTRT)
4108 new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
4109 else
4110 break;
4113 y = copy_rtx (XEXP (y, 0));
4115 /* If Y contains our first operand (the most common way this
4116 can happen is if Y is a MEM), we would do into an infinite
4117 loop if we tried to fold it. So don't in that case. */
4119 if (! reg_mentioned_p (folded_arg0, y))
4120 y = fold_rtx (y, insn);
4122 return simplify_gen_binary (code, mode, y, new_const);
4124 break;
4126 case DIV: case UDIV:
4127 /* ??? The associative optimization performed immediately above is
4128 also possible for DIV and UDIV using associate_code of MULT.
4129 However, we would need extra code to verify that the
4130 multiplication does not overflow, that is, there is no overflow
4131 in the calculation of new_const. */
4132 break;
4134 default:
4135 break;
4138 new = simplify_binary_operation (code, mode,
4139 const_arg0 ? const_arg0 : folded_arg0,
4140 const_arg1 ? const_arg1 : folded_arg1);
4141 break;
4143 case RTX_OBJ:
4144 /* (lo_sum (high X) X) is simply X. */
4145 if (code == LO_SUM && const_arg0 != 0
4146 && GET_CODE (const_arg0) == HIGH
4147 && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
4148 return const_arg1;
4149 break;
4151 case RTX_TERNARY:
4152 case RTX_BITFIELD_OPS:
4153 new = simplify_ternary_operation (code, mode, mode_arg0,
4154 const_arg0 ? const_arg0 : folded_arg0,
4155 const_arg1 ? const_arg1 : folded_arg1,
4156 const_arg2 ? const_arg2 : XEXP (x, 2));
4157 break;
4159 default:
4160 break;
4163 return new ? new : x;
4166 /* Return a constant value currently equivalent to X.
4167 Return 0 if we don't know one. */
4169 static rtx
4170 equiv_constant (rtx x)
4172 if (REG_P (x)
4173 && REGNO_QTY_VALID_P (REGNO (x)))
4175 int x_q = REG_QTY (REGNO (x));
4176 struct qty_table_elem *x_ent = &qty_table[x_q];
4178 if (x_ent->const_rtx)
4179 x = gen_lowpart (GET_MODE (x), x_ent->const_rtx);
4182 if (x == 0 || CONSTANT_P (x))
4183 return x;
4185 /* If X is a MEM, try to fold it outside the context of any insn to see if
4186 it might be equivalent to a constant. That handles the case where it
4187 is a constant-pool reference. Then try to look it up in the hash table
4188 in case it is something whose value we have seen before. */
4190 if (MEM_P (x))
4192 struct table_elt *elt;
4194 x = fold_rtx (x, NULL_RTX);
4195 if (CONSTANT_P (x))
4196 return x;
4198 elt = lookup (x, safe_hash (x, GET_MODE (x)) & HASH_MASK, GET_MODE (x));
4199 if (elt == 0)
4200 return 0;
4202 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
4203 if (elt->is_const && CONSTANT_P (elt->exp))
4204 return elt->exp;
4207 return 0;
4210 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a fixed-point
4211 number, return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
4212 least-significant part of X.
4213 MODE specifies how big a part of X to return.
4215 If the requested operation cannot be done, 0 is returned.
4217 This is similar to gen_lowpart_general in emit-rtl.c. */
4220 gen_lowpart_if_possible (enum machine_mode mode, rtx x)
4222 rtx result = gen_lowpart_common (mode, x);
4224 if (result)
4225 return result;
4226 else if (MEM_P (x))
4228 /* This is the only other case we handle. */
4229 int offset = 0;
4230 rtx new;
4232 if (WORDS_BIG_ENDIAN)
4233 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
4234 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
4235 if (BYTES_BIG_ENDIAN)
4236 /* Adjust the address so that the address-after-the-data is
4237 unchanged. */
4238 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
4239 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
4241 new = adjust_address_nv (x, mode, offset);
4242 if (! memory_address_p (mode, XEXP (new, 0)))
4243 return 0;
4245 return new;
4247 else
4248 return 0;
4251 /* Given INSN, a jump insn, PATH_TAKEN indicates if we are following the "taken"
4252 branch. It will be zero if not.
4254 In certain cases, this can cause us to add an equivalence. For example,
4255 if we are following the taken case of
4256 if (i == 2)
4257 we can add the fact that `i' and '2' are now equivalent.
4259 In any case, we can record that this comparison was passed. If the same
4260 comparison is seen later, we will know its value. */
4262 static void
4263 record_jump_equiv (rtx insn, int taken)
4265 int cond_known_true;
4266 rtx op0, op1;
4267 rtx set;
4268 enum machine_mode mode, mode0, mode1;
4269 int reversed_nonequality = 0;
4270 enum rtx_code code;
4272 /* Ensure this is the right kind of insn. */
4273 if (! any_condjump_p (insn))
4274 return;
4275 set = pc_set (insn);
4277 /* See if this jump condition is known true or false. */
4278 if (taken)
4279 cond_known_true = (XEXP (SET_SRC (set), 2) == pc_rtx);
4280 else
4281 cond_known_true = (XEXP (SET_SRC (set), 1) == pc_rtx);
4283 /* Get the type of comparison being done and the operands being compared.
4284 If we had to reverse a non-equality condition, record that fact so we
4285 know that it isn't valid for floating-point. */
4286 code = GET_CODE (XEXP (SET_SRC (set), 0));
4287 op0 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 0), insn);
4288 op1 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 1), insn);
4290 code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
4291 if (! cond_known_true)
4293 code = reversed_comparison_code_parts (code, op0, op1, insn);
4295 /* Don't remember if we can't find the inverse. */
4296 if (code == UNKNOWN)
4297 return;
4300 /* The mode is the mode of the non-constant. */
4301 mode = mode0;
4302 if (mode1 != VOIDmode)
4303 mode = mode1;
4305 record_jump_cond (code, mode, op0, op1, reversed_nonequality);
4308 /* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
4309 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
4310 Make any useful entries we can with that information. Called from
4311 above function and called recursively. */
4313 static void
4314 record_jump_cond (enum rtx_code code, enum machine_mode mode, rtx op0,
4315 rtx op1, int reversed_nonequality)
4317 unsigned op0_hash, op1_hash;
4318 int op0_in_memory, op1_in_memory;
4319 struct table_elt *op0_elt, *op1_elt;
4321 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
4322 we know that they are also equal in the smaller mode (this is also
4323 true for all smaller modes whether or not there is a SUBREG, but
4324 is not worth testing for with no SUBREG). */
4326 /* Note that GET_MODE (op0) may not equal MODE. */
4327 if (code == EQ && GET_CODE (op0) == SUBREG
4328 && (GET_MODE_SIZE (GET_MODE (op0))
4329 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4331 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4332 rtx tem = gen_lowpart (inner_mode, op1);
4334 record_jump_cond (code, mode, SUBREG_REG (op0),
4335 tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
4336 reversed_nonequality);
4339 if (code == EQ && GET_CODE (op1) == SUBREG
4340 && (GET_MODE_SIZE (GET_MODE (op1))
4341 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4343 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4344 rtx tem = gen_lowpart (inner_mode, op0);
4346 record_jump_cond (code, mode, SUBREG_REG (op1),
4347 tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
4348 reversed_nonequality);
4351 /* Similarly, if this is an NE comparison, and either is a SUBREG
4352 making a smaller mode, we know the whole thing is also NE. */
4354 /* Note that GET_MODE (op0) may not equal MODE;
4355 if we test MODE instead, we can get an infinite recursion
4356 alternating between two modes each wider than MODE. */
4358 if (code == NE && GET_CODE (op0) == SUBREG
4359 && subreg_lowpart_p (op0)
4360 && (GET_MODE_SIZE (GET_MODE (op0))
4361 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4363 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4364 rtx tem = gen_lowpart (inner_mode, op1);
4366 record_jump_cond (code, mode, SUBREG_REG (op0),
4367 tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
4368 reversed_nonequality);
4371 if (code == NE && GET_CODE (op1) == SUBREG
4372 && subreg_lowpart_p (op1)
4373 && (GET_MODE_SIZE (GET_MODE (op1))
4374 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4376 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4377 rtx tem = gen_lowpart (inner_mode, op0);
4379 record_jump_cond (code, mode, SUBREG_REG (op1),
4380 tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
4381 reversed_nonequality);
4384 /* Hash both operands. */
4386 do_not_record = 0;
4387 hash_arg_in_memory = 0;
4388 op0_hash = HASH (op0, mode);
4389 op0_in_memory = hash_arg_in_memory;
4391 if (do_not_record)
4392 return;
4394 do_not_record = 0;
4395 hash_arg_in_memory = 0;
4396 op1_hash = HASH (op1, mode);
4397 op1_in_memory = hash_arg_in_memory;
4399 if (do_not_record)
4400 return;
4402 /* Look up both operands. */
4403 op0_elt = lookup (op0, op0_hash, mode);
4404 op1_elt = lookup (op1, op1_hash, mode);
4406 /* If both operands are already equivalent or if they are not in the
4407 table but are identical, do nothing. */
4408 if ((op0_elt != 0 && op1_elt != 0
4409 && op0_elt->first_same_value == op1_elt->first_same_value)
4410 || op0 == op1 || rtx_equal_p (op0, op1))
4411 return;
4413 /* If we aren't setting two things equal all we can do is save this
4414 comparison. Similarly if this is floating-point. In the latter
4415 case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
4416 If we record the equality, we might inadvertently delete code
4417 whose intent was to change -0 to +0. */
4419 if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
4421 struct qty_table_elem *ent;
4422 int qty;
4424 /* If we reversed a floating-point comparison, if OP0 is not a
4425 register, or if OP1 is neither a register or constant, we can't
4426 do anything. */
4428 if (!REG_P (op1))
4429 op1 = equiv_constant (op1);
4431 if ((reversed_nonequality && FLOAT_MODE_P (mode))
4432 || !REG_P (op0) || op1 == 0)
4433 return;
4435 /* Put OP0 in the hash table if it isn't already. This gives it a
4436 new quantity number. */
4437 if (op0_elt == 0)
4439 if (insert_regs (op0, NULL, 0))
4441 rehash_using_reg (op0);
4442 op0_hash = HASH (op0, mode);
4444 /* If OP0 is contained in OP1, this changes its hash code
4445 as well. Faster to rehash than to check, except
4446 for the simple case of a constant. */
4447 if (! CONSTANT_P (op1))
4448 op1_hash = HASH (op1,mode);
4451 op0_elt = insert (op0, NULL, op0_hash, mode);
4452 op0_elt->in_memory = op0_in_memory;
4455 qty = REG_QTY (REGNO (op0));
4456 ent = &qty_table[qty];
4458 ent->comparison_code = code;
4459 if (REG_P (op1))
4461 /* Look it up again--in case op0 and op1 are the same. */
4462 op1_elt = lookup (op1, op1_hash, mode);
4464 /* Put OP1 in the hash table so it gets a new quantity number. */
4465 if (op1_elt == 0)
4467 if (insert_regs (op1, NULL, 0))
4469 rehash_using_reg (op1);
4470 op1_hash = HASH (op1, mode);
4473 op1_elt = insert (op1, NULL, op1_hash, mode);
4474 op1_elt->in_memory = op1_in_memory;
4477 ent->comparison_const = NULL_RTX;
4478 ent->comparison_qty = REG_QTY (REGNO (op1));
4480 else
4482 ent->comparison_const = op1;
4483 ent->comparison_qty = -1;
4486 return;
4489 /* If either side is still missing an equivalence, make it now,
4490 then merge the equivalences. */
4492 if (op0_elt == 0)
4494 if (insert_regs (op0, NULL, 0))
4496 rehash_using_reg (op0);
4497 op0_hash = HASH (op0, mode);
4500 op0_elt = insert (op0, NULL, op0_hash, mode);
4501 op0_elt->in_memory = op0_in_memory;
4504 if (op1_elt == 0)
4506 if (insert_regs (op1, NULL, 0))
4508 rehash_using_reg (op1);
4509 op1_hash = HASH (op1, mode);
4512 op1_elt = insert (op1, NULL, op1_hash, mode);
4513 op1_elt->in_memory = op1_in_memory;
4516 merge_equiv_classes (op0_elt, op1_elt);
4517 last_jump_equiv_class = op0_elt;
4520 /* CSE processing for one instruction.
4521 First simplify sources and addresses of all assignments
4522 in the instruction, using previously-computed equivalents values.
4523 Then install the new sources and destinations in the table
4524 of available values.
4526 If LIBCALL_INSN is nonzero, don't record any equivalence made in
4527 the insn. It means that INSN is inside libcall block. In this
4528 case LIBCALL_INSN is the corresponding insn with REG_LIBCALL. */
4530 /* Data on one SET contained in the instruction. */
4532 struct set
4534 /* The SET rtx itself. */
4535 rtx rtl;
4536 /* The SET_SRC of the rtx (the original value, if it is changing). */
4537 rtx src;
4538 /* The hash-table element for the SET_SRC of the SET. */
4539 struct table_elt *src_elt;
4540 /* Hash value for the SET_SRC. */
4541 unsigned src_hash;
4542 /* Hash value for the SET_DEST. */
4543 unsigned dest_hash;
4544 /* The SET_DEST, with SUBREG, etc., stripped. */
4545 rtx inner_dest;
4546 /* Nonzero if the SET_SRC is in memory. */
4547 char src_in_memory;
4548 /* Nonzero if the SET_SRC contains something
4549 whose value cannot be predicted and understood. */
4550 char src_volatile;
4551 /* Original machine mode, in case it becomes a CONST_INT.
4552 The size of this field should match the size of the mode
4553 field of struct rtx_def (see rtl.h). */
4554 ENUM_BITFIELD(machine_mode) mode : 8;
4555 /* A constant equivalent for SET_SRC, if any. */
4556 rtx src_const;
4557 /* Original SET_SRC value used for libcall notes. */
4558 rtx orig_src;
4559 /* Hash value of constant equivalent for SET_SRC. */
4560 unsigned src_const_hash;
4561 /* Table entry for constant equivalent for SET_SRC, if any. */
4562 struct table_elt *src_const_elt;
4565 static void
4566 cse_insn (rtx insn, rtx libcall_insn)
4568 rtx x = PATTERN (insn);
4569 int i;
4570 rtx tem;
4571 int n_sets = 0;
4573 #ifdef HAVE_cc0
4574 /* Records what this insn does to set CC0. */
4575 rtx this_insn_cc0 = 0;
4576 enum machine_mode this_insn_cc0_mode = VOIDmode;
4577 #endif
4579 rtx src_eqv = 0;
4580 struct table_elt *src_eqv_elt = 0;
4581 int src_eqv_volatile = 0;
4582 int src_eqv_in_memory = 0;
4583 unsigned src_eqv_hash = 0;
4585 struct set *sets = (struct set *) 0;
4587 this_insn = insn;
4589 /* Find all the SETs and CLOBBERs in this instruction.
4590 Record all the SETs in the array `set' and count them.
4591 Also determine whether there is a CLOBBER that invalidates
4592 all memory references, or all references at varying addresses. */
4594 if (CALL_P (insn))
4596 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4598 if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
4599 invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
4600 XEXP (tem, 0) = canon_reg (XEXP (tem, 0), insn);
4604 if (GET_CODE (x) == SET)
4606 sets = alloca (sizeof (struct set));
4607 sets[0].rtl = x;
4609 /* Ignore SETs that are unconditional jumps.
4610 They never need cse processing, so this does not hurt.
4611 The reason is not efficiency but rather
4612 so that we can test at the end for instructions
4613 that have been simplified to unconditional jumps
4614 and not be misled by unchanged instructions
4615 that were unconditional jumps to begin with. */
4616 if (SET_DEST (x) == pc_rtx
4617 && GET_CODE (SET_SRC (x)) == LABEL_REF)
4620 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
4621 The hard function value register is used only once, to copy to
4622 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
4623 Ensure we invalidate the destination register. On the 80386 no
4624 other code would invalidate it since it is a fixed_reg.
4625 We need not check the return of apply_change_group; see canon_reg. */
4627 else if (GET_CODE (SET_SRC (x)) == CALL)
4629 canon_reg (SET_SRC (x), insn);
4630 apply_change_group ();
4631 fold_rtx (SET_SRC (x), insn);
4632 invalidate (SET_DEST (x), VOIDmode);
4634 else
4635 n_sets = 1;
4637 else if (GET_CODE (x) == PARALLEL)
4639 int lim = XVECLEN (x, 0);
4641 sets = alloca (lim * sizeof (struct set));
4643 /* Find all regs explicitly clobbered in this insn,
4644 and ensure they are not replaced with any other regs
4645 elsewhere in this insn.
4646 When a reg that is clobbered is also used for input,
4647 we should presume that that is for a reason,
4648 and we should not substitute some other register
4649 which is not supposed to be clobbered.
4650 Therefore, this loop cannot be merged into the one below
4651 because a CALL may precede a CLOBBER and refer to the
4652 value clobbered. We must not let a canonicalization do
4653 anything in that case. */
4654 for (i = 0; i < lim; i++)
4656 rtx y = XVECEXP (x, 0, i);
4657 if (GET_CODE (y) == CLOBBER)
4659 rtx clobbered = XEXP (y, 0);
4661 if (REG_P (clobbered)
4662 || GET_CODE (clobbered) == SUBREG)
4663 invalidate (clobbered, VOIDmode);
4664 else if (GET_CODE (clobbered) == STRICT_LOW_PART
4665 || GET_CODE (clobbered) == ZERO_EXTRACT)
4666 invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
4670 for (i = 0; i < lim; i++)
4672 rtx y = XVECEXP (x, 0, i);
4673 if (GET_CODE (y) == SET)
4675 /* As above, we ignore unconditional jumps and call-insns and
4676 ignore the result of apply_change_group. */
4677 if (GET_CODE (SET_SRC (y)) == CALL)
4679 canon_reg (SET_SRC (y), insn);
4680 apply_change_group ();
4681 fold_rtx (SET_SRC (y), insn);
4682 invalidate (SET_DEST (y), VOIDmode);
4684 else if (SET_DEST (y) == pc_rtx
4685 && GET_CODE (SET_SRC (y)) == LABEL_REF)
4687 else
4688 sets[n_sets++].rtl = y;
4690 else if (GET_CODE (y) == CLOBBER)
4692 /* If we clobber memory, canon the address.
4693 This does nothing when a register is clobbered
4694 because we have already invalidated the reg. */
4695 if (MEM_P (XEXP (y, 0)))
4696 canon_reg (XEXP (y, 0), NULL_RTX);
4698 else if (GET_CODE (y) == USE
4699 && ! (REG_P (XEXP (y, 0))
4700 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
4701 canon_reg (y, NULL_RTX);
4702 else if (GET_CODE (y) == CALL)
4704 /* The result of apply_change_group can be ignored; see
4705 canon_reg. */
4706 canon_reg (y, insn);
4707 apply_change_group ();
4708 fold_rtx (y, insn);
4712 else if (GET_CODE (x) == CLOBBER)
4714 if (MEM_P (XEXP (x, 0)))
4715 canon_reg (XEXP (x, 0), NULL_RTX);
4718 /* Canonicalize a USE of a pseudo register or memory location. */
4719 else if (GET_CODE (x) == USE
4720 && ! (REG_P (XEXP (x, 0))
4721 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
4722 canon_reg (XEXP (x, 0), NULL_RTX);
4723 else if (GET_CODE (x) == CALL)
4725 /* The result of apply_change_group can be ignored; see canon_reg. */
4726 canon_reg (x, insn);
4727 apply_change_group ();
4728 fold_rtx (x, insn);
4731 /* Store the equivalent value in SRC_EQV, if different, or if the DEST
4732 is a STRICT_LOW_PART. The latter condition is necessary because SRC_EQV
4733 is handled specially for this case, and if it isn't set, then there will
4734 be no equivalence for the destination. */
4735 if (n_sets == 1 && REG_NOTES (insn) != 0
4736 && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
4737 && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
4738 || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
4740 src_eqv = fold_rtx (canon_reg (XEXP (tem, 0), NULL_RTX), insn);
4741 XEXP (tem, 0) = src_eqv;
4744 /* Canonicalize sources and addresses of destinations.
4745 We do this in a separate pass to avoid problems when a MATCH_DUP is
4746 present in the insn pattern. In that case, we want to ensure that
4747 we don't break the duplicate nature of the pattern. So we will replace
4748 both operands at the same time. Otherwise, we would fail to find an
4749 equivalent substitution in the loop calling validate_change below.
4751 We used to suppress canonicalization of DEST if it appears in SRC,
4752 but we don't do this any more. */
4754 for (i = 0; i < n_sets; i++)
4756 rtx dest = SET_DEST (sets[i].rtl);
4757 rtx src = SET_SRC (sets[i].rtl);
4758 rtx new = canon_reg (src, insn);
4759 int insn_code;
4761 sets[i].orig_src = src;
4762 if ((REG_P (new) && REG_P (src)
4763 && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
4764 != (REGNO (src) < FIRST_PSEUDO_REGISTER)))
4765 || (insn_code = recog_memoized (insn)) < 0
4766 || insn_data[insn_code].n_dups > 0)
4767 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
4768 else
4769 SET_SRC (sets[i].rtl) = new;
4771 if (GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SIGN_EXTRACT)
4773 validate_change (insn, &XEXP (dest, 1),
4774 canon_reg (XEXP (dest, 1), insn), 1);
4775 validate_change (insn, &XEXP (dest, 2),
4776 canon_reg (XEXP (dest, 2), insn), 1);
4779 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
4780 || GET_CODE (dest) == ZERO_EXTRACT
4781 || GET_CODE (dest) == SIGN_EXTRACT)
4782 dest = XEXP (dest, 0);
4784 if (MEM_P (dest))
4785 canon_reg (dest, insn);
4788 /* Now that we have done all the replacements, we can apply the change
4789 group and see if they all work. Note that this will cause some
4790 canonicalizations that would have worked individually not to be applied
4791 because some other canonicalization didn't work, but this should not
4792 occur often.
4794 The result of apply_change_group can be ignored; see canon_reg. */
4796 apply_change_group ();
4798 /* Set sets[i].src_elt to the class each source belongs to.
4799 Detect assignments from or to volatile things
4800 and set set[i] to zero so they will be ignored
4801 in the rest of this function.
4803 Nothing in this loop changes the hash table or the register chains. */
4805 for (i = 0; i < n_sets; i++)
4807 rtx src, dest;
4808 rtx src_folded;
4809 struct table_elt *elt = 0, *p;
4810 enum machine_mode mode;
4811 rtx src_eqv_here;
4812 rtx src_const = 0;
4813 rtx src_related = 0;
4814 struct table_elt *src_const_elt = 0;
4815 int src_cost = MAX_COST;
4816 int src_eqv_cost = MAX_COST;
4817 int src_folded_cost = MAX_COST;
4818 int src_related_cost = MAX_COST;
4819 int src_elt_cost = MAX_COST;
4820 int src_regcost = MAX_COST;
4821 int src_eqv_regcost = MAX_COST;
4822 int src_folded_regcost = MAX_COST;
4823 int src_related_regcost = MAX_COST;
4824 int src_elt_regcost = MAX_COST;
4825 /* Set nonzero if we need to call force_const_mem on with the
4826 contents of src_folded before using it. */
4827 int src_folded_force_flag = 0;
4829 dest = SET_DEST (sets[i].rtl);
4830 src = SET_SRC (sets[i].rtl);
4832 /* If SRC is a constant that has no machine mode,
4833 hash it with the destination's machine mode.
4834 This way we can keep different modes separate. */
4836 mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
4837 sets[i].mode = mode;
4839 if (src_eqv)
4841 enum machine_mode eqvmode = mode;
4842 if (GET_CODE (dest) == STRICT_LOW_PART)
4843 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
4844 do_not_record = 0;
4845 hash_arg_in_memory = 0;
4846 src_eqv_hash = HASH (src_eqv, eqvmode);
4848 /* Find the equivalence class for the equivalent expression. */
4850 if (!do_not_record)
4851 src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
4853 src_eqv_volatile = do_not_record;
4854 src_eqv_in_memory = hash_arg_in_memory;
4857 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
4858 value of the INNER register, not the destination. So it is not
4859 a valid substitution for the source. But save it for later. */
4860 if (GET_CODE (dest) == STRICT_LOW_PART)
4861 src_eqv_here = 0;
4862 else
4863 src_eqv_here = src_eqv;
4865 /* Simplify and foldable subexpressions in SRC. Then get the fully-
4866 simplified result, which may not necessarily be valid. */
4867 src_folded = fold_rtx (src, insn);
4869 #if 0
4870 /* ??? This caused bad code to be generated for the m68k port with -O2.
4871 Suppose src is (CONST_INT -1), and that after truncation src_folded
4872 is (CONST_INT 3). Suppose src_folded is then used for src_const.
4873 At the end we will add src and src_const to the same equivalence
4874 class. We now have 3 and -1 on the same equivalence class. This
4875 causes later instructions to be mis-optimized. */
4876 /* If storing a constant in a bitfield, pre-truncate the constant
4877 so we will be able to record it later. */
4878 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
4879 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
4881 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
4883 if (GET_CODE (src) == CONST_INT
4884 && GET_CODE (width) == CONST_INT
4885 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
4886 && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
4887 src_folded
4888 = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
4889 << INTVAL (width)) - 1));
4891 #endif
4893 /* Compute SRC's hash code, and also notice if it
4894 should not be recorded at all. In that case,
4895 prevent any further processing of this assignment. */
4896 do_not_record = 0;
4897 hash_arg_in_memory = 0;
4899 sets[i].src = src;
4900 sets[i].src_hash = HASH (src, mode);
4901 sets[i].src_volatile = do_not_record;
4902 sets[i].src_in_memory = hash_arg_in_memory;
4904 /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
4905 a pseudo, do not record SRC. Using SRC as a replacement for
4906 anything else will be incorrect in that situation. Note that
4907 this usually occurs only for stack slots, in which case all the
4908 RTL would be referring to SRC, so we don't lose any optimization
4909 opportunities by not having SRC in the hash table. */
4911 if (MEM_P (src)
4912 && find_reg_note (insn, REG_EQUIV, NULL_RTX) != 0
4913 && REG_P (dest)
4914 && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
4915 sets[i].src_volatile = 1;
4917 #if 0
4918 /* It is no longer clear why we used to do this, but it doesn't
4919 appear to still be needed. So let's try without it since this
4920 code hurts cse'ing widened ops. */
4921 /* If source is a paradoxical subreg (such as QI treated as an SI),
4922 treat it as volatile. It may do the work of an SI in one context
4923 where the extra bits are not being used, but cannot replace an SI
4924 in general. */
4925 if (GET_CODE (src) == SUBREG
4926 && (GET_MODE_SIZE (GET_MODE (src))
4927 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
4928 sets[i].src_volatile = 1;
4929 #endif
4931 /* Locate all possible equivalent forms for SRC. Try to replace
4932 SRC in the insn with each cheaper equivalent.
4934 We have the following types of equivalents: SRC itself, a folded
4935 version, a value given in a REG_EQUAL note, or a value related
4936 to a constant.
4938 Each of these equivalents may be part of an additional class
4939 of equivalents (if more than one is in the table, they must be in
4940 the same class; we check for this).
4942 If the source is volatile, we don't do any table lookups.
4944 We note any constant equivalent for possible later use in a
4945 REG_NOTE. */
4947 if (!sets[i].src_volatile)
4948 elt = lookup (src, sets[i].src_hash, mode);
4950 sets[i].src_elt = elt;
4952 if (elt && src_eqv_here && src_eqv_elt)
4954 if (elt->first_same_value != src_eqv_elt->first_same_value)
4956 /* The REG_EQUAL is indicating that two formerly distinct
4957 classes are now equivalent. So merge them. */
4958 merge_equiv_classes (elt, src_eqv_elt);
4959 src_eqv_hash = HASH (src_eqv, elt->mode);
4960 src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
4963 src_eqv_here = 0;
4966 else if (src_eqv_elt)
4967 elt = src_eqv_elt;
4969 /* Try to find a constant somewhere and record it in `src_const'.
4970 Record its table element, if any, in `src_const_elt'. Look in
4971 any known equivalences first. (If the constant is not in the
4972 table, also set `sets[i].src_const_hash'). */
4973 if (elt)
4974 for (p = elt->first_same_value; p; p = p->next_same_value)
4975 if (p->is_const)
4977 src_const = p->exp;
4978 src_const_elt = elt;
4979 break;
4982 if (src_const == 0
4983 && (CONSTANT_P (src_folded)
4984 /* Consider (minus (label_ref L1) (label_ref L2)) as
4985 "constant" here so we will record it. This allows us
4986 to fold switch statements when an ADDR_DIFF_VEC is used. */
4987 || (GET_CODE (src_folded) == MINUS
4988 && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
4989 && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
4990 src_const = src_folded, src_const_elt = elt;
4991 else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
4992 src_const = src_eqv_here, src_const_elt = src_eqv_elt;
4994 /* If we don't know if the constant is in the table, get its
4995 hash code and look it up. */
4996 if (src_const && src_const_elt == 0)
4998 sets[i].src_const_hash = HASH (src_const, mode);
4999 src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
5002 sets[i].src_const = src_const;
5003 sets[i].src_const_elt = src_const_elt;
5005 /* If the constant and our source are both in the table, mark them as
5006 equivalent. Otherwise, if a constant is in the table but the source
5007 isn't, set ELT to it. */
5008 if (src_const_elt && elt
5009 && src_const_elt->first_same_value != elt->first_same_value)
5010 merge_equiv_classes (elt, src_const_elt);
5011 else if (src_const_elt && elt == 0)
5012 elt = src_const_elt;
5014 /* See if there is a register linearly related to a constant
5015 equivalent of SRC. */
5016 if (src_const
5017 && (GET_CODE (src_const) == CONST
5018 || (src_const_elt && src_const_elt->related_value != 0)))
5020 src_related = use_related_value (src_const, src_const_elt);
5021 if (src_related)
5023 struct table_elt *src_related_elt
5024 = lookup (src_related, HASH (src_related, mode), mode);
5025 if (src_related_elt && elt)
5027 if (elt->first_same_value
5028 != src_related_elt->first_same_value)
5029 /* This can occur when we previously saw a CONST
5030 involving a SYMBOL_REF and then see the SYMBOL_REF
5031 twice. Merge the involved classes. */
5032 merge_equiv_classes (elt, src_related_elt);
5034 src_related = 0;
5035 src_related_elt = 0;
5037 else if (src_related_elt && elt == 0)
5038 elt = src_related_elt;
5042 /* See if we have a CONST_INT that is already in a register in a
5043 wider mode. */
5045 if (src_const && src_related == 0 && GET_CODE (src_const) == CONST_INT
5046 && GET_MODE_CLASS (mode) == MODE_INT
5047 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
5049 enum machine_mode wider_mode;
5051 for (wider_mode = GET_MODE_WIDER_MODE (mode);
5052 GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
5053 && src_related == 0;
5054 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5056 struct table_elt *const_elt
5057 = lookup (src_const, HASH (src_const, wider_mode), wider_mode);
5059 if (const_elt == 0)
5060 continue;
5062 for (const_elt = const_elt->first_same_value;
5063 const_elt; const_elt = const_elt->next_same_value)
5064 if (REG_P (const_elt->exp))
5066 src_related = gen_lowpart (mode,
5067 const_elt->exp);
5068 break;
5073 /* Another possibility is that we have an AND with a constant in
5074 a mode narrower than a word. If so, it might have been generated
5075 as part of an "if" which would narrow the AND. If we already
5076 have done the AND in a wider mode, we can use a SUBREG of that
5077 value. */
5079 if (flag_expensive_optimizations && ! src_related
5080 && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
5081 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5083 enum machine_mode tmode;
5084 rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
5086 for (tmode = GET_MODE_WIDER_MODE (mode);
5087 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5088 tmode = GET_MODE_WIDER_MODE (tmode))
5090 rtx inner = gen_lowpart (tmode, XEXP (src, 0));
5091 struct table_elt *larger_elt;
5093 if (inner)
5095 PUT_MODE (new_and, tmode);
5096 XEXP (new_and, 0) = inner;
5097 larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
5098 if (larger_elt == 0)
5099 continue;
5101 for (larger_elt = larger_elt->first_same_value;
5102 larger_elt; larger_elt = larger_elt->next_same_value)
5103 if (REG_P (larger_elt->exp))
5105 src_related
5106 = gen_lowpart (mode, larger_elt->exp);
5107 break;
5110 if (src_related)
5111 break;
5116 #ifdef LOAD_EXTEND_OP
5117 /* See if a MEM has already been loaded with a widening operation;
5118 if it has, we can use a subreg of that. Many CISC machines
5119 also have such operations, but this is only likely to be
5120 beneficial on these machines. */
5122 if (flag_expensive_optimizations && src_related == 0
5123 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5124 && GET_MODE_CLASS (mode) == MODE_INT
5125 && MEM_P (src) && ! do_not_record
5126 && LOAD_EXTEND_OP (mode) != NIL)
5128 enum machine_mode tmode;
5130 /* Set what we are trying to extend and the operation it might
5131 have been extended with. */
5132 PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
5133 XEXP (memory_extend_rtx, 0) = src;
5135 for (tmode = GET_MODE_WIDER_MODE (mode);
5136 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5137 tmode = GET_MODE_WIDER_MODE (tmode))
5139 struct table_elt *larger_elt;
5141 PUT_MODE (memory_extend_rtx, tmode);
5142 larger_elt = lookup (memory_extend_rtx,
5143 HASH (memory_extend_rtx, tmode), tmode);
5144 if (larger_elt == 0)
5145 continue;
5147 for (larger_elt = larger_elt->first_same_value;
5148 larger_elt; larger_elt = larger_elt->next_same_value)
5149 if (REG_P (larger_elt->exp))
5151 src_related = gen_lowpart (mode,
5152 larger_elt->exp);
5153 break;
5156 if (src_related)
5157 break;
5160 #endif /* LOAD_EXTEND_OP */
5162 if (src == src_folded)
5163 src_folded = 0;
5165 /* At this point, ELT, if nonzero, points to a class of expressions
5166 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
5167 and SRC_RELATED, if nonzero, each contain additional equivalent
5168 expressions. Prune these latter expressions by deleting expressions
5169 already in the equivalence class.
5171 Check for an equivalent identical to the destination. If found,
5172 this is the preferred equivalent since it will likely lead to
5173 elimination of the insn. Indicate this by placing it in
5174 `src_related'. */
5176 if (elt)
5177 elt = elt->first_same_value;
5178 for (p = elt; p; p = p->next_same_value)
5180 enum rtx_code code = GET_CODE (p->exp);
5182 /* If the expression is not valid, ignore it. Then we do not
5183 have to check for validity below. In most cases, we can use
5184 `rtx_equal_p', since canonicalization has already been done. */
5185 if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, 0))
5186 continue;
5188 /* Also skip paradoxical subregs, unless that's what we're
5189 looking for. */
5190 if (code == SUBREG
5191 && (GET_MODE_SIZE (GET_MODE (p->exp))
5192 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))
5193 && ! (src != 0
5194 && GET_CODE (src) == SUBREG
5195 && GET_MODE (src) == GET_MODE (p->exp)
5196 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5197 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))))
5198 continue;
5200 if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
5201 src = 0;
5202 else if (src_folded && GET_CODE (src_folded) == code
5203 && rtx_equal_p (src_folded, p->exp))
5204 src_folded = 0;
5205 else if (src_eqv_here && GET_CODE (src_eqv_here) == code
5206 && rtx_equal_p (src_eqv_here, p->exp))
5207 src_eqv_here = 0;
5208 else if (src_related && GET_CODE (src_related) == code
5209 && rtx_equal_p (src_related, p->exp))
5210 src_related = 0;
5212 /* This is the same as the destination of the insns, we want
5213 to prefer it. Copy it to src_related. The code below will
5214 then give it a negative cost. */
5215 if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
5216 src_related = dest;
5219 /* Find the cheapest valid equivalent, trying all the available
5220 possibilities. Prefer items not in the hash table to ones
5221 that are when they are equal cost. Note that we can never
5222 worsen an insn as the current contents will also succeed.
5223 If we find an equivalent identical to the destination, use it as best,
5224 since this insn will probably be eliminated in that case. */
5225 if (src)
5227 if (rtx_equal_p (src, dest))
5228 src_cost = src_regcost = -1;
5229 else
5231 src_cost = COST (src);
5232 src_regcost = approx_reg_cost (src);
5236 if (src_eqv_here)
5238 if (rtx_equal_p (src_eqv_here, dest))
5239 src_eqv_cost = src_eqv_regcost = -1;
5240 else
5242 src_eqv_cost = COST (src_eqv_here);
5243 src_eqv_regcost = approx_reg_cost (src_eqv_here);
5247 if (src_folded)
5249 if (rtx_equal_p (src_folded, dest))
5250 src_folded_cost = src_folded_regcost = -1;
5251 else
5253 src_folded_cost = COST (src_folded);
5254 src_folded_regcost = approx_reg_cost (src_folded);
5258 if (src_related)
5260 if (rtx_equal_p (src_related, dest))
5261 src_related_cost = src_related_regcost = -1;
5262 else
5264 src_related_cost = COST (src_related);
5265 src_related_regcost = approx_reg_cost (src_related);
5269 /* If this was an indirect jump insn, a known label will really be
5270 cheaper even though it looks more expensive. */
5271 if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
5272 src_folded = src_const, src_folded_cost = src_folded_regcost = -1;
5274 /* Terminate loop when replacement made. This must terminate since
5275 the current contents will be tested and will always be valid. */
5276 while (1)
5278 rtx trial;
5280 /* Skip invalid entries. */
5281 while (elt && !REG_P (elt->exp)
5282 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
5283 elt = elt->next_same_value;
5285 /* A paradoxical subreg would be bad here: it'll be the right
5286 size, but later may be adjusted so that the upper bits aren't
5287 what we want. So reject it. */
5288 if (elt != 0
5289 && GET_CODE (elt->exp) == SUBREG
5290 && (GET_MODE_SIZE (GET_MODE (elt->exp))
5291 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))
5292 /* It is okay, though, if the rtx we're trying to match
5293 will ignore any of the bits we can't predict. */
5294 && ! (src != 0
5295 && GET_CODE (src) == SUBREG
5296 && GET_MODE (src) == GET_MODE (elt->exp)
5297 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5298 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))))
5300 elt = elt->next_same_value;
5301 continue;
5304 if (elt)
5306 src_elt_cost = elt->cost;
5307 src_elt_regcost = elt->regcost;
5310 /* Find cheapest and skip it for the next time. For items
5311 of equal cost, use this order:
5312 src_folded, src, src_eqv, src_related and hash table entry. */
5313 if (src_folded
5314 && preferable (src_folded_cost, src_folded_regcost,
5315 src_cost, src_regcost) <= 0
5316 && preferable (src_folded_cost, src_folded_regcost,
5317 src_eqv_cost, src_eqv_regcost) <= 0
5318 && preferable (src_folded_cost, src_folded_regcost,
5319 src_related_cost, src_related_regcost) <= 0
5320 && preferable (src_folded_cost, src_folded_regcost,
5321 src_elt_cost, src_elt_regcost) <= 0)
5323 trial = src_folded, src_folded_cost = MAX_COST;
5324 if (src_folded_force_flag)
5326 rtx forced = force_const_mem (mode, trial);
5327 if (forced)
5328 trial = forced;
5331 else if (src
5332 && preferable (src_cost, src_regcost,
5333 src_eqv_cost, src_eqv_regcost) <= 0
5334 && preferable (src_cost, src_regcost,
5335 src_related_cost, src_related_regcost) <= 0
5336 && preferable (src_cost, src_regcost,
5337 src_elt_cost, src_elt_regcost) <= 0)
5338 trial = src, src_cost = MAX_COST;
5339 else if (src_eqv_here
5340 && preferable (src_eqv_cost, src_eqv_regcost,
5341 src_related_cost, src_related_regcost) <= 0
5342 && preferable (src_eqv_cost, src_eqv_regcost,
5343 src_elt_cost, src_elt_regcost) <= 0)
5344 trial = copy_rtx (src_eqv_here), src_eqv_cost = MAX_COST;
5345 else if (src_related
5346 && preferable (src_related_cost, src_related_regcost,
5347 src_elt_cost, src_elt_regcost) <= 0)
5348 trial = copy_rtx (src_related), src_related_cost = MAX_COST;
5349 else
5351 trial = copy_rtx (elt->exp);
5352 elt = elt->next_same_value;
5353 src_elt_cost = MAX_COST;
5356 /* We don't normally have an insn matching (set (pc) (pc)), so
5357 check for this separately here. We will delete such an
5358 insn below.
5360 For other cases such as a table jump or conditional jump
5361 where we know the ultimate target, go ahead and replace the
5362 operand. While that may not make a valid insn, we will
5363 reemit the jump below (and also insert any necessary
5364 barriers). */
5365 if (n_sets == 1 && dest == pc_rtx
5366 && (trial == pc_rtx
5367 || (GET_CODE (trial) == LABEL_REF
5368 && ! condjump_p (insn))))
5370 SET_SRC (sets[i].rtl) = trial;
5371 cse_jumps_altered = 1;
5372 break;
5375 /* Look for a substitution that makes a valid insn. */
5376 else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
5378 rtx new = canon_reg (SET_SRC (sets[i].rtl), insn);
5380 /* If we just made a substitution inside a libcall, then we
5381 need to make the same substitution in any notes attached
5382 to the RETVAL insn. */
5383 if (libcall_insn
5384 && (REG_P (sets[i].orig_src)
5385 || GET_CODE (sets[i].orig_src) == SUBREG
5386 || MEM_P (sets[i].orig_src)))
5388 rtx note = find_reg_equal_equiv_note (libcall_insn);
5389 if (note != 0)
5390 XEXP (note, 0) = simplify_replace_rtx (XEXP (note, 0),
5391 sets[i].orig_src,
5392 copy_rtx (new));
5395 /* The result of apply_change_group can be ignored; see
5396 canon_reg. */
5398 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
5399 apply_change_group ();
5400 break;
5403 /* If we previously found constant pool entries for
5404 constants and this is a constant, try making a
5405 pool entry. Put it in src_folded unless we already have done
5406 this since that is where it likely came from. */
5408 else if (constant_pool_entries_cost
5409 && CONSTANT_P (trial)
5410 /* Reject cases that will abort in decode_rtx_const.
5411 On the alpha when simplifying a switch, we get
5412 (const (truncate (minus (label_ref) (label_ref)))). */
5413 && ! (GET_CODE (trial) == CONST
5414 && GET_CODE (XEXP (trial, 0)) == TRUNCATE)
5415 /* Likewise on IA-64, except without the truncate. */
5416 && ! (GET_CODE (trial) == CONST
5417 && GET_CODE (XEXP (trial, 0)) == MINUS
5418 && GET_CODE (XEXP (XEXP (trial, 0), 0)) == LABEL_REF
5419 && GET_CODE (XEXP (XEXP (trial, 0), 1)) == LABEL_REF)
5420 && (src_folded == 0
5421 || (!MEM_P (src_folded)
5422 && ! src_folded_force_flag))
5423 && GET_MODE_CLASS (mode) != MODE_CC
5424 && mode != VOIDmode)
5426 src_folded_force_flag = 1;
5427 src_folded = trial;
5428 src_folded_cost = constant_pool_entries_cost;
5429 src_folded_regcost = constant_pool_entries_regcost;
5433 src = SET_SRC (sets[i].rtl);
5435 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
5436 However, there is an important exception: If both are registers
5437 that are not the head of their equivalence class, replace SET_SRC
5438 with the head of the class. If we do not do this, we will have
5439 both registers live over a portion of the basic block. This way,
5440 their lifetimes will likely abut instead of overlapping. */
5441 if (REG_P (dest)
5442 && REGNO_QTY_VALID_P (REGNO (dest)))
5444 int dest_q = REG_QTY (REGNO (dest));
5445 struct qty_table_elem *dest_ent = &qty_table[dest_q];
5447 if (dest_ent->mode == GET_MODE (dest)
5448 && dest_ent->first_reg != REGNO (dest)
5449 && REG_P (src) && REGNO (src) == REGNO (dest)
5450 /* Don't do this if the original insn had a hard reg as
5451 SET_SRC or SET_DEST. */
5452 && (!REG_P (sets[i].src)
5453 || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER)
5454 && (!REG_P (dest) || REGNO (dest) >= FIRST_PSEUDO_REGISTER))
5455 /* We can't call canon_reg here because it won't do anything if
5456 SRC is a hard register. */
5458 int src_q = REG_QTY (REGNO (src));
5459 struct qty_table_elem *src_ent = &qty_table[src_q];
5460 int first = src_ent->first_reg;
5461 rtx new_src
5462 = (first >= FIRST_PSEUDO_REGISTER
5463 ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first));
5465 /* We must use validate-change even for this, because this
5466 might be a special no-op instruction, suitable only to
5467 tag notes onto. */
5468 if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0))
5470 src = new_src;
5471 /* If we had a constant that is cheaper than what we are now
5472 setting SRC to, use that constant. We ignored it when we
5473 thought we could make this into a no-op. */
5474 if (src_const && COST (src_const) < COST (src)
5475 && validate_change (insn, &SET_SRC (sets[i].rtl),
5476 src_const, 0))
5477 src = src_const;
5482 /* If we made a change, recompute SRC values. */
5483 if (src != sets[i].src)
5485 cse_altered = 1;
5486 do_not_record = 0;
5487 hash_arg_in_memory = 0;
5488 sets[i].src = src;
5489 sets[i].src_hash = HASH (src, mode);
5490 sets[i].src_volatile = do_not_record;
5491 sets[i].src_in_memory = hash_arg_in_memory;
5492 sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
5495 /* If this is a single SET, we are setting a register, and we have an
5496 equivalent constant, we want to add a REG_NOTE. We don't want
5497 to write a REG_EQUAL note for a constant pseudo since verifying that
5498 that pseudo hasn't been eliminated is a pain. Such a note also
5499 won't help anything.
5501 Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
5502 which can be created for a reference to a compile time computable
5503 entry in a jump table. */
5505 if (n_sets == 1 && src_const && REG_P (dest)
5506 && !REG_P (src_const)
5507 && ! (GET_CODE (src_const) == CONST
5508 && GET_CODE (XEXP (src_const, 0)) == MINUS
5509 && GET_CODE (XEXP (XEXP (src_const, 0), 0)) == LABEL_REF
5510 && GET_CODE (XEXP (XEXP (src_const, 0), 1)) == LABEL_REF))
5512 /* We only want a REG_EQUAL note if src_const != src. */
5513 if (! rtx_equal_p (src, src_const))
5515 /* Make sure that the rtx is not shared. */
5516 src_const = copy_rtx (src_const);
5518 /* Record the actual constant value in a REG_EQUAL note,
5519 making a new one if one does not already exist. */
5520 set_unique_reg_note (insn, REG_EQUAL, src_const);
5524 /* Now deal with the destination. */
5525 do_not_record = 0;
5527 /* Look within any SIGN_EXTRACT or ZERO_EXTRACT
5528 to the MEM or REG within it. */
5529 while (GET_CODE (dest) == SIGN_EXTRACT
5530 || GET_CODE (dest) == ZERO_EXTRACT
5531 || GET_CODE (dest) == SUBREG
5532 || GET_CODE (dest) == STRICT_LOW_PART)
5533 dest = XEXP (dest, 0);
5535 sets[i].inner_dest = dest;
5537 if (MEM_P (dest))
5539 #ifdef PUSH_ROUNDING
5540 /* Stack pushes invalidate the stack pointer. */
5541 rtx addr = XEXP (dest, 0);
5542 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC
5543 && XEXP (addr, 0) == stack_pointer_rtx)
5544 invalidate (stack_pointer_rtx, Pmode);
5545 #endif
5546 dest = fold_rtx (dest, insn);
5549 /* Compute the hash code of the destination now,
5550 before the effects of this instruction are recorded,
5551 since the register values used in the address computation
5552 are those before this instruction. */
5553 sets[i].dest_hash = HASH (dest, mode);
5555 /* Don't enter a bit-field in the hash table
5556 because the value in it after the store
5557 may not equal what was stored, due to truncation. */
5559 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
5560 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
5562 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5564 if (src_const != 0 && GET_CODE (src_const) == CONST_INT
5565 && GET_CODE (width) == CONST_INT
5566 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5567 && ! (INTVAL (src_const)
5568 & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5569 /* Exception: if the value is constant,
5570 and it won't be truncated, record it. */
5572 else
5574 /* This is chosen so that the destination will be invalidated
5575 but no new value will be recorded.
5576 We must invalidate because sometimes constant
5577 values can be recorded for bitfields. */
5578 sets[i].src_elt = 0;
5579 sets[i].src_volatile = 1;
5580 src_eqv = 0;
5581 src_eqv_elt = 0;
5585 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
5586 the insn. */
5587 else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
5589 /* One less use of the label this insn used to jump to. */
5590 delete_insn (insn);
5591 cse_jumps_altered = 1;
5592 /* No more processing for this set. */
5593 sets[i].rtl = 0;
5596 /* If this SET is now setting PC to a label, we know it used to
5597 be a conditional or computed branch. */
5598 else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF)
5600 /* Now emit a BARRIER after the unconditional jump. */
5601 if (NEXT_INSN (insn) == 0
5602 || !BARRIER_P (NEXT_INSN (insn)))
5603 emit_barrier_after (insn);
5605 /* We reemit the jump in as many cases as possible just in
5606 case the form of an unconditional jump is significantly
5607 different than a computed jump or conditional jump.
5609 If this insn has multiple sets, then reemitting the
5610 jump is nontrivial. So instead we just force rerecognition
5611 and hope for the best. */
5612 if (n_sets == 1)
5614 rtx new, note;
5616 new = emit_jump_insn_after (gen_jump (XEXP (src, 0)), insn);
5617 JUMP_LABEL (new) = XEXP (src, 0);
5618 LABEL_NUSES (XEXP (src, 0))++;
5620 /* Make sure to copy over REG_NON_LOCAL_GOTO. */
5621 note = find_reg_note (insn, REG_NON_LOCAL_GOTO, 0);
5622 if (note)
5624 XEXP (note, 1) = NULL_RTX;
5625 REG_NOTES (new) = note;
5628 delete_insn (insn);
5629 insn = new;
5631 /* Now emit a BARRIER after the unconditional jump. */
5632 if (NEXT_INSN (insn) == 0
5633 || !BARRIER_P (NEXT_INSN (insn)))
5634 emit_barrier_after (insn);
5636 else
5637 INSN_CODE (insn) = -1;
5639 /* Do not bother deleting any unreachable code,
5640 let jump/flow do that. */
5642 cse_jumps_altered = 1;
5643 sets[i].rtl = 0;
5646 /* If destination is volatile, invalidate it and then do no further
5647 processing for this assignment. */
5649 else if (do_not_record)
5651 if (REG_P (dest) || GET_CODE (dest) == SUBREG)
5652 invalidate (dest, VOIDmode);
5653 else if (MEM_P (dest))
5655 /* Outgoing arguments for a libcall don't
5656 affect any recorded expressions. */
5657 if (! libcall_insn || insn == libcall_insn)
5658 invalidate (dest, VOIDmode);
5660 else if (GET_CODE (dest) == STRICT_LOW_PART
5661 || GET_CODE (dest) == ZERO_EXTRACT)
5662 invalidate (XEXP (dest, 0), GET_MODE (dest));
5663 sets[i].rtl = 0;
5666 if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
5667 sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
5669 #ifdef HAVE_cc0
5670 /* If setting CC0, record what it was set to, or a constant, if it
5671 is equivalent to a constant. If it is being set to a floating-point
5672 value, make a COMPARE with the appropriate constant of 0. If we
5673 don't do this, later code can interpret this as a test against
5674 const0_rtx, which can cause problems if we try to put it into an
5675 insn as a floating-point operand. */
5676 if (dest == cc0_rtx)
5678 this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
5679 this_insn_cc0_mode = mode;
5680 if (FLOAT_MODE_P (mode))
5681 this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0,
5682 CONST0_RTX (mode));
5684 #endif
5687 /* Now enter all non-volatile source expressions in the hash table
5688 if they are not already present.
5689 Record their equivalence classes in src_elt.
5690 This way we can insert the corresponding destinations into
5691 the same classes even if the actual sources are no longer in them
5692 (having been invalidated). */
5694 if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
5695 && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
5697 struct table_elt *elt;
5698 struct table_elt *classp = sets[0].src_elt;
5699 rtx dest = SET_DEST (sets[0].rtl);
5700 enum machine_mode eqvmode = GET_MODE (dest);
5702 if (GET_CODE (dest) == STRICT_LOW_PART)
5704 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5705 classp = 0;
5707 if (insert_regs (src_eqv, classp, 0))
5709 rehash_using_reg (src_eqv);
5710 src_eqv_hash = HASH (src_eqv, eqvmode);
5712 elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
5713 elt->in_memory = src_eqv_in_memory;
5714 src_eqv_elt = elt;
5716 /* Check to see if src_eqv_elt is the same as a set source which
5717 does not yet have an elt, and if so set the elt of the set source
5718 to src_eqv_elt. */
5719 for (i = 0; i < n_sets; i++)
5720 if (sets[i].rtl && sets[i].src_elt == 0
5721 && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
5722 sets[i].src_elt = src_eqv_elt;
5725 for (i = 0; i < n_sets; i++)
5726 if (sets[i].rtl && ! sets[i].src_volatile
5727 && ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
5729 if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
5731 /* REG_EQUAL in setting a STRICT_LOW_PART
5732 gives an equivalent for the entire destination register,
5733 not just for the subreg being stored in now.
5734 This is a more interesting equivalence, so we arrange later
5735 to treat the entire reg as the destination. */
5736 sets[i].src_elt = src_eqv_elt;
5737 sets[i].src_hash = src_eqv_hash;
5739 else
5741 /* Insert source and constant equivalent into hash table, if not
5742 already present. */
5743 struct table_elt *classp = src_eqv_elt;
5744 rtx src = sets[i].src;
5745 rtx dest = SET_DEST (sets[i].rtl);
5746 enum machine_mode mode
5747 = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5749 /* It's possible that we have a source value known to be
5750 constant but don't have a REG_EQUAL note on the insn.
5751 Lack of a note will mean src_eqv_elt will be NULL. This
5752 can happen where we've generated a SUBREG to access a
5753 CONST_INT that is already in a register in a wider mode.
5754 Ensure that the source expression is put in the proper
5755 constant class. */
5756 if (!classp)
5757 classp = sets[i].src_const_elt;
5759 if (sets[i].src_elt == 0)
5761 /* Don't put a hard register source into the table if this is
5762 the last insn of a libcall. In this case, we only need
5763 to put src_eqv_elt in src_elt. */
5764 if (! find_reg_note (insn, REG_RETVAL, NULL_RTX))
5766 struct table_elt *elt;
5768 /* Note that these insert_regs calls cannot remove
5769 any of the src_elt's, because they would have failed to
5770 match if not still valid. */
5771 if (insert_regs (src, classp, 0))
5773 rehash_using_reg (src);
5774 sets[i].src_hash = HASH (src, mode);
5776 elt = insert (src, classp, sets[i].src_hash, mode);
5777 elt->in_memory = sets[i].src_in_memory;
5778 sets[i].src_elt = classp = elt;
5780 else
5781 sets[i].src_elt = classp;
5783 if (sets[i].src_const && sets[i].src_const_elt == 0
5784 && src != sets[i].src_const
5785 && ! rtx_equal_p (sets[i].src_const, src))
5786 sets[i].src_elt = insert (sets[i].src_const, classp,
5787 sets[i].src_const_hash, mode);
5790 else if (sets[i].src_elt == 0)
5791 /* If we did not insert the source into the hash table (e.g., it was
5792 volatile), note the equivalence class for the REG_EQUAL value, if any,
5793 so that the destination goes into that class. */
5794 sets[i].src_elt = src_eqv_elt;
5796 invalidate_from_clobbers (x);
5798 /* Some registers are invalidated by subroutine calls. Memory is
5799 invalidated by non-constant calls. */
5801 if (CALL_P (insn))
5803 if (! CONST_OR_PURE_CALL_P (insn))
5804 invalidate_memory ();
5805 invalidate_for_call ();
5808 /* Now invalidate everything set by this instruction.
5809 If a SUBREG or other funny destination is being set,
5810 sets[i].rtl is still nonzero, so here we invalidate the reg
5811 a part of which is being set. */
5813 for (i = 0; i < n_sets; i++)
5814 if (sets[i].rtl)
5816 /* We can't use the inner dest, because the mode associated with
5817 a ZERO_EXTRACT is significant. */
5818 rtx dest = SET_DEST (sets[i].rtl);
5820 /* Needed for registers to remove the register from its
5821 previous quantity's chain.
5822 Needed for memory if this is a nonvarying address, unless
5823 we have just done an invalidate_memory that covers even those. */
5824 if (REG_P (dest) || GET_CODE (dest) == SUBREG)
5825 invalidate (dest, VOIDmode);
5826 else if (MEM_P (dest))
5828 /* Outgoing arguments for a libcall don't
5829 affect any recorded expressions. */
5830 if (! libcall_insn || insn == libcall_insn)
5831 invalidate (dest, VOIDmode);
5833 else if (GET_CODE (dest) == STRICT_LOW_PART
5834 || GET_CODE (dest) == ZERO_EXTRACT)
5835 invalidate (XEXP (dest, 0), GET_MODE (dest));
5838 /* A volatile ASM invalidates everything. */
5839 if (NONJUMP_INSN_P (insn)
5840 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
5841 && MEM_VOLATILE_P (PATTERN (insn)))
5842 flush_hash_table ();
5844 /* Make sure registers mentioned in destinations
5845 are safe for use in an expression to be inserted.
5846 This removes from the hash table
5847 any invalid entry that refers to one of these registers.
5849 We don't care about the return value from mention_regs because
5850 we are going to hash the SET_DEST values unconditionally. */
5852 for (i = 0; i < n_sets; i++)
5854 if (sets[i].rtl)
5856 rtx x = SET_DEST (sets[i].rtl);
5858 if (!REG_P (x))
5859 mention_regs (x);
5860 else
5862 /* We used to rely on all references to a register becoming
5863 inaccessible when a register changes to a new quantity,
5864 since that changes the hash code. However, that is not
5865 safe, since after HASH_SIZE new quantities we get a
5866 hash 'collision' of a register with its own invalid
5867 entries. And since SUBREGs have been changed not to
5868 change their hash code with the hash code of the register,
5869 it wouldn't work any longer at all. So we have to check
5870 for any invalid references lying around now.
5871 This code is similar to the REG case in mention_regs,
5872 but it knows that reg_tick has been incremented, and
5873 it leaves reg_in_table as -1 . */
5874 unsigned int regno = REGNO (x);
5875 unsigned int endregno
5876 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
5877 : hard_regno_nregs[regno][GET_MODE (x)]);
5878 unsigned int i;
5880 for (i = regno; i < endregno; i++)
5882 if (REG_IN_TABLE (i) >= 0)
5884 remove_invalid_refs (i);
5885 REG_IN_TABLE (i) = -1;
5892 /* We may have just removed some of the src_elt's from the hash table.
5893 So replace each one with the current head of the same class. */
5895 for (i = 0; i < n_sets; i++)
5896 if (sets[i].rtl)
5898 if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
5899 /* If elt was removed, find current head of same class,
5900 or 0 if nothing remains of that class. */
5902 struct table_elt *elt = sets[i].src_elt;
5904 while (elt && elt->prev_same_value)
5905 elt = elt->prev_same_value;
5907 while (elt && elt->first_same_value == 0)
5908 elt = elt->next_same_value;
5909 sets[i].src_elt = elt ? elt->first_same_value : 0;
5913 /* Now insert the destinations into their equivalence classes. */
5915 for (i = 0; i < n_sets; i++)
5916 if (sets[i].rtl)
5918 rtx dest = SET_DEST (sets[i].rtl);
5919 struct table_elt *elt;
5921 /* Don't record value if we are not supposed to risk allocating
5922 floating-point values in registers that might be wider than
5923 memory. */
5924 if ((flag_float_store
5925 && MEM_P (dest)
5926 && FLOAT_MODE_P (GET_MODE (dest)))
5927 /* Don't record BLKmode values, because we don't know the
5928 size of it, and can't be sure that other BLKmode values
5929 have the same or smaller size. */
5930 || GET_MODE (dest) == BLKmode
5931 /* Don't record values of destinations set inside a libcall block
5932 since we might delete the libcall. Things should have been set
5933 up so we won't want to reuse such a value, but we play it safe
5934 here. */
5935 || libcall_insn
5936 /* If we didn't put a REG_EQUAL value or a source into the hash
5937 table, there is no point is recording DEST. */
5938 || sets[i].src_elt == 0
5939 /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
5940 or SIGN_EXTEND, don't record DEST since it can cause
5941 some tracking to be wrong.
5943 ??? Think about this more later. */
5944 || (GET_CODE (dest) == SUBREG
5945 && (GET_MODE_SIZE (GET_MODE (dest))
5946 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
5947 && (GET_CODE (sets[i].src) == SIGN_EXTEND
5948 || GET_CODE (sets[i].src) == ZERO_EXTEND)))
5949 continue;
5951 /* STRICT_LOW_PART isn't part of the value BEING set,
5952 and neither is the SUBREG inside it.
5953 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
5954 if (GET_CODE (dest) == STRICT_LOW_PART)
5955 dest = SUBREG_REG (XEXP (dest, 0));
5957 if (REG_P (dest) || GET_CODE (dest) == SUBREG)
5958 /* Registers must also be inserted into chains for quantities. */
5959 if (insert_regs (dest, sets[i].src_elt, 1))
5961 /* If `insert_regs' changes something, the hash code must be
5962 recalculated. */
5963 rehash_using_reg (dest);
5964 sets[i].dest_hash = HASH (dest, GET_MODE (dest));
5967 elt = insert (dest, sets[i].src_elt,
5968 sets[i].dest_hash, GET_MODE (dest));
5970 elt->in_memory = (MEM_P (sets[i].inner_dest)
5971 && !MEM_READONLY_P (sets[i].inner_dest));
5973 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
5974 narrower than M2, and both M1 and M2 are the same number of words,
5975 we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
5976 make that equivalence as well.
5978 However, BAR may have equivalences for which gen_lowpart
5979 will produce a simpler value than gen_lowpart applied to
5980 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
5981 BAR's equivalences. If we don't get a simplified form, make
5982 the SUBREG. It will not be used in an equivalence, but will
5983 cause two similar assignments to be detected.
5985 Note the loop below will find SUBREG_REG (DEST) since we have
5986 already entered SRC and DEST of the SET in the table. */
5988 if (GET_CODE (dest) == SUBREG
5989 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
5990 / UNITS_PER_WORD)
5991 == (GET_MODE_SIZE (GET_MODE (dest)) - 1) / UNITS_PER_WORD)
5992 && (GET_MODE_SIZE (GET_MODE (dest))
5993 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
5994 && sets[i].src_elt != 0)
5996 enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
5997 struct table_elt *elt, *classp = 0;
5999 for (elt = sets[i].src_elt->first_same_value; elt;
6000 elt = elt->next_same_value)
6002 rtx new_src = 0;
6003 unsigned src_hash;
6004 struct table_elt *src_elt;
6005 int byte = 0;
6007 /* Ignore invalid entries. */
6008 if (!REG_P (elt->exp)
6009 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
6010 continue;
6012 /* We may have already been playing subreg games. If the
6013 mode is already correct for the destination, use it. */
6014 if (GET_MODE (elt->exp) == new_mode)
6015 new_src = elt->exp;
6016 else
6018 /* Calculate big endian correction for the SUBREG_BYTE.
6019 We have already checked that M1 (GET_MODE (dest))
6020 is not narrower than M2 (new_mode). */
6021 if (BYTES_BIG_ENDIAN)
6022 byte = (GET_MODE_SIZE (GET_MODE (dest))
6023 - GET_MODE_SIZE (new_mode));
6025 new_src = simplify_gen_subreg (new_mode, elt->exp,
6026 GET_MODE (dest), byte);
6029 /* The call to simplify_gen_subreg fails if the value
6030 is VOIDmode, yet we can't do any simplification, e.g.
6031 for EXPR_LISTs denoting function call results.
6032 It is invalid to construct a SUBREG with a VOIDmode
6033 SUBREG_REG, hence a zero new_src means we can't do
6034 this substitution. */
6035 if (! new_src)
6036 continue;
6038 src_hash = HASH (new_src, new_mode);
6039 src_elt = lookup (new_src, src_hash, new_mode);
6041 /* Put the new source in the hash table is if isn't
6042 already. */
6043 if (src_elt == 0)
6045 if (insert_regs (new_src, classp, 0))
6047 rehash_using_reg (new_src);
6048 src_hash = HASH (new_src, new_mode);
6050 src_elt = insert (new_src, classp, src_hash, new_mode);
6051 src_elt->in_memory = elt->in_memory;
6053 else if (classp && classp != src_elt->first_same_value)
6054 /* Show that two things that we've seen before are
6055 actually the same. */
6056 merge_equiv_classes (src_elt, classp);
6058 classp = src_elt->first_same_value;
6059 /* Ignore invalid entries. */
6060 while (classp
6061 && !REG_P (classp->exp)
6062 && ! exp_equiv_p (classp->exp, classp->exp, 1, 0))
6063 classp = classp->next_same_value;
6068 /* Special handling for (set REG0 REG1) where REG0 is the
6069 "cheapest", cheaper than REG1. After cse, REG1 will probably not
6070 be used in the sequel, so (if easily done) change this insn to
6071 (set REG1 REG0) and replace REG1 with REG0 in the previous insn
6072 that computed their value. Then REG1 will become a dead store
6073 and won't cloud the situation for later optimizations.
6075 Do not make this change if REG1 is a hard register, because it will
6076 then be used in the sequel and we may be changing a two-operand insn
6077 into a three-operand insn.
6079 Also do not do this if we are operating on a copy of INSN.
6081 Also don't do this if INSN ends a libcall; this would cause an unrelated
6082 register to be set in the middle of a libcall, and we then get bad code
6083 if the libcall is deleted. */
6085 if (n_sets == 1 && sets[0].rtl && REG_P (SET_DEST (sets[0].rtl))
6086 && NEXT_INSN (PREV_INSN (insn)) == insn
6087 && REG_P (SET_SRC (sets[0].rtl))
6088 && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
6089 && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl))))
6091 int src_q = REG_QTY (REGNO (SET_SRC (sets[0].rtl)));
6092 struct qty_table_elem *src_ent = &qty_table[src_q];
6094 if ((src_ent->first_reg == REGNO (SET_DEST (sets[0].rtl)))
6095 && ! find_reg_note (insn, REG_RETVAL, NULL_RTX))
6097 rtx prev = insn;
6098 /* Scan for the previous nonnote insn, but stop at a basic
6099 block boundary. */
6102 prev = PREV_INSN (prev);
6104 while (prev && NOTE_P (prev)
6105 && NOTE_LINE_NUMBER (prev) != NOTE_INSN_BASIC_BLOCK);
6107 /* Do not swap the registers around if the previous instruction
6108 attaches a REG_EQUIV note to REG1.
6110 ??? It's not entirely clear whether we can transfer a REG_EQUIV
6111 from the pseudo that originally shadowed an incoming argument
6112 to another register. Some uses of REG_EQUIV might rely on it
6113 being attached to REG1 rather than REG2.
6115 This section previously turned the REG_EQUIV into a REG_EQUAL
6116 note. We cannot do that because REG_EQUIV may provide an
6117 uninitialized stack slot when REG_PARM_STACK_SPACE is used. */
6119 if (prev != 0 && NONJUMP_INSN_P (prev)
6120 && GET_CODE (PATTERN (prev)) == SET
6121 && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl)
6122 && ! find_reg_note (prev, REG_EQUIV, NULL_RTX))
6124 rtx dest = SET_DEST (sets[0].rtl);
6125 rtx src = SET_SRC (sets[0].rtl);
6126 rtx note;
6128 validate_change (prev, &SET_DEST (PATTERN (prev)), dest, 1);
6129 validate_change (insn, &SET_DEST (sets[0].rtl), src, 1);
6130 validate_change (insn, &SET_SRC (sets[0].rtl), dest, 1);
6131 apply_change_group ();
6133 /* If INSN has a REG_EQUAL note, and this note mentions
6134 REG0, then we must delete it, because the value in
6135 REG0 has changed. If the note's value is REG1, we must
6136 also delete it because that is now this insn's dest. */
6137 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
6138 if (note != 0
6139 && (reg_mentioned_p (dest, XEXP (note, 0))
6140 || rtx_equal_p (src, XEXP (note, 0))))
6141 remove_note (insn, note);
6146 /* If this is a conditional jump insn, record any known equivalences due to
6147 the condition being tested. */
6149 last_jump_equiv_class = 0;
6150 if (JUMP_P (insn)
6151 && n_sets == 1 && GET_CODE (x) == SET
6152 && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
6153 record_jump_equiv (insn, 0);
6155 #ifdef HAVE_cc0
6156 /* If the previous insn set CC0 and this insn no longer references CC0,
6157 delete the previous insn. Here we use the fact that nothing expects CC0
6158 to be valid over an insn, which is true until the final pass. */
6159 if (prev_insn && NONJUMP_INSN_P (prev_insn)
6160 && (tem = single_set (prev_insn)) != 0
6161 && SET_DEST (tem) == cc0_rtx
6162 && ! reg_mentioned_p (cc0_rtx, x))
6163 delete_insn (prev_insn);
6165 prev_insn_cc0 = this_insn_cc0;
6166 prev_insn_cc0_mode = this_insn_cc0_mode;
6167 prev_insn = insn;
6168 #endif
6171 /* Remove from the hash table all expressions that reference memory. */
6173 static void
6174 invalidate_memory (void)
6176 int i;
6177 struct table_elt *p, *next;
6179 for (i = 0; i < HASH_SIZE; i++)
6180 for (p = table[i]; p; p = next)
6182 next = p->next_same_hash;
6183 if (p->in_memory)
6184 remove_from_table (p, i);
6188 /* If ADDR is an address that implicitly affects the stack pointer, return
6189 1 and update the register tables to show the effect. Else, return 0. */
6191 static int
6192 addr_affects_sp_p (rtx addr)
6194 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC
6195 && REG_P (XEXP (addr, 0))
6196 && REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
6198 if (REG_TICK (STACK_POINTER_REGNUM) >= 0)
6200 REG_TICK (STACK_POINTER_REGNUM)++;
6201 /* Is it possible to use a subreg of SP? */
6202 SUBREG_TICKED (STACK_POINTER_REGNUM) = -1;
6205 /* This should be *very* rare. */
6206 if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
6207 invalidate (stack_pointer_rtx, VOIDmode);
6209 return 1;
6212 return 0;
6215 /* Perform invalidation on the basis of everything about an insn
6216 except for invalidating the actual places that are SET in it.
6217 This includes the places CLOBBERed, and anything that might
6218 alias with something that is SET or CLOBBERed.
6220 X is the pattern of the insn. */
6222 static void
6223 invalidate_from_clobbers (rtx x)
6225 if (GET_CODE (x) == CLOBBER)
6227 rtx ref = XEXP (x, 0);
6228 if (ref)
6230 if (REG_P (ref) || GET_CODE (ref) == SUBREG
6231 || MEM_P (ref))
6232 invalidate (ref, VOIDmode);
6233 else if (GET_CODE (ref) == STRICT_LOW_PART
6234 || GET_CODE (ref) == ZERO_EXTRACT)
6235 invalidate (XEXP (ref, 0), GET_MODE (ref));
6238 else if (GET_CODE (x) == PARALLEL)
6240 int i;
6241 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
6243 rtx y = XVECEXP (x, 0, i);
6244 if (GET_CODE (y) == CLOBBER)
6246 rtx ref = XEXP (y, 0);
6247 if (REG_P (ref) || GET_CODE (ref) == SUBREG
6248 || MEM_P (ref))
6249 invalidate (ref, VOIDmode);
6250 else if (GET_CODE (ref) == STRICT_LOW_PART
6251 || GET_CODE (ref) == ZERO_EXTRACT)
6252 invalidate (XEXP (ref, 0), GET_MODE (ref));
6258 /* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes
6259 and replace any registers in them with either an equivalent constant
6260 or the canonical form of the register. If we are inside an address,
6261 only do this if the address remains valid.
6263 OBJECT is 0 except when within a MEM in which case it is the MEM.
6265 Return the replacement for X. */
6267 static rtx
6268 cse_process_notes (rtx x, rtx object)
6270 enum rtx_code code = GET_CODE (x);
6271 const char *fmt = GET_RTX_FORMAT (code);
6272 int i;
6274 switch (code)
6276 case CONST_INT:
6277 case CONST:
6278 case SYMBOL_REF:
6279 case LABEL_REF:
6280 case CONST_DOUBLE:
6281 case CONST_VECTOR:
6282 case PC:
6283 case CC0:
6284 case LO_SUM:
6285 return x;
6287 case MEM:
6288 validate_change (x, &XEXP (x, 0),
6289 cse_process_notes (XEXP (x, 0), x), 0);
6290 return x;
6292 case EXPR_LIST:
6293 case INSN_LIST:
6294 if (REG_NOTE_KIND (x) == REG_EQUAL)
6295 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX);
6296 if (XEXP (x, 1))
6297 XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX);
6298 return x;
6300 case SIGN_EXTEND:
6301 case ZERO_EXTEND:
6302 case SUBREG:
6304 rtx new = cse_process_notes (XEXP (x, 0), object);
6305 /* We don't substitute VOIDmode constants into these rtx,
6306 since they would impede folding. */
6307 if (GET_MODE (new) != VOIDmode)
6308 validate_change (object, &XEXP (x, 0), new, 0);
6309 return x;
6312 case REG:
6313 i = REG_QTY (REGNO (x));
6315 /* Return a constant or a constant register. */
6316 if (REGNO_QTY_VALID_P (REGNO (x)))
6318 struct qty_table_elem *ent = &qty_table[i];
6320 if (ent->const_rtx != NULL_RTX
6321 && (CONSTANT_P (ent->const_rtx)
6322 || REG_P (ent->const_rtx)))
6324 rtx new = gen_lowpart (GET_MODE (x), ent->const_rtx);
6325 if (new)
6326 return new;
6330 /* Otherwise, canonicalize this register. */
6331 return canon_reg (x, NULL_RTX);
6333 default:
6334 break;
6337 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6338 if (fmt[i] == 'e')
6339 validate_change (object, &XEXP (x, i),
6340 cse_process_notes (XEXP (x, i), object), 0);
6342 return x;
6345 /* Find common subexpressions between the end test of a loop and the beginning
6346 of the loop. LOOP_START is the CODE_LABEL at the start of a loop.
6348 Often we have a loop where an expression in the exit test is used
6349 in the body of the loop. For example "while (*p) *q++ = *p++;".
6350 Because of the way we duplicate the loop exit test in front of the loop,
6351 however, we don't detect that common subexpression. This will be caught
6352 when global cse is implemented, but this is a quite common case.
6354 This function handles the most common cases of these common expressions.
6355 It is called after we have processed the basic block ending with the
6356 NOTE_INSN_LOOP_END note that ends a loop and the previous JUMP_INSN
6357 jumps to a label used only once. */
6359 static void
6360 cse_around_loop (rtx loop_start)
6362 rtx insn;
6363 int i;
6364 struct table_elt *p;
6366 /* If the jump at the end of the loop doesn't go to the start, we don't
6367 do anything. */
6368 for (insn = PREV_INSN (loop_start);
6369 insn && (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) >= 0);
6370 insn = PREV_INSN (insn))
6373 if (insn == 0
6374 || !NOTE_P (insn)
6375 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_BEG)
6376 return;
6378 /* If the last insn of the loop (the end test) was an NE comparison,
6379 we will interpret it as an EQ comparison, since we fell through
6380 the loop. Any equivalences resulting from that comparison are
6381 therefore not valid and must be invalidated. */
6382 if (last_jump_equiv_class)
6383 for (p = last_jump_equiv_class->first_same_value; p;
6384 p = p->next_same_value)
6386 if (MEM_P (p->exp) || REG_P (p->exp)
6387 || (GET_CODE (p->exp) == SUBREG
6388 && REG_P (SUBREG_REG (p->exp))))
6389 invalidate (p->exp, VOIDmode);
6390 else if (GET_CODE (p->exp) == STRICT_LOW_PART
6391 || GET_CODE (p->exp) == ZERO_EXTRACT)
6392 invalidate (XEXP (p->exp, 0), GET_MODE (p->exp));
6395 /* Process insns starting after LOOP_START until we hit a CALL_INSN or
6396 a CODE_LABEL (we could handle a CALL_INSN, but it isn't worth it).
6398 The only thing we do with SET_DEST is invalidate entries, so we
6399 can safely process each SET in order. It is slightly less efficient
6400 to do so, but we only want to handle the most common cases.
6402 The gen_move_insn call in cse_set_around_loop may create new pseudos.
6403 These pseudos won't have valid entries in any of the tables indexed
6404 by register number, such as reg_qty. We avoid out-of-range array
6405 accesses by not processing any instructions created after cse started. */
6407 for (insn = NEXT_INSN (loop_start);
6408 !CALL_P (insn) && !LABEL_P (insn)
6409 && INSN_UID (insn) < max_insn_uid
6410 && ! (NOTE_P (insn)
6411 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
6412 insn = NEXT_INSN (insn))
6414 if (INSN_P (insn)
6415 && (GET_CODE (PATTERN (insn)) == SET
6416 || GET_CODE (PATTERN (insn)) == CLOBBER))
6417 cse_set_around_loop (PATTERN (insn), insn, loop_start);
6418 else if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == PARALLEL)
6419 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6420 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET
6421 || GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == CLOBBER)
6422 cse_set_around_loop (XVECEXP (PATTERN (insn), 0, i), insn,
6423 loop_start);
6427 /* Process one SET of an insn that was skipped. We ignore CLOBBERs
6428 since they are done elsewhere. This function is called via note_stores. */
6430 static void
6431 invalidate_skipped_set (rtx dest, rtx set, void *data ATTRIBUTE_UNUSED)
6433 enum rtx_code code = GET_CODE (dest);
6435 if (code == MEM
6436 && ! addr_affects_sp_p (dest) /* If this is not a stack push ... */
6437 /* There are times when an address can appear varying and be a PLUS
6438 during this scan when it would be a fixed address were we to know
6439 the proper equivalences. So invalidate all memory if there is
6440 a BLKmode or nonscalar memory reference or a reference to a
6441 variable address. */
6442 && (MEM_IN_STRUCT_P (dest) || GET_MODE (dest) == BLKmode
6443 || cse_rtx_varies_p (XEXP (dest, 0), 0)))
6445 invalidate_memory ();
6446 return;
6449 if (GET_CODE (set) == CLOBBER
6450 || CC0_P (dest)
6451 || dest == pc_rtx)
6452 return;
6454 if (code == STRICT_LOW_PART || code == ZERO_EXTRACT)
6455 invalidate (XEXP (dest, 0), GET_MODE (dest));
6456 else if (code == REG || code == SUBREG || code == MEM)
6457 invalidate (dest, VOIDmode);
6460 /* Invalidate all insns from START up to the end of the function or the
6461 next label. This called when we wish to CSE around a block that is
6462 conditionally executed. */
6464 static void
6465 invalidate_skipped_block (rtx start)
6467 rtx insn;
6469 for (insn = start; insn && !LABEL_P (insn);
6470 insn = NEXT_INSN (insn))
6472 if (! INSN_P (insn))
6473 continue;
6475 if (CALL_P (insn))
6477 if (! CONST_OR_PURE_CALL_P (insn))
6478 invalidate_memory ();
6479 invalidate_for_call ();
6482 invalidate_from_clobbers (PATTERN (insn));
6483 note_stores (PATTERN (insn), invalidate_skipped_set, NULL);
6487 /* If modifying X will modify the value in *DATA (which is really an
6488 `rtx *'), indicate that fact by setting the pointed to value to
6489 NULL_RTX. */
6491 static void
6492 cse_check_loop_start (rtx x, rtx set ATTRIBUTE_UNUSED, void *data)
6494 rtx *cse_check_loop_start_value = (rtx *) data;
6496 if (*cse_check_loop_start_value == NULL_RTX
6497 || GET_CODE (x) == CC0 || GET_CODE (x) == PC)
6498 return;
6500 if ((MEM_P (x) && MEM_P (*cse_check_loop_start_value))
6501 || reg_overlap_mentioned_p (x, *cse_check_loop_start_value))
6502 *cse_check_loop_start_value = NULL_RTX;
6505 /* X is a SET or CLOBBER contained in INSN that was found near the start of
6506 a loop that starts with the label at LOOP_START.
6508 If X is a SET, we see if its SET_SRC is currently in our hash table.
6509 If so, we see if it has a value equal to some register used only in the
6510 loop exit code (as marked by jump.c).
6512 If those two conditions are true, we search backwards from the start of
6513 the loop to see if that same value was loaded into a register that still
6514 retains its value at the start of the loop.
6516 If so, we insert an insn after the load to copy the destination of that
6517 load into the equivalent register and (try to) replace our SET_SRC with that
6518 register.
6520 In any event, we invalidate whatever this SET or CLOBBER modifies. */
6522 static void
6523 cse_set_around_loop (rtx x, rtx insn, rtx loop_start)
6525 struct table_elt *src_elt;
6527 /* If this is a SET, see if we can replace SET_SRC, but ignore SETs that
6528 are setting PC or CC0 or whose SET_SRC is already a register. */
6529 if (GET_CODE (x) == SET
6530 && GET_CODE (SET_DEST (x)) != PC && GET_CODE (SET_DEST (x)) != CC0
6531 && !REG_P (SET_SRC (x)))
6533 src_elt = lookup (SET_SRC (x),
6534 HASH (SET_SRC (x), GET_MODE (SET_DEST (x))),
6535 GET_MODE (SET_DEST (x)));
6537 if (src_elt)
6538 for (src_elt = src_elt->first_same_value; src_elt;
6539 src_elt = src_elt->next_same_value)
6540 if (REG_P (src_elt->exp) && REG_LOOP_TEST_P (src_elt->exp)
6541 && COST (src_elt->exp) < COST (SET_SRC (x)))
6543 rtx p, set;
6545 /* Look for an insn in front of LOOP_START that sets
6546 something in the desired mode to SET_SRC (x) before we hit
6547 a label or CALL_INSN. */
6549 for (p = prev_nonnote_insn (loop_start);
6550 p && !CALL_P (p)
6551 && !LABEL_P (p);
6552 p = prev_nonnote_insn (p))
6553 if ((set = single_set (p)) != 0
6554 && REG_P (SET_DEST (set))
6555 && GET_MODE (SET_DEST (set)) == src_elt->mode
6556 && rtx_equal_p (SET_SRC (set), SET_SRC (x)))
6558 /* We now have to ensure that nothing between P
6559 and LOOP_START modified anything referenced in
6560 SET_SRC (x). We know that nothing within the loop
6561 can modify it, or we would have invalidated it in
6562 the hash table. */
6563 rtx q;
6564 rtx cse_check_loop_start_value = SET_SRC (x);
6565 for (q = p; q != loop_start; q = NEXT_INSN (q))
6566 if (INSN_P (q))
6567 note_stores (PATTERN (q),
6568 cse_check_loop_start,
6569 &cse_check_loop_start_value);
6571 /* If nothing was changed and we can replace our
6572 SET_SRC, add an insn after P to copy its destination
6573 to what we will be replacing SET_SRC with. */
6574 if (cse_check_loop_start_value
6575 && single_set (p)
6576 && !can_throw_internal (insn)
6577 && validate_change (insn, &SET_SRC (x),
6578 src_elt->exp, 0))
6580 /* If this creates new pseudos, this is unsafe,
6581 because the regno of new pseudo is unsuitable
6582 to index into reg_qty when cse_insn processes
6583 the new insn. Therefore, if a new pseudo was
6584 created, discard this optimization. */
6585 int nregs = max_reg_num ();
6586 rtx move
6587 = gen_move_insn (src_elt->exp, SET_DEST (set));
6588 if (nregs != max_reg_num ())
6590 if (! validate_change (insn, &SET_SRC (x),
6591 SET_SRC (set), 0))
6592 abort ();
6594 else
6596 if (CONSTANT_P (SET_SRC (set))
6597 && ! find_reg_equal_equiv_note (insn))
6598 set_unique_reg_note (insn, REG_EQUAL,
6599 SET_SRC (set));
6600 if (control_flow_insn_p (p))
6601 /* p can cause a control flow transfer so it
6602 is the last insn of a basic block. We can't
6603 therefore use emit_insn_after. */
6604 emit_insn_before (move, next_nonnote_insn (p));
6605 else
6606 emit_insn_after (move, p);
6609 break;
6614 /* Deal with the destination of X affecting the stack pointer. */
6615 addr_affects_sp_p (SET_DEST (x));
6617 /* See comment on similar code in cse_insn for explanation of these
6618 tests. */
6619 if (REG_P (SET_DEST (x)) || GET_CODE (SET_DEST (x)) == SUBREG
6620 || MEM_P (SET_DEST (x)))
6621 invalidate (SET_DEST (x), VOIDmode);
6622 else if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
6623 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
6624 invalidate (XEXP (SET_DEST (x), 0), GET_MODE (SET_DEST (x)));
6627 /* Find the end of INSN's basic block and return its range,
6628 the total number of SETs in all the insns of the block, the last insn of the
6629 block, and the branch path.
6631 The branch path indicates which branches should be followed. If a nonzero
6632 path size is specified, the block should be rescanned and a different set
6633 of branches will be taken. The branch path is only used if
6634 FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is nonzero.
6636 DATA is a pointer to a struct cse_basic_block_data, defined below, that is
6637 used to describe the block. It is filled in with the information about
6638 the current block. The incoming structure's branch path, if any, is used
6639 to construct the output branch path. */
6641 static void
6642 cse_end_of_basic_block (rtx insn, struct cse_basic_block_data *data,
6643 int follow_jumps, int after_loop, int skip_blocks)
6645 rtx p = insn, q;
6646 int nsets = 0;
6647 int low_cuid = INSN_CUID (insn), high_cuid = INSN_CUID (insn);
6648 rtx next = INSN_P (insn) ? insn : next_real_insn (insn);
6649 int path_size = data->path_size;
6650 int path_entry = 0;
6651 int i;
6653 /* Update the previous branch path, if any. If the last branch was
6654 previously PATH_TAKEN, mark it PATH_NOT_TAKEN.
6655 If it was previously PATH_NOT_TAKEN,
6656 shorten the path by one and look at the previous branch. We know that
6657 at least one branch must have been taken if PATH_SIZE is nonzero. */
6658 while (path_size > 0)
6660 if (data->path[path_size - 1].status != PATH_NOT_TAKEN)
6662 data->path[path_size - 1].status = PATH_NOT_TAKEN;
6663 break;
6665 else
6666 path_size--;
6669 /* If the first instruction is marked with QImode, that means we've
6670 already processed this block. Our caller will look at DATA->LAST
6671 to figure out where to go next. We want to return the next block
6672 in the instruction stream, not some branched-to block somewhere
6673 else. We accomplish this by pretending our called forbid us to
6674 follow jumps, or skip blocks. */
6675 if (GET_MODE (insn) == QImode)
6676 follow_jumps = skip_blocks = 0;
6678 /* Scan to end of this basic block. */
6679 while (p && !LABEL_P (p))
6681 /* Don't cse out the end of a loop. This makes a difference
6682 only for the unusual loops that always execute at least once;
6683 all other loops have labels there so we will stop in any case.
6684 Cse'ing out the end of the loop is dangerous because it
6685 might cause an invariant expression inside the loop
6686 to be reused after the end of the loop. This would make it
6687 hard to move the expression out of the loop in loop.c,
6688 especially if it is one of several equivalent expressions
6689 and loop.c would like to eliminate it.
6691 If we are running after loop.c has finished, we can ignore
6692 the NOTE_INSN_LOOP_END. */
6694 if (! after_loop && NOTE_P (p)
6695 && NOTE_LINE_NUMBER (p) == NOTE_INSN_LOOP_END)
6696 break;
6698 /* Don't cse over a call to setjmp; on some machines (eg VAX)
6699 the regs restored by the longjmp come from
6700 a later time than the setjmp. */
6701 if (PREV_INSN (p) && CALL_P (PREV_INSN (p))
6702 && find_reg_note (PREV_INSN (p), REG_SETJMP, NULL))
6703 break;
6705 /* A PARALLEL can have lots of SETs in it,
6706 especially if it is really an ASM_OPERANDS. */
6707 if (INSN_P (p) && GET_CODE (PATTERN (p)) == PARALLEL)
6708 nsets += XVECLEN (PATTERN (p), 0);
6709 else if (!NOTE_P (p))
6710 nsets += 1;
6712 /* Ignore insns made by CSE; they cannot affect the boundaries of
6713 the basic block. */
6715 if (INSN_UID (p) <= max_uid && INSN_CUID (p) > high_cuid)
6716 high_cuid = INSN_CUID (p);
6717 if (INSN_UID (p) <= max_uid && INSN_CUID (p) < low_cuid)
6718 low_cuid = INSN_CUID (p);
6720 /* See if this insn is in our branch path. If it is and we are to
6721 take it, do so. */
6722 if (path_entry < path_size && data->path[path_entry].branch == p)
6724 if (data->path[path_entry].status != PATH_NOT_TAKEN)
6725 p = JUMP_LABEL (p);
6727 /* Point to next entry in path, if any. */
6728 path_entry++;
6731 /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
6732 was specified, we haven't reached our maximum path length, there are
6733 insns following the target of the jump, this is the only use of the
6734 jump label, and the target label is preceded by a BARRIER.
6736 Alternatively, we can follow the jump if it branches around a
6737 block of code and there are no other branches into the block.
6738 In this case invalidate_skipped_block will be called to invalidate any
6739 registers set in the block when following the jump. */
6741 else if ((follow_jumps || skip_blocks) && path_size < PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH) - 1
6742 && JUMP_P (p)
6743 && GET_CODE (PATTERN (p)) == SET
6744 && GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
6745 && JUMP_LABEL (p) != 0
6746 && LABEL_NUSES (JUMP_LABEL (p)) == 1
6747 && NEXT_INSN (JUMP_LABEL (p)) != 0)
6749 for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
6750 if ((!NOTE_P (q)
6751 || NOTE_LINE_NUMBER (q) == NOTE_INSN_LOOP_END
6752 || (PREV_INSN (q) && CALL_P (PREV_INSN (q))
6753 && find_reg_note (PREV_INSN (q), REG_SETJMP, NULL)))
6754 && (!LABEL_P (q) || LABEL_NUSES (q) != 0))
6755 break;
6757 /* If we ran into a BARRIER, this code is an extension of the
6758 basic block when the branch is taken. */
6759 if (follow_jumps && q != 0 && BARRIER_P (q))
6761 /* Don't allow ourself to keep walking around an
6762 always-executed loop. */
6763 if (next_real_insn (q) == next)
6765 p = NEXT_INSN (p);
6766 continue;
6769 /* Similarly, don't put a branch in our path more than once. */
6770 for (i = 0; i < path_entry; i++)
6771 if (data->path[i].branch == p)
6772 break;
6774 if (i != path_entry)
6775 break;
6777 data->path[path_entry].branch = p;
6778 data->path[path_entry++].status = PATH_TAKEN;
6780 /* This branch now ends our path. It was possible that we
6781 didn't see this branch the last time around (when the
6782 insn in front of the target was a JUMP_INSN that was
6783 turned into a no-op). */
6784 path_size = path_entry;
6786 p = JUMP_LABEL (p);
6787 /* Mark block so we won't scan it again later. */
6788 PUT_MODE (NEXT_INSN (p), QImode);
6790 /* Detect a branch around a block of code. */
6791 else if (skip_blocks && q != 0 && !LABEL_P (q))
6793 rtx tmp;
6795 if (next_real_insn (q) == next)
6797 p = NEXT_INSN (p);
6798 continue;
6801 for (i = 0; i < path_entry; i++)
6802 if (data->path[i].branch == p)
6803 break;
6805 if (i != path_entry)
6806 break;
6808 /* This is no_labels_between_p (p, q) with an added check for
6809 reaching the end of a function (in case Q precedes P). */
6810 for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
6811 if (LABEL_P (tmp))
6812 break;
6814 if (tmp == q)
6816 data->path[path_entry].branch = p;
6817 data->path[path_entry++].status = PATH_AROUND;
6819 path_size = path_entry;
6821 p = JUMP_LABEL (p);
6822 /* Mark block so we won't scan it again later. */
6823 PUT_MODE (NEXT_INSN (p), QImode);
6827 p = NEXT_INSN (p);
6830 data->low_cuid = low_cuid;
6831 data->high_cuid = high_cuid;
6832 data->nsets = nsets;
6833 data->last = p;
6835 /* If all jumps in the path are not taken, set our path length to zero
6836 so a rescan won't be done. */
6837 for (i = path_size - 1; i >= 0; i--)
6838 if (data->path[i].status != PATH_NOT_TAKEN)
6839 break;
6841 if (i == -1)
6842 data->path_size = 0;
6843 else
6844 data->path_size = path_size;
6846 /* End the current branch path. */
6847 data->path[path_size].branch = 0;
6850 /* Perform cse on the instructions of a function.
6851 F is the first instruction.
6852 NREGS is one plus the highest pseudo-reg number used in the instruction.
6854 AFTER_LOOP is 1 if this is the cse call done after loop optimization
6855 (only if -frerun-cse-after-loop).
6857 Returns 1 if jump_optimize should be redone due to simplifications
6858 in conditional jump instructions. */
6861 cse_main (rtx f, int nregs, int after_loop, FILE *file)
6863 struct cse_basic_block_data val;
6864 rtx insn = f;
6865 int i;
6867 val.path = xmalloc (sizeof (struct branch_path)
6868 * PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH));
6870 cse_jumps_altered = 0;
6871 recorded_label_ref = 0;
6872 constant_pool_entries_cost = 0;
6873 constant_pool_entries_regcost = 0;
6874 val.path_size = 0;
6875 rtl_hooks = cse_rtl_hooks;
6877 init_recog ();
6878 init_alias_analysis ();
6880 max_reg = nregs;
6882 max_insn_uid = get_max_uid ();
6884 reg_eqv_table = xmalloc (nregs * sizeof (struct reg_eqv_elem));
6886 #ifdef LOAD_EXTEND_OP
6888 /* Allocate scratch rtl here. cse_insn will fill in the memory reference
6889 and change the code and mode as appropriate. */
6890 memory_extend_rtx = gen_rtx_ZERO_EXTEND (VOIDmode, NULL_RTX);
6891 #endif
6893 /* Reset the counter indicating how many elements have been made
6894 thus far. */
6895 n_elements_made = 0;
6897 /* Find the largest uid. */
6899 max_uid = get_max_uid ();
6900 uid_cuid = xcalloc (max_uid + 1, sizeof (int));
6902 /* Compute the mapping from uids to cuids.
6903 CUIDs are numbers assigned to insns, like uids,
6904 except that cuids increase monotonically through the code.
6905 Don't assign cuids to line-number NOTEs, so that the distance in cuids
6906 between two insns is not affected by -g. */
6908 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
6910 if (!NOTE_P (insn)
6911 || NOTE_LINE_NUMBER (insn) < 0)
6912 INSN_CUID (insn) = ++i;
6913 else
6914 /* Give a line number note the same cuid as preceding insn. */
6915 INSN_CUID (insn) = i;
6918 ggc_push_context ();
6920 /* Loop over basic blocks.
6921 Compute the maximum number of qty's needed for each basic block
6922 (which is 2 for each SET). */
6923 insn = f;
6924 while (insn)
6926 cse_altered = 0;
6927 cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps, after_loop,
6928 flag_cse_skip_blocks);
6930 /* If this basic block was already processed or has no sets, skip it. */
6931 if (val.nsets == 0 || GET_MODE (insn) == QImode)
6933 PUT_MODE (insn, VOIDmode);
6934 insn = (val.last ? NEXT_INSN (val.last) : 0);
6935 val.path_size = 0;
6936 continue;
6939 cse_basic_block_start = val.low_cuid;
6940 cse_basic_block_end = val.high_cuid;
6941 max_qty = val.nsets * 2;
6943 if (file)
6944 fnotice (file, ";; Processing block from %d to %d, %d sets.\n",
6945 INSN_UID (insn), val.last ? INSN_UID (val.last) : 0,
6946 val.nsets);
6948 /* Make MAX_QTY bigger to give us room to optimize
6949 past the end of this basic block, if that should prove useful. */
6950 if (max_qty < 500)
6951 max_qty = 500;
6953 max_qty += max_reg;
6955 /* If this basic block is being extended by following certain jumps,
6956 (see `cse_end_of_basic_block'), we reprocess the code from the start.
6957 Otherwise, we start after this basic block. */
6958 if (val.path_size > 0)
6959 cse_basic_block (insn, val.last, val.path, 0);
6960 else
6962 int old_cse_jumps_altered = cse_jumps_altered;
6963 rtx temp;
6965 /* When cse changes a conditional jump to an unconditional
6966 jump, we want to reprocess the block, since it will give
6967 us a new branch path to investigate. */
6968 cse_jumps_altered = 0;
6969 temp = cse_basic_block (insn, val.last, val.path, ! after_loop);
6970 if (cse_jumps_altered == 0
6971 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
6972 insn = temp;
6974 cse_jumps_altered |= old_cse_jumps_altered;
6977 if (cse_altered)
6978 ggc_collect ();
6980 #ifdef USE_C_ALLOCA
6981 alloca (0);
6982 #endif
6985 ggc_pop_context ();
6987 if (max_elements_made < n_elements_made)
6988 max_elements_made = n_elements_made;
6990 /* Clean up. */
6991 end_alias_analysis ();
6992 free (uid_cuid);
6993 free (reg_eqv_table);
6994 free (val.path);
6995 rtl_hooks = general_rtl_hooks;
6997 return cse_jumps_altered || recorded_label_ref;
7000 /* Process a single basic block. FROM and TO and the limits of the basic
7001 block. NEXT_BRANCH points to the branch path when following jumps or
7002 a null path when not following jumps.
7004 AROUND_LOOP is nonzero if we are to try to cse around to the start of a
7005 loop. This is true when we are being called for the last time on a
7006 block and this CSE pass is before loop.c. */
7008 static rtx
7009 cse_basic_block (rtx from, rtx to, struct branch_path *next_branch,
7010 int around_loop)
7012 rtx insn;
7013 int to_usage = 0;
7014 rtx libcall_insn = NULL_RTX;
7015 int num_insns = 0;
7016 int no_conflict = 0;
7018 /* This array is undefined before max_reg, so only allocate
7019 the space actually needed and adjust the start. */
7021 qty_table = xmalloc ((max_qty - max_reg) * sizeof (struct qty_table_elem));
7022 qty_table -= max_reg;
7024 new_basic_block ();
7026 /* TO might be a label. If so, protect it from being deleted. */
7027 if (to != 0 && LABEL_P (to))
7028 ++LABEL_NUSES (to);
7030 for (insn = from; insn != to; insn = NEXT_INSN (insn))
7032 enum rtx_code code = GET_CODE (insn);
7034 /* If we have processed 1,000 insns, flush the hash table to
7035 avoid extreme quadratic behavior. We must not include NOTEs
7036 in the count since there may be more of them when generating
7037 debugging information. If we clear the table at different
7038 times, code generated with -g -O might be different than code
7039 generated with -O but not -g.
7041 ??? This is a real kludge and needs to be done some other way.
7042 Perhaps for 2.9. */
7043 if (code != NOTE && num_insns++ > 1000)
7045 flush_hash_table ();
7046 num_insns = 0;
7049 /* See if this is a branch that is part of the path. If so, and it is
7050 to be taken, do so. */
7051 if (next_branch->branch == insn)
7053 enum taken status = next_branch++->status;
7054 if (status != PATH_NOT_TAKEN)
7056 if (status == PATH_TAKEN)
7057 record_jump_equiv (insn, 1);
7058 else
7059 invalidate_skipped_block (NEXT_INSN (insn));
7061 /* Set the last insn as the jump insn; it doesn't affect cc0.
7062 Then follow this branch. */
7063 #ifdef HAVE_cc0
7064 prev_insn_cc0 = 0;
7065 prev_insn = insn;
7066 #endif
7067 insn = JUMP_LABEL (insn);
7068 continue;
7072 if (GET_MODE (insn) == QImode)
7073 PUT_MODE (insn, VOIDmode);
7075 if (GET_RTX_CLASS (code) == RTX_INSN)
7077 rtx p;
7079 /* Process notes first so we have all notes in canonical forms when
7080 looking for duplicate operations. */
7082 if (REG_NOTES (insn))
7083 REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn), NULL_RTX);
7085 /* Track when we are inside in LIBCALL block. Inside such a block,
7086 we do not want to record destinations. The last insn of a
7087 LIBCALL block is not considered to be part of the block, since
7088 its destination is the result of the block and hence should be
7089 recorded. */
7091 if (REG_NOTES (insn) != 0)
7093 if ((p = find_reg_note (insn, REG_LIBCALL, NULL_RTX)))
7094 libcall_insn = XEXP (p, 0);
7095 else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7097 /* Keep libcall_insn for the last SET insn of a no-conflict
7098 block to prevent changing the destination. */
7099 if (! no_conflict)
7100 libcall_insn = 0;
7101 else
7102 no_conflict = -1;
7104 else if (find_reg_note (insn, REG_NO_CONFLICT, NULL_RTX))
7105 no_conflict = 1;
7108 cse_insn (insn, libcall_insn);
7110 if (no_conflict == -1)
7112 libcall_insn = 0;
7113 no_conflict = 0;
7116 /* If we haven't already found an insn where we added a LABEL_REF,
7117 check this one. */
7118 if (NONJUMP_INSN_P (insn) && ! recorded_label_ref
7119 && for_each_rtx (&PATTERN (insn), check_for_label_ref,
7120 (void *) insn))
7121 recorded_label_ref = 1;
7124 /* If INSN is now an unconditional jump, skip to the end of our
7125 basic block by pretending that we just did the last insn in the
7126 basic block. If we are jumping to the end of our block, show
7127 that we can have one usage of TO. */
7129 if (any_uncondjump_p (insn))
7131 if (to == 0)
7133 free (qty_table + max_reg);
7134 return 0;
7137 if (JUMP_LABEL (insn) == to)
7138 to_usage = 1;
7140 /* Maybe TO was deleted because the jump is unconditional.
7141 If so, there is nothing left in this basic block. */
7142 /* ??? Perhaps it would be smarter to set TO
7143 to whatever follows this insn,
7144 and pretend the basic block had always ended here. */
7145 if (INSN_DELETED_P (to))
7146 break;
7148 insn = PREV_INSN (to);
7151 /* See if it is ok to keep on going past the label
7152 which used to end our basic block. Remember that we incremented
7153 the count of that label, so we decrement it here. If we made
7154 a jump unconditional, TO_USAGE will be one; in that case, we don't
7155 want to count the use in that jump. */
7157 if (to != 0 && NEXT_INSN (insn) == to
7158 && LABEL_P (to) && --LABEL_NUSES (to) == to_usage)
7160 struct cse_basic_block_data val;
7161 rtx prev;
7163 insn = NEXT_INSN (to);
7165 /* If TO was the last insn in the function, we are done. */
7166 if (insn == 0)
7168 free (qty_table + max_reg);
7169 return 0;
7172 /* If TO was preceded by a BARRIER we are done with this block
7173 because it has no continuation. */
7174 prev = prev_nonnote_insn (to);
7175 if (prev && BARRIER_P (prev))
7177 free (qty_table + max_reg);
7178 return insn;
7181 /* Find the end of the following block. Note that we won't be
7182 following branches in this case. */
7183 to_usage = 0;
7184 val.path_size = 0;
7185 val.path = xmalloc (sizeof (struct branch_path)
7186 * PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH));
7187 cse_end_of_basic_block (insn, &val, 0, 0, 0);
7188 free (val.path);
7190 /* If the tables we allocated have enough space left
7191 to handle all the SETs in the next basic block,
7192 continue through it. Otherwise, return,
7193 and that block will be scanned individually. */
7194 if (val.nsets * 2 + next_qty > max_qty)
7195 break;
7197 cse_basic_block_start = val.low_cuid;
7198 cse_basic_block_end = val.high_cuid;
7199 to = val.last;
7201 /* Prevent TO from being deleted if it is a label. */
7202 if (to != 0 && LABEL_P (to))
7203 ++LABEL_NUSES (to);
7205 /* Back up so we process the first insn in the extension. */
7206 insn = PREV_INSN (insn);
7210 if (next_qty > max_qty)
7211 abort ();
7213 /* If we are running before loop.c, we stopped on a NOTE_INSN_LOOP_END, and
7214 the previous insn is the only insn that branches to the head of a loop,
7215 we can cse into the loop. Don't do this if we changed the jump
7216 structure of a loop unless we aren't going to be following jumps. */
7218 insn = prev_nonnote_insn (to);
7219 if ((cse_jumps_altered == 0
7220 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7221 && around_loop && to != 0
7222 && NOTE_P (to) && NOTE_LINE_NUMBER (to) == NOTE_INSN_LOOP_END
7223 && JUMP_P (insn)
7224 && JUMP_LABEL (insn) != 0
7225 && LABEL_NUSES (JUMP_LABEL (insn)) == 1)
7226 cse_around_loop (JUMP_LABEL (insn));
7228 free (qty_table + max_reg);
7230 return to ? NEXT_INSN (to) : 0;
7233 /* Called via for_each_rtx to see if an insn is using a LABEL_REF for which
7234 there isn't a REG_LABEL note. Return one if so. DATA is the insn. */
7236 static int
7237 check_for_label_ref (rtx *rtl, void *data)
7239 rtx insn = (rtx) data;
7241 /* If this insn uses a LABEL_REF and there isn't a REG_LABEL note for it,
7242 we must rerun jump since it needs to place the note. If this is a
7243 LABEL_REF for a CODE_LABEL that isn't in the insn chain, don't do this
7244 since no REG_LABEL will be added. */
7245 return (GET_CODE (*rtl) == LABEL_REF
7246 && ! LABEL_REF_NONLOCAL_P (*rtl)
7247 && LABEL_P (XEXP (*rtl, 0))
7248 && INSN_UID (XEXP (*rtl, 0)) != 0
7249 && ! find_reg_note (insn, REG_LABEL, XEXP (*rtl, 0)));
7252 /* Count the number of times registers are used (not set) in X.
7253 COUNTS is an array in which we accumulate the count, INCR is how much
7254 we count each register usage. */
7256 static void
7257 count_reg_usage (rtx x, int *counts, int incr)
7259 enum rtx_code code;
7260 rtx note;
7261 const char *fmt;
7262 int i, j;
7264 if (x == 0)
7265 return;
7267 switch (code = GET_CODE (x))
7269 case REG:
7270 counts[REGNO (x)] += incr;
7271 return;
7273 case PC:
7274 case CC0:
7275 case CONST:
7276 case CONST_INT:
7277 case CONST_DOUBLE:
7278 case CONST_VECTOR:
7279 case SYMBOL_REF:
7280 case LABEL_REF:
7281 return;
7283 case CLOBBER:
7284 /* If we are clobbering a MEM, mark any registers inside the address
7285 as being used. */
7286 if (MEM_P (XEXP (x, 0)))
7287 count_reg_usage (XEXP (XEXP (x, 0), 0), counts, incr);
7288 return;
7290 case SET:
7291 /* Unless we are setting a REG, count everything in SET_DEST. */
7292 if (!REG_P (SET_DEST (x)))
7293 count_reg_usage (SET_DEST (x), counts, incr);
7294 count_reg_usage (SET_SRC (x), counts, incr);
7295 return;
7297 case CALL_INSN:
7298 count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, incr);
7299 /* Fall through. */
7301 case INSN:
7302 case JUMP_INSN:
7303 count_reg_usage (PATTERN (x), counts, incr);
7305 /* Things used in a REG_EQUAL note aren't dead since loop may try to
7306 use them. */
7308 note = find_reg_equal_equiv_note (x);
7309 if (note)
7311 rtx eqv = XEXP (note, 0);
7313 if (GET_CODE (eqv) == EXPR_LIST)
7314 /* This REG_EQUAL note describes the result of a function call.
7315 Process all the arguments. */
7318 count_reg_usage (XEXP (eqv, 0), counts, incr);
7319 eqv = XEXP (eqv, 1);
7321 while (eqv && GET_CODE (eqv) == EXPR_LIST);
7322 else
7323 count_reg_usage (eqv, counts, incr);
7325 return;
7327 case EXPR_LIST:
7328 if (REG_NOTE_KIND (x) == REG_EQUAL
7329 || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE)
7330 /* FUNCTION_USAGE expression lists may include (CLOBBER (mem /u)),
7331 involving registers in the address. */
7332 || GET_CODE (XEXP (x, 0)) == CLOBBER)
7333 count_reg_usage (XEXP (x, 0), counts, incr);
7335 count_reg_usage (XEXP (x, 1), counts, incr);
7336 return;
7338 case ASM_OPERANDS:
7339 /* Iterate over just the inputs, not the constraints as well. */
7340 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
7341 count_reg_usage (ASM_OPERANDS_INPUT (x, i), counts, incr);
7342 return;
7344 case INSN_LIST:
7345 abort ();
7347 default:
7348 break;
7351 fmt = GET_RTX_FORMAT (code);
7352 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7354 if (fmt[i] == 'e')
7355 count_reg_usage (XEXP (x, i), counts, incr);
7356 else if (fmt[i] == 'E')
7357 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7358 count_reg_usage (XVECEXP (x, i, j), counts, incr);
7362 /* Return true if set is live. */
7363 static bool
7364 set_live_p (rtx set, rtx insn ATTRIBUTE_UNUSED, /* Only used with HAVE_cc0. */
7365 int *counts)
7367 #ifdef HAVE_cc0
7368 rtx tem;
7369 #endif
7371 if (set_noop_p (set))
7374 #ifdef HAVE_cc0
7375 else if (GET_CODE (SET_DEST (set)) == CC0
7376 && !side_effects_p (SET_SRC (set))
7377 && ((tem = next_nonnote_insn (insn)) == 0
7378 || !INSN_P (tem)
7379 || !reg_referenced_p (cc0_rtx, PATTERN (tem))))
7380 return false;
7381 #endif
7382 else if (!REG_P (SET_DEST (set))
7383 || REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
7384 || counts[REGNO (SET_DEST (set))] != 0
7385 || side_effects_p (SET_SRC (set)))
7386 return true;
7387 return false;
7390 /* Return true if insn is live. */
7392 static bool
7393 insn_live_p (rtx insn, int *counts)
7395 int i;
7396 if (flag_non_call_exceptions && may_trap_p (PATTERN (insn)))
7397 return true;
7398 else if (GET_CODE (PATTERN (insn)) == SET)
7399 return set_live_p (PATTERN (insn), insn, counts);
7400 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
7402 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7404 rtx elt = XVECEXP (PATTERN (insn), 0, i);
7406 if (GET_CODE (elt) == SET)
7408 if (set_live_p (elt, insn, counts))
7409 return true;
7411 else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
7412 return true;
7414 return false;
7416 else
7417 return true;
7420 /* Return true if libcall is dead as a whole. */
7422 static bool
7423 dead_libcall_p (rtx insn, int *counts)
7425 rtx note, set, new;
7427 /* See if there's a REG_EQUAL note on this insn and try to
7428 replace the source with the REG_EQUAL expression.
7430 We assume that insns with REG_RETVALs can only be reg->reg
7431 copies at this point. */
7432 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
7433 if (!note)
7434 return false;
7436 set = single_set (insn);
7437 if (!set)
7438 return false;
7440 new = simplify_rtx (XEXP (note, 0));
7441 if (!new)
7442 new = XEXP (note, 0);
7444 /* While changing insn, we must update the counts accordingly. */
7445 count_reg_usage (insn, counts, -1);
7447 if (validate_change (insn, &SET_SRC (set), new, 0))
7449 count_reg_usage (insn, counts, 1);
7450 remove_note (insn, find_reg_note (insn, REG_RETVAL, NULL_RTX));
7451 remove_note (insn, note);
7452 return true;
7455 if (CONSTANT_P (new))
7457 new = force_const_mem (GET_MODE (SET_DEST (set)), new);
7458 if (new && validate_change (insn, &SET_SRC (set), new, 0))
7460 count_reg_usage (insn, counts, 1);
7461 remove_note (insn, find_reg_note (insn, REG_RETVAL, NULL_RTX));
7462 remove_note (insn, note);
7463 return true;
7467 count_reg_usage (insn, counts, 1);
7468 return false;
7471 /* Scan all the insns and delete any that are dead; i.e., they store a register
7472 that is never used or they copy a register to itself.
7474 This is used to remove insns made obviously dead by cse, loop or other
7475 optimizations. It improves the heuristics in loop since it won't try to
7476 move dead invariants out of loops or make givs for dead quantities. The
7477 remaining passes of the compilation are also sped up. */
7480 delete_trivially_dead_insns (rtx insns, int nreg)
7482 int *counts;
7483 rtx insn, prev;
7484 int in_libcall = 0, dead_libcall = 0;
7485 int ndead = 0, nlastdead, niterations = 0;
7487 timevar_push (TV_DELETE_TRIVIALLY_DEAD);
7488 /* First count the number of times each register is used. */
7489 counts = xcalloc (nreg, sizeof (int));
7490 for (insn = next_real_insn (insns); insn; insn = next_real_insn (insn))
7491 count_reg_usage (insn, counts, 1);
7495 nlastdead = ndead;
7496 niterations++;
7497 /* Go from the last insn to the first and delete insns that only set unused
7498 registers or copy a register to itself. As we delete an insn, remove
7499 usage counts for registers it uses.
7501 The first jump optimization pass may leave a real insn as the last
7502 insn in the function. We must not skip that insn or we may end
7503 up deleting code that is not really dead. */
7504 insn = get_last_insn ();
7505 if (! INSN_P (insn))
7506 insn = prev_real_insn (insn);
7508 for (; insn; insn = prev)
7510 int live_insn = 0;
7512 prev = prev_real_insn (insn);
7514 /* Don't delete any insns that are part of a libcall block unless
7515 we can delete the whole libcall block.
7517 Flow or loop might get confused if we did that. Remember
7518 that we are scanning backwards. */
7519 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7521 in_libcall = 1;
7522 live_insn = 1;
7523 dead_libcall = dead_libcall_p (insn, counts);
7525 else if (in_libcall)
7526 live_insn = ! dead_libcall;
7527 else
7528 live_insn = insn_live_p (insn, counts);
7530 /* If this is a dead insn, delete it and show registers in it aren't
7531 being used. */
7533 if (! live_insn)
7535 count_reg_usage (insn, counts, -1);
7536 delete_insn_and_edges (insn);
7537 ndead++;
7540 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
7542 in_libcall = 0;
7543 dead_libcall = 0;
7547 while (ndead != nlastdead);
7549 if (dump_file && ndead)
7550 fprintf (dump_file, "Deleted %i trivially dead insns; %i iterations\n",
7551 ndead, niterations);
7552 /* Clean up. */
7553 free (counts);
7554 timevar_pop (TV_DELETE_TRIVIALLY_DEAD);
7555 return ndead;
7558 /* This function is called via for_each_rtx. The argument, NEWREG, is
7559 a condition code register with the desired mode. If we are looking
7560 at the same register in a different mode, replace it with
7561 NEWREG. */
7563 static int
7564 cse_change_cc_mode (rtx *loc, void *data)
7566 rtx newreg = (rtx) data;
7568 if (*loc
7569 && REG_P (*loc)
7570 && REGNO (*loc) == REGNO (newreg)
7571 && GET_MODE (*loc) != GET_MODE (newreg))
7573 *loc = newreg;
7574 return -1;
7576 return 0;
7579 /* Change the mode of any reference to the register REGNO (NEWREG) to
7580 GET_MODE (NEWREG), starting at START. Stop before END. Stop at
7581 any instruction which modifies NEWREG. */
7583 static void
7584 cse_change_cc_mode_insns (rtx start, rtx end, rtx newreg)
7586 rtx insn;
7588 for (insn = start; insn != end; insn = NEXT_INSN (insn))
7590 if (! INSN_P (insn))
7591 continue;
7593 if (reg_set_p (newreg, insn))
7594 return;
7596 for_each_rtx (&PATTERN (insn), cse_change_cc_mode, newreg);
7597 for_each_rtx (&REG_NOTES (insn), cse_change_cc_mode, newreg);
7601 /* BB is a basic block which finishes with CC_REG as a condition code
7602 register which is set to CC_SRC. Look through the successors of BB
7603 to find blocks which have a single predecessor (i.e., this one),
7604 and look through those blocks for an assignment to CC_REG which is
7605 equivalent to CC_SRC. CAN_CHANGE_MODE indicates whether we are
7606 permitted to change the mode of CC_SRC to a compatible mode. This
7607 returns VOIDmode if no equivalent assignments were found.
7608 Otherwise it returns the mode which CC_SRC should wind up with.
7610 The main complexity in this function is handling the mode issues.
7611 We may have more than one duplicate which we can eliminate, and we
7612 try to find a mode which will work for multiple duplicates. */
7614 static enum machine_mode
7615 cse_cc_succs (basic_block bb, rtx cc_reg, rtx cc_src, bool can_change_mode)
7617 bool found_equiv;
7618 enum machine_mode mode;
7619 unsigned int insn_count;
7620 edge e;
7621 rtx insns[2];
7622 enum machine_mode modes[2];
7623 rtx last_insns[2];
7624 unsigned int i;
7625 rtx newreg;
7627 /* We expect to have two successors. Look at both before picking
7628 the final mode for the comparison. If we have more successors
7629 (i.e., some sort of table jump, although that seems unlikely),
7630 then we require all beyond the first two to use the same
7631 mode. */
7633 found_equiv = false;
7634 mode = GET_MODE (cc_src);
7635 insn_count = 0;
7636 for (e = bb->succ; e; e = e->succ_next)
7638 rtx insn;
7639 rtx end;
7641 if (e->flags & EDGE_COMPLEX)
7642 continue;
7644 if (! e->dest->pred
7645 || e->dest->pred->pred_next
7646 || e->dest == EXIT_BLOCK_PTR)
7647 continue;
7649 end = NEXT_INSN (BB_END (e->dest));
7650 for (insn = BB_HEAD (e->dest); insn != end; insn = NEXT_INSN (insn))
7652 rtx set;
7654 if (! INSN_P (insn))
7655 continue;
7657 /* If CC_SRC is modified, we have to stop looking for
7658 something which uses it. */
7659 if (modified_in_p (cc_src, insn))
7660 break;
7662 /* Check whether INSN sets CC_REG to CC_SRC. */
7663 set = single_set (insn);
7664 if (set
7665 && REG_P (SET_DEST (set))
7666 && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7668 bool found;
7669 enum machine_mode set_mode;
7670 enum machine_mode comp_mode;
7672 found = false;
7673 set_mode = GET_MODE (SET_SRC (set));
7674 comp_mode = set_mode;
7675 if (rtx_equal_p (cc_src, SET_SRC (set)))
7676 found = true;
7677 else if (GET_CODE (cc_src) == COMPARE
7678 && GET_CODE (SET_SRC (set)) == COMPARE
7679 && mode != set_mode
7680 && rtx_equal_p (XEXP (cc_src, 0),
7681 XEXP (SET_SRC (set), 0))
7682 && rtx_equal_p (XEXP (cc_src, 1),
7683 XEXP (SET_SRC (set), 1)))
7686 comp_mode = targetm.cc_modes_compatible (mode, set_mode);
7687 if (comp_mode != VOIDmode
7688 && (can_change_mode || comp_mode == mode))
7689 found = true;
7692 if (found)
7694 found_equiv = true;
7695 if (insn_count < ARRAY_SIZE (insns))
7697 insns[insn_count] = insn;
7698 modes[insn_count] = set_mode;
7699 last_insns[insn_count] = end;
7700 ++insn_count;
7702 if (mode != comp_mode)
7704 if (! can_change_mode)
7705 abort ();
7706 mode = comp_mode;
7707 PUT_MODE (cc_src, mode);
7710 else
7712 if (set_mode != mode)
7714 /* We found a matching expression in the
7715 wrong mode, but we don't have room to
7716 store it in the array. Punt. This case
7717 should be rare. */
7718 break;
7720 /* INSN sets CC_REG to a value equal to CC_SRC
7721 with the right mode. We can simply delete
7722 it. */
7723 delete_insn (insn);
7726 /* We found an instruction to delete. Keep looking,
7727 in the hopes of finding a three-way jump. */
7728 continue;
7731 /* We found an instruction which sets the condition
7732 code, so don't look any farther. */
7733 break;
7736 /* If INSN sets CC_REG in some other way, don't look any
7737 farther. */
7738 if (reg_set_p (cc_reg, insn))
7739 break;
7742 /* If we fell off the bottom of the block, we can keep looking
7743 through successors. We pass CAN_CHANGE_MODE as false because
7744 we aren't prepared to handle compatibility between the
7745 further blocks and this block. */
7746 if (insn == end)
7748 enum machine_mode submode;
7750 submode = cse_cc_succs (e->dest, cc_reg, cc_src, false);
7751 if (submode != VOIDmode)
7753 if (submode != mode)
7754 abort ();
7755 found_equiv = true;
7756 can_change_mode = false;
7761 if (! found_equiv)
7762 return VOIDmode;
7764 /* Now INSN_COUNT is the number of instructions we found which set
7765 CC_REG to a value equivalent to CC_SRC. The instructions are in
7766 INSNS. The modes used by those instructions are in MODES. */
7768 newreg = NULL_RTX;
7769 for (i = 0; i < insn_count; ++i)
7771 if (modes[i] != mode)
7773 /* We need to change the mode of CC_REG in INSNS[i] and
7774 subsequent instructions. */
7775 if (! newreg)
7777 if (GET_MODE (cc_reg) == mode)
7778 newreg = cc_reg;
7779 else
7780 newreg = gen_rtx_REG (mode, REGNO (cc_reg));
7782 cse_change_cc_mode_insns (NEXT_INSN (insns[i]), last_insns[i],
7783 newreg);
7786 delete_insn (insns[i]);
7789 return mode;
7792 /* If we have a fixed condition code register (or two), walk through
7793 the instructions and try to eliminate duplicate assignments. */
7795 void
7796 cse_condition_code_reg (void)
7798 unsigned int cc_regno_1;
7799 unsigned int cc_regno_2;
7800 rtx cc_reg_1;
7801 rtx cc_reg_2;
7802 basic_block bb;
7804 if (! targetm.fixed_condition_code_regs (&cc_regno_1, &cc_regno_2))
7805 return;
7807 cc_reg_1 = gen_rtx_REG (CCmode, cc_regno_1);
7808 if (cc_regno_2 != INVALID_REGNUM)
7809 cc_reg_2 = gen_rtx_REG (CCmode, cc_regno_2);
7810 else
7811 cc_reg_2 = NULL_RTX;
7813 FOR_EACH_BB (bb)
7815 rtx last_insn;
7816 rtx cc_reg;
7817 rtx insn;
7818 rtx cc_src_insn;
7819 rtx cc_src;
7820 enum machine_mode mode;
7821 enum machine_mode orig_mode;
7823 /* Look for blocks which end with a conditional jump based on a
7824 condition code register. Then look for the instruction which
7825 sets the condition code register. Then look through the
7826 successor blocks for instructions which set the condition
7827 code register to the same value. There are other possible
7828 uses of the condition code register, but these are by far the
7829 most common and the ones which we are most likely to be able
7830 to optimize. */
7832 last_insn = BB_END (bb);
7833 if (!JUMP_P (last_insn))
7834 continue;
7836 if (reg_referenced_p (cc_reg_1, PATTERN (last_insn)))
7837 cc_reg = cc_reg_1;
7838 else if (cc_reg_2 && reg_referenced_p (cc_reg_2, PATTERN (last_insn)))
7839 cc_reg = cc_reg_2;
7840 else
7841 continue;
7843 cc_src_insn = NULL_RTX;
7844 cc_src = NULL_RTX;
7845 for (insn = PREV_INSN (last_insn);
7846 insn && insn != PREV_INSN (BB_HEAD (bb));
7847 insn = PREV_INSN (insn))
7849 rtx set;
7851 if (! INSN_P (insn))
7852 continue;
7853 set = single_set (insn);
7854 if (set
7855 && REG_P (SET_DEST (set))
7856 && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7858 cc_src_insn = insn;
7859 cc_src = SET_SRC (set);
7860 break;
7862 else if (reg_set_p (cc_reg, insn))
7863 break;
7866 if (! cc_src_insn)
7867 continue;
7869 if (modified_between_p (cc_src, cc_src_insn, NEXT_INSN (last_insn)))
7870 continue;
7872 /* Now CC_REG is a condition code register used for a
7873 conditional jump at the end of the block, and CC_SRC, in
7874 CC_SRC_INSN, is the value to which that condition code
7875 register is set, and CC_SRC is still meaningful at the end of
7876 the basic block. */
7878 orig_mode = GET_MODE (cc_src);
7879 mode = cse_cc_succs (bb, cc_reg, cc_src, true);
7880 if (mode != VOIDmode)
7882 if (mode != GET_MODE (cc_src))
7883 abort ();
7884 if (mode != orig_mode)
7886 rtx newreg = gen_rtx_REG (mode, REGNO (cc_reg));
7888 /* Change the mode of CC_REG in CC_SRC_INSN to
7889 GET_MODE (NEWREG). */
7890 for_each_rtx (&PATTERN (cc_src_insn), cse_change_cc_mode,
7891 newreg);
7892 for_each_rtx (&REG_NOTES (cc_src_insn), cse_change_cc_mode,
7893 newreg);
7895 /* Do the same in the following insns that use the
7896 current value of CC_REG within BB. */
7897 cse_change_cc_mode_insns (NEXT_INSN (cc_src_insn),
7898 NEXT_INSN (last_insn),
7899 newreg);