This commit was manufactured by cvs2svn to create branch
[official-gcc.git] / gcc / cse.c
blob1f71f4cfc54d0249d82ed82fe48643c66dc1c04c
1 /* Common subexpression elimination for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 /* stdio.h must precede rtl.h for FFS. */
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "hard-reg-set.h"
30 #include "regs.h"
31 #include "basic-block.h"
32 #include "flags.h"
33 #include "real.h"
34 #include "insn-config.h"
35 #include "recog.h"
36 #include "function.h"
37 #include "expr.h"
38 #include "toplev.h"
39 #include "output.h"
40 #include "ggc.h"
41 #include "timevar.h"
42 #include "except.h"
43 #include "target.h"
44 #include "params.h"
45 #include "rtlhooks-def.h"
47 /* The basic idea of common subexpression elimination is to go
48 through the code, keeping a record of expressions that would
49 have the same value at the current scan point, and replacing
50 expressions encountered with the cheapest equivalent expression.
52 It is too complicated to keep track of the different possibilities
53 when control paths merge in this code; so, at each label, we forget all
54 that is known and start fresh. This can be described as processing each
55 extended basic block separately. We have a separate pass to perform
56 global CSE.
58 Note CSE can turn a conditional or computed jump into a nop or
59 an unconditional jump. When this occurs we arrange to run the jump
60 optimizer after CSE to delete the unreachable code.
62 We use two data structures to record the equivalent expressions:
63 a hash table for most expressions, and a vector of "quantity
64 numbers" to record equivalent (pseudo) registers.
66 The use of the special data structure for registers is desirable
67 because it is faster. It is possible because registers references
68 contain a fairly small number, the register number, taken from
69 a contiguously allocated series, and two register references are
70 identical if they have the same number. General expressions
71 do not have any such thing, so the only way to retrieve the
72 information recorded on an expression other than a register
73 is to keep it in a hash table.
75 Registers and "quantity numbers":
77 At the start of each basic block, all of the (hardware and pseudo)
78 registers used in the function are given distinct quantity
79 numbers to indicate their contents. During scan, when the code
80 copies one register into another, we copy the quantity number.
81 When a register is loaded in any other way, we allocate a new
82 quantity number to describe the value generated by this operation.
83 `reg_qty' records what quantity a register is currently thought
84 of as containing.
86 All real quantity numbers are greater than or equal to zero.
87 If register N has not been assigned a quantity, reg_qty[N] will
88 equal -N - 1, which is always negative.
90 Quantity numbers below zero do not exist and none of the `qty_table'
91 entries should be referenced with a negative index.
93 We also maintain a bidirectional chain of registers for each
94 quantity number. The `qty_table` members `first_reg' and `last_reg',
95 and `reg_eqv_table' members `next' and `prev' hold these chains.
97 The first register in a chain is the one whose lifespan is least local.
98 Among equals, it is the one that was seen first.
99 We replace any equivalent register with that one.
101 If two registers have the same quantity number, it must be true that
102 REG expressions with qty_table `mode' must be in the hash table for both
103 registers and must be in the same class.
105 The converse is not true. Since hard registers may be referenced in
106 any mode, two REG expressions might be equivalent in the hash table
107 but not have the same quantity number if the quantity number of one
108 of the registers is not the same mode as those expressions.
110 Constants and quantity numbers
112 When a quantity has a known constant value, that value is stored
113 in the appropriate qty_table `const_rtx'. This is in addition to
114 putting the constant in the hash table as is usual for non-regs.
116 Whether a reg or a constant is preferred is determined by the configuration
117 macro CONST_COSTS and will often depend on the constant value. In any
118 event, expressions containing constants can be simplified, by fold_rtx.
120 When a quantity has a known nearly constant value (such as an address
121 of a stack slot), that value is stored in the appropriate qty_table
122 `const_rtx'.
124 Integer constants don't have a machine mode. However, cse
125 determines the intended machine mode from the destination
126 of the instruction that moves the constant. The machine mode
127 is recorded in the hash table along with the actual RTL
128 constant expression so that different modes are kept separate.
130 Other expressions:
132 To record known equivalences among expressions in general
133 we use a hash table called `table'. It has a fixed number of buckets
134 that contain chains of `struct table_elt' elements for expressions.
135 These chains connect the elements whose expressions have the same
136 hash codes.
138 Other chains through the same elements connect the elements which
139 currently have equivalent values.
141 Register references in an expression are canonicalized before hashing
142 the expression. This is done using `reg_qty' and qty_table `first_reg'.
143 The hash code of a register reference is computed using the quantity
144 number, not the register number.
146 When the value of an expression changes, it is necessary to remove from the
147 hash table not just that expression but all expressions whose values
148 could be different as a result.
150 1. If the value changing is in memory, except in special cases
151 ANYTHING referring to memory could be changed. That is because
152 nobody knows where a pointer does not point.
153 The function `invalidate_memory' removes what is necessary.
155 The special cases are when the address is constant or is
156 a constant plus a fixed register such as the frame pointer
157 or a static chain pointer. When such addresses are stored in,
158 we can tell exactly which other such addresses must be invalidated
159 due to overlap. `invalidate' does this.
160 All expressions that refer to non-constant
161 memory addresses are also invalidated. `invalidate_memory' does this.
163 2. If the value changing is a register, all expressions
164 containing references to that register, and only those,
165 must be removed.
167 Because searching the entire hash table for expressions that contain
168 a register is very slow, we try to figure out when it isn't necessary.
169 Precisely, this is necessary only when expressions have been
170 entered in the hash table using this register, and then the value has
171 changed, and then another expression wants to be added to refer to
172 the register's new value. This sequence of circumstances is rare
173 within any one basic block.
175 The vectors `reg_tick' and `reg_in_table' are used to detect this case.
176 reg_tick[i] is incremented whenever a value is stored in register i.
177 reg_in_table[i] holds -1 if no references to register i have been
178 entered in the table; otherwise, it contains the value reg_tick[i] had
179 when the references were entered. If we want to enter a reference
180 and reg_in_table[i] != reg_tick[i], we must scan and remove old references.
181 Until we want to enter a new entry, the mere fact that the two vectors
182 don't match makes the entries be ignored if anyone tries to match them.
184 Registers themselves are entered in the hash table as well as in
185 the equivalent-register chains. However, the vectors `reg_tick'
186 and `reg_in_table' do not apply to expressions which are simple
187 register references. These expressions are removed from the table
188 immediately when they become invalid, and this can be done even if
189 we do not immediately search for all the expressions that refer to
190 the register.
192 A CLOBBER rtx in an instruction invalidates its operand for further
193 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
194 invalidates everything that resides in memory.
196 Related expressions:
198 Constant expressions that differ only by an additive integer
199 are called related. When a constant expression is put in
200 the table, the related expression with no constant term
201 is also entered. These are made to point at each other
202 so that it is possible to find out if there exists any
203 register equivalent to an expression related to a given expression. */
205 /* Length of qty_table vector. We know in advance we will not need
206 a quantity number this big. */
208 static int max_qty;
210 /* Next quantity number to be allocated.
211 This is 1 + the largest number needed so far. */
213 static int next_qty;
215 /* Per-qty information tracking.
217 `first_reg' and `last_reg' track the head and tail of the
218 chain of registers which currently contain this quantity.
220 `mode' contains the machine mode of this quantity.
222 `const_rtx' holds the rtx of the constant value of this
223 quantity, if known. A summations of the frame/arg pointer
224 and a constant can also be entered here. When this holds
225 a known value, `const_insn' is the insn which stored the
226 constant value.
228 `comparison_{code,const,qty}' are used to track when a
229 comparison between a quantity and some constant or register has
230 been passed. In such a case, we know the results of the comparison
231 in case we see it again. These members record a comparison that
232 is known to be true. `comparison_code' holds the rtx code of such
233 a comparison, else it is set to UNKNOWN and the other two
234 comparison members are undefined. `comparison_const' holds
235 the constant being compared against, or zero if the comparison
236 is not against a constant. `comparison_qty' holds the quantity
237 being compared against when the result is known. If the comparison
238 is not with a register, `comparison_qty' is -1. */
240 struct qty_table_elem
242 rtx const_rtx;
243 rtx const_insn;
244 rtx comparison_const;
245 int comparison_qty;
246 unsigned int first_reg, last_reg;
247 /* The sizes of these fields should match the sizes of the
248 code and mode fields of struct rtx_def (see rtl.h). */
249 ENUM_BITFIELD(rtx_code) comparison_code : 16;
250 ENUM_BITFIELD(machine_mode) mode : 8;
253 /* The table of all qtys, indexed by qty number. */
254 static struct qty_table_elem *qty_table;
256 /* Structure used to pass arguments via for_each_rtx to function
257 cse_change_cc_mode. */
258 struct change_cc_mode_args
260 rtx insn;
261 rtx newreg;
264 #ifdef HAVE_cc0
265 /* For machines that have a CC0, we do not record its value in the hash
266 table since its use is guaranteed to be the insn immediately following
267 its definition and any other insn is presumed to invalidate it.
269 Instead, we store below the value last assigned to CC0. If it should
270 happen to be a constant, it is stored in preference to the actual
271 assigned value. In case it is a constant, we store the mode in which
272 the constant should be interpreted. */
274 static rtx prev_insn_cc0;
275 static enum machine_mode prev_insn_cc0_mode;
277 /* Previous actual insn. 0 if at first insn of basic block. */
279 static rtx prev_insn;
280 #endif
282 /* Insn being scanned. */
284 static rtx this_insn;
286 /* Index by register number, gives the number of the next (or
287 previous) register in the chain of registers sharing the same
288 value.
290 Or -1 if this register is at the end of the chain.
292 If reg_qty[N] == N, reg_eqv_table[N].next is undefined. */
294 /* Per-register equivalence chain. */
295 struct reg_eqv_elem
297 int next, prev;
300 /* The table of all register equivalence chains. */
301 static struct reg_eqv_elem *reg_eqv_table;
303 struct cse_reg_info
305 /* The timestamp at which this register is initialized. */
306 unsigned int timestamp;
308 /* The quantity number of the register's current contents. */
309 int reg_qty;
311 /* The number of times the register has been altered in the current
312 basic block. */
313 int reg_tick;
315 /* The REG_TICK value at which rtx's containing this register are
316 valid in the hash table. If this does not equal the current
317 reg_tick value, such expressions existing in the hash table are
318 invalid. */
319 int reg_in_table;
321 /* The SUBREG that was set when REG_TICK was last incremented. Set
322 to -1 if the last store was to the whole register, not a subreg. */
323 unsigned int subreg_ticked;
326 /* A table of cse_reg_info indexed by register numbers. */
327 struct cse_reg_info *cse_reg_info_table;
329 /* The size of the above table. */
330 static unsigned int cse_reg_info_table_size;
332 /* The index of the first entry that has not been initialized. */
333 static unsigned int cse_reg_info_table_first_uninitialized;
335 /* The timestamp at the beginning of the current run of
336 cse_basic_block. We increment this variable at at the beginning of
337 the current run of cse_basic_block. The timestamp field of a
338 cse_reg_info entry matches the value of this variable if and only
339 if the entry has been initialized during the current run of
340 cse_basic_block. */
341 static unsigned int cse_reg_info_timestamp;
343 /* A HARD_REG_SET containing all the hard registers for which there is
344 currently a REG expression in the hash table. Note the difference
345 from the above variables, which indicate if the REG is mentioned in some
346 expression in the table. */
348 static HARD_REG_SET hard_regs_in_table;
350 /* CUID of insn that starts the basic block currently being cse-processed. */
352 static int cse_basic_block_start;
354 /* CUID of insn that ends the basic block currently being cse-processed. */
356 static int cse_basic_block_end;
358 /* Vector mapping INSN_UIDs to cuids.
359 The cuids are like uids but increase monotonically always.
360 We use them to see whether a reg is used outside a given basic block. */
362 static int *uid_cuid;
364 /* Highest UID in UID_CUID. */
365 static int max_uid;
367 /* Get the cuid of an insn. */
369 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
371 /* Nonzero if this pass has made changes, and therefore it's
372 worthwhile to run the garbage collector. */
374 static int cse_altered;
376 /* Nonzero if cse has altered conditional jump insns
377 in such a way that jump optimization should be redone. */
379 static int cse_jumps_altered;
381 /* Nonzero if we put a LABEL_REF into the hash table for an INSN without a
382 REG_LABEL, we have to rerun jump after CSE to put in the note. */
383 static int recorded_label_ref;
385 /* canon_hash stores 1 in do_not_record
386 if it notices a reference to CC0, PC, or some other volatile
387 subexpression. */
389 static int do_not_record;
391 /* canon_hash stores 1 in hash_arg_in_memory
392 if it notices a reference to memory within the expression being hashed. */
394 static int hash_arg_in_memory;
396 /* The hash table contains buckets which are chains of `struct table_elt's,
397 each recording one expression's information.
398 That expression is in the `exp' field.
400 The canon_exp field contains a canonical (from the point of view of
401 alias analysis) version of the `exp' field.
403 Those elements with the same hash code are chained in both directions
404 through the `next_same_hash' and `prev_same_hash' fields.
406 Each set of expressions with equivalent values
407 are on a two-way chain through the `next_same_value'
408 and `prev_same_value' fields, and all point with
409 the `first_same_value' field at the first element in
410 that chain. The chain is in order of increasing cost.
411 Each element's cost value is in its `cost' field.
413 The `in_memory' field is nonzero for elements that
414 involve any reference to memory. These elements are removed
415 whenever a write is done to an unidentified location in memory.
416 To be safe, we assume that a memory address is unidentified unless
417 the address is either a symbol constant or a constant plus
418 the frame pointer or argument pointer.
420 The `related_value' field is used to connect related expressions
421 (that differ by adding an integer).
422 The related expressions are chained in a circular fashion.
423 `related_value' is zero for expressions for which this
424 chain is not useful.
426 The `cost' field stores the cost of this element's expression.
427 The `regcost' field stores the value returned by approx_reg_cost for
428 this element's expression.
430 The `is_const' flag is set if the element is a constant (including
431 a fixed address).
433 The `flag' field is used as a temporary during some search routines.
435 The `mode' field is usually the same as GET_MODE (`exp'), but
436 if `exp' is a CONST_INT and has no machine mode then the `mode'
437 field is the mode it was being used as. Each constant is
438 recorded separately for each mode it is used with. */
440 struct table_elt
442 rtx exp;
443 rtx canon_exp;
444 struct table_elt *next_same_hash;
445 struct table_elt *prev_same_hash;
446 struct table_elt *next_same_value;
447 struct table_elt *prev_same_value;
448 struct table_elt *first_same_value;
449 struct table_elt *related_value;
450 int cost;
451 int regcost;
452 /* The size of this field should match the size
453 of the mode field of struct rtx_def (see rtl.h). */
454 ENUM_BITFIELD(machine_mode) mode : 8;
455 char in_memory;
456 char is_const;
457 char flag;
460 /* We don't want a lot of buckets, because we rarely have very many
461 things stored in the hash table, and a lot of buckets slows
462 down a lot of loops that happen frequently. */
463 #define HASH_SHIFT 5
464 #define HASH_SIZE (1 << HASH_SHIFT)
465 #define HASH_MASK (HASH_SIZE - 1)
467 /* Compute hash code of X in mode M. Special-case case where X is a pseudo
468 register (hard registers may require `do_not_record' to be set). */
470 #define HASH(X, M) \
471 ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER \
472 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
473 : canon_hash (X, M)) & HASH_MASK)
475 /* Like HASH, but without side-effects. */
476 #define SAFE_HASH(X, M) \
477 ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER \
478 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
479 : safe_hash (X, M)) & HASH_MASK)
481 /* Determine whether register number N is considered a fixed register for the
482 purpose of approximating register costs.
483 It is desirable to replace other regs with fixed regs, to reduce need for
484 non-fixed hard regs.
485 A reg wins if it is either the frame pointer or designated as fixed. */
486 #define FIXED_REGNO_P(N) \
487 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
488 || fixed_regs[N] || global_regs[N])
490 /* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
491 hard registers and pointers into the frame are the cheapest with a cost
492 of 0. Next come pseudos with a cost of one and other hard registers with
493 a cost of 2. Aside from these special cases, call `rtx_cost'. */
495 #define CHEAP_REGNO(N) \
496 (REGNO_PTR_FRAME_P(N) \
497 || (HARD_REGISTER_NUM_P (N) \
498 && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
500 #define COST(X) (REG_P (X) ? 0 : notreg_cost (X, SET))
501 #define COST_IN(X,OUTER) (REG_P (X) ? 0 : notreg_cost (X, OUTER))
503 /* Get the number of times this register has been updated in this
504 basic block. */
506 #define REG_TICK(N) (get_cse_reg_info (N)->reg_tick)
508 /* Get the point at which REG was recorded in the table. */
510 #define REG_IN_TABLE(N) (get_cse_reg_info (N)->reg_in_table)
512 /* Get the SUBREG set at the last increment to REG_TICK (-1 if not a
513 SUBREG). */
515 #define SUBREG_TICKED(N) (get_cse_reg_info (N)->subreg_ticked)
517 /* Get the quantity number for REG. */
519 #define REG_QTY(N) (get_cse_reg_info (N)->reg_qty)
521 /* Determine if the quantity number for register X represents a valid index
522 into the qty_table. */
524 #define REGNO_QTY_VALID_P(N) (REG_QTY (N) >= 0)
526 static struct table_elt *table[HASH_SIZE];
528 /* Chain of `struct table_elt's made so far for this function
529 but currently removed from the table. */
531 static struct table_elt *free_element_chain;
533 /* Set to the cost of a constant pool reference if one was found for a
534 symbolic constant. If this was found, it means we should try to
535 convert constants into constant pool entries if they don't fit in
536 the insn. */
538 static int constant_pool_entries_cost;
539 static int constant_pool_entries_regcost;
541 /* This data describes a block that will be processed by cse_basic_block. */
543 struct cse_basic_block_data
545 /* Lowest CUID value of insns in block. */
546 int low_cuid;
547 /* Highest CUID value of insns in block. */
548 int high_cuid;
549 /* Total number of SETs in block. */
550 int nsets;
551 /* Last insn in the block. */
552 rtx last;
553 /* Size of current branch path, if any. */
554 int path_size;
555 /* Current branch path, indicating which branches will be taken. */
556 struct branch_path
558 /* The branch insn. */
559 rtx branch;
560 /* Whether it should be taken or not. AROUND is the same as taken
561 except that it is used when the destination label is not preceded
562 by a BARRIER. */
563 enum taken {PATH_TAKEN, PATH_NOT_TAKEN, PATH_AROUND} status;
564 } *path;
567 static bool fixed_base_plus_p (rtx x);
568 static int notreg_cost (rtx, enum rtx_code);
569 static int approx_reg_cost_1 (rtx *, void *);
570 static int approx_reg_cost (rtx);
571 static int preferable (int, int, int, int);
572 static void new_basic_block (void);
573 static void make_new_qty (unsigned int, enum machine_mode);
574 static void make_regs_eqv (unsigned int, unsigned int);
575 static void delete_reg_equiv (unsigned int);
576 static int mention_regs (rtx);
577 static int insert_regs (rtx, struct table_elt *, int);
578 static void remove_from_table (struct table_elt *, unsigned);
579 static struct table_elt *lookup (rtx, unsigned, enum machine_mode);
580 static struct table_elt *lookup_for_remove (rtx, unsigned, enum machine_mode);
581 static rtx lookup_as_function (rtx, enum rtx_code);
582 static struct table_elt *insert (rtx, struct table_elt *, unsigned,
583 enum machine_mode);
584 static void merge_equiv_classes (struct table_elt *, struct table_elt *);
585 static void invalidate (rtx, enum machine_mode);
586 static int cse_rtx_varies_p (rtx, int);
587 static void remove_invalid_refs (unsigned int);
588 static void remove_invalid_subreg_refs (unsigned int, unsigned int,
589 enum machine_mode);
590 static void rehash_using_reg (rtx);
591 static void invalidate_memory (void);
592 static void invalidate_for_call (void);
593 static rtx use_related_value (rtx, struct table_elt *);
595 static inline unsigned canon_hash (rtx, enum machine_mode);
596 static inline unsigned safe_hash (rtx, enum machine_mode);
597 static unsigned hash_rtx_string (const char *);
599 static rtx canon_reg (rtx, rtx);
600 static void find_best_addr (rtx, rtx *, enum machine_mode);
601 static enum rtx_code find_comparison_args (enum rtx_code, rtx *, rtx *,
602 enum machine_mode *,
603 enum machine_mode *);
604 static rtx fold_rtx (rtx, rtx);
605 static rtx equiv_constant (rtx);
606 static void record_jump_equiv (rtx, int);
607 static void record_jump_cond (enum rtx_code, enum machine_mode, rtx, rtx,
608 int);
609 static void cse_insn (rtx, rtx);
610 static void cse_end_of_basic_block (rtx, struct cse_basic_block_data *,
611 int, int);
612 static int addr_affects_sp_p (rtx);
613 static void invalidate_from_clobbers (rtx);
614 static rtx cse_process_notes (rtx, rtx);
615 static void invalidate_skipped_set (rtx, rtx, void *);
616 static void invalidate_skipped_block (rtx);
617 static rtx cse_basic_block (rtx, rtx, struct branch_path *);
618 static void count_reg_usage (rtx, int *, int);
619 static int check_for_label_ref (rtx *, void *);
620 extern void dump_class (struct table_elt*);
621 static void get_cse_reg_info_1 (unsigned int regno);
622 static struct cse_reg_info * get_cse_reg_info (unsigned int regno);
623 static int check_dependence (rtx *, void *);
625 static void flush_hash_table (void);
626 static bool insn_live_p (rtx, int *);
627 static bool set_live_p (rtx, rtx, int *);
628 static bool dead_libcall_p (rtx, int *);
629 static int cse_change_cc_mode (rtx *, void *);
630 static void cse_change_cc_mode_insn (rtx, rtx);
631 static void cse_change_cc_mode_insns (rtx, rtx, rtx);
632 static enum machine_mode cse_cc_succs (basic_block, rtx, rtx, bool);
635 #undef RTL_HOOKS_GEN_LOWPART
636 #define RTL_HOOKS_GEN_LOWPART gen_lowpart_if_possible
638 static const struct rtl_hooks cse_rtl_hooks = RTL_HOOKS_INITIALIZER;
640 /* Nonzero if X has the form (PLUS frame-pointer integer). We check for
641 virtual regs here because the simplify_*_operation routines are called
642 by integrate.c, which is called before virtual register instantiation. */
644 static bool
645 fixed_base_plus_p (rtx x)
647 switch (GET_CODE (x))
649 case REG:
650 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx)
651 return true;
652 if (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
653 return true;
654 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
655 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
656 return true;
657 return false;
659 case PLUS:
660 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
661 return false;
662 return fixed_base_plus_p (XEXP (x, 0));
664 default:
665 return false;
669 /* Dump the expressions in the equivalence class indicated by CLASSP.
670 This function is used only for debugging. */
671 void
672 dump_class (struct table_elt *classp)
674 struct table_elt *elt;
676 fprintf (stderr, "Equivalence chain for ");
677 print_rtl (stderr, classp->exp);
678 fprintf (stderr, ": \n");
680 for (elt = classp->first_same_value; elt; elt = elt->next_same_value)
682 print_rtl (stderr, elt->exp);
683 fprintf (stderr, "\n");
687 /* Subroutine of approx_reg_cost; called through for_each_rtx. */
689 static int
690 approx_reg_cost_1 (rtx *xp, void *data)
692 rtx x = *xp;
693 int *cost_p = data;
695 if (x && REG_P (x))
697 unsigned int regno = REGNO (x);
699 if (! CHEAP_REGNO (regno))
701 if (regno < FIRST_PSEUDO_REGISTER)
703 if (SMALL_REGISTER_CLASSES)
704 return 1;
705 *cost_p += 2;
707 else
708 *cost_p += 1;
712 return 0;
715 /* Return an estimate of the cost of the registers used in an rtx.
716 This is mostly the number of different REG expressions in the rtx;
717 however for some exceptions like fixed registers we use a cost of
718 0. If any other hard register reference occurs, return MAX_COST. */
720 static int
721 approx_reg_cost (rtx x)
723 int cost = 0;
725 if (for_each_rtx (&x, approx_reg_cost_1, (void *) &cost))
726 return MAX_COST;
728 return cost;
731 /* Returns a canonical version of X for the address, from the point of view,
732 that all multiplications are represented as MULT instead of the multiply
733 by a power of 2 being represented as ASHIFT. */
735 static rtx
736 canon_for_address (rtx x)
738 enum rtx_code code;
739 enum machine_mode mode;
740 rtx new = 0;
741 int i;
742 const char *fmt;
744 if (!x)
745 return x;
747 code = GET_CODE (x);
748 mode = GET_MODE (x);
750 switch (code)
752 case ASHIFT:
753 if (GET_CODE (XEXP (x, 1)) == CONST_INT
754 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode)
755 && INTVAL (XEXP (x, 1)) >= 0)
757 new = canon_for_address (XEXP (x, 0));
758 new = gen_rtx_MULT (mode, new,
759 gen_int_mode ((HOST_WIDE_INT) 1
760 << INTVAL (XEXP (x, 1)),
761 mode));
763 break;
764 default:
765 break;
768 if (new)
769 return new;
771 /* Now recursively process each operand of this operation. */
772 fmt = GET_RTX_FORMAT (code);
773 for (i = 0; i < GET_RTX_LENGTH (code); i++)
774 if (fmt[i] == 'e')
776 new = canon_for_address (XEXP (x, i));
777 XEXP (x, i) = new;
779 return x;
782 /* Return a negative value if an rtx A, whose costs are given by COST_A
783 and REGCOST_A, is more desirable than an rtx B.
784 Return a positive value if A is less desirable, or 0 if the two are
785 equally good. */
786 static int
787 preferable (int cost_a, int regcost_a, int cost_b, int regcost_b)
789 /* First, get rid of cases involving expressions that are entirely
790 unwanted. */
791 if (cost_a != cost_b)
793 if (cost_a == MAX_COST)
794 return 1;
795 if (cost_b == MAX_COST)
796 return -1;
799 /* Avoid extending lifetimes of hardregs. */
800 if (regcost_a != regcost_b)
802 if (regcost_a == MAX_COST)
803 return 1;
804 if (regcost_b == MAX_COST)
805 return -1;
808 /* Normal operation costs take precedence. */
809 if (cost_a != cost_b)
810 return cost_a - cost_b;
811 /* Only if these are identical consider effects on register pressure. */
812 if (regcost_a != regcost_b)
813 return regcost_a - regcost_b;
814 return 0;
817 /* Internal function, to compute cost when X is not a register; called
818 from COST macro to keep it simple. */
820 static int
821 notreg_cost (rtx x, enum rtx_code outer)
823 return ((GET_CODE (x) == SUBREG
824 && REG_P (SUBREG_REG (x))
825 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
826 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
827 && (GET_MODE_SIZE (GET_MODE (x))
828 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
829 && subreg_lowpart_p (x)
830 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (x)),
831 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))))
833 : rtx_cost (x, outer) * 2);
837 /* Initialize CSE_REG_INFO_TABLE. */
839 static void
840 init_cse_reg_info (unsigned int nregs)
842 /* Do we need to grow the table? */
843 if (nregs > cse_reg_info_table_size)
845 unsigned int new_size;
847 if (cse_reg_info_table_size < 2048)
849 /* Compute a new size that is a power of 2 and no smaller
850 than the large of NREGS and 64. */
851 new_size = (cse_reg_info_table_size
852 ? cse_reg_info_table_size : 64);
854 while (new_size < nregs)
855 new_size *= 2;
857 else
859 /* If we need a big table, allocate just enough to hold
860 NREGS registers. */
861 new_size = nregs;
864 /* Reallocate the table with NEW_SIZE entries. */
865 cse_reg_info_table = xrealloc (cse_reg_info_table,
866 (sizeof (struct cse_reg_info)
867 * new_size));
868 cse_reg_info_table_size = new_size;
871 /* Do we have all of the first NREGS entries initialized? */
872 if (cse_reg_info_table_first_uninitialized < nregs)
874 unsigned int old_timestamp = cse_reg_info_timestamp - 1;
875 unsigned int i;
877 /* Put the old timestamp on newly allocated entries so that they
878 will all be considered out of date. We do not touch those
879 entries beyond the first NREGS entries to be nice to the
880 virtual memory. */
881 for (i = cse_reg_info_table_first_uninitialized; i < nregs; i++)
882 cse_reg_info_table[i].timestamp = old_timestamp;
884 cse_reg_info_table_first_uninitialized = nregs;
888 /* Given REGNO, initialize the cse_reg_info entry for REGNO. */
890 static void
891 get_cse_reg_info_1 (unsigned int regno)
893 /* Set TIMESTAMP field to CSE_REG_INFO_TIMESTAMP so that this
894 entry will be considered to have been initialized. */
895 cse_reg_info_table[regno].timestamp = cse_reg_info_timestamp;
897 /* Initialize the rest of the entry. */
898 cse_reg_info_table[regno].reg_tick = 1;
899 cse_reg_info_table[regno].reg_in_table = -1;
900 cse_reg_info_table[regno].subreg_ticked = -1;
901 cse_reg_info_table[regno].reg_qty = -regno - 1;
904 /* Find a cse_reg_info entry for REGNO. */
906 static inline struct cse_reg_info *
907 get_cse_reg_info (unsigned int regno)
909 struct cse_reg_info *p = &cse_reg_info_table[regno];
911 /* If this entry has not been initialized, go ahead and initialize
912 it. */
913 if (p->timestamp != cse_reg_info_timestamp)
914 get_cse_reg_info_1 (regno);
916 return p;
919 /* Clear the hash table and initialize each register with its own quantity,
920 for a new basic block. */
922 static void
923 new_basic_block (void)
925 int i;
927 next_qty = 0;
929 /* Invalidate cse_reg_info_table. */
930 cse_reg_info_timestamp++;
932 /* Clear out hash table state for this pass. */
933 CLEAR_HARD_REG_SET (hard_regs_in_table);
935 /* The per-quantity values used to be initialized here, but it is
936 much faster to initialize each as it is made in `make_new_qty'. */
938 for (i = 0; i < HASH_SIZE; i++)
940 struct table_elt *first;
942 first = table[i];
943 if (first != NULL)
945 struct table_elt *last = first;
947 table[i] = NULL;
949 while (last->next_same_hash != NULL)
950 last = last->next_same_hash;
952 /* Now relink this hash entire chain into
953 the free element list. */
955 last->next_same_hash = free_element_chain;
956 free_element_chain = first;
960 #ifdef HAVE_cc0
961 prev_insn = 0;
962 prev_insn_cc0 = 0;
963 #endif
966 /* Say that register REG contains a quantity in mode MODE not in any
967 register before and initialize that quantity. */
969 static void
970 make_new_qty (unsigned int reg, enum machine_mode mode)
972 int q;
973 struct qty_table_elem *ent;
974 struct reg_eqv_elem *eqv;
976 gcc_assert (next_qty < max_qty);
978 q = REG_QTY (reg) = next_qty++;
979 ent = &qty_table[q];
980 ent->first_reg = reg;
981 ent->last_reg = reg;
982 ent->mode = mode;
983 ent->const_rtx = ent->const_insn = NULL_RTX;
984 ent->comparison_code = UNKNOWN;
986 eqv = &reg_eqv_table[reg];
987 eqv->next = eqv->prev = -1;
990 /* Make reg NEW equivalent to reg OLD.
991 OLD is not changing; NEW is. */
993 static void
994 make_regs_eqv (unsigned int new, unsigned int old)
996 unsigned int lastr, firstr;
997 int q = REG_QTY (old);
998 struct qty_table_elem *ent;
1000 ent = &qty_table[q];
1002 /* Nothing should become eqv until it has a "non-invalid" qty number. */
1003 gcc_assert (REGNO_QTY_VALID_P (old));
1005 REG_QTY (new) = q;
1006 firstr = ent->first_reg;
1007 lastr = ent->last_reg;
1009 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
1010 hard regs. Among pseudos, if NEW will live longer than any other reg
1011 of the same qty, and that is beyond the current basic block,
1012 make it the new canonical replacement for this qty. */
1013 if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
1014 /* Certain fixed registers might be of the class NO_REGS. This means
1015 that not only can they not be allocated by the compiler, but
1016 they cannot be used in substitutions or canonicalizations
1017 either. */
1018 && (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
1019 && ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
1020 || (new >= FIRST_PSEUDO_REGISTER
1021 && (firstr < FIRST_PSEUDO_REGISTER
1022 || ((uid_cuid[REGNO_LAST_UID (new)] > cse_basic_block_end
1023 || (uid_cuid[REGNO_FIRST_UID (new)]
1024 < cse_basic_block_start))
1025 && (uid_cuid[REGNO_LAST_UID (new)]
1026 > uid_cuid[REGNO_LAST_UID (firstr)]))))))
1028 reg_eqv_table[firstr].prev = new;
1029 reg_eqv_table[new].next = firstr;
1030 reg_eqv_table[new].prev = -1;
1031 ent->first_reg = new;
1033 else
1035 /* If NEW is a hard reg (known to be non-fixed), insert at end.
1036 Otherwise, insert before any non-fixed hard regs that are at the
1037 end. Registers of class NO_REGS cannot be used as an
1038 equivalent for anything. */
1039 while (lastr < FIRST_PSEUDO_REGISTER && reg_eqv_table[lastr].prev >= 0
1040 && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
1041 && new >= FIRST_PSEUDO_REGISTER)
1042 lastr = reg_eqv_table[lastr].prev;
1043 reg_eqv_table[new].next = reg_eqv_table[lastr].next;
1044 if (reg_eqv_table[lastr].next >= 0)
1045 reg_eqv_table[reg_eqv_table[lastr].next].prev = new;
1046 else
1047 qty_table[q].last_reg = new;
1048 reg_eqv_table[lastr].next = new;
1049 reg_eqv_table[new].prev = lastr;
1053 /* Remove REG from its equivalence class. */
1055 static void
1056 delete_reg_equiv (unsigned int reg)
1058 struct qty_table_elem *ent;
1059 int q = REG_QTY (reg);
1060 int p, n;
1062 /* If invalid, do nothing. */
1063 if (! REGNO_QTY_VALID_P (reg))
1064 return;
1066 ent = &qty_table[q];
1068 p = reg_eqv_table[reg].prev;
1069 n = reg_eqv_table[reg].next;
1071 if (n != -1)
1072 reg_eqv_table[n].prev = p;
1073 else
1074 ent->last_reg = p;
1075 if (p != -1)
1076 reg_eqv_table[p].next = n;
1077 else
1078 ent->first_reg = n;
1080 REG_QTY (reg) = -reg - 1;
1083 /* Remove any invalid expressions from the hash table
1084 that refer to any of the registers contained in expression X.
1086 Make sure that newly inserted references to those registers
1087 as subexpressions will be considered valid.
1089 mention_regs is not called when a register itself
1090 is being stored in the table.
1092 Return 1 if we have done something that may have changed the hash code
1093 of X. */
1095 static int
1096 mention_regs (rtx x)
1098 enum rtx_code code;
1099 int i, j;
1100 const char *fmt;
1101 int changed = 0;
1103 if (x == 0)
1104 return 0;
1106 code = GET_CODE (x);
1107 if (code == REG)
1109 unsigned int regno = REGNO (x);
1110 unsigned int endregno
1111 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
1112 : hard_regno_nregs[regno][GET_MODE (x)]);
1113 unsigned int i;
1115 for (i = regno; i < endregno; i++)
1117 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1118 remove_invalid_refs (i);
1120 REG_IN_TABLE (i) = REG_TICK (i);
1121 SUBREG_TICKED (i) = -1;
1124 return 0;
1127 /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1128 pseudo if they don't use overlapping words. We handle only pseudos
1129 here for simplicity. */
1130 if (code == SUBREG && REG_P (SUBREG_REG (x))
1131 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1133 unsigned int i = REGNO (SUBREG_REG (x));
1135 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1137 /* If REG_IN_TABLE (i) differs from REG_TICK (i) by one, and
1138 the last store to this register really stored into this
1139 subreg, then remove the memory of this subreg.
1140 Otherwise, remove any memory of the entire register and
1141 all its subregs from the table. */
1142 if (REG_TICK (i) - REG_IN_TABLE (i) > 1
1143 || SUBREG_TICKED (i) != REGNO (SUBREG_REG (x)))
1144 remove_invalid_refs (i);
1145 else
1146 remove_invalid_subreg_refs (i, SUBREG_BYTE (x), GET_MODE (x));
1149 REG_IN_TABLE (i) = REG_TICK (i);
1150 SUBREG_TICKED (i) = REGNO (SUBREG_REG (x));
1151 return 0;
1154 /* If X is a comparison or a COMPARE and either operand is a register
1155 that does not have a quantity, give it one. This is so that a later
1156 call to record_jump_equiv won't cause X to be assigned a different
1157 hash code and not found in the table after that call.
1159 It is not necessary to do this here, since rehash_using_reg can
1160 fix up the table later, but doing this here eliminates the need to
1161 call that expensive function in the most common case where the only
1162 use of the register is in the comparison. */
1164 if (code == COMPARE || COMPARISON_P (x))
1166 if (REG_P (XEXP (x, 0))
1167 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
1168 if (insert_regs (XEXP (x, 0), NULL, 0))
1170 rehash_using_reg (XEXP (x, 0));
1171 changed = 1;
1174 if (REG_P (XEXP (x, 1))
1175 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
1176 if (insert_regs (XEXP (x, 1), NULL, 0))
1178 rehash_using_reg (XEXP (x, 1));
1179 changed = 1;
1183 fmt = GET_RTX_FORMAT (code);
1184 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1185 if (fmt[i] == 'e')
1186 changed |= mention_regs (XEXP (x, i));
1187 else if (fmt[i] == 'E')
1188 for (j = 0; j < XVECLEN (x, i); j++)
1189 changed |= mention_regs (XVECEXP (x, i, j));
1191 return changed;
1194 /* Update the register quantities for inserting X into the hash table
1195 with a value equivalent to CLASSP.
1196 (If the class does not contain a REG, it is irrelevant.)
1197 If MODIFIED is nonzero, X is a destination; it is being modified.
1198 Note that delete_reg_equiv should be called on a register
1199 before insert_regs is done on that register with MODIFIED != 0.
1201 Nonzero value means that elements of reg_qty have changed
1202 so X's hash code may be different. */
1204 static int
1205 insert_regs (rtx x, struct table_elt *classp, int modified)
1207 if (REG_P (x))
1209 unsigned int regno = REGNO (x);
1210 int qty_valid;
1212 /* If REGNO is in the equivalence table already but is of the
1213 wrong mode for that equivalence, don't do anything here. */
1215 qty_valid = REGNO_QTY_VALID_P (regno);
1216 if (qty_valid)
1218 struct qty_table_elem *ent = &qty_table[REG_QTY (regno)];
1220 if (ent->mode != GET_MODE (x))
1221 return 0;
1224 if (modified || ! qty_valid)
1226 if (classp)
1227 for (classp = classp->first_same_value;
1228 classp != 0;
1229 classp = classp->next_same_value)
1230 if (REG_P (classp->exp)
1231 && GET_MODE (classp->exp) == GET_MODE (x))
1233 make_regs_eqv (regno, REGNO (classp->exp));
1234 return 1;
1237 /* Mention_regs for a SUBREG checks if REG_TICK is exactly one larger
1238 than REG_IN_TABLE to find out if there was only a single preceding
1239 invalidation - for the SUBREG - or another one, which would be
1240 for the full register. However, if we find here that REG_TICK
1241 indicates that the register is invalid, it means that it has
1242 been invalidated in a separate operation. The SUBREG might be used
1243 now (then this is a recursive call), or we might use the full REG
1244 now and a SUBREG of it later. So bump up REG_TICK so that
1245 mention_regs will do the right thing. */
1246 if (! modified
1247 && REG_IN_TABLE (regno) >= 0
1248 && REG_TICK (regno) == REG_IN_TABLE (regno) + 1)
1249 REG_TICK (regno)++;
1250 make_new_qty (regno, GET_MODE (x));
1251 return 1;
1254 return 0;
1257 /* If X is a SUBREG, we will likely be inserting the inner register in the
1258 table. If that register doesn't have an assigned quantity number at
1259 this point but does later, the insertion that we will be doing now will
1260 not be accessible because its hash code will have changed. So assign
1261 a quantity number now. */
1263 else if (GET_CODE (x) == SUBREG && REG_P (SUBREG_REG (x))
1264 && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1266 insert_regs (SUBREG_REG (x), NULL, 0);
1267 mention_regs (x);
1268 return 1;
1270 else
1271 return mention_regs (x);
1274 /* Look in or update the hash table. */
1276 /* Remove table element ELT from use in the table.
1277 HASH is its hash code, made using the HASH macro.
1278 It's an argument because often that is known in advance
1279 and we save much time not recomputing it. */
1281 static void
1282 remove_from_table (struct table_elt *elt, unsigned int hash)
1284 if (elt == 0)
1285 return;
1287 /* Mark this element as removed. See cse_insn. */
1288 elt->first_same_value = 0;
1290 /* Remove the table element from its equivalence class. */
1293 struct table_elt *prev = elt->prev_same_value;
1294 struct table_elt *next = elt->next_same_value;
1296 if (next)
1297 next->prev_same_value = prev;
1299 if (prev)
1300 prev->next_same_value = next;
1301 else
1303 struct table_elt *newfirst = next;
1304 while (next)
1306 next->first_same_value = newfirst;
1307 next = next->next_same_value;
1312 /* Remove the table element from its hash bucket. */
1315 struct table_elt *prev = elt->prev_same_hash;
1316 struct table_elt *next = elt->next_same_hash;
1318 if (next)
1319 next->prev_same_hash = prev;
1321 if (prev)
1322 prev->next_same_hash = next;
1323 else if (table[hash] == elt)
1324 table[hash] = next;
1325 else
1327 /* This entry is not in the proper hash bucket. This can happen
1328 when two classes were merged by `merge_equiv_classes'. Search
1329 for the hash bucket that it heads. This happens only very
1330 rarely, so the cost is acceptable. */
1331 for (hash = 0; hash < HASH_SIZE; hash++)
1332 if (table[hash] == elt)
1333 table[hash] = next;
1337 /* Remove the table element from its related-value circular chain. */
1339 if (elt->related_value != 0 && elt->related_value != elt)
1341 struct table_elt *p = elt->related_value;
1343 while (p->related_value != elt)
1344 p = p->related_value;
1345 p->related_value = elt->related_value;
1346 if (p->related_value == p)
1347 p->related_value = 0;
1350 /* Now add it to the free element chain. */
1351 elt->next_same_hash = free_element_chain;
1352 free_element_chain = elt;
1355 /* Look up X in the hash table and return its table element,
1356 or 0 if X is not in the table.
1358 MODE is the machine-mode of X, or if X is an integer constant
1359 with VOIDmode then MODE is the mode with which X will be used.
1361 Here we are satisfied to find an expression whose tree structure
1362 looks like X. */
1364 static struct table_elt *
1365 lookup (rtx x, unsigned int hash, enum machine_mode mode)
1367 struct table_elt *p;
1369 for (p = table[hash]; p; p = p->next_same_hash)
1370 if (mode == p->mode && ((x == p->exp && REG_P (x))
1371 || exp_equiv_p (x, p->exp, !REG_P (x), false)))
1372 return p;
1374 return 0;
1377 /* Like `lookup' but don't care whether the table element uses invalid regs.
1378 Also ignore discrepancies in the machine mode of a register. */
1380 static struct table_elt *
1381 lookup_for_remove (rtx x, unsigned int hash, enum machine_mode mode)
1383 struct table_elt *p;
1385 if (REG_P (x))
1387 unsigned int regno = REGNO (x);
1389 /* Don't check the machine mode when comparing registers;
1390 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1391 for (p = table[hash]; p; p = p->next_same_hash)
1392 if (REG_P (p->exp)
1393 && REGNO (p->exp) == regno)
1394 return p;
1396 else
1398 for (p = table[hash]; p; p = p->next_same_hash)
1399 if (mode == p->mode
1400 && (x == p->exp || exp_equiv_p (x, p->exp, 0, false)))
1401 return p;
1404 return 0;
1407 /* Look for an expression equivalent to X and with code CODE.
1408 If one is found, return that expression. */
1410 static rtx
1411 lookup_as_function (rtx x, enum rtx_code code)
1413 struct table_elt *p
1414 = lookup (x, SAFE_HASH (x, VOIDmode), GET_MODE (x));
1416 /* If we are looking for a CONST_INT, the mode doesn't really matter, as
1417 long as we are narrowing. So if we looked in vain for a mode narrower
1418 than word_mode before, look for word_mode now. */
1419 if (p == 0 && code == CONST_INT
1420 && GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (word_mode))
1422 x = copy_rtx (x);
1423 PUT_MODE (x, word_mode);
1424 p = lookup (x, SAFE_HASH (x, VOIDmode), word_mode);
1427 if (p == 0)
1428 return 0;
1430 for (p = p->first_same_value; p; p = p->next_same_value)
1431 if (GET_CODE (p->exp) == code
1432 /* Make sure this is a valid entry in the table. */
1433 && exp_equiv_p (p->exp, p->exp, 1, false))
1434 return p->exp;
1436 return 0;
1439 /* Insert X in the hash table, assuming HASH is its hash code
1440 and CLASSP is an element of the class it should go in
1441 (or 0 if a new class should be made).
1442 It is inserted at the proper position to keep the class in
1443 the order cheapest first.
1445 MODE is the machine-mode of X, or if X is an integer constant
1446 with VOIDmode then MODE is the mode with which X will be used.
1448 For elements of equal cheapness, the most recent one
1449 goes in front, except that the first element in the list
1450 remains first unless a cheaper element is added. The order of
1451 pseudo-registers does not matter, as canon_reg will be called to
1452 find the cheapest when a register is retrieved from the table.
1454 The in_memory field in the hash table element is set to 0.
1455 The caller must set it nonzero if appropriate.
1457 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1458 and if insert_regs returns a nonzero value
1459 you must then recompute its hash code before calling here.
1461 If necessary, update table showing constant values of quantities. */
1463 #define CHEAPER(X, Y) \
1464 (preferable ((X)->cost, (X)->regcost, (Y)->cost, (Y)->regcost) < 0)
1466 static struct table_elt *
1467 insert (rtx x, struct table_elt *classp, unsigned int hash, enum machine_mode mode)
1469 struct table_elt *elt;
1471 /* If X is a register and we haven't made a quantity for it,
1472 something is wrong. */
1473 gcc_assert (!REG_P (x) || REGNO_QTY_VALID_P (REGNO (x)));
1475 /* If X is a hard register, show it is being put in the table. */
1476 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
1478 unsigned int regno = REGNO (x);
1479 unsigned int endregno = regno + hard_regno_nregs[regno][GET_MODE (x)];
1480 unsigned int i;
1482 for (i = regno; i < endregno; i++)
1483 SET_HARD_REG_BIT (hard_regs_in_table, i);
1486 /* Put an element for X into the right hash bucket. */
1488 elt = free_element_chain;
1489 if (elt)
1490 free_element_chain = elt->next_same_hash;
1491 else
1492 elt = xmalloc (sizeof (struct table_elt));
1494 elt->exp = x;
1495 elt->canon_exp = NULL_RTX;
1496 elt->cost = COST (x);
1497 elt->regcost = approx_reg_cost (x);
1498 elt->next_same_value = 0;
1499 elt->prev_same_value = 0;
1500 elt->next_same_hash = table[hash];
1501 elt->prev_same_hash = 0;
1502 elt->related_value = 0;
1503 elt->in_memory = 0;
1504 elt->mode = mode;
1505 elt->is_const = (CONSTANT_P (x) || fixed_base_plus_p (x));
1507 if (table[hash])
1508 table[hash]->prev_same_hash = elt;
1509 table[hash] = elt;
1511 /* Put it into the proper value-class. */
1512 if (classp)
1514 classp = classp->first_same_value;
1515 if (CHEAPER (elt, classp))
1516 /* Insert at the head of the class. */
1518 struct table_elt *p;
1519 elt->next_same_value = classp;
1520 classp->prev_same_value = elt;
1521 elt->first_same_value = elt;
1523 for (p = classp; p; p = p->next_same_value)
1524 p->first_same_value = elt;
1526 else
1528 /* Insert not at head of the class. */
1529 /* Put it after the last element cheaper than X. */
1530 struct table_elt *p, *next;
1532 for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1533 p = next);
1535 /* Put it after P and before NEXT. */
1536 elt->next_same_value = next;
1537 if (next)
1538 next->prev_same_value = elt;
1540 elt->prev_same_value = p;
1541 p->next_same_value = elt;
1542 elt->first_same_value = classp;
1545 else
1546 elt->first_same_value = elt;
1548 /* If this is a constant being set equivalent to a register or a register
1549 being set equivalent to a constant, note the constant equivalence.
1551 If this is a constant, it cannot be equivalent to a different constant,
1552 and a constant is the only thing that can be cheaper than a register. So
1553 we know the register is the head of the class (before the constant was
1554 inserted).
1556 If this is a register that is not already known equivalent to a
1557 constant, we must check the entire class.
1559 If this is a register that is already known equivalent to an insn,
1560 update the qtys `const_insn' to show that `this_insn' is the latest
1561 insn making that quantity equivalent to the constant. */
1563 if (elt->is_const && classp && REG_P (classp->exp)
1564 && !REG_P (x))
1566 int exp_q = REG_QTY (REGNO (classp->exp));
1567 struct qty_table_elem *exp_ent = &qty_table[exp_q];
1569 exp_ent->const_rtx = gen_lowpart (exp_ent->mode, x);
1570 exp_ent->const_insn = this_insn;
1573 else if (REG_P (x)
1574 && classp
1575 && ! qty_table[REG_QTY (REGNO (x))].const_rtx
1576 && ! elt->is_const)
1578 struct table_elt *p;
1580 for (p = classp; p != 0; p = p->next_same_value)
1582 if (p->is_const && !REG_P (p->exp))
1584 int x_q = REG_QTY (REGNO (x));
1585 struct qty_table_elem *x_ent = &qty_table[x_q];
1587 x_ent->const_rtx
1588 = gen_lowpart (GET_MODE (x), p->exp);
1589 x_ent->const_insn = this_insn;
1590 break;
1595 else if (REG_P (x)
1596 && qty_table[REG_QTY (REGNO (x))].const_rtx
1597 && GET_MODE (x) == qty_table[REG_QTY (REGNO (x))].mode)
1598 qty_table[REG_QTY (REGNO (x))].const_insn = this_insn;
1600 /* If this is a constant with symbolic value,
1601 and it has a term with an explicit integer value,
1602 link it up with related expressions. */
1603 if (GET_CODE (x) == CONST)
1605 rtx subexp = get_related_value (x);
1606 unsigned subhash;
1607 struct table_elt *subelt, *subelt_prev;
1609 if (subexp != 0)
1611 /* Get the integer-free subexpression in the hash table. */
1612 subhash = SAFE_HASH (subexp, mode);
1613 subelt = lookup (subexp, subhash, mode);
1614 if (subelt == 0)
1615 subelt = insert (subexp, NULL, subhash, mode);
1616 /* Initialize SUBELT's circular chain if it has none. */
1617 if (subelt->related_value == 0)
1618 subelt->related_value = subelt;
1619 /* Find the element in the circular chain that precedes SUBELT. */
1620 subelt_prev = subelt;
1621 while (subelt_prev->related_value != subelt)
1622 subelt_prev = subelt_prev->related_value;
1623 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1624 This way the element that follows SUBELT is the oldest one. */
1625 elt->related_value = subelt_prev->related_value;
1626 subelt_prev->related_value = elt;
1630 return elt;
1633 /* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1634 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1635 the two classes equivalent.
1637 CLASS1 will be the surviving class; CLASS2 should not be used after this
1638 call.
1640 Any invalid entries in CLASS2 will not be copied. */
1642 static void
1643 merge_equiv_classes (struct table_elt *class1, struct table_elt *class2)
1645 struct table_elt *elt, *next, *new;
1647 /* Ensure we start with the head of the classes. */
1648 class1 = class1->first_same_value;
1649 class2 = class2->first_same_value;
1651 /* If they were already equal, forget it. */
1652 if (class1 == class2)
1653 return;
1655 for (elt = class2; elt; elt = next)
1657 unsigned int hash;
1658 rtx exp = elt->exp;
1659 enum machine_mode mode = elt->mode;
1661 next = elt->next_same_value;
1663 /* Remove old entry, make a new one in CLASS1's class.
1664 Don't do this for invalid entries as we cannot find their
1665 hash code (it also isn't necessary). */
1666 if (REG_P (exp) || exp_equiv_p (exp, exp, 1, false))
1668 bool need_rehash = false;
1670 hash_arg_in_memory = 0;
1671 hash = HASH (exp, mode);
1673 if (REG_P (exp))
1675 need_rehash = REGNO_QTY_VALID_P (REGNO (exp));
1676 delete_reg_equiv (REGNO (exp));
1679 remove_from_table (elt, hash);
1681 if (insert_regs (exp, class1, 0) || need_rehash)
1683 rehash_using_reg (exp);
1684 hash = HASH (exp, mode);
1686 new = insert (exp, class1, hash, mode);
1687 new->in_memory = hash_arg_in_memory;
1692 /* Flush the entire hash table. */
1694 static void
1695 flush_hash_table (void)
1697 int i;
1698 struct table_elt *p;
1700 for (i = 0; i < HASH_SIZE; i++)
1701 for (p = table[i]; p; p = table[i])
1703 /* Note that invalidate can remove elements
1704 after P in the current hash chain. */
1705 if (REG_P (p->exp))
1706 invalidate (p->exp, p->mode);
1707 else
1708 remove_from_table (p, i);
1712 /* Function called for each rtx to check whether true dependence exist. */
1713 struct check_dependence_data
1715 enum machine_mode mode;
1716 rtx exp;
1717 rtx addr;
1720 static int
1721 check_dependence (rtx *x, void *data)
1723 struct check_dependence_data *d = (struct check_dependence_data *) data;
1724 if (*x && MEM_P (*x))
1725 return canon_true_dependence (d->exp, d->mode, d->addr, *x,
1726 cse_rtx_varies_p);
1727 else
1728 return 0;
1731 /* Remove from the hash table, or mark as invalid, all expressions whose
1732 values could be altered by storing in X. X is a register, a subreg, or
1733 a memory reference with nonvarying address (because, when a memory
1734 reference with a varying address is stored in, all memory references are
1735 removed by invalidate_memory so specific invalidation is superfluous).
1736 FULL_MODE, if not VOIDmode, indicates that this much should be
1737 invalidated instead of just the amount indicated by the mode of X. This
1738 is only used for bitfield stores into memory.
1740 A nonvarying address may be just a register or just a symbol reference,
1741 or it may be either of those plus a numeric offset. */
1743 static void
1744 invalidate (rtx x, enum machine_mode full_mode)
1746 int i;
1747 struct table_elt *p;
1748 rtx addr;
1750 switch (GET_CODE (x))
1752 case REG:
1754 /* If X is a register, dependencies on its contents are recorded
1755 through the qty number mechanism. Just change the qty number of
1756 the register, mark it as invalid for expressions that refer to it,
1757 and remove it itself. */
1758 unsigned int regno = REGNO (x);
1759 unsigned int hash = HASH (x, GET_MODE (x));
1761 /* Remove REGNO from any quantity list it might be on and indicate
1762 that its value might have changed. If it is a pseudo, remove its
1763 entry from the hash table.
1765 For a hard register, we do the first two actions above for any
1766 additional hard registers corresponding to X. Then, if any of these
1767 registers are in the table, we must remove any REG entries that
1768 overlap these registers. */
1770 delete_reg_equiv (regno);
1771 REG_TICK (regno)++;
1772 SUBREG_TICKED (regno) = -1;
1774 if (regno >= FIRST_PSEUDO_REGISTER)
1776 /* Because a register can be referenced in more than one mode,
1777 we might have to remove more than one table entry. */
1778 struct table_elt *elt;
1780 while ((elt = lookup_for_remove (x, hash, GET_MODE (x))))
1781 remove_from_table (elt, hash);
1783 else
1785 HOST_WIDE_INT in_table
1786 = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1787 unsigned int endregno
1788 = regno + hard_regno_nregs[regno][GET_MODE (x)];
1789 unsigned int tregno, tendregno, rn;
1790 struct table_elt *p, *next;
1792 CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1794 for (rn = regno + 1; rn < endregno; rn++)
1796 in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, rn);
1797 CLEAR_HARD_REG_BIT (hard_regs_in_table, rn);
1798 delete_reg_equiv (rn);
1799 REG_TICK (rn)++;
1800 SUBREG_TICKED (rn) = -1;
1803 if (in_table)
1804 for (hash = 0; hash < HASH_SIZE; hash++)
1805 for (p = table[hash]; p; p = next)
1807 next = p->next_same_hash;
1809 if (!REG_P (p->exp)
1810 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1811 continue;
1813 tregno = REGNO (p->exp);
1814 tendregno
1815 = tregno + hard_regno_nregs[tregno][GET_MODE (p->exp)];
1816 if (tendregno > regno && tregno < endregno)
1817 remove_from_table (p, hash);
1821 return;
1823 case SUBREG:
1824 invalidate (SUBREG_REG (x), VOIDmode);
1825 return;
1827 case PARALLEL:
1828 for (i = XVECLEN (x, 0) - 1; i >= 0; --i)
1829 invalidate (XVECEXP (x, 0, i), VOIDmode);
1830 return;
1832 case EXPR_LIST:
1833 /* This is part of a disjoint return value; extract the location in
1834 question ignoring the offset. */
1835 invalidate (XEXP (x, 0), VOIDmode);
1836 return;
1838 case MEM:
1839 addr = canon_rtx (get_addr (XEXP (x, 0)));
1840 /* Calculate the canonical version of X here so that
1841 true_dependence doesn't generate new RTL for X on each call. */
1842 x = canon_rtx (x);
1844 /* Remove all hash table elements that refer to overlapping pieces of
1845 memory. */
1846 if (full_mode == VOIDmode)
1847 full_mode = GET_MODE (x);
1849 for (i = 0; i < HASH_SIZE; i++)
1851 struct table_elt *next;
1853 for (p = table[i]; p; p = next)
1855 next = p->next_same_hash;
1856 if (p->in_memory)
1858 struct check_dependence_data d;
1860 /* Just canonicalize the expression once;
1861 otherwise each time we call invalidate
1862 true_dependence will canonicalize the
1863 expression again. */
1864 if (!p->canon_exp)
1865 p->canon_exp = canon_rtx (p->exp);
1866 d.exp = x;
1867 d.addr = addr;
1868 d.mode = full_mode;
1869 if (for_each_rtx (&p->canon_exp, check_dependence, &d))
1870 remove_from_table (p, i);
1874 return;
1876 default:
1877 gcc_unreachable ();
1881 /* Remove all expressions that refer to register REGNO,
1882 since they are already invalid, and we are about to
1883 mark that register valid again and don't want the old
1884 expressions to reappear as valid. */
1886 static void
1887 remove_invalid_refs (unsigned int regno)
1889 unsigned int i;
1890 struct table_elt *p, *next;
1892 for (i = 0; i < HASH_SIZE; i++)
1893 for (p = table[i]; p; p = next)
1895 next = p->next_same_hash;
1896 if (!REG_P (p->exp)
1897 && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
1898 remove_from_table (p, i);
1902 /* Likewise for a subreg with subreg_reg REGNO, subreg_byte OFFSET,
1903 and mode MODE. */
1904 static void
1905 remove_invalid_subreg_refs (unsigned int regno, unsigned int offset,
1906 enum machine_mode mode)
1908 unsigned int i;
1909 struct table_elt *p, *next;
1910 unsigned int end = offset + (GET_MODE_SIZE (mode) - 1);
1912 for (i = 0; i < HASH_SIZE; i++)
1913 for (p = table[i]; p; p = next)
1915 rtx exp = p->exp;
1916 next = p->next_same_hash;
1918 if (!REG_P (exp)
1919 && (GET_CODE (exp) != SUBREG
1920 || !REG_P (SUBREG_REG (exp))
1921 || REGNO (SUBREG_REG (exp)) != regno
1922 || (((SUBREG_BYTE (exp)
1923 + (GET_MODE_SIZE (GET_MODE (exp)) - 1)) >= offset)
1924 && SUBREG_BYTE (exp) <= end))
1925 && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
1926 remove_from_table (p, i);
1930 /* Recompute the hash codes of any valid entries in the hash table that
1931 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
1933 This is called when we make a jump equivalence. */
1935 static void
1936 rehash_using_reg (rtx x)
1938 unsigned int i;
1939 struct table_elt *p, *next;
1940 unsigned hash;
1942 if (GET_CODE (x) == SUBREG)
1943 x = SUBREG_REG (x);
1945 /* If X is not a register or if the register is known not to be in any
1946 valid entries in the table, we have no work to do. */
1948 if (!REG_P (x)
1949 || REG_IN_TABLE (REGNO (x)) < 0
1950 || REG_IN_TABLE (REGNO (x)) != REG_TICK (REGNO (x)))
1951 return;
1953 /* Scan all hash chains looking for valid entries that mention X.
1954 If we find one and it is in the wrong hash chain, move it. */
1956 for (i = 0; i < HASH_SIZE; i++)
1957 for (p = table[i]; p; p = next)
1959 next = p->next_same_hash;
1960 if (reg_mentioned_p (x, p->exp)
1961 && exp_equiv_p (p->exp, p->exp, 1, false)
1962 && i != (hash = SAFE_HASH (p->exp, p->mode)))
1964 if (p->next_same_hash)
1965 p->next_same_hash->prev_same_hash = p->prev_same_hash;
1967 if (p->prev_same_hash)
1968 p->prev_same_hash->next_same_hash = p->next_same_hash;
1969 else
1970 table[i] = p->next_same_hash;
1972 p->next_same_hash = table[hash];
1973 p->prev_same_hash = 0;
1974 if (table[hash])
1975 table[hash]->prev_same_hash = p;
1976 table[hash] = p;
1981 /* Remove from the hash table any expression that is a call-clobbered
1982 register. Also update their TICK values. */
1984 static void
1985 invalidate_for_call (void)
1987 unsigned int regno, endregno;
1988 unsigned int i;
1989 unsigned hash;
1990 struct table_elt *p, *next;
1991 int in_table = 0;
1993 /* Go through all the hard registers. For each that is clobbered in
1994 a CALL_INSN, remove the register from quantity chains and update
1995 reg_tick if defined. Also see if any of these registers is currently
1996 in the table. */
1998 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1999 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2001 delete_reg_equiv (regno);
2002 if (REG_TICK (regno) >= 0)
2004 REG_TICK (regno)++;
2005 SUBREG_TICKED (regno) = -1;
2008 in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
2011 /* In the case where we have no call-clobbered hard registers in the
2012 table, we are done. Otherwise, scan the table and remove any
2013 entry that overlaps a call-clobbered register. */
2015 if (in_table)
2016 for (hash = 0; hash < HASH_SIZE; hash++)
2017 for (p = table[hash]; p; p = next)
2019 next = p->next_same_hash;
2021 if (!REG_P (p->exp)
2022 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
2023 continue;
2025 regno = REGNO (p->exp);
2026 endregno = regno + hard_regno_nregs[regno][GET_MODE (p->exp)];
2028 for (i = regno; i < endregno; i++)
2029 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
2031 remove_from_table (p, hash);
2032 break;
2037 /* Given an expression X of type CONST,
2038 and ELT which is its table entry (or 0 if it
2039 is not in the hash table),
2040 return an alternate expression for X as a register plus integer.
2041 If none can be found, return 0. */
2043 static rtx
2044 use_related_value (rtx x, struct table_elt *elt)
2046 struct table_elt *relt = 0;
2047 struct table_elt *p, *q;
2048 HOST_WIDE_INT offset;
2050 /* First, is there anything related known?
2051 If we have a table element, we can tell from that.
2052 Otherwise, must look it up. */
2054 if (elt != 0 && elt->related_value != 0)
2055 relt = elt;
2056 else if (elt == 0 && GET_CODE (x) == CONST)
2058 rtx subexp = get_related_value (x);
2059 if (subexp != 0)
2060 relt = lookup (subexp,
2061 SAFE_HASH (subexp, GET_MODE (subexp)),
2062 GET_MODE (subexp));
2065 if (relt == 0)
2066 return 0;
2068 /* Search all related table entries for one that has an
2069 equivalent register. */
2071 p = relt;
2072 while (1)
2074 /* This loop is strange in that it is executed in two different cases.
2075 The first is when X is already in the table. Then it is searching
2076 the RELATED_VALUE list of X's class (RELT). The second case is when
2077 X is not in the table. Then RELT points to a class for the related
2078 value.
2080 Ensure that, whatever case we are in, that we ignore classes that have
2081 the same value as X. */
2083 if (rtx_equal_p (x, p->exp))
2084 q = 0;
2085 else
2086 for (q = p->first_same_value; q; q = q->next_same_value)
2087 if (REG_P (q->exp))
2088 break;
2090 if (q)
2091 break;
2093 p = p->related_value;
2095 /* We went all the way around, so there is nothing to be found.
2096 Alternatively, perhaps RELT was in the table for some other reason
2097 and it has no related values recorded. */
2098 if (p == relt || p == 0)
2099 break;
2102 if (q == 0)
2103 return 0;
2105 offset = (get_integer_term (x) - get_integer_term (p->exp));
2106 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
2107 return plus_constant (q->exp, offset);
2110 /* Hash a string. Just add its bytes up. */
2111 static inline unsigned
2112 hash_rtx_string (const char *ps)
2114 unsigned hash = 0;
2115 const unsigned char *p = (const unsigned char *) ps;
2117 if (p)
2118 while (*p)
2119 hash += *p++;
2121 return hash;
2124 /* Hash an rtx. We are careful to make sure the value is never negative.
2125 Equivalent registers hash identically.
2126 MODE is used in hashing for CONST_INTs only;
2127 otherwise the mode of X is used.
2129 Store 1 in DO_NOT_RECORD_P if any subexpression is volatile.
2131 If HASH_ARG_IN_MEMORY_P is not NULL, store 1 in it if X contains
2132 a MEM rtx which does not have the RTX_UNCHANGING_P bit set.
2134 Note that cse_insn knows that the hash code of a MEM expression
2135 is just (int) MEM plus the hash code of the address. */
2137 unsigned
2138 hash_rtx (rtx x, enum machine_mode mode, int *do_not_record_p,
2139 int *hash_arg_in_memory_p, bool have_reg_qty)
2141 int i, j;
2142 unsigned hash = 0;
2143 enum rtx_code code;
2144 const char *fmt;
2146 /* Used to turn recursion into iteration. We can't rely on GCC's
2147 tail-recursion elimination since we need to keep accumulating values
2148 in HASH. */
2149 repeat:
2150 if (x == 0)
2151 return hash;
2153 code = GET_CODE (x);
2154 switch (code)
2156 case REG:
2158 unsigned int regno = REGNO (x);
2160 if (!reload_completed)
2162 /* On some machines, we can't record any non-fixed hard register,
2163 because extending its life will cause reload problems. We
2164 consider ap, fp, sp, gp to be fixed for this purpose.
2166 We also consider CCmode registers to be fixed for this purpose;
2167 failure to do so leads to failure to simplify 0<100 type of
2168 conditionals.
2170 On all machines, we can't record any global registers.
2171 Nor should we record any register that is in a small
2172 class, as defined by CLASS_LIKELY_SPILLED_P. */
2173 bool record;
2175 if (regno >= FIRST_PSEUDO_REGISTER)
2176 record = true;
2177 else if (x == frame_pointer_rtx
2178 || x == hard_frame_pointer_rtx
2179 || x == arg_pointer_rtx
2180 || x == stack_pointer_rtx
2181 || x == pic_offset_table_rtx)
2182 record = true;
2183 else if (global_regs[regno])
2184 record = false;
2185 else if (fixed_regs[regno])
2186 record = true;
2187 else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_CC)
2188 record = true;
2189 else if (SMALL_REGISTER_CLASSES)
2190 record = false;
2191 else if (CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (regno)))
2192 record = false;
2193 else
2194 record = true;
2196 if (!record)
2198 *do_not_record_p = 1;
2199 return 0;
2203 hash += ((unsigned int) REG << 7);
2204 hash += (have_reg_qty ? (unsigned) REG_QTY (regno) : regno);
2205 return hash;
2208 /* We handle SUBREG of a REG specially because the underlying
2209 reg changes its hash value with every value change; we don't
2210 want to have to forget unrelated subregs when one subreg changes. */
2211 case SUBREG:
2213 if (REG_P (SUBREG_REG (x)))
2215 hash += (((unsigned int) SUBREG << 7)
2216 + REGNO (SUBREG_REG (x))
2217 + (SUBREG_BYTE (x) / UNITS_PER_WORD));
2218 return hash;
2220 break;
2223 case CONST_INT:
2224 hash += (((unsigned int) CONST_INT << 7) + (unsigned int) mode
2225 + (unsigned int) INTVAL (x));
2226 return hash;
2228 case CONST_DOUBLE:
2229 /* This is like the general case, except that it only counts
2230 the integers representing the constant. */
2231 hash += (unsigned int) code + (unsigned int) GET_MODE (x);
2232 if (GET_MODE (x) != VOIDmode)
2233 hash += real_hash (CONST_DOUBLE_REAL_VALUE (x));
2234 else
2235 hash += ((unsigned int) CONST_DOUBLE_LOW (x)
2236 + (unsigned int) CONST_DOUBLE_HIGH (x));
2237 return hash;
2239 case CONST_VECTOR:
2241 int units;
2242 rtx elt;
2244 units = CONST_VECTOR_NUNITS (x);
2246 for (i = 0; i < units; ++i)
2248 elt = CONST_VECTOR_ELT (x, i);
2249 hash += hash_rtx (elt, GET_MODE (elt), do_not_record_p,
2250 hash_arg_in_memory_p, have_reg_qty);
2253 return hash;
2256 /* Assume there is only one rtx object for any given label. */
2257 case LABEL_REF:
2258 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
2259 differences and differences between each stage's debugging dumps. */
2260 hash += (((unsigned int) LABEL_REF << 7)
2261 + CODE_LABEL_NUMBER (XEXP (x, 0)));
2262 return hash;
2264 case SYMBOL_REF:
2266 /* Don't hash on the symbol's address to avoid bootstrap differences.
2267 Different hash values may cause expressions to be recorded in
2268 different orders and thus different registers to be used in the
2269 final assembler. This also avoids differences in the dump files
2270 between various stages. */
2271 unsigned int h = 0;
2272 const unsigned char *p = (const unsigned char *) XSTR (x, 0);
2274 while (*p)
2275 h += (h << 7) + *p++; /* ??? revisit */
2277 hash += ((unsigned int) SYMBOL_REF << 7) + h;
2278 return hash;
2281 case MEM:
2282 /* We don't record if marked volatile or if BLKmode since we don't
2283 know the size of the move. */
2284 if (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode)
2286 *do_not_record_p = 1;
2287 return 0;
2289 if (hash_arg_in_memory_p && !MEM_READONLY_P (x))
2290 *hash_arg_in_memory_p = 1;
2292 /* Now that we have already found this special case,
2293 might as well speed it up as much as possible. */
2294 hash += (unsigned) MEM;
2295 x = XEXP (x, 0);
2296 goto repeat;
2298 case USE:
2299 /* A USE that mentions non-volatile memory needs special
2300 handling since the MEM may be BLKmode which normally
2301 prevents an entry from being made. Pure calls are
2302 marked by a USE which mentions BLKmode memory.
2303 See calls.c:emit_call_1. */
2304 if (MEM_P (XEXP (x, 0))
2305 && ! MEM_VOLATILE_P (XEXP (x, 0)))
2307 hash += (unsigned) USE;
2308 x = XEXP (x, 0);
2310 if (hash_arg_in_memory_p && !MEM_READONLY_P (x))
2311 *hash_arg_in_memory_p = 1;
2313 /* Now that we have already found this special case,
2314 might as well speed it up as much as possible. */
2315 hash += (unsigned) MEM;
2316 x = XEXP (x, 0);
2317 goto repeat;
2319 break;
2321 case PRE_DEC:
2322 case PRE_INC:
2323 case POST_DEC:
2324 case POST_INC:
2325 case PRE_MODIFY:
2326 case POST_MODIFY:
2327 case PC:
2328 case CC0:
2329 case CALL:
2330 case UNSPEC_VOLATILE:
2331 *do_not_record_p = 1;
2332 return 0;
2334 case ASM_OPERANDS:
2335 if (MEM_VOLATILE_P (x))
2337 *do_not_record_p = 1;
2338 return 0;
2340 else
2342 /* We don't want to take the filename and line into account. */
2343 hash += (unsigned) code + (unsigned) GET_MODE (x)
2344 + hash_rtx_string (ASM_OPERANDS_TEMPLATE (x))
2345 + hash_rtx_string (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
2346 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
2348 if (ASM_OPERANDS_INPUT_LENGTH (x))
2350 for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2352 hash += (hash_rtx (ASM_OPERANDS_INPUT (x, i),
2353 GET_MODE (ASM_OPERANDS_INPUT (x, i)),
2354 do_not_record_p, hash_arg_in_memory_p,
2355 have_reg_qty)
2356 + hash_rtx_string
2357 (ASM_OPERANDS_INPUT_CONSTRAINT (x, i)));
2360 hash += hash_rtx_string (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
2361 x = ASM_OPERANDS_INPUT (x, 0);
2362 mode = GET_MODE (x);
2363 goto repeat;
2366 return hash;
2368 break;
2370 default:
2371 break;
2374 i = GET_RTX_LENGTH (code) - 1;
2375 hash += (unsigned) code + (unsigned) GET_MODE (x);
2376 fmt = GET_RTX_FORMAT (code);
2377 for (; i >= 0; i--)
2379 switch (fmt[i])
2381 case 'e':
2382 /* If we are about to do the last recursive call
2383 needed at this level, change it into iteration.
2384 This function is called enough to be worth it. */
2385 if (i == 0)
2387 x = XEXP (x, i);
2388 goto repeat;
2391 hash += hash_rtx (XEXP (x, i), 0, do_not_record_p,
2392 hash_arg_in_memory_p, have_reg_qty);
2393 break;
2395 case 'E':
2396 for (j = 0; j < XVECLEN (x, i); j++)
2397 hash += hash_rtx (XVECEXP (x, i, j), 0, do_not_record_p,
2398 hash_arg_in_memory_p, have_reg_qty);
2399 break;
2401 case 's':
2402 hash += hash_rtx_string (XSTR (x, i));
2403 break;
2405 case 'i':
2406 hash += (unsigned int) XINT (x, i);
2407 break;
2409 case '0': case 't':
2410 /* Unused. */
2411 break;
2413 default:
2414 gcc_unreachable ();
2418 return hash;
2421 /* Hash an rtx X for cse via hash_rtx.
2422 Stores 1 in do_not_record if any subexpression is volatile.
2423 Stores 1 in hash_arg_in_memory if X contains a mem rtx which
2424 does not have the RTX_UNCHANGING_P bit set. */
2426 static inline unsigned
2427 canon_hash (rtx x, enum machine_mode mode)
2429 return hash_rtx (x, mode, &do_not_record, &hash_arg_in_memory, true);
2432 /* Like canon_hash but with no side effects, i.e. do_not_record
2433 and hash_arg_in_memory are not changed. */
2435 static inline unsigned
2436 safe_hash (rtx x, enum machine_mode mode)
2438 int dummy_do_not_record;
2439 return hash_rtx (x, mode, &dummy_do_not_record, NULL, true);
2442 /* Return 1 iff X and Y would canonicalize into the same thing,
2443 without actually constructing the canonicalization of either one.
2444 If VALIDATE is nonzero,
2445 we assume X is an expression being processed from the rtl
2446 and Y was found in the hash table. We check register refs
2447 in Y for being marked as valid.
2449 If FOR_GCSE is true, we compare X and Y for equivalence for GCSE. */
2452 exp_equiv_p (rtx x, rtx y, int validate, bool for_gcse)
2454 int i, j;
2455 enum rtx_code code;
2456 const char *fmt;
2458 /* Note: it is incorrect to assume an expression is equivalent to itself
2459 if VALIDATE is nonzero. */
2460 if (x == y && !validate)
2461 return 1;
2463 if (x == 0 || y == 0)
2464 return x == y;
2466 code = GET_CODE (x);
2467 if (code != GET_CODE (y))
2468 return 0;
2470 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2471 if (GET_MODE (x) != GET_MODE (y))
2472 return 0;
2474 switch (code)
2476 case PC:
2477 case CC0:
2478 case CONST_INT:
2479 return x == y;
2481 case LABEL_REF:
2482 return XEXP (x, 0) == XEXP (y, 0);
2484 case SYMBOL_REF:
2485 return XSTR (x, 0) == XSTR (y, 0);
2487 case REG:
2488 if (for_gcse)
2489 return REGNO (x) == REGNO (y);
2490 else
2492 unsigned int regno = REGNO (y);
2493 unsigned int i;
2494 unsigned int endregno
2495 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
2496 : hard_regno_nregs[regno][GET_MODE (y)]);
2498 /* If the quantities are not the same, the expressions are not
2499 equivalent. If there are and we are not to validate, they
2500 are equivalent. Otherwise, ensure all regs are up-to-date. */
2502 if (REG_QTY (REGNO (x)) != REG_QTY (regno))
2503 return 0;
2505 if (! validate)
2506 return 1;
2508 for (i = regno; i < endregno; i++)
2509 if (REG_IN_TABLE (i) != REG_TICK (i))
2510 return 0;
2512 return 1;
2515 case MEM:
2516 if (for_gcse)
2518 /* Can't merge two expressions in different alias sets, since we
2519 can decide that the expression is transparent in a block when
2520 it isn't, due to it being set with the different alias set. */
2521 if (MEM_ALIAS_SET (x) != MEM_ALIAS_SET (y))
2522 return 0;
2524 /* A volatile mem should not be considered equivalent to any
2525 other. */
2526 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2527 return 0;
2529 break;
2531 /* For commutative operations, check both orders. */
2532 case PLUS:
2533 case MULT:
2534 case AND:
2535 case IOR:
2536 case XOR:
2537 case NE:
2538 case EQ:
2539 return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0),
2540 validate, for_gcse)
2541 && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2542 validate, for_gcse))
2543 || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2544 validate, for_gcse)
2545 && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2546 validate, for_gcse)));
2548 case ASM_OPERANDS:
2549 /* We don't use the generic code below because we want to
2550 disregard filename and line numbers. */
2552 /* A volatile asm isn't equivalent to any other. */
2553 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2554 return 0;
2556 if (GET_MODE (x) != GET_MODE (y)
2557 || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
2558 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2559 ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
2560 || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
2561 || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
2562 return 0;
2564 if (ASM_OPERANDS_INPUT_LENGTH (x))
2566 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
2567 if (! exp_equiv_p (ASM_OPERANDS_INPUT (x, i),
2568 ASM_OPERANDS_INPUT (y, i),
2569 validate, for_gcse)
2570 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
2571 ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
2572 return 0;
2575 return 1;
2577 default:
2578 break;
2581 /* Compare the elements. If any pair of corresponding elements
2582 fail to match, return 0 for the whole thing. */
2584 fmt = GET_RTX_FORMAT (code);
2585 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2587 switch (fmt[i])
2589 case 'e':
2590 if (! exp_equiv_p (XEXP (x, i), XEXP (y, i),
2591 validate, for_gcse))
2592 return 0;
2593 break;
2595 case 'E':
2596 if (XVECLEN (x, i) != XVECLEN (y, i))
2597 return 0;
2598 for (j = 0; j < XVECLEN (x, i); j++)
2599 if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2600 validate, for_gcse))
2601 return 0;
2602 break;
2604 case 's':
2605 if (strcmp (XSTR (x, i), XSTR (y, i)))
2606 return 0;
2607 break;
2609 case 'i':
2610 if (XINT (x, i) != XINT (y, i))
2611 return 0;
2612 break;
2614 case 'w':
2615 if (XWINT (x, i) != XWINT (y, i))
2616 return 0;
2617 break;
2619 case '0':
2620 case 't':
2621 break;
2623 default:
2624 gcc_unreachable ();
2628 return 1;
2631 /* Return 1 if X has a value that can vary even between two
2632 executions of the program. 0 means X can be compared reliably
2633 against certain constants or near-constants. */
2635 static int
2636 cse_rtx_varies_p (rtx x, int from_alias)
2638 /* We need not check for X and the equivalence class being of the same
2639 mode because if X is equivalent to a constant in some mode, it
2640 doesn't vary in any mode. */
2642 if (REG_P (x)
2643 && REGNO_QTY_VALID_P (REGNO (x)))
2645 int x_q = REG_QTY (REGNO (x));
2646 struct qty_table_elem *x_ent = &qty_table[x_q];
2648 if (GET_MODE (x) == x_ent->mode
2649 && x_ent->const_rtx != NULL_RTX)
2650 return 0;
2653 if (GET_CODE (x) == PLUS
2654 && GET_CODE (XEXP (x, 1)) == CONST_INT
2655 && REG_P (XEXP (x, 0))
2656 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
2658 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2659 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2661 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2662 && x0_ent->const_rtx != NULL_RTX)
2663 return 0;
2666 /* This can happen as the result of virtual register instantiation, if
2667 the initial constant is too large to be a valid address. This gives
2668 us a three instruction sequence, load large offset into a register,
2669 load fp minus a constant into a register, then a MEM which is the
2670 sum of the two `constant' registers. */
2671 if (GET_CODE (x) == PLUS
2672 && REG_P (XEXP (x, 0))
2673 && REG_P (XEXP (x, 1))
2674 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2675 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
2677 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2678 int x1_q = REG_QTY (REGNO (XEXP (x, 1)));
2679 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2680 struct qty_table_elem *x1_ent = &qty_table[x1_q];
2682 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2683 && x0_ent->const_rtx != NULL_RTX
2684 && (GET_MODE (XEXP (x, 1)) == x1_ent->mode)
2685 && x1_ent->const_rtx != NULL_RTX)
2686 return 0;
2689 return rtx_varies_p (x, from_alias);
2692 /* Subroutine of canon_reg. Pass *XLOC through canon_reg, and validate
2693 the result if necessary. INSN is as for canon_reg. */
2695 static void
2696 validate_canon_reg (rtx *xloc, rtx insn)
2698 rtx new = canon_reg (*xloc, insn);
2699 int insn_code;
2701 /* If replacing pseudo with hard reg or vice versa, ensure the
2702 insn remains valid. Likewise if the insn has MATCH_DUPs. */
2703 if (insn != 0 && new != 0
2704 && REG_P (new) && REG_P (*xloc)
2705 && (((REGNO (new) < FIRST_PSEUDO_REGISTER)
2706 != (REGNO (*xloc) < FIRST_PSEUDO_REGISTER))
2707 || GET_MODE (new) != GET_MODE (*xloc)
2708 || (insn_code = recog_memoized (insn)) < 0
2709 || insn_data[insn_code].n_dups > 0))
2710 validate_change (insn, xloc, new, 1);
2711 else
2712 *xloc = new;
2715 /* Canonicalize an expression:
2716 replace each register reference inside it
2717 with the "oldest" equivalent register.
2719 If INSN is nonzero and we are replacing a pseudo with a hard register
2720 or vice versa, validate_change is used to ensure that INSN remains valid
2721 after we make our substitution. The calls are made with IN_GROUP nonzero
2722 so apply_change_group must be called upon the outermost return from this
2723 function (unless INSN is zero). The result of apply_change_group can
2724 generally be discarded since the changes we are making are optional. */
2726 static rtx
2727 canon_reg (rtx x, rtx insn)
2729 int i;
2730 enum rtx_code code;
2731 const char *fmt;
2733 if (x == 0)
2734 return x;
2736 code = GET_CODE (x);
2737 switch (code)
2739 case PC:
2740 case CC0:
2741 case CONST:
2742 case CONST_INT:
2743 case CONST_DOUBLE:
2744 case CONST_VECTOR:
2745 case SYMBOL_REF:
2746 case LABEL_REF:
2747 case ADDR_VEC:
2748 case ADDR_DIFF_VEC:
2749 return x;
2751 case REG:
2753 int first;
2754 int q;
2755 struct qty_table_elem *ent;
2757 /* Never replace a hard reg, because hard regs can appear
2758 in more than one machine mode, and we must preserve the mode
2759 of each occurrence. Also, some hard regs appear in
2760 MEMs that are shared and mustn't be altered. Don't try to
2761 replace any reg that maps to a reg of class NO_REGS. */
2762 if (REGNO (x) < FIRST_PSEUDO_REGISTER
2763 || ! REGNO_QTY_VALID_P (REGNO (x)))
2764 return x;
2766 q = REG_QTY (REGNO (x));
2767 ent = &qty_table[q];
2768 first = ent->first_reg;
2769 return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2770 : REGNO_REG_CLASS (first) == NO_REGS ? x
2771 : gen_rtx_REG (ent->mode, first));
2774 default:
2775 break;
2778 fmt = GET_RTX_FORMAT (code);
2779 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2781 int j;
2783 if (fmt[i] == 'e')
2784 validate_canon_reg (&XEXP (x, i), insn);
2785 else if (fmt[i] == 'E')
2786 for (j = 0; j < XVECLEN (x, i); j++)
2787 validate_canon_reg (&XVECEXP (x, i, j), insn);
2790 return x;
2793 /* LOC is a location within INSN that is an operand address (the contents of
2794 a MEM). Find the best equivalent address to use that is valid for this
2795 insn.
2797 On most CISC machines, complicated address modes are costly, and rtx_cost
2798 is a good approximation for that cost. However, most RISC machines have
2799 only a few (usually only one) memory reference formats. If an address is
2800 valid at all, it is often just as cheap as any other address. Hence, for
2801 RISC machines, we use `address_cost' to compare the costs of various
2802 addresses. For two addresses of equal cost, choose the one with the
2803 highest `rtx_cost' value as that has the potential of eliminating the
2804 most insns. For equal costs, we choose the first in the equivalence
2805 class. Note that we ignore the fact that pseudo registers are cheaper than
2806 hard registers here because we would also prefer the pseudo registers. */
2808 static void
2809 find_best_addr (rtx insn, rtx *loc, enum machine_mode mode)
2811 struct table_elt *elt;
2812 rtx addr = *loc;
2813 struct table_elt *p;
2814 int found_better = 1;
2815 int save_do_not_record = do_not_record;
2816 int save_hash_arg_in_memory = hash_arg_in_memory;
2817 int addr_volatile;
2818 int regno;
2819 unsigned hash;
2821 /* Do not try to replace constant addresses or addresses of local and
2822 argument slots. These MEM expressions are made only once and inserted
2823 in many instructions, as well as being used to control symbol table
2824 output. It is not safe to clobber them.
2826 There are some uncommon cases where the address is already in a register
2827 for some reason, but we cannot take advantage of that because we have
2828 no easy way to unshare the MEM. In addition, looking up all stack
2829 addresses is costly. */
2830 if ((GET_CODE (addr) == PLUS
2831 && REG_P (XEXP (addr, 0))
2832 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2833 && (regno = REGNO (XEXP (addr, 0)),
2834 regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM
2835 || regno == ARG_POINTER_REGNUM))
2836 || (REG_P (addr)
2837 && (regno = REGNO (addr), regno == FRAME_POINTER_REGNUM
2838 || regno == HARD_FRAME_POINTER_REGNUM
2839 || regno == ARG_POINTER_REGNUM))
2840 || CONSTANT_ADDRESS_P (addr))
2841 return;
2843 /* If this address is not simply a register, try to fold it. This will
2844 sometimes simplify the expression. Many simplifications
2845 will not be valid, but some, usually applying the associative rule, will
2846 be valid and produce better code. */
2847 if (!REG_P (addr))
2849 rtx folded = fold_rtx (addr, NULL_RTX);
2850 if (folded != addr)
2852 int addr_folded_cost = address_cost (folded, mode);
2853 int addr_cost = address_cost (addr, mode);
2855 if ((addr_folded_cost < addr_cost
2856 || (addr_folded_cost == addr_cost
2857 /* ??? The rtx_cost comparison is left over from an older
2858 version of this code. It is probably no longer helpful.*/
2859 && (rtx_cost (folded, MEM) > rtx_cost (addr, MEM)
2860 || approx_reg_cost (folded) < approx_reg_cost (addr))))
2861 && validate_change (insn, loc, folded, 0))
2862 addr = folded;
2866 /* If this address is not in the hash table, we can't look for equivalences
2867 of the whole address. Also, ignore if volatile. */
2869 do_not_record = 0;
2870 hash = HASH (addr, Pmode);
2871 addr_volatile = do_not_record;
2872 do_not_record = save_do_not_record;
2873 hash_arg_in_memory = save_hash_arg_in_memory;
2875 if (addr_volatile)
2876 return;
2878 elt = lookup (addr, hash, Pmode);
2880 if (elt)
2882 /* We need to find the best (under the criteria documented above) entry
2883 in the class that is valid. We use the `flag' field to indicate
2884 choices that were invalid and iterate until we can't find a better
2885 one that hasn't already been tried. */
2887 for (p = elt->first_same_value; p; p = p->next_same_value)
2888 p->flag = 0;
2890 while (found_better)
2892 int best_addr_cost = address_cost (*loc, mode);
2893 int best_rtx_cost = (elt->cost + 1) >> 1;
2894 int exp_cost;
2895 struct table_elt *best_elt = elt;
2897 found_better = 0;
2898 for (p = elt->first_same_value; p; p = p->next_same_value)
2899 if (! p->flag)
2901 if ((REG_P (p->exp)
2902 || exp_equiv_p (p->exp, p->exp, 1, false))
2903 && ((exp_cost = address_cost (p->exp, mode)) < best_addr_cost
2904 || (exp_cost == best_addr_cost
2905 && ((p->cost + 1) >> 1) > best_rtx_cost)))
2907 found_better = 1;
2908 best_addr_cost = exp_cost;
2909 best_rtx_cost = (p->cost + 1) >> 1;
2910 best_elt = p;
2914 if (found_better)
2916 if (validate_change (insn, loc,
2917 canon_reg (copy_rtx (best_elt->exp),
2918 NULL_RTX), 0))
2919 return;
2920 else
2921 best_elt->flag = 1;
2926 /* If the address is a binary operation with the first operand a register
2927 and the second a constant, do the same as above, but looking for
2928 equivalences of the register. Then try to simplify before checking for
2929 the best address to use. This catches a few cases: First is when we
2930 have REG+const and the register is another REG+const. We can often merge
2931 the constants and eliminate one insn and one register. It may also be
2932 that a machine has a cheap REG+REG+const. Finally, this improves the
2933 code on the Alpha for unaligned byte stores. */
2935 if (flag_expensive_optimizations
2936 && ARITHMETIC_P (*loc)
2937 && REG_P (XEXP (*loc, 0)))
2939 rtx op1 = XEXP (*loc, 1);
2941 do_not_record = 0;
2942 hash = HASH (XEXP (*loc, 0), Pmode);
2943 do_not_record = save_do_not_record;
2944 hash_arg_in_memory = save_hash_arg_in_memory;
2946 elt = lookup (XEXP (*loc, 0), hash, Pmode);
2947 if (elt == 0)
2948 return;
2950 /* We need to find the best (under the criteria documented above) entry
2951 in the class that is valid. We use the `flag' field to indicate
2952 choices that were invalid and iterate until we can't find a better
2953 one that hasn't already been tried. */
2955 for (p = elt->first_same_value; p; p = p->next_same_value)
2956 p->flag = 0;
2958 while (found_better)
2960 int best_addr_cost = address_cost (*loc, mode);
2961 int best_rtx_cost = (COST (*loc) + 1) >> 1;
2962 struct table_elt *best_elt = elt;
2963 rtx best_rtx = *loc;
2964 int count;
2966 /* This is at worst case an O(n^2) algorithm, so limit our search
2967 to the first 32 elements on the list. This avoids trouble
2968 compiling code with very long basic blocks that can easily
2969 call simplify_gen_binary so many times that we run out of
2970 memory. */
2972 found_better = 0;
2973 for (p = elt->first_same_value, count = 0;
2974 p && count < 32;
2975 p = p->next_same_value, count++)
2976 if (! p->flag
2977 && (REG_P (p->exp)
2978 || exp_equiv_p (p->exp, p->exp, 1, false)))
2980 rtx new = simplify_gen_binary (GET_CODE (*loc), Pmode,
2981 p->exp, op1);
2982 int new_cost;
2984 /* Get the canonical version of the address so we can accept
2985 more. */
2986 new = canon_for_address (new);
2988 new_cost = address_cost (new, mode);
2990 if (new_cost < best_addr_cost
2991 || (new_cost == best_addr_cost
2992 && (COST (new) + 1) >> 1 > best_rtx_cost))
2994 found_better = 1;
2995 best_addr_cost = new_cost;
2996 best_rtx_cost = (COST (new) + 1) >> 1;
2997 best_elt = p;
2998 best_rtx = new;
3002 if (found_better)
3004 if (validate_change (insn, loc,
3005 canon_reg (copy_rtx (best_rtx),
3006 NULL_RTX), 0))
3007 return;
3008 else
3009 best_elt->flag = 1;
3015 /* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
3016 operation (EQ, NE, GT, etc.), follow it back through the hash table and
3017 what values are being compared.
3019 *PARG1 and *PARG2 are updated to contain the rtx representing the values
3020 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
3021 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
3022 compared to produce cc0.
3024 The return value is the comparison operator and is either the code of
3025 A or the code corresponding to the inverse of the comparison. */
3027 static enum rtx_code
3028 find_comparison_args (enum rtx_code code, rtx *parg1, rtx *parg2,
3029 enum machine_mode *pmode1, enum machine_mode *pmode2)
3031 rtx arg1, arg2;
3033 arg1 = *parg1, arg2 = *parg2;
3035 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
3037 while (arg2 == CONST0_RTX (GET_MODE (arg1)))
3039 /* Set nonzero when we find something of interest. */
3040 rtx x = 0;
3041 int reverse_code = 0;
3042 struct table_elt *p = 0;
3044 /* If arg1 is a COMPARE, extract the comparison arguments from it.
3045 On machines with CC0, this is the only case that can occur, since
3046 fold_rtx will return the COMPARE or item being compared with zero
3047 when given CC0. */
3049 if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
3050 x = arg1;
3052 /* If ARG1 is a comparison operator and CODE is testing for
3053 STORE_FLAG_VALUE, get the inner arguments. */
3055 else if (COMPARISON_P (arg1))
3057 #ifdef FLOAT_STORE_FLAG_VALUE
3058 REAL_VALUE_TYPE fsfv;
3059 #endif
3061 if (code == NE
3062 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3063 && code == LT && STORE_FLAG_VALUE == -1)
3064 #ifdef FLOAT_STORE_FLAG_VALUE
3065 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3066 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3067 REAL_VALUE_NEGATIVE (fsfv)))
3068 #endif
3070 x = arg1;
3071 else if (code == EQ
3072 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3073 && code == GE && STORE_FLAG_VALUE == -1)
3074 #ifdef FLOAT_STORE_FLAG_VALUE
3075 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3076 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3077 REAL_VALUE_NEGATIVE (fsfv)))
3078 #endif
3080 x = arg1, reverse_code = 1;
3083 /* ??? We could also check for
3085 (ne (and (eq (...) (const_int 1))) (const_int 0))
3087 and related forms, but let's wait until we see them occurring. */
3089 if (x == 0)
3090 /* Look up ARG1 in the hash table and see if it has an equivalence
3091 that lets us see what is being compared. */
3092 p = lookup (arg1, SAFE_HASH (arg1, GET_MODE (arg1)), GET_MODE (arg1));
3093 if (p)
3095 p = p->first_same_value;
3097 /* If what we compare is already known to be constant, that is as
3098 good as it gets.
3099 We need to break the loop in this case, because otherwise we
3100 can have an infinite loop when looking at a reg that is known
3101 to be a constant which is the same as a comparison of a reg
3102 against zero which appears later in the insn stream, which in
3103 turn is constant and the same as the comparison of the first reg
3104 against zero... */
3105 if (p->is_const)
3106 break;
3109 for (; p; p = p->next_same_value)
3111 enum machine_mode inner_mode = GET_MODE (p->exp);
3112 #ifdef FLOAT_STORE_FLAG_VALUE
3113 REAL_VALUE_TYPE fsfv;
3114 #endif
3116 /* If the entry isn't valid, skip it. */
3117 if (! exp_equiv_p (p->exp, p->exp, 1, false))
3118 continue;
3120 if (GET_CODE (p->exp) == COMPARE
3121 /* Another possibility is that this machine has a compare insn
3122 that includes the comparison code. In that case, ARG1 would
3123 be equivalent to a comparison operation that would set ARG1 to
3124 either STORE_FLAG_VALUE or zero. If this is an NE operation,
3125 ORIG_CODE is the actual comparison being done; if it is an EQ,
3126 we must reverse ORIG_CODE. On machine with a negative value
3127 for STORE_FLAG_VALUE, also look at LT and GE operations. */
3128 || ((code == NE
3129 || (code == LT
3130 && GET_MODE_CLASS (inner_mode) == MODE_INT
3131 && (GET_MODE_BITSIZE (inner_mode)
3132 <= HOST_BITS_PER_WIDE_INT)
3133 && (STORE_FLAG_VALUE
3134 & ((HOST_WIDE_INT) 1
3135 << (GET_MODE_BITSIZE (inner_mode) - 1))))
3136 #ifdef FLOAT_STORE_FLAG_VALUE
3137 || (code == LT
3138 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3139 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3140 REAL_VALUE_NEGATIVE (fsfv)))
3141 #endif
3143 && COMPARISON_P (p->exp)))
3145 x = p->exp;
3146 break;
3148 else if ((code == EQ
3149 || (code == GE
3150 && GET_MODE_CLASS (inner_mode) == MODE_INT
3151 && (GET_MODE_BITSIZE (inner_mode)
3152 <= HOST_BITS_PER_WIDE_INT)
3153 && (STORE_FLAG_VALUE
3154 & ((HOST_WIDE_INT) 1
3155 << (GET_MODE_BITSIZE (inner_mode) - 1))))
3156 #ifdef FLOAT_STORE_FLAG_VALUE
3157 || (code == GE
3158 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3159 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3160 REAL_VALUE_NEGATIVE (fsfv)))
3161 #endif
3163 && COMPARISON_P (p->exp))
3165 reverse_code = 1;
3166 x = p->exp;
3167 break;
3170 /* If this non-trapping address, e.g. fp + constant, the
3171 equivalent is a better operand since it may let us predict
3172 the value of the comparison. */
3173 else if (!rtx_addr_can_trap_p (p->exp))
3175 arg1 = p->exp;
3176 continue;
3180 /* If we didn't find a useful equivalence for ARG1, we are done.
3181 Otherwise, set up for the next iteration. */
3182 if (x == 0)
3183 break;
3185 /* If we need to reverse the comparison, make sure that that is
3186 possible -- we can't necessarily infer the value of GE from LT
3187 with floating-point operands. */
3188 if (reverse_code)
3190 enum rtx_code reversed = reversed_comparison_code (x, NULL_RTX);
3191 if (reversed == UNKNOWN)
3192 break;
3193 else
3194 code = reversed;
3196 else if (COMPARISON_P (x))
3197 code = GET_CODE (x);
3198 arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
3201 /* Return our results. Return the modes from before fold_rtx
3202 because fold_rtx might produce const_int, and then it's too late. */
3203 *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
3204 *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
3206 return code;
3209 /* If X is a nontrivial arithmetic operation on an argument
3210 for which a constant value can be determined, return
3211 the result of operating on that value, as a constant.
3212 Otherwise, return X, possibly with one or more operands
3213 modified by recursive calls to this function.
3215 If X is a register whose contents are known, we do NOT
3216 return those contents here. equiv_constant is called to
3217 perform that task.
3219 INSN is the insn that we may be modifying. If it is 0, make a copy
3220 of X before modifying it. */
3222 static rtx
3223 fold_rtx (rtx x, rtx insn)
3225 enum rtx_code code;
3226 enum machine_mode mode;
3227 const char *fmt;
3228 int i;
3229 rtx new = 0;
3230 int copied = 0;
3231 int must_swap = 0;
3233 /* Folded equivalents of first two operands of X. */
3234 rtx folded_arg0;
3235 rtx folded_arg1;
3237 /* Constant equivalents of first three operands of X;
3238 0 when no such equivalent is known. */
3239 rtx const_arg0;
3240 rtx const_arg1;
3241 rtx const_arg2;
3243 /* The mode of the first operand of X. We need this for sign and zero
3244 extends. */
3245 enum machine_mode mode_arg0;
3247 if (x == 0)
3248 return x;
3250 mode = GET_MODE (x);
3251 code = GET_CODE (x);
3252 switch (code)
3254 case CONST:
3255 case CONST_INT:
3256 case CONST_DOUBLE:
3257 case CONST_VECTOR:
3258 case SYMBOL_REF:
3259 case LABEL_REF:
3260 case REG:
3261 case PC:
3262 /* No use simplifying an EXPR_LIST
3263 since they are used only for lists of args
3264 in a function call's REG_EQUAL note. */
3265 case EXPR_LIST:
3266 return x;
3268 #ifdef HAVE_cc0
3269 case CC0:
3270 return prev_insn_cc0;
3271 #endif
3273 case SUBREG:
3274 /* See if we previously assigned a constant value to this SUBREG. */
3275 if ((new = lookup_as_function (x, CONST_INT)) != 0
3276 || (new = lookup_as_function (x, CONST_DOUBLE)) != 0)
3277 return new;
3279 /* If this is a paradoxical SUBREG, we have no idea what value the
3280 extra bits would have. However, if the operand is equivalent
3281 to a SUBREG whose operand is the same as our mode, and all the
3282 modes are within a word, we can just use the inner operand
3283 because these SUBREGs just say how to treat the register.
3285 Similarly if we find an integer constant. */
3287 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3289 enum machine_mode imode = GET_MODE (SUBREG_REG (x));
3290 struct table_elt *elt;
3292 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
3293 && GET_MODE_SIZE (imode) <= UNITS_PER_WORD
3294 && (elt = lookup (SUBREG_REG (x), HASH (SUBREG_REG (x), imode),
3295 imode)) != 0)
3296 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
3298 if (CONSTANT_P (elt->exp)
3299 && GET_MODE (elt->exp) == VOIDmode)
3300 return elt->exp;
3302 if (GET_CODE (elt->exp) == SUBREG
3303 && GET_MODE (SUBREG_REG (elt->exp)) == mode
3304 && exp_equiv_p (elt->exp, elt->exp, 1, false))
3305 return copy_rtx (SUBREG_REG (elt->exp));
3308 return x;
3311 /* Fold SUBREG_REG. If it changed, see if we can simplify the SUBREG.
3312 We might be able to if the SUBREG is extracting a single word in an
3313 integral mode or extracting the low part. */
3315 folded_arg0 = fold_rtx (SUBREG_REG (x), insn);
3316 const_arg0 = equiv_constant (folded_arg0);
3317 if (const_arg0)
3318 folded_arg0 = const_arg0;
3320 if (folded_arg0 != SUBREG_REG (x))
3322 new = simplify_subreg (mode, folded_arg0,
3323 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
3324 if (new)
3325 return new;
3328 if (REG_P (folded_arg0)
3329 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (folded_arg0)))
3331 struct table_elt *elt;
3333 elt = lookup (folded_arg0,
3334 HASH (folded_arg0, GET_MODE (folded_arg0)),
3335 GET_MODE (folded_arg0));
3337 if (elt)
3338 elt = elt->first_same_value;
3340 if (subreg_lowpart_p (x))
3341 /* If this is a narrowing SUBREG and our operand is a REG, see
3342 if we can find an equivalence for REG that is an arithmetic
3343 operation in a wider mode where both operands are paradoxical
3344 SUBREGs from objects of our result mode. In that case, we
3345 couldn-t report an equivalent value for that operation, since we
3346 don't know what the extra bits will be. But we can find an
3347 equivalence for this SUBREG by folding that operation in the
3348 narrow mode. This allows us to fold arithmetic in narrow modes
3349 when the machine only supports word-sized arithmetic.
3351 Also look for a case where we have a SUBREG whose operand
3352 is the same as our result. If both modes are smaller
3353 than a word, we are simply interpreting a register in
3354 different modes and we can use the inner value. */
3356 for (; elt; elt = elt->next_same_value)
3358 enum rtx_code eltcode = GET_CODE (elt->exp);
3360 /* Just check for unary and binary operations. */
3361 if (UNARY_P (elt->exp)
3362 && eltcode != SIGN_EXTEND
3363 && eltcode != ZERO_EXTEND
3364 && GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3365 && GET_MODE (SUBREG_REG (XEXP (elt->exp, 0))) == mode
3366 && (GET_MODE_CLASS (mode)
3367 == GET_MODE_CLASS (GET_MODE (XEXP (elt->exp, 0)))))
3369 rtx op0 = SUBREG_REG (XEXP (elt->exp, 0));
3371 if (!REG_P (op0) && ! CONSTANT_P (op0))
3372 op0 = fold_rtx (op0, NULL_RTX);
3374 op0 = equiv_constant (op0);
3375 if (op0)
3376 new = simplify_unary_operation (GET_CODE (elt->exp), mode,
3377 op0, mode);
3379 else if (ARITHMETIC_P (elt->exp)
3380 && eltcode != DIV && eltcode != MOD
3381 && eltcode != UDIV && eltcode != UMOD
3382 && eltcode != ASHIFTRT && eltcode != LSHIFTRT
3383 && eltcode != ROTATE && eltcode != ROTATERT
3384 && ((GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3385 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 0)))
3386 == mode))
3387 || CONSTANT_P (XEXP (elt->exp, 0)))
3388 && ((GET_CODE (XEXP (elt->exp, 1)) == SUBREG
3389 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 1)))
3390 == mode))
3391 || CONSTANT_P (XEXP (elt->exp, 1))))
3393 rtx op0 = gen_lowpart_common (mode, XEXP (elt->exp, 0));
3394 rtx op1 = gen_lowpart_common (mode, XEXP (elt->exp, 1));
3396 if (op0 && !REG_P (op0) && ! CONSTANT_P (op0))
3397 op0 = fold_rtx (op0, NULL_RTX);
3399 if (op0)
3400 op0 = equiv_constant (op0);
3402 if (op1 && !REG_P (op1) && ! CONSTANT_P (op1))
3403 op1 = fold_rtx (op1, NULL_RTX);
3405 if (op1)
3406 op1 = equiv_constant (op1);
3408 /* If we are looking for the low SImode part of
3409 (ashift:DI c (const_int 32)), it doesn't work
3410 to compute that in SImode, because a 32-bit shift
3411 in SImode is unpredictable. We know the value is 0. */
3412 if (op0 && op1
3413 && GET_CODE (elt->exp) == ASHIFT
3414 && GET_CODE (op1) == CONST_INT
3415 && INTVAL (op1) >= GET_MODE_BITSIZE (mode))
3417 if (INTVAL (op1)
3418 < GET_MODE_BITSIZE (GET_MODE (elt->exp)))
3419 /* If the count fits in the inner mode's width,
3420 but exceeds the outer mode's width,
3421 the value will get truncated to 0
3422 by the subreg. */
3423 new = CONST0_RTX (mode);
3424 else
3425 /* If the count exceeds even the inner mode's width,
3426 don't fold this expression. */
3427 new = 0;
3429 else if (op0 && op1)
3430 new = simplify_binary_operation (GET_CODE (elt->exp), mode, op0, op1);
3433 else if (GET_CODE (elt->exp) == SUBREG
3434 && GET_MODE (SUBREG_REG (elt->exp)) == mode
3435 && (GET_MODE_SIZE (GET_MODE (folded_arg0))
3436 <= UNITS_PER_WORD)
3437 && exp_equiv_p (elt->exp, elt->exp, 1, false))
3438 new = copy_rtx (SUBREG_REG (elt->exp));
3440 if (new)
3441 return new;
3443 else
3444 /* A SUBREG resulting from a zero extension may fold to zero if
3445 it extracts higher bits than the ZERO_EXTEND's source bits.
3446 FIXME: if combine tried to, er, combine these instructions,
3447 this transformation may be moved to simplify_subreg. */
3448 for (; elt; elt = elt->next_same_value)
3450 if (GET_CODE (elt->exp) == ZERO_EXTEND
3451 && subreg_lsb (x)
3452 >= GET_MODE_BITSIZE (GET_MODE (XEXP (elt->exp, 0))))
3453 return CONST0_RTX (mode);
3457 return x;
3459 case NOT:
3460 case NEG:
3461 /* If we have (NOT Y), see if Y is known to be (NOT Z).
3462 If so, (NOT Y) simplifies to Z. Similarly for NEG. */
3463 new = lookup_as_function (XEXP (x, 0), code);
3464 if (new)
3465 return fold_rtx (copy_rtx (XEXP (new, 0)), insn);
3466 break;
3468 case MEM:
3469 /* If we are not actually processing an insn, don't try to find the
3470 best address. Not only don't we care, but we could modify the
3471 MEM in an invalid way since we have no insn to validate against. */
3472 if (insn != 0)
3473 find_best_addr (insn, &XEXP (x, 0), GET_MODE (x));
3476 /* Even if we don't fold in the insn itself,
3477 we can safely do so here, in hopes of getting a constant. */
3478 rtx addr = fold_rtx (XEXP (x, 0), NULL_RTX);
3479 rtx base = 0;
3480 HOST_WIDE_INT offset = 0;
3482 if (REG_P (addr)
3483 && REGNO_QTY_VALID_P (REGNO (addr)))
3485 int addr_q = REG_QTY (REGNO (addr));
3486 struct qty_table_elem *addr_ent = &qty_table[addr_q];
3488 if (GET_MODE (addr) == addr_ent->mode
3489 && addr_ent->const_rtx != NULL_RTX)
3490 addr = addr_ent->const_rtx;
3493 /* If address is constant, split it into a base and integer offset. */
3494 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
3495 base = addr;
3496 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
3497 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
3499 base = XEXP (XEXP (addr, 0), 0);
3500 offset = INTVAL (XEXP (XEXP (addr, 0), 1));
3502 else if (GET_CODE (addr) == LO_SUM
3503 && GET_CODE (XEXP (addr, 1)) == SYMBOL_REF)
3504 base = XEXP (addr, 1);
3506 /* If this is a constant pool reference, we can fold it into its
3507 constant to allow better value tracking. */
3508 if (base && GET_CODE (base) == SYMBOL_REF
3509 && CONSTANT_POOL_ADDRESS_P (base))
3511 rtx constant = get_pool_constant (base);
3512 enum machine_mode const_mode = get_pool_mode (base);
3513 rtx new;
3515 if (CONSTANT_P (constant) && GET_CODE (constant) != CONST_INT)
3517 constant_pool_entries_cost = COST (constant);
3518 constant_pool_entries_regcost = approx_reg_cost (constant);
3521 /* If we are loading the full constant, we have an equivalence. */
3522 if (offset == 0 && mode == const_mode)
3523 return constant;
3525 /* If this actually isn't a constant (weird!), we can't do
3526 anything. Otherwise, handle the two most common cases:
3527 extracting a word from a multi-word constant, and extracting
3528 the low-order bits. Other cases don't seem common enough to
3529 worry about. */
3530 if (! CONSTANT_P (constant))
3531 return x;
3533 if (GET_MODE_CLASS (mode) == MODE_INT
3534 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
3535 && offset % UNITS_PER_WORD == 0
3536 && (new = operand_subword (constant,
3537 offset / UNITS_PER_WORD,
3538 0, const_mode)) != 0)
3539 return new;
3541 if (((BYTES_BIG_ENDIAN
3542 && offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
3543 || (! BYTES_BIG_ENDIAN && offset == 0))
3544 && (new = gen_lowpart (mode, constant)) != 0)
3545 return new;
3548 /* If this is a reference to a label at a known position in a jump
3549 table, we also know its value. */
3550 if (base && GET_CODE (base) == LABEL_REF)
3552 rtx label = XEXP (base, 0);
3553 rtx table_insn = NEXT_INSN (label);
3555 if (table_insn && JUMP_P (table_insn)
3556 && GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
3558 rtx table = PATTERN (table_insn);
3560 if (offset >= 0
3561 && (offset / GET_MODE_SIZE (GET_MODE (table))
3562 < XVECLEN (table, 0)))
3563 return XVECEXP (table, 0,
3564 offset / GET_MODE_SIZE (GET_MODE (table)));
3566 if (table_insn && JUMP_P (table_insn)
3567 && GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
3569 rtx table = PATTERN (table_insn);
3571 if (offset >= 0
3572 && (offset / GET_MODE_SIZE (GET_MODE (table))
3573 < XVECLEN (table, 1)))
3575 offset /= GET_MODE_SIZE (GET_MODE (table));
3576 new = gen_rtx_MINUS (Pmode, XVECEXP (table, 1, offset),
3577 XEXP (table, 0));
3579 if (GET_MODE (table) != Pmode)
3580 new = gen_rtx_TRUNCATE (GET_MODE (table), new);
3582 /* Indicate this is a constant. This isn't a
3583 valid form of CONST, but it will only be used
3584 to fold the next insns and then discarded, so
3585 it should be safe.
3587 Note this expression must be explicitly discarded,
3588 by cse_insn, else it may end up in a REG_EQUAL note
3589 and "escape" to cause problems elsewhere. */
3590 return gen_rtx_CONST (GET_MODE (new), new);
3595 return x;
3598 #ifdef NO_FUNCTION_CSE
3599 case CALL:
3600 if (CONSTANT_P (XEXP (XEXP (x, 0), 0)))
3601 return x;
3602 break;
3603 #endif
3605 case ASM_OPERANDS:
3606 if (insn)
3608 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
3609 validate_change (insn, &ASM_OPERANDS_INPUT (x, i),
3610 fold_rtx (ASM_OPERANDS_INPUT (x, i), insn), 0);
3612 break;
3614 default:
3615 break;
3618 const_arg0 = 0;
3619 const_arg1 = 0;
3620 const_arg2 = 0;
3621 mode_arg0 = VOIDmode;
3623 /* Try folding our operands.
3624 Then see which ones have constant values known. */
3626 fmt = GET_RTX_FORMAT (code);
3627 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3628 if (fmt[i] == 'e')
3630 rtx arg = XEXP (x, i);
3631 rtx folded_arg = arg, const_arg = 0;
3632 enum machine_mode mode_arg = GET_MODE (arg);
3633 rtx cheap_arg, expensive_arg;
3634 rtx replacements[2];
3635 int j;
3636 int old_cost = COST_IN (XEXP (x, i), code);
3638 /* Most arguments are cheap, so handle them specially. */
3639 switch (GET_CODE (arg))
3641 case REG:
3642 /* This is the same as calling equiv_constant; it is duplicated
3643 here for speed. */
3644 if (REGNO_QTY_VALID_P (REGNO (arg)))
3646 int arg_q = REG_QTY (REGNO (arg));
3647 struct qty_table_elem *arg_ent = &qty_table[arg_q];
3649 if (arg_ent->const_rtx != NULL_RTX
3650 && !REG_P (arg_ent->const_rtx)
3651 && GET_CODE (arg_ent->const_rtx) != PLUS)
3652 const_arg
3653 = gen_lowpart (GET_MODE (arg),
3654 arg_ent->const_rtx);
3656 break;
3658 case CONST:
3659 case CONST_INT:
3660 case SYMBOL_REF:
3661 case LABEL_REF:
3662 case CONST_DOUBLE:
3663 case CONST_VECTOR:
3664 const_arg = arg;
3665 break;
3667 #ifdef HAVE_cc0
3668 case CC0:
3669 folded_arg = prev_insn_cc0;
3670 mode_arg = prev_insn_cc0_mode;
3671 const_arg = equiv_constant (folded_arg);
3672 break;
3673 #endif
3675 default:
3676 folded_arg = fold_rtx (arg, insn);
3677 const_arg = equiv_constant (folded_arg);
3680 /* For the first three operands, see if the operand
3681 is constant or equivalent to a constant. */
3682 switch (i)
3684 case 0:
3685 folded_arg0 = folded_arg;
3686 const_arg0 = const_arg;
3687 mode_arg0 = mode_arg;
3688 break;
3689 case 1:
3690 folded_arg1 = folded_arg;
3691 const_arg1 = const_arg;
3692 break;
3693 case 2:
3694 const_arg2 = const_arg;
3695 break;
3698 /* Pick the least expensive of the folded argument and an
3699 equivalent constant argument. */
3700 if (const_arg == 0 || const_arg == folded_arg
3701 || COST_IN (const_arg, code) > COST_IN (folded_arg, code))
3702 cheap_arg = folded_arg, expensive_arg = const_arg;
3703 else
3704 cheap_arg = const_arg, expensive_arg = folded_arg;
3706 /* Try to replace the operand with the cheapest of the two
3707 possibilities. If it doesn't work and this is either of the first
3708 two operands of a commutative operation, try swapping them.
3709 If THAT fails, try the more expensive, provided it is cheaper
3710 than what is already there. */
3712 if (cheap_arg == XEXP (x, i))
3713 continue;
3715 if (insn == 0 && ! copied)
3717 x = copy_rtx (x);
3718 copied = 1;
3721 /* Order the replacements from cheapest to most expensive. */
3722 replacements[0] = cheap_arg;
3723 replacements[1] = expensive_arg;
3725 for (j = 0; j < 2 && replacements[j]; j++)
3727 int new_cost = COST_IN (replacements[j], code);
3729 /* Stop if what existed before was cheaper. Prefer constants
3730 in the case of a tie. */
3731 if (new_cost > old_cost
3732 || (new_cost == old_cost && CONSTANT_P (XEXP (x, i))))
3733 break;
3735 /* It's not safe to substitute the operand of a conversion
3736 operator with a constant, as the conversion's identity
3737 depends upon the mode of it's operand. This optimization
3738 is handled by the call to simplify_unary_operation. */
3739 if (GET_RTX_CLASS (code) == RTX_UNARY
3740 && GET_MODE (replacements[j]) != mode_arg0
3741 && (code == ZERO_EXTEND
3742 || code == SIGN_EXTEND
3743 || code == TRUNCATE
3744 || code == FLOAT_TRUNCATE
3745 || code == FLOAT_EXTEND
3746 || code == FLOAT
3747 || code == FIX
3748 || code == UNSIGNED_FLOAT
3749 || code == UNSIGNED_FIX))
3750 continue;
3752 if (validate_change (insn, &XEXP (x, i), replacements[j], 0))
3753 break;
3755 if (GET_RTX_CLASS (code) == RTX_COMM_COMPARE
3756 || GET_RTX_CLASS (code) == RTX_COMM_ARITH)
3758 validate_change (insn, &XEXP (x, i), XEXP (x, 1 - i), 1);
3759 validate_change (insn, &XEXP (x, 1 - i), replacements[j], 1);
3761 if (apply_change_group ())
3763 /* Swap them back to be invalid so that this loop can
3764 continue and flag them to be swapped back later. */
3765 rtx tem;
3767 tem = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1);
3768 XEXP (x, 1) = tem;
3769 must_swap = 1;
3770 break;
3776 else
3778 if (fmt[i] == 'E')
3779 /* Don't try to fold inside of a vector of expressions.
3780 Doing nothing is harmless. */
3784 /* If a commutative operation, place a constant integer as the second
3785 operand unless the first operand is also a constant integer. Otherwise,
3786 place any constant second unless the first operand is also a constant. */
3788 if (COMMUTATIVE_P (x))
3790 if (must_swap
3791 || swap_commutative_operands_p (const_arg0 ? const_arg0
3792 : XEXP (x, 0),
3793 const_arg1 ? const_arg1
3794 : XEXP (x, 1)))
3796 rtx tem = XEXP (x, 0);
3798 if (insn == 0 && ! copied)
3800 x = copy_rtx (x);
3801 copied = 1;
3804 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
3805 validate_change (insn, &XEXP (x, 1), tem, 1);
3806 if (apply_change_group ())
3808 tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
3809 tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
3814 /* If X is an arithmetic operation, see if we can simplify it. */
3816 switch (GET_RTX_CLASS (code))
3818 case RTX_UNARY:
3820 int is_const = 0;
3822 /* We can't simplify extension ops unless we know the
3823 original mode. */
3824 if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
3825 && mode_arg0 == VOIDmode)
3826 break;
3828 /* If we had a CONST, strip it off and put it back later if we
3829 fold. */
3830 if (const_arg0 != 0 && GET_CODE (const_arg0) == CONST)
3831 is_const = 1, const_arg0 = XEXP (const_arg0, 0);
3833 new = simplify_unary_operation (code, mode,
3834 const_arg0 ? const_arg0 : folded_arg0,
3835 mode_arg0);
3836 /* NEG of PLUS could be converted into MINUS, but that causes
3837 expressions of the form
3838 (CONST (MINUS (CONST_INT) (SYMBOL_REF)))
3839 which many ports mistakenly treat as LEGITIMATE_CONSTANT_P.
3840 FIXME: those ports should be fixed. */
3841 if (new != 0 && is_const
3842 && GET_CODE (new) == PLUS
3843 && (GET_CODE (XEXP (new, 0)) == SYMBOL_REF
3844 || GET_CODE (XEXP (new, 0)) == LABEL_REF)
3845 && GET_CODE (XEXP (new, 1)) == CONST_INT)
3846 new = gen_rtx_CONST (mode, new);
3848 break;
3850 case RTX_COMPARE:
3851 case RTX_COMM_COMPARE:
3852 /* See what items are actually being compared and set FOLDED_ARG[01]
3853 to those values and CODE to the actual comparison code. If any are
3854 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
3855 do anything if both operands are already known to be constant. */
3857 /* ??? Vector mode comparisons are not supported yet. */
3858 if (VECTOR_MODE_P (mode))
3859 break;
3861 if (const_arg0 == 0 || const_arg1 == 0)
3863 struct table_elt *p0, *p1;
3864 rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
3865 enum machine_mode mode_arg1;
3867 #ifdef FLOAT_STORE_FLAG_VALUE
3868 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3870 true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
3871 (FLOAT_STORE_FLAG_VALUE (mode), mode));
3872 false_rtx = CONST0_RTX (mode);
3874 #endif
3876 code = find_comparison_args (code, &folded_arg0, &folded_arg1,
3877 &mode_arg0, &mode_arg1);
3879 /* If the mode is VOIDmode or a MODE_CC mode, we don't know
3880 what kinds of things are being compared, so we can't do
3881 anything with this comparison. */
3883 if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
3884 break;
3886 const_arg0 = equiv_constant (folded_arg0);
3887 const_arg1 = equiv_constant (folded_arg1);
3889 /* If we do not now have two constants being compared, see
3890 if we can nevertheless deduce some things about the
3891 comparison. */
3892 if (const_arg0 == 0 || const_arg1 == 0)
3894 /* Some addresses are known to be nonzero. We don't know
3895 their sign, but equality comparisons are known. */
3896 if (const_arg1 == const0_rtx
3897 && nonzero_address_p (folded_arg0))
3899 if (code == EQ)
3900 return false_rtx;
3901 else if (code == NE)
3902 return true_rtx;
3905 /* See if the two operands are the same. */
3907 if (folded_arg0 == folded_arg1
3908 || (REG_P (folded_arg0)
3909 && REG_P (folded_arg1)
3910 && (REG_QTY (REGNO (folded_arg0))
3911 == REG_QTY (REGNO (folded_arg1))))
3912 || ((p0 = lookup (folded_arg0,
3913 SAFE_HASH (folded_arg0, mode_arg0),
3914 mode_arg0))
3915 && (p1 = lookup (folded_arg1,
3916 SAFE_HASH (folded_arg1, mode_arg0),
3917 mode_arg0))
3918 && p0->first_same_value == p1->first_same_value))
3920 /* Sadly two equal NaNs are not equivalent. */
3921 if (!HONOR_NANS (mode_arg0))
3922 return ((code == EQ || code == LE || code == GE
3923 || code == LEU || code == GEU || code == UNEQ
3924 || code == UNLE || code == UNGE
3925 || code == ORDERED)
3926 ? true_rtx : false_rtx);
3927 /* Take care for the FP compares we can resolve. */
3928 if (code == UNEQ || code == UNLE || code == UNGE)
3929 return true_rtx;
3930 if (code == LTGT || code == LT || code == GT)
3931 return false_rtx;
3934 /* If FOLDED_ARG0 is a register, see if the comparison we are
3935 doing now is either the same as we did before or the reverse
3936 (we only check the reverse if not floating-point). */
3937 else if (REG_P (folded_arg0))
3939 int qty = REG_QTY (REGNO (folded_arg0));
3941 if (REGNO_QTY_VALID_P (REGNO (folded_arg0)))
3943 struct qty_table_elem *ent = &qty_table[qty];
3945 if ((comparison_dominates_p (ent->comparison_code, code)
3946 || (! FLOAT_MODE_P (mode_arg0)
3947 && comparison_dominates_p (ent->comparison_code,
3948 reverse_condition (code))))
3949 && (rtx_equal_p (ent->comparison_const, folded_arg1)
3950 || (const_arg1
3951 && rtx_equal_p (ent->comparison_const,
3952 const_arg1))
3953 || (REG_P (folded_arg1)
3954 && (REG_QTY (REGNO (folded_arg1)) == ent->comparison_qty))))
3955 return (comparison_dominates_p (ent->comparison_code, code)
3956 ? true_rtx : false_rtx);
3962 /* If we are comparing against zero, see if the first operand is
3963 equivalent to an IOR with a constant. If so, we may be able to
3964 determine the result of this comparison. */
3966 if (const_arg1 == const0_rtx)
3968 rtx y = lookup_as_function (folded_arg0, IOR);
3969 rtx inner_const;
3971 if (y != 0
3972 && (inner_const = equiv_constant (XEXP (y, 1))) != 0
3973 && GET_CODE (inner_const) == CONST_INT
3974 && INTVAL (inner_const) != 0)
3976 int sign_bitnum = GET_MODE_BITSIZE (mode_arg0) - 1;
3977 int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
3978 && (INTVAL (inner_const)
3979 & ((HOST_WIDE_INT) 1 << sign_bitnum)));
3980 rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
3982 #ifdef FLOAT_STORE_FLAG_VALUE
3983 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3985 true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
3986 (FLOAT_STORE_FLAG_VALUE (mode), mode));
3987 false_rtx = CONST0_RTX (mode);
3989 #endif
3991 switch (code)
3993 case EQ:
3994 return false_rtx;
3995 case NE:
3996 return true_rtx;
3997 case LT: case LE:
3998 if (has_sign)
3999 return true_rtx;
4000 break;
4001 case GT: case GE:
4002 if (has_sign)
4003 return false_rtx;
4004 break;
4005 default:
4006 break;
4012 rtx op0 = const_arg0 ? const_arg0 : folded_arg0;
4013 rtx op1 = const_arg1 ? const_arg1 : folded_arg1;
4014 new = simplify_relational_operation (code, mode, mode_arg0, op0, op1);
4016 break;
4018 case RTX_BIN_ARITH:
4019 case RTX_COMM_ARITH:
4020 switch (code)
4022 case PLUS:
4023 /* If the second operand is a LABEL_REF, see if the first is a MINUS
4024 with that LABEL_REF as its second operand. If so, the result is
4025 the first operand of that MINUS. This handles switches with an
4026 ADDR_DIFF_VEC table. */
4027 if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
4029 rtx y
4030 = GET_CODE (folded_arg0) == MINUS ? folded_arg0
4031 : lookup_as_function (folded_arg0, MINUS);
4033 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
4034 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
4035 return XEXP (y, 0);
4037 /* Now try for a CONST of a MINUS like the above. */
4038 if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
4039 : lookup_as_function (folded_arg0, CONST))) != 0
4040 && GET_CODE (XEXP (y, 0)) == MINUS
4041 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
4042 && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg1, 0))
4043 return XEXP (XEXP (y, 0), 0);
4046 /* Likewise if the operands are in the other order. */
4047 if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
4049 rtx y
4050 = GET_CODE (folded_arg1) == MINUS ? folded_arg1
4051 : lookup_as_function (folded_arg1, MINUS);
4053 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
4054 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
4055 return XEXP (y, 0);
4057 /* Now try for a CONST of a MINUS like the above. */
4058 if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
4059 : lookup_as_function (folded_arg1, CONST))) != 0
4060 && GET_CODE (XEXP (y, 0)) == MINUS
4061 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
4062 && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg0, 0))
4063 return XEXP (XEXP (y, 0), 0);
4066 /* If second operand is a register equivalent to a negative
4067 CONST_INT, see if we can find a register equivalent to the
4068 positive constant. Make a MINUS if so. Don't do this for
4069 a non-negative constant since we might then alternate between
4070 choosing positive and negative constants. Having the positive
4071 constant previously-used is the more common case. Be sure
4072 the resulting constant is non-negative; if const_arg1 were
4073 the smallest negative number this would overflow: depending
4074 on the mode, this would either just be the same value (and
4075 hence not save anything) or be incorrect. */
4076 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT
4077 && INTVAL (const_arg1) < 0
4078 /* This used to test
4080 -INTVAL (const_arg1) >= 0
4082 But The Sun V5.0 compilers mis-compiled that test. So
4083 instead we test for the problematic value in a more direct
4084 manner and hope the Sun compilers get it correct. */
4085 && INTVAL (const_arg1) !=
4086 ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1))
4087 && REG_P (folded_arg1))
4089 rtx new_const = GEN_INT (-INTVAL (const_arg1));
4090 struct table_elt *p
4091 = lookup (new_const, SAFE_HASH (new_const, mode), mode);
4093 if (p)
4094 for (p = p->first_same_value; p; p = p->next_same_value)
4095 if (REG_P (p->exp))
4096 return simplify_gen_binary (MINUS, mode, folded_arg0,
4097 canon_reg (p->exp, NULL_RTX));
4099 goto from_plus;
4101 case MINUS:
4102 /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
4103 If so, produce (PLUS Z C2-C). */
4104 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT)
4106 rtx y = lookup_as_function (XEXP (x, 0), PLUS);
4107 if (y && GET_CODE (XEXP (y, 1)) == CONST_INT)
4108 return fold_rtx (plus_constant (copy_rtx (y),
4109 -INTVAL (const_arg1)),
4110 NULL_RTX);
4113 /* Fall through. */
4115 from_plus:
4116 case SMIN: case SMAX: case UMIN: case UMAX:
4117 case IOR: case AND: case XOR:
4118 case MULT:
4119 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
4120 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
4121 is known to be of similar form, we may be able to replace the
4122 operation with a combined operation. This may eliminate the
4123 intermediate operation if every use is simplified in this way.
4124 Note that the similar optimization done by combine.c only works
4125 if the intermediate operation's result has only one reference. */
4127 if (REG_P (folded_arg0)
4128 && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
4130 int is_shift
4131 = (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
4132 rtx y = lookup_as_function (folded_arg0, code);
4133 rtx inner_const;
4134 enum rtx_code associate_code;
4135 rtx new_const;
4137 if (y == 0
4138 || 0 == (inner_const
4139 = equiv_constant (fold_rtx (XEXP (y, 1), 0)))
4140 || GET_CODE (inner_const) != CONST_INT
4141 /* If we have compiled a statement like
4142 "if (x == (x & mask1))", and now are looking at
4143 "x & mask2", we will have a case where the first operand
4144 of Y is the same as our first operand. Unless we detect
4145 this case, an infinite loop will result. */
4146 || XEXP (y, 0) == folded_arg0)
4147 break;
4149 /* Don't associate these operations if they are a PLUS with the
4150 same constant and it is a power of two. These might be doable
4151 with a pre- or post-increment. Similarly for two subtracts of
4152 identical powers of two with post decrement. */
4154 if (code == PLUS && const_arg1 == inner_const
4155 && ((HAVE_PRE_INCREMENT
4156 && exact_log2 (INTVAL (const_arg1)) >= 0)
4157 || (HAVE_POST_INCREMENT
4158 && exact_log2 (INTVAL (const_arg1)) >= 0)
4159 || (HAVE_PRE_DECREMENT
4160 && exact_log2 (- INTVAL (const_arg1)) >= 0)
4161 || (HAVE_POST_DECREMENT
4162 && exact_log2 (- INTVAL (const_arg1)) >= 0)))
4163 break;
4165 /* Compute the code used to compose the constants. For example,
4166 A-C1-C2 is A-(C1 + C2), so if CODE == MINUS, we want PLUS. */
4168 associate_code = (is_shift || code == MINUS ? PLUS : code);
4170 new_const = simplify_binary_operation (associate_code, mode,
4171 const_arg1, inner_const);
4173 if (new_const == 0)
4174 break;
4176 /* If we are associating shift operations, don't let this
4177 produce a shift of the size of the object or larger.
4178 This could occur when we follow a sign-extend by a right
4179 shift on a machine that does a sign-extend as a pair
4180 of shifts. */
4182 if (is_shift && GET_CODE (new_const) == CONST_INT
4183 && INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
4185 /* As an exception, we can turn an ASHIFTRT of this
4186 form into a shift of the number of bits - 1. */
4187 if (code == ASHIFTRT)
4188 new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
4189 else
4190 break;
4193 y = copy_rtx (XEXP (y, 0));
4195 /* If Y contains our first operand (the most common way this
4196 can happen is if Y is a MEM), we would do into an infinite
4197 loop if we tried to fold it. So don't in that case. */
4199 if (! reg_mentioned_p (folded_arg0, y))
4200 y = fold_rtx (y, insn);
4202 return simplify_gen_binary (code, mode, y, new_const);
4204 break;
4206 case DIV: case UDIV:
4207 /* ??? The associative optimization performed immediately above is
4208 also possible for DIV and UDIV using associate_code of MULT.
4209 However, we would need extra code to verify that the
4210 multiplication does not overflow, that is, there is no overflow
4211 in the calculation of new_const. */
4212 break;
4214 default:
4215 break;
4218 new = simplify_binary_operation (code, mode,
4219 const_arg0 ? const_arg0 : folded_arg0,
4220 const_arg1 ? const_arg1 : folded_arg1);
4221 break;
4223 case RTX_OBJ:
4224 /* (lo_sum (high X) X) is simply X. */
4225 if (code == LO_SUM && const_arg0 != 0
4226 && GET_CODE (const_arg0) == HIGH
4227 && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
4228 return const_arg1;
4229 break;
4231 case RTX_TERNARY:
4232 case RTX_BITFIELD_OPS:
4233 new = simplify_ternary_operation (code, mode, mode_arg0,
4234 const_arg0 ? const_arg0 : folded_arg0,
4235 const_arg1 ? const_arg1 : folded_arg1,
4236 const_arg2 ? const_arg2 : XEXP (x, 2));
4237 break;
4239 default:
4240 break;
4243 return new ? new : x;
4246 /* Return a constant value currently equivalent to X.
4247 Return 0 if we don't know one. */
4249 static rtx
4250 equiv_constant (rtx x)
4252 if (REG_P (x)
4253 && REGNO_QTY_VALID_P (REGNO (x)))
4255 int x_q = REG_QTY (REGNO (x));
4256 struct qty_table_elem *x_ent = &qty_table[x_q];
4258 if (x_ent->const_rtx)
4259 x = gen_lowpart (GET_MODE (x), x_ent->const_rtx);
4262 if (x == 0 || CONSTANT_P (x))
4263 return x;
4265 /* If X is a MEM, try to fold it outside the context of any insn to see if
4266 it might be equivalent to a constant. That handles the case where it
4267 is a constant-pool reference. Then try to look it up in the hash table
4268 in case it is something whose value we have seen before. */
4270 if (MEM_P (x))
4272 struct table_elt *elt;
4274 x = fold_rtx (x, NULL_RTX);
4275 if (CONSTANT_P (x))
4276 return x;
4278 elt = lookup (x, SAFE_HASH (x, GET_MODE (x)), GET_MODE (x));
4279 if (elt == 0)
4280 return 0;
4282 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
4283 if (elt->is_const && CONSTANT_P (elt->exp))
4284 return elt->exp;
4287 return 0;
4290 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a fixed-point
4291 number, return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
4292 least-significant part of X.
4293 MODE specifies how big a part of X to return.
4295 If the requested operation cannot be done, 0 is returned.
4297 This is similar to gen_lowpart_general in emit-rtl.c. */
4300 gen_lowpart_if_possible (enum machine_mode mode, rtx x)
4302 rtx result = gen_lowpart_common (mode, x);
4304 if (result)
4305 return result;
4306 else if (MEM_P (x))
4308 /* This is the only other case we handle. */
4309 int offset = 0;
4310 rtx new;
4312 if (WORDS_BIG_ENDIAN)
4313 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
4314 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
4315 if (BYTES_BIG_ENDIAN)
4316 /* Adjust the address so that the address-after-the-data is
4317 unchanged. */
4318 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
4319 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
4321 new = adjust_address_nv (x, mode, offset);
4322 if (! memory_address_p (mode, XEXP (new, 0)))
4323 return 0;
4325 return new;
4327 else
4328 return 0;
4331 /* Given INSN, a jump insn, PATH_TAKEN indicates if we are following the "taken"
4332 branch. It will be zero if not.
4334 In certain cases, this can cause us to add an equivalence. For example,
4335 if we are following the taken case of
4336 if (i == 2)
4337 we can add the fact that `i' and '2' are now equivalent.
4339 In any case, we can record that this comparison was passed. If the same
4340 comparison is seen later, we will know its value. */
4342 static void
4343 record_jump_equiv (rtx insn, int taken)
4345 int cond_known_true;
4346 rtx op0, op1;
4347 rtx set;
4348 enum machine_mode mode, mode0, mode1;
4349 int reversed_nonequality = 0;
4350 enum rtx_code code;
4352 /* Ensure this is the right kind of insn. */
4353 if (! any_condjump_p (insn))
4354 return;
4355 set = pc_set (insn);
4357 /* See if this jump condition is known true or false. */
4358 if (taken)
4359 cond_known_true = (XEXP (SET_SRC (set), 2) == pc_rtx);
4360 else
4361 cond_known_true = (XEXP (SET_SRC (set), 1) == pc_rtx);
4363 /* Get the type of comparison being done and the operands being compared.
4364 If we had to reverse a non-equality condition, record that fact so we
4365 know that it isn't valid for floating-point. */
4366 code = GET_CODE (XEXP (SET_SRC (set), 0));
4367 op0 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 0), insn);
4368 op1 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 1), insn);
4370 code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
4371 if (! cond_known_true)
4373 code = reversed_comparison_code_parts (code, op0, op1, insn);
4375 /* Don't remember if we can't find the inverse. */
4376 if (code == UNKNOWN)
4377 return;
4380 /* The mode is the mode of the non-constant. */
4381 mode = mode0;
4382 if (mode1 != VOIDmode)
4383 mode = mode1;
4385 record_jump_cond (code, mode, op0, op1, reversed_nonequality);
4388 /* Yet another form of subreg creation. In this case, we want something in
4389 MODE, and we should assume OP has MODE iff it is naturally modeless. */
4391 static rtx
4392 record_jump_cond_subreg (enum machine_mode mode, rtx op)
4394 enum machine_mode op_mode = GET_MODE (op);
4395 if (op_mode == mode || op_mode == VOIDmode)
4396 return op;
4397 return lowpart_subreg (mode, op, op_mode);
4400 /* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
4401 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
4402 Make any useful entries we can with that information. Called from
4403 above function and called recursively. */
4405 static void
4406 record_jump_cond (enum rtx_code code, enum machine_mode mode, rtx op0,
4407 rtx op1, int reversed_nonequality)
4409 unsigned op0_hash, op1_hash;
4410 int op0_in_memory, op1_in_memory;
4411 struct table_elt *op0_elt, *op1_elt;
4413 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
4414 we know that they are also equal in the smaller mode (this is also
4415 true for all smaller modes whether or not there is a SUBREG, but
4416 is not worth testing for with no SUBREG). */
4418 /* Note that GET_MODE (op0) may not equal MODE. */
4419 if (code == EQ && GET_CODE (op0) == SUBREG
4420 && (GET_MODE_SIZE (GET_MODE (op0))
4421 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4423 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4424 rtx tem = record_jump_cond_subreg (inner_mode, op1);
4425 if (tem)
4426 record_jump_cond (code, mode, SUBREG_REG (op0), tem,
4427 reversed_nonequality);
4430 if (code == EQ && GET_CODE (op1) == SUBREG
4431 && (GET_MODE_SIZE (GET_MODE (op1))
4432 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4434 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4435 rtx tem = record_jump_cond_subreg (inner_mode, op0);
4436 if (tem)
4437 record_jump_cond (code, mode, SUBREG_REG (op1), tem,
4438 reversed_nonequality);
4441 /* Similarly, if this is an NE comparison, and either is a SUBREG
4442 making a smaller mode, we know the whole thing is also NE. */
4444 /* Note that GET_MODE (op0) may not equal MODE;
4445 if we test MODE instead, we can get an infinite recursion
4446 alternating between two modes each wider than MODE. */
4448 if (code == NE && GET_CODE (op0) == SUBREG
4449 && subreg_lowpart_p (op0)
4450 && (GET_MODE_SIZE (GET_MODE (op0))
4451 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4453 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4454 rtx tem = record_jump_cond_subreg (inner_mode, op1);
4455 if (tem)
4456 record_jump_cond (code, mode, SUBREG_REG (op0), tem,
4457 reversed_nonequality);
4460 if (code == NE && GET_CODE (op1) == SUBREG
4461 && subreg_lowpart_p (op1)
4462 && (GET_MODE_SIZE (GET_MODE (op1))
4463 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4465 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4466 rtx tem = record_jump_cond_subreg (inner_mode, op0);
4467 if (tem)
4468 record_jump_cond (code, mode, SUBREG_REG (op1), tem,
4469 reversed_nonequality);
4472 /* Hash both operands. */
4474 do_not_record = 0;
4475 hash_arg_in_memory = 0;
4476 op0_hash = HASH (op0, mode);
4477 op0_in_memory = hash_arg_in_memory;
4479 if (do_not_record)
4480 return;
4482 do_not_record = 0;
4483 hash_arg_in_memory = 0;
4484 op1_hash = HASH (op1, mode);
4485 op1_in_memory = hash_arg_in_memory;
4487 if (do_not_record)
4488 return;
4490 /* Look up both operands. */
4491 op0_elt = lookup (op0, op0_hash, mode);
4492 op1_elt = lookup (op1, op1_hash, mode);
4494 /* If both operands are already equivalent or if they are not in the
4495 table but are identical, do nothing. */
4496 if ((op0_elt != 0 && op1_elt != 0
4497 && op0_elt->first_same_value == op1_elt->first_same_value)
4498 || op0 == op1 || rtx_equal_p (op0, op1))
4499 return;
4501 /* If we aren't setting two things equal all we can do is save this
4502 comparison. Similarly if this is floating-point. In the latter
4503 case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
4504 If we record the equality, we might inadvertently delete code
4505 whose intent was to change -0 to +0. */
4507 if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
4509 struct qty_table_elem *ent;
4510 int qty;
4512 /* If we reversed a floating-point comparison, if OP0 is not a
4513 register, or if OP1 is neither a register or constant, we can't
4514 do anything. */
4516 if (!REG_P (op1))
4517 op1 = equiv_constant (op1);
4519 if ((reversed_nonequality && FLOAT_MODE_P (mode))
4520 || !REG_P (op0) || op1 == 0)
4521 return;
4523 /* Put OP0 in the hash table if it isn't already. This gives it a
4524 new quantity number. */
4525 if (op0_elt == 0)
4527 if (insert_regs (op0, NULL, 0))
4529 rehash_using_reg (op0);
4530 op0_hash = HASH (op0, mode);
4532 /* If OP0 is contained in OP1, this changes its hash code
4533 as well. Faster to rehash than to check, except
4534 for the simple case of a constant. */
4535 if (! CONSTANT_P (op1))
4536 op1_hash = HASH (op1,mode);
4539 op0_elt = insert (op0, NULL, op0_hash, mode);
4540 op0_elt->in_memory = op0_in_memory;
4543 qty = REG_QTY (REGNO (op0));
4544 ent = &qty_table[qty];
4546 ent->comparison_code = code;
4547 if (REG_P (op1))
4549 /* Look it up again--in case op0 and op1 are the same. */
4550 op1_elt = lookup (op1, op1_hash, mode);
4552 /* Put OP1 in the hash table so it gets a new quantity number. */
4553 if (op1_elt == 0)
4555 if (insert_regs (op1, NULL, 0))
4557 rehash_using_reg (op1);
4558 op1_hash = HASH (op1, mode);
4561 op1_elt = insert (op1, NULL, op1_hash, mode);
4562 op1_elt->in_memory = op1_in_memory;
4565 ent->comparison_const = NULL_RTX;
4566 ent->comparison_qty = REG_QTY (REGNO (op1));
4568 else
4570 ent->comparison_const = op1;
4571 ent->comparison_qty = -1;
4574 return;
4577 /* If either side is still missing an equivalence, make it now,
4578 then merge the equivalences. */
4580 if (op0_elt == 0)
4582 if (insert_regs (op0, NULL, 0))
4584 rehash_using_reg (op0);
4585 op0_hash = HASH (op0, mode);
4588 op0_elt = insert (op0, NULL, op0_hash, mode);
4589 op0_elt->in_memory = op0_in_memory;
4592 if (op1_elt == 0)
4594 if (insert_regs (op1, NULL, 0))
4596 rehash_using_reg (op1);
4597 op1_hash = HASH (op1, mode);
4600 op1_elt = insert (op1, NULL, op1_hash, mode);
4601 op1_elt->in_memory = op1_in_memory;
4604 merge_equiv_classes (op0_elt, op1_elt);
4607 /* CSE processing for one instruction.
4608 First simplify sources and addresses of all assignments
4609 in the instruction, using previously-computed equivalents values.
4610 Then install the new sources and destinations in the table
4611 of available values.
4613 If LIBCALL_INSN is nonzero, don't record any equivalence made in
4614 the insn. It means that INSN is inside libcall block. In this
4615 case LIBCALL_INSN is the corresponding insn with REG_LIBCALL. */
4617 /* Data on one SET contained in the instruction. */
4619 struct set
4621 /* The SET rtx itself. */
4622 rtx rtl;
4623 /* The SET_SRC of the rtx (the original value, if it is changing). */
4624 rtx src;
4625 /* The hash-table element for the SET_SRC of the SET. */
4626 struct table_elt *src_elt;
4627 /* Hash value for the SET_SRC. */
4628 unsigned src_hash;
4629 /* Hash value for the SET_DEST. */
4630 unsigned dest_hash;
4631 /* The SET_DEST, with SUBREG, etc., stripped. */
4632 rtx inner_dest;
4633 /* Nonzero if the SET_SRC is in memory. */
4634 char src_in_memory;
4635 /* Nonzero if the SET_SRC contains something
4636 whose value cannot be predicted and understood. */
4637 char src_volatile;
4638 /* Original machine mode, in case it becomes a CONST_INT.
4639 The size of this field should match the size of the mode
4640 field of struct rtx_def (see rtl.h). */
4641 ENUM_BITFIELD(machine_mode) mode : 8;
4642 /* A constant equivalent for SET_SRC, if any. */
4643 rtx src_const;
4644 /* Original SET_SRC value used for libcall notes. */
4645 rtx orig_src;
4646 /* Hash value of constant equivalent for SET_SRC. */
4647 unsigned src_const_hash;
4648 /* Table entry for constant equivalent for SET_SRC, if any. */
4649 struct table_elt *src_const_elt;
4652 static void
4653 cse_insn (rtx insn, rtx libcall_insn)
4655 rtx x = PATTERN (insn);
4656 int i;
4657 rtx tem;
4658 int n_sets = 0;
4660 #ifdef HAVE_cc0
4661 /* Records what this insn does to set CC0. */
4662 rtx this_insn_cc0 = 0;
4663 enum machine_mode this_insn_cc0_mode = VOIDmode;
4664 #endif
4666 rtx src_eqv = 0;
4667 struct table_elt *src_eqv_elt = 0;
4668 int src_eqv_volatile = 0;
4669 int src_eqv_in_memory = 0;
4670 unsigned src_eqv_hash = 0;
4672 struct set *sets = (struct set *) 0;
4674 this_insn = insn;
4676 /* Find all the SETs and CLOBBERs in this instruction.
4677 Record all the SETs in the array `set' and count them.
4678 Also determine whether there is a CLOBBER that invalidates
4679 all memory references, or all references at varying addresses. */
4681 if (CALL_P (insn))
4683 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4685 if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
4686 invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
4687 XEXP (tem, 0) = canon_reg (XEXP (tem, 0), insn);
4691 if (GET_CODE (x) == SET)
4693 sets = alloca (sizeof (struct set));
4694 sets[0].rtl = x;
4696 /* Ignore SETs that are unconditional jumps.
4697 They never need cse processing, so this does not hurt.
4698 The reason is not efficiency but rather
4699 so that we can test at the end for instructions
4700 that have been simplified to unconditional jumps
4701 and not be misled by unchanged instructions
4702 that were unconditional jumps to begin with. */
4703 if (SET_DEST (x) == pc_rtx
4704 && GET_CODE (SET_SRC (x)) == LABEL_REF)
4707 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
4708 The hard function value register is used only once, to copy to
4709 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
4710 Ensure we invalidate the destination register. On the 80386 no
4711 other code would invalidate it since it is a fixed_reg.
4712 We need not check the return of apply_change_group; see canon_reg. */
4714 else if (GET_CODE (SET_SRC (x)) == CALL)
4716 canon_reg (SET_SRC (x), insn);
4717 apply_change_group ();
4718 fold_rtx (SET_SRC (x), insn);
4719 invalidate (SET_DEST (x), VOIDmode);
4721 else
4722 n_sets = 1;
4724 else if (GET_CODE (x) == PARALLEL)
4726 int lim = XVECLEN (x, 0);
4728 sets = alloca (lim * sizeof (struct set));
4730 /* Find all regs explicitly clobbered in this insn,
4731 and ensure they are not replaced with any other regs
4732 elsewhere in this insn.
4733 When a reg that is clobbered is also used for input,
4734 we should presume that that is for a reason,
4735 and we should not substitute some other register
4736 which is not supposed to be clobbered.
4737 Therefore, this loop cannot be merged into the one below
4738 because a CALL may precede a CLOBBER and refer to the
4739 value clobbered. We must not let a canonicalization do
4740 anything in that case. */
4741 for (i = 0; i < lim; i++)
4743 rtx y = XVECEXP (x, 0, i);
4744 if (GET_CODE (y) == CLOBBER)
4746 rtx clobbered = XEXP (y, 0);
4748 if (REG_P (clobbered)
4749 || GET_CODE (clobbered) == SUBREG)
4750 invalidate (clobbered, VOIDmode);
4751 else if (GET_CODE (clobbered) == STRICT_LOW_PART
4752 || GET_CODE (clobbered) == ZERO_EXTRACT)
4753 invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
4757 for (i = 0; i < lim; i++)
4759 rtx y = XVECEXP (x, 0, i);
4760 if (GET_CODE (y) == SET)
4762 /* As above, we ignore unconditional jumps and call-insns and
4763 ignore the result of apply_change_group. */
4764 if (GET_CODE (SET_SRC (y)) == CALL)
4766 canon_reg (SET_SRC (y), insn);
4767 apply_change_group ();
4768 fold_rtx (SET_SRC (y), insn);
4769 invalidate (SET_DEST (y), VOIDmode);
4771 else if (SET_DEST (y) == pc_rtx
4772 && GET_CODE (SET_SRC (y)) == LABEL_REF)
4774 else
4775 sets[n_sets++].rtl = y;
4777 else if (GET_CODE (y) == CLOBBER)
4779 /* If we clobber memory, canon the address.
4780 This does nothing when a register is clobbered
4781 because we have already invalidated the reg. */
4782 if (MEM_P (XEXP (y, 0)))
4783 canon_reg (XEXP (y, 0), NULL_RTX);
4785 else if (GET_CODE (y) == USE
4786 && ! (REG_P (XEXP (y, 0))
4787 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
4788 canon_reg (y, NULL_RTX);
4789 else if (GET_CODE (y) == CALL)
4791 /* The result of apply_change_group can be ignored; see
4792 canon_reg. */
4793 canon_reg (y, insn);
4794 apply_change_group ();
4795 fold_rtx (y, insn);
4799 else if (GET_CODE (x) == CLOBBER)
4801 if (MEM_P (XEXP (x, 0)))
4802 canon_reg (XEXP (x, 0), NULL_RTX);
4805 /* Canonicalize a USE of a pseudo register or memory location. */
4806 else if (GET_CODE (x) == USE
4807 && ! (REG_P (XEXP (x, 0))
4808 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
4809 canon_reg (XEXP (x, 0), NULL_RTX);
4810 else if (GET_CODE (x) == CALL)
4812 /* The result of apply_change_group can be ignored; see canon_reg. */
4813 canon_reg (x, insn);
4814 apply_change_group ();
4815 fold_rtx (x, insn);
4818 /* Store the equivalent value in SRC_EQV, if different, or if the DEST
4819 is a STRICT_LOW_PART. The latter condition is necessary because SRC_EQV
4820 is handled specially for this case, and if it isn't set, then there will
4821 be no equivalence for the destination. */
4822 if (n_sets == 1 && REG_NOTES (insn) != 0
4823 && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
4824 && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
4825 || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
4827 src_eqv = fold_rtx (canon_reg (XEXP (tem, 0), NULL_RTX), insn);
4828 XEXP (tem, 0) = src_eqv;
4831 /* Canonicalize sources and addresses of destinations.
4832 We do this in a separate pass to avoid problems when a MATCH_DUP is
4833 present in the insn pattern. In that case, we want to ensure that
4834 we don't break the duplicate nature of the pattern. So we will replace
4835 both operands at the same time. Otherwise, we would fail to find an
4836 equivalent substitution in the loop calling validate_change below.
4838 We used to suppress canonicalization of DEST if it appears in SRC,
4839 but we don't do this any more. */
4841 for (i = 0; i < n_sets; i++)
4843 rtx dest = SET_DEST (sets[i].rtl);
4844 rtx src = SET_SRC (sets[i].rtl);
4845 rtx new = canon_reg (src, insn);
4846 int insn_code;
4848 sets[i].orig_src = src;
4849 if ((REG_P (new) && REG_P (src)
4850 && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
4851 != (REGNO (src) < FIRST_PSEUDO_REGISTER)))
4852 || (insn_code = recog_memoized (insn)) < 0
4853 || insn_data[insn_code].n_dups > 0)
4854 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
4855 else
4856 SET_SRC (sets[i].rtl) = new;
4858 if (GET_CODE (dest) == ZERO_EXTRACT)
4860 validate_change (insn, &XEXP (dest, 1),
4861 canon_reg (XEXP (dest, 1), insn), 1);
4862 validate_change (insn, &XEXP (dest, 2),
4863 canon_reg (XEXP (dest, 2), insn), 1);
4866 while (GET_CODE (dest) == SUBREG
4867 || GET_CODE (dest) == ZERO_EXTRACT
4868 || GET_CODE (dest) == STRICT_LOW_PART)
4869 dest = XEXP (dest, 0);
4871 if (MEM_P (dest))
4872 canon_reg (dest, insn);
4875 /* Now that we have done all the replacements, we can apply the change
4876 group and see if they all work. Note that this will cause some
4877 canonicalizations that would have worked individually not to be applied
4878 because some other canonicalization didn't work, but this should not
4879 occur often.
4881 The result of apply_change_group can be ignored; see canon_reg. */
4883 apply_change_group ();
4885 /* Set sets[i].src_elt to the class each source belongs to.
4886 Detect assignments from or to volatile things
4887 and set set[i] to zero so they will be ignored
4888 in the rest of this function.
4890 Nothing in this loop changes the hash table or the register chains. */
4892 for (i = 0; i < n_sets; i++)
4894 rtx src, dest;
4895 rtx src_folded;
4896 struct table_elt *elt = 0, *p;
4897 enum machine_mode mode;
4898 rtx src_eqv_here;
4899 rtx src_const = 0;
4900 rtx src_related = 0;
4901 struct table_elt *src_const_elt = 0;
4902 int src_cost = MAX_COST;
4903 int src_eqv_cost = MAX_COST;
4904 int src_folded_cost = MAX_COST;
4905 int src_related_cost = MAX_COST;
4906 int src_elt_cost = MAX_COST;
4907 int src_regcost = MAX_COST;
4908 int src_eqv_regcost = MAX_COST;
4909 int src_folded_regcost = MAX_COST;
4910 int src_related_regcost = MAX_COST;
4911 int src_elt_regcost = MAX_COST;
4912 /* Set nonzero if we need to call force_const_mem on with the
4913 contents of src_folded before using it. */
4914 int src_folded_force_flag = 0;
4916 dest = SET_DEST (sets[i].rtl);
4917 src = SET_SRC (sets[i].rtl);
4919 /* If SRC is a constant that has no machine mode,
4920 hash it with the destination's machine mode.
4921 This way we can keep different modes separate. */
4923 mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
4924 sets[i].mode = mode;
4926 if (src_eqv)
4928 enum machine_mode eqvmode = mode;
4929 if (GET_CODE (dest) == STRICT_LOW_PART)
4930 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
4931 do_not_record = 0;
4932 hash_arg_in_memory = 0;
4933 src_eqv_hash = HASH (src_eqv, eqvmode);
4935 /* Find the equivalence class for the equivalent expression. */
4937 if (!do_not_record)
4938 src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
4940 src_eqv_volatile = do_not_record;
4941 src_eqv_in_memory = hash_arg_in_memory;
4944 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
4945 value of the INNER register, not the destination. So it is not
4946 a valid substitution for the source. But save it for later. */
4947 if (GET_CODE (dest) == STRICT_LOW_PART)
4948 src_eqv_here = 0;
4949 else
4950 src_eqv_here = src_eqv;
4952 /* Simplify and foldable subexpressions in SRC. Then get the fully-
4953 simplified result, which may not necessarily be valid. */
4954 src_folded = fold_rtx (src, insn);
4956 #if 0
4957 /* ??? This caused bad code to be generated for the m68k port with -O2.
4958 Suppose src is (CONST_INT -1), and that after truncation src_folded
4959 is (CONST_INT 3). Suppose src_folded is then used for src_const.
4960 At the end we will add src and src_const to the same equivalence
4961 class. We now have 3 and -1 on the same equivalence class. This
4962 causes later instructions to be mis-optimized. */
4963 /* If storing a constant in a bitfield, pre-truncate the constant
4964 so we will be able to record it later. */
4965 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT)
4967 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
4969 if (GET_CODE (src) == CONST_INT
4970 && GET_CODE (width) == CONST_INT
4971 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
4972 && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
4973 src_folded
4974 = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
4975 << INTVAL (width)) - 1));
4977 #endif
4979 /* Compute SRC's hash code, and also notice if it
4980 should not be recorded at all. In that case,
4981 prevent any further processing of this assignment. */
4982 do_not_record = 0;
4983 hash_arg_in_memory = 0;
4985 sets[i].src = src;
4986 sets[i].src_hash = HASH (src, mode);
4987 sets[i].src_volatile = do_not_record;
4988 sets[i].src_in_memory = hash_arg_in_memory;
4990 /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
4991 a pseudo, do not record SRC. Using SRC as a replacement for
4992 anything else will be incorrect in that situation. Note that
4993 this usually occurs only for stack slots, in which case all the
4994 RTL would be referring to SRC, so we don't lose any optimization
4995 opportunities by not having SRC in the hash table. */
4997 if (MEM_P (src)
4998 && find_reg_note (insn, REG_EQUIV, NULL_RTX) != 0
4999 && REG_P (dest)
5000 && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
5001 sets[i].src_volatile = 1;
5003 #if 0
5004 /* It is no longer clear why we used to do this, but it doesn't
5005 appear to still be needed. So let's try without it since this
5006 code hurts cse'ing widened ops. */
5007 /* If source is a paradoxical subreg (such as QI treated as an SI),
5008 treat it as volatile. It may do the work of an SI in one context
5009 where the extra bits are not being used, but cannot replace an SI
5010 in general. */
5011 if (GET_CODE (src) == SUBREG
5012 && (GET_MODE_SIZE (GET_MODE (src))
5013 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
5014 sets[i].src_volatile = 1;
5015 #endif
5017 /* Locate all possible equivalent forms for SRC. Try to replace
5018 SRC in the insn with each cheaper equivalent.
5020 We have the following types of equivalents: SRC itself, a folded
5021 version, a value given in a REG_EQUAL note, or a value related
5022 to a constant.
5024 Each of these equivalents may be part of an additional class
5025 of equivalents (if more than one is in the table, they must be in
5026 the same class; we check for this).
5028 If the source is volatile, we don't do any table lookups.
5030 We note any constant equivalent for possible later use in a
5031 REG_NOTE. */
5033 if (!sets[i].src_volatile)
5034 elt = lookup (src, sets[i].src_hash, mode);
5036 sets[i].src_elt = elt;
5038 if (elt && src_eqv_here && src_eqv_elt)
5040 if (elt->first_same_value != src_eqv_elt->first_same_value)
5042 /* The REG_EQUAL is indicating that two formerly distinct
5043 classes are now equivalent. So merge them. */
5044 merge_equiv_classes (elt, src_eqv_elt);
5045 src_eqv_hash = HASH (src_eqv, elt->mode);
5046 src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
5049 src_eqv_here = 0;
5052 else if (src_eqv_elt)
5053 elt = src_eqv_elt;
5055 /* Try to find a constant somewhere and record it in `src_const'.
5056 Record its table element, if any, in `src_const_elt'. Look in
5057 any known equivalences first. (If the constant is not in the
5058 table, also set `sets[i].src_const_hash'). */
5059 if (elt)
5060 for (p = elt->first_same_value; p; p = p->next_same_value)
5061 if (p->is_const)
5063 src_const = p->exp;
5064 src_const_elt = elt;
5065 break;
5068 if (src_const == 0
5069 && (CONSTANT_P (src_folded)
5070 /* Consider (minus (label_ref L1) (label_ref L2)) as
5071 "constant" here so we will record it. This allows us
5072 to fold switch statements when an ADDR_DIFF_VEC is used. */
5073 || (GET_CODE (src_folded) == MINUS
5074 && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
5075 && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
5076 src_const = src_folded, src_const_elt = elt;
5077 else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
5078 src_const = src_eqv_here, src_const_elt = src_eqv_elt;
5080 /* If we don't know if the constant is in the table, get its
5081 hash code and look it up. */
5082 if (src_const && src_const_elt == 0)
5084 sets[i].src_const_hash = HASH (src_const, mode);
5085 src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
5088 sets[i].src_const = src_const;
5089 sets[i].src_const_elt = src_const_elt;
5091 /* If the constant and our source are both in the table, mark them as
5092 equivalent. Otherwise, if a constant is in the table but the source
5093 isn't, set ELT to it. */
5094 if (src_const_elt && elt
5095 && src_const_elt->first_same_value != elt->first_same_value)
5096 merge_equiv_classes (elt, src_const_elt);
5097 else if (src_const_elt && elt == 0)
5098 elt = src_const_elt;
5100 /* See if there is a register linearly related to a constant
5101 equivalent of SRC. */
5102 if (src_const
5103 && (GET_CODE (src_const) == CONST
5104 || (src_const_elt && src_const_elt->related_value != 0)))
5106 src_related = use_related_value (src_const, src_const_elt);
5107 if (src_related)
5109 struct table_elt *src_related_elt
5110 = lookup (src_related, HASH (src_related, mode), mode);
5111 if (src_related_elt && elt)
5113 if (elt->first_same_value
5114 != src_related_elt->first_same_value)
5115 /* This can occur when we previously saw a CONST
5116 involving a SYMBOL_REF and then see the SYMBOL_REF
5117 twice. Merge the involved classes. */
5118 merge_equiv_classes (elt, src_related_elt);
5120 src_related = 0;
5121 src_related_elt = 0;
5123 else if (src_related_elt && elt == 0)
5124 elt = src_related_elt;
5128 /* See if we have a CONST_INT that is already in a register in a
5129 wider mode. */
5131 if (src_const && src_related == 0 && GET_CODE (src_const) == CONST_INT
5132 && GET_MODE_CLASS (mode) == MODE_INT
5133 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
5135 enum machine_mode wider_mode;
5137 for (wider_mode = GET_MODE_WIDER_MODE (mode);
5138 GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
5139 && src_related == 0;
5140 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5142 struct table_elt *const_elt
5143 = lookup (src_const, HASH (src_const, wider_mode), wider_mode);
5145 if (const_elt == 0)
5146 continue;
5148 for (const_elt = const_elt->first_same_value;
5149 const_elt; const_elt = const_elt->next_same_value)
5150 if (REG_P (const_elt->exp))
5152 src_related = gen_lowpart (mode,
5153 const_elt->exp);
5154 break;
5159 /* Another possibility is that we have an AND with a constant in
5160 a mode narrower than a word. If so, it might have been generated
5161 as part of an "if" which would narrow the AND. If we already
5162 have done the AND in a wider mode, we can use a SUBREG of that
5163 value. */
5165 if (flag_expensive_optimizations && ! src_related
5166 && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
5167 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5169 enum machine_mode tmode;
5170 rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
5172 for (tmode = GET_MODE_WIDER_MODE (mode);
5173 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5174 tmode = GET_MODE_WIDER_MODE (tmode))
5176 rtx inner = gen_lowpart (tmode, XEXP (src, 0));
5177 struct table_elt *larger_elt;
5179 if (inner)
5181 PUT_MODE (new_and, tmode);
5182 XEXP (new_and, 0) = inner;
5183 larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
5184 if (larger_elt == 0)
5185 continue;
5187 for (larger_elt = larger_elt->first_same_value;
5188 larger_elt; larger_elt = larger_elt->next_same_value)
5189 if (REG_P (larger_elt->exp))
5191 src_related
5192 = gen_lowpart (mode, larger_elt->exp);
5193 break;
5196 if (src_related)
5197 break;
5202 #ifdef LOAD_EXTEND_OP
5203 /* See if a MEM has already been loaded with a widening operation;
5204 if it has, we can use a subreg of that. Many CISC machines
5205 also have such operations, but this is only likely to be
5206 beneficial on these machines. */
5208 if (flag_expensive_optimizations && src_related == 0
5209 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5210 && GET_MODE_CLASS (mode) == MODE_INT
5211 && MEM_P (src) && ! do_not_record
5212 && LOAD_EXTEND_OP (mode) != UNKNOWN)
5214 struct rtx_def memory_extend_buf;
5215 rtx memory_extend_rtx = &memory_extend_buf;
5216 enum machine_mode tmode;
5218 /* Set what we are trying to extend and the operation it might
5219 have been extended with. */
5220 memset (memory_extend_rtx, 0, sizeof(*memory_extend_rtx));
5221 PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
5222 XEXP (memory_extend_rtx, 0) = src;
5224 for (tmode = GET_MODE_WIDER_MODE (mode);
5225 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5226 tmode = GET_MODE_WIDER_MODE (tmode))
5228 struct table_elt *larger_elt;
5230 PUT_MODE (memory_extend_rtx, tmode);
5231 larger_elt = lookup (memory_extend_rtx,
5232 HASH (memory_extend_rtx, tmode), tmode);
5233 if (larger_elt == 0)
5234 continue;
5236 for (larger_elt = larger_elt->first_same_value;
5237 larger_elt; larger_elt = larger_elt->next_same_value)
5238 if (REG_P (larger_elt->exp))
5240 src_related = gen_lowpart (mode,
5241 larger_elt->exp);
5242 break;
5245 if (src_related)
5246 break;
5249 #endif /* LOAD_EXTEND_OP */
5251 if (src == src_folded)
5252 src_folded = 0;
5254 /* At this point, ELT, if nonzero, points to a class of expressions
5255 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
5256 and SRC_RELATED, if nonzero, each contain additional equivalent
5257 expressions. Prune these latter expressions by deleting expressions
5258 already in the equivalence class.
5260 Check for an equivalent identical to the destination. If found,
5261 this is the preferred equivalent since it will likely lead to
5262 elimination of the insn. Indicate this by placing it in
5263 `src_related'. */
5265 if (elt)
5266 elt = elt->first_same_value;
5267 for (p = elt; p; p = p->next_same_value)
5269 enum rtx_code code = GET_CODE (p->exp);
5271 /* If the expression is not valid, ignore it. Then we do not
5272 have to check for validity below. In most cases, we can use
5273 `rtx_equal_p', since canonicalization has already been done. */
5274 if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, false))
5275 continue;
5277 /* Also skip paradoxical subregs, unless that's what we're
5278 looking for. */
5279 if (code == SUBREG
5280 && (GET_MODE_SIZE (GET_MODE (p->exp))
5281 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))
5282 && ! (src != 0
5283 && GET_CODE (src) == SUBREG
5284 && GET_MODE (src) == GET_MODE (p->exp)
5285 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5286 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))))
5287 continue;
5289 if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
5290 src = 0;
5291 else if (src_folded && GET_CODE (src_folded) == code
5292 && rtx_equal_p (src_folded, p->exp))
5293 src_folded = 0;
5294 else if (src_eqv_here && GET_CODE (src_eqv_here) == code
5295 && rtx_equal_p (src_eqv_here, p->exp))
5296 src_eqv_here = 0;
5297 else if (src_related && GET_CODE (src_related) == code
5298 && rtx_equal_p (src_related, p->exp))
5299 src_related = 0;
5301 /* This is the same as the destination of the insns, we want
5302 to prefer it. Copy it to src_related. The code below will
5303 then give it a negative cost. */
5304 if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
5305 src_related = dest;
5308 /* Find the cheapest valid equivalent, trying all the available
5309 possibilities. Prefer items not in the hash table to ones
5310 that are when they are equal cost. Note that we can never
5311 worsen an insn as the current contents will also succeed.
5312 If we find an equivalent identical to the destination, use it as best,
5313 since this insn will probably be eliminated in that case. */
5314 if (src)
5316 if (rtx_equal_p (src, dest))
5317 src_cost = src_regcost = -1;
5318 else
5320 src_cost = COST (src);
5321 src_regcost = approx_reg_cost (src);
5325 if (src_eqv_here)
5327 if (rtx_equal_p (src_eqv_here, dest))
5328 src_eqv_cost = src_eqv_regcost = -1;
5329 else
5331 src_eqv_cost = COST (src_eqv_here);
5332 src_eqv_regcost = approx_reg_cost (src_eqv_here);
5336 if (src_folded)
5338 if (rtx_equal_p (src_folded, dest))
5339 src_folded_cost = src_folded_regcost = -1;
5340 else
5342 src_folded_cost = COST (src_folded);
5343 src_folded_regcost = approx_reg_cost (src_folded);
5347 if (src_related)
5349 if (rtx_equal_p (src_related, dest))
5350 src_related_cost = src_related_regcost = -1;
5351 else
5353 src_related_cost = COST (src_related);
5354 src_related_regcost = approx_reg_cost (src_related);
5358 /* If this was an indirect jump insn, a known label will really be
5359 cheaper even though it looks more expensive. */
5360 if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
5361 src_folded = src_const, src_folded_cost = src_folded_regcost = -1;
5363 /* Terminate loop when replacement made. This must terminate since
5364 the current contents will be tested and will always be valid. */
5365 while (1)
5367 rtx trial;
5369 /* Skip invalid entries. */
5370 while (elt && !REG_P (elt->exp)
5371 && ! exp_equiv_p (elt->exp, elt->exp, 1, false))
5372 elt = elt->next_same_value;
5374 /* A paradoxical subreg would be bad here: it'll be the right
5375 size, but later may be adjusted so that the upper bits aren't
5376 what we want. So reject it. */
5377 if (elt != 0
5378 && GET_CODE (elt->exp) == SUBREG
5379 && (GET_MODE_SIZE (GET_MODE (elt->exp))
5380 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))
5381 /* It is okay, though, if the rtx we're trying to match
5382 will ignore any of the bits we can't predict. */
5383 && ! (src != 0
5384 && GET_CODE (src) == SUBREG
5385 && GET_MODE (src) == GET_MODE (elt->exp)
5386 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5387 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))))
5389 elt = elt->next_same_value;
5390 continue;
5393 if (elt)
5395 src_elt_cost = elt->cost;
5396 src_elt_regcost = elt->regcost;
5399 /* Find cheapest and skip it for the next time. For items
5400 of equal cost, use this order:
5401 src_folded, src, src_eqv, src_related and hash table entry. */
5402 if (src_folded
5403 && preferable (src_folded_cost, src_folded_regcost,
5404 src_cost, src_regcost) <= 0
5405 && preferable (src_folded_cost, src_folded_regcost,
5406 src_eqv_cost, src_eqv_regcost) <= 0
5407 && preferable (src_folded_cost, src_folded_regcost,
5408 src_related_cost, src_related_regcost) <= 0
5409 && preferable (src_folded_cost, src_folded_regcost,
5410 src_elt_cost, src_elt_regcost) <= 0)
5412 trial = src_folded, src_folded_cost = MAX_COST;
5413 if (src_folded_force_flag)
5415 rtx forced = force_const_mem (mode, trial);
5416 if (forced)
5417 trial = forced;
5420 else if (src
5421 && preferable (src_cost, src_regcost,
5422 src_eqv_cost, src_eqv_regcost) <= 0
5423 && preferable (src_cost, src_regcost,
5424 src_related_cost, src_related_regcost) <= 0
5425 && preferable (src_cost, src_regcost,
5426 src_elt_cost, src_elt_regcost) <= 0)
5427 trial = src, src_cost = MAX_COST;
5428 else if (src_eqv_here
5429 && preferable (src_eqv_cost, src_eqv_regcost,
5430 src_related_cost, src_related_regcost) <= 0
5431 && preferable (src_eqv_cost, src_eqv_regcost,
5432 src_elt_cost, src_elt_regcost) <= 0)
5433 trial = copy_rtx (src_eqv_here), src_eqv_cost = MAX_COST;
5434 else if (src_related
5435 && preferable (src_related_cost, src_related_regcost,
5436 src_elt_cost, src_elt_regcost) <= 0)
5437 trial = copy_rtx (src_related), src_related_cost = MAX_COST;
5438 else
5440 trial = copy_rtx (elt->exp);
5441 elt = elt->next_same_value;
5442 src_elt_cost = MAX_COST;
5445 /* We don't normally have an insn matching (set (pc) (pc)), so
5446 check for this separately here. We will delete such an
5447 insn below.
5449 For other cases such as a table jump or conditional jump
5450 where we know the ultimate target, go ahead and replace the
5451 operand. While that may not make a valid insn, we will
5452 reemit the jump below (and also insert any necessary
5453 barriers). */
5454 if (n_sets == 1 && dest == pc_rtx
5455 && (trial == pc_rtx
5456 || (GET_CODE (trial) == LABEL_REF
5457 && ! condjump_p (insn))))
5459 /* Don't substitute non-local labels, this confuses CFG. */
5460 if (GET_CODE (trial) == LABEL_REF
5461 && LABEL_REF_NONLOCAL_P (trial))
5462 continue;
5464 SET_SRC (sets[i].rtl) = trial;
5465 cse_jumps_altered = 1;
5466 break;
5469 /* Look for a substitution that makes a valid insn. */
5470 else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
5472 rtx new = canon_reg (SET_SRC (sets[i].rtl), insn);
5474 /* If we just made a substitution inside a libcall, then we
5475 need to make the same substitution in any notes attached
5476 to the RETVAL insn. */
5477 if (libcall_insn
5478 && (REG_P (sets[i].orig_src)
5479 || GET_CODE (sets[i].orig_src) == SUBREG
5480 || MEM_P (sets[i].orig_src)))
5482 rtx note = find_reg_equal_equiv_note (libcall_insn);
5483 if (note != 0)
5484 XEXP (note, 0) = simplify_replace_rtx (XEXP (note, 0),
5485 sets[i].orig_src,
5486 copy_rtx (new));
5489 /* The result of apply_change_group can be ignored; see
5490 canon_reg. */
5492 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
5493 apply_change_group ();
5494 break;
5497 /* If we previously found constant pool entries for
5498 constants and this is a constant, try making a
5499 pool entry. Put it in src_folded unless we already have done
5500 this since that is where it likely came from. */
5502 else if (constant_pool_entries_cost
5503 && CONSTANT_P (trial)
5504 /* Reject cases that will abort in decode_rtx_const.
5505 On the alpha when simplifying a switch, we get
5506 (const (truncate (minus (label_ref) (label_ref)))). */
5507 && ! (GET_CODE (trial) == CONST
5508 && GET_CODE (XEXP (trial, 0)) == TRUNCATE)
5509 /* Likewise on IA-64, except without the truncate. */
5510 && ! (GET_CODE (trial) == CONST
5511 && GET_CODE (XEXP (trial, 0)) == MINUS
5512 && GET_CODE (XEXP (XEXP (trial, 0), 0)) == LABEL_REF
5513 && GET_CODE (XEXP (XEXP (trial, 0), 1)) == LABEL_REF)
5514 && (src_folded == 0
5515 || (!MEM_P (src_folded)
5516 && ! src_folded_force_flag))
5517 && GET_MODE_CLASS (mode) != MODE_CC
5518 && mode != VOIDmode)
5520 src_folded_force_flag = 1;
5521 src_folded = trial;
5522 src_folded_cost = constant_pool_entries_cost;
5523 src_folded_regcost = constant_pool_entries_regcost;
5527 src = SET_SRC (sets[i].rtl);
5529 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
5530 However, there is an important exception: If both are registers
5531 that are not the head of their equivalence class, replace SET_SRC
5532 with the head of the class. If we do not do this, we will have
5533 both registers live over a portion of the basic block. This way,
5534 their lifetimes will likely abut instead of overlapping. */
5535 if (REG_P (dest)
5536 && REGNO_QTY_VALID_P (REGNO (dest)))
5538 int dest_q = REG_QTY (REGNO (dest));
5539 struct qty_table_elem *dest_ent = &qty_table[dest_q];
5541 if (dest_ent->mode == GET_MODE (dest)
5542 && dest_ent->first_reg != REGNO (dest)
5543 && REG_P (src) && REGNO (src) == REGNO (dest)
5544 /* Don't do this if the original insn had a hard reg as
5545 SET_SRC or SET_DEST. */
5546 && (!REG_P (sets[i].src)
5547 || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER)
5548 && (!REG_P (dest) || REGNO (dest) >= FIRST_PSEUDO_REGISTER))
5549 /* We can't call canon_reg here because it won't do anything if
5550 SRC is a hard register. */
5552 int src_q = REG_QTY (REGNO (src));
5553 struct qty_table_elem *src_ent = &qty_table[src_q];
5554 int first = src_ent->first_reg;
5555 rtx new_src
5556 = (first >= FIRST_PSEUDO_REGISTER
5557 ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first));
5559 /* We must use validate-change even for this, because this
5560 might be a special no-op instruction, suitable only to
5561 tag notes onto. */
5562 if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0))
5564 src = new_src;
5565 /* If we had a constant that is cheaper than what we are now
5566 setting SRC to, use that constant. We ignored it when we
5567 thought we could make this into a no-op. */
5568 if (src_const && COST (src_const) < COST (src)
5569 && validate_change (insn, &SET_SRC (sets[i].rtl),
5570 src_const, 0))
5571 src = src_const;
5576 /* If we made a change, recompute SRC values. */
5577 if (src != sets[i].src)
5579 cse_altered = 1;
5580 do_not_record = 0;
5581 hash_arg_in_memory = 0;
5582 sets[i].src = src;
5583 sets[i].src_hash = HASH (src, mode);
5584 sets[i].src_volatile = do_not_record;
5585 sets[i].src_in_memory = hash_arg_in_memory;
5586 sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
5589 /* If this is a single SET, we are setting a register, and we have an
5590 equivalent constant, we want to add a REG_NOTE. We don't want
5591 to write a REG_EQUAL note for a constant pseudo since verifying that
5592 that pseudo hasn't been eliminated is a pain. Such a note also
5593 won't help anything.
5595 Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
5596 which can be created for a reference to a compile time computable
5597 entry in a jump table. */
5599 if (n_sets == 1 && src_const && REG_P (dest)
5600 && !REG_P (src_const)
5601 && ! (GET_CODE (src_const) == CONST
5602 && GET_CODE (XEXP (src_const, 0)) == MINUS
5603 && GET_CODE (XEXP (XEXP (src_const, 0), 0)) == LABEL_REF
5604 && GET_CODE (XEXP (XEXP (src_const, 0), 1)) == LABEL_REF))
5606 /* We only want a REG_EQUAL note if src_const != src. */
5607 if (! rtx_equal_p (src, src_const))
5609 /* Make sure that the rtx is not shared. */
5610 src_const = copy_rtx (src_const);
5612 /* Record the actual constant value in a REG_EQUAL note,
5613 making a new one if one does not already exist. */
5614 set_unique_reg_note (insn, REG_EQUAL, src_const);
5618 /* Now deal with the destination. */
5619 do_not_record = 0;
5621 /* Look within any ZERO_EXTRACT to the MEM or REG within it. */
5622 while (GET_CODE (dest) == SUBREG
5623 || GET_CODE (dest) == ZERO_EXTRACT
5624 || GET_CODE (dest) == STRICT_LOW_PART)
5625 dest = XEXP (dest, 0);
5627 sets[i].inner_dest = dest;
5629 if (MEM_P (dest))
5631 #ifdef PUSH_ROUNDING
5632 /* Stack pushes invalidate the stack pointer. */
5633 rtx addr = XEXP (dest, 0);
5634 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC
5635 && XEXP (addr, 0) == stack_pointer_rtx)
5636 invalidate (stack_pointer_rtx, Pmode);
5637 #endif
5638 dest = fold_rtx (dest, insn);
5641 /* Compute the hash code of the destination now,
5642 before the effects of this instruction are recorded,
5643 since the register values used in the address computation
5644 are those before this instruction. */
5645 sets[i].dest_hash = HASH (dest, mode);
5647 /* Don't enter a bit-field in the hash table
5648 because the value in it after the store
5649 may not equal what was stored, due to truncation. */
5651 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT)
5653 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5655 if (src_const != 0 && GET_CODE (src_const) == CONST_INT
5656 && GET_CODE (width) == CONST_INT
5657 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5658 && ! (INTVAL (src_const)
5659 & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5660 /* Exception: if the value is constant,
5661 and it won't be truncated, record it. */
5663 else
5665 /* This is chosen so that the destination will be invalidated
5666 but no new value will be recorded.
5667 We must invalidate because sometimes constant
5668 values can be recorded for bitfields. */
5669 sets[i].src_elt = 0;
5670 sets[i].src_volatile = 1;
5671 src_eqv = 0;
5672 src_eqv_elt = 0;
5676 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
5677 the insn. */
5678 else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
5680 /* One less use of the label this insn used to jump to. */
5681 delete_insn (insn);
5682 cse_jumps_altered = 1;
5683 /* No more processing for this set. */
5684 sets[i].rtl = 0;
5687 /* If this SET is now setting PC to a label, we know it used to
5688 be a conditional or computed branch. */
5689 else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF
5690 && !LABEL_REF_NONLOCAL_P (src))
5692 /* Now emit a BARRIER after the unconditional jump. */
5693 if (NEXT_INSN (insn) == 0
5694 || !BARRIER_P (NEXT_INSN (insn)))
5695 emit_barrier_after (insn);
5697 /* We reemit the jump in as many cases as possible just in
5698 case the form of an unconditional jump is significantly
5699 different than a computed jump or conditional jump.
5701 If this insn has multiple sets, then reemitting the
5702 jump is nontrivial. So instead we just force rerecognition
5703 and hope for the best. */
5704 if (n_sets == 1)
5706 rtx new, note;
5708 new = emit_jump_insn_after (gen_jump (XEXP (src, 0)), insn);
5709 JUMP_LABEL (new) = XEXP (src, 0);
5710 LABEL_NUSES (XEXP (src, 0))++;
5712 /* Make sure to copy over REG_NON_LOCAL_GOTO. */
5713 note = find_reg_note (insn, REG_NON_LOCAL_GOTO, 0);
5714 if (note)
5716 XEXP (note, 1) = NULL_RTX;
5717 REG_NOTES (new) = note;
5720 delete_insn (insn);
5721 insn = new;
5723 /* Now emit a BARRIER after the unconditional jump. */
5724 if (NEXT_INSN (insn) == 0
5725 || !BARRIER_P (NEXT_INSN (insn)))
5726 emit_barrier_after (insn);
5728 else
5729 INSN_CODE (insn) = -1;
5731 /* Do not bother deleting any unreachable code,
5732 let jump/flow do that. */
5734 cse_jumps_altered = 1;
5735 sets[i].rtl = 0;
5738 /* If destination is volatile, invalidate it and then do no further
5739 processing for this assignment. */
5741 else if (do_not_record)
5743 if (REG_P (dest) || GET_CODE (dest) == SUBREG)
5744 invalidate (dest, VOIDmode);
5745 else if (MEM_P (dest))
5746 invalidate (dest, VOIDmode);
5747 else if (GET_CODE (dest) == STRICT_LOW_PART
5748 || GET_CODE (dest) == ZERO_EXTRACT)
5749 invalidate (XEXP (dest, 0), GET_MODE (dest));
5750 sets[i].rtl = 0;
5753 if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
5754 sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
5756 #ifdef HAVE_cc0
5757 /* If setting CC0, record what it was set to, or a constant, if it
5758 is equivalent to a constant. If it is being set to a floating-point
5759 value, make a COMPARE with the appropriate constant of 0. If we
5760 don't do this, later code can interpret this as a test against
5761 const0_rtx, which can cause problems if we try to put it into an
5762 insn as a floating-point operand. */
5763 if (dest == cc0_rtx)
5765 this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
5766 this_insn_cc0_mode = mode;
5767 if (FLOAT_MODE_P (mode))
5768 this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0,
5769 CONST0_RTX (mode));
5771 #endif
5774 /* Now enter all non-volatile source expressions in the hash table
5775 if they are not already present.
5776 Record their equivalence classes in src_elt.
5777 This way we can insert the corresponding destinations into
5778 the same classes even if the actual sources are no longer in them
5779 (having been invalidated). */
5781 if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
5782 && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
5784 struct table_elt *elt;
5785 struct table_elt *classp = sets[0].src_elt;
5786 rtx dest = SET_DEST (sets[0].rtl);
5787 enum machine_mode eqvmode = GET_MODE (dest);
5789 if (GET_CODE (dest) == STRICT_LOW_PART)
5791 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5792 classp = 0;
5794 if (insert_regs (src_eqv, classp, 0))
5796 rehash_using_reg (src_eqv);
5797 src_eqv_hash = HASH (src_eqv, eqvmode);
5799 elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
5800 elt->in_memory = src_eqv_in_memory;
5801 src_eqv_elt = elt;
5803 /* Check to see if src_eqv_elt is the same as a set source which
5804 does not yet have an elt, and if so set the elt of the set source
5805 to src_eqv_elt. */
5806 for (i = 0; i < n_sets; i++)
5807 if (sets[i].rtl && sets[i].src_elt == 0
5808 && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
5809 sets[i].src_elt = src_eqv_elt;
5812 for (i = 0; i < n_sets; i++)
5813 if (sets[i].rtl && ! sets[i].src_volatile
5814 && ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
5816 if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
5818 /* REG_EQUAL in setting a STRICT_LOW_PART
5819 gives an equivalent for the entire destination register,
5820 not just for the subreg being stored in now.
5821 This is a more interesting equivalence, so we arrange later
5822 to treat the entire reg as the destination. */
5823 sets[i].src_elt = src_eqv_elt;
5824 sets[i].src_hash = src_eqv_hash;
5826 else
5828 /* Insert source and constant equivalent into hash table, if not
5829 already present. */
5830 struct table_elt *classp = src_eqv_elt;
5831 rtx src = sets[i].src;
5832 rtx dest = SET_DEST (sets[i].rtl);
5833 enum machine_mode mode
5834 = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5836 /* It's possible that we have a source value known to be
5837 constant but don't have a REG_EQUAL note on the insn.
5838 Lack of a note will mean src_eqv_elt will be NULL. This
5839 can happen where we've generated a SUBREG to access a
5840 CONST_INT that is already in a register in a wider mode.
5841 Ensure that the source expression is put in the proper
5842 constant class. */
5843 if (!classp)
5844 classp = sets[i].src_const_elt;
5846 if (sets[i].src_elt == 0)
5848 /* Don't put a hard register source into the table if this is
5849 the last insn of a libcall. In this case, we only need
5850 to put src_eqv_elt in src_elt. */
5851 if (! find_reg_note (insn, REG_RETVAL, NULL_RTX))
5853 struct table_elt *elt;
5855 /* Note that these insert_regs calls cannot remove
5856 any of the src_elt's, because they would have failed to
5857 match if not still valid. */
5858 if (insert_regs (src, classp, 0))
5860 rehash_using_reg (src);
5861 sets[i].src_hash = HASH (src, mode);
5863 elt = insert (src, classp, sets[i].src_hash, mode);
5864 elt->in_memory = sets[i].src_in_memory;
5865 sets[i].src_elt = classp = elt;
5867 else
5868 sets[i].src_elt = classp;
5870 if (sets[i].src_const && sets[i].src_const_elt == 0
5871 && src != sets[i].src_const
5872 && ! rtx_equal_p (sets[i].src_const, src))
5873 sets[i].src_elt = insert (sets[i].src_const, classp,
5874 sets[i].src_const_hash, mode);
5877 else if (sets[i].src_elt == 0)
5878 /* If we did not insert the source into the hash table (e.g., it was
5879 volatile), note the equivalence class for the REG_EQUAL value, if any,
5880 so that the destination goes into that class. */
5881 sets[i].src_elt = src_eqv_elt;
5883 invalidate_from_clobbers (x);
5885 /* Some registers are invalidated by subroutine calls. Memory is
5886 invalidated by non-constant calls. */
5888 if (CALL_P (insn))
5890 if (! CONST_OR_PURE_CALL_P (insn))
5891 invalidate_memory ();
5892 invalidate_for_call ();
5895 /* Now invalidate everything set by this instruction.
5896 If a SUBREG or other funny destination is being set,
5897 sets[i].rtl is still nonzero, so here we invalidate the reg
5898 a part of which is being set. */
5900 for (i = 0; i < n_sets; i++)
5901 if (sets[i].rtl)
5903 /* We can't use the inner dest, because the mode associated with
5904 a ZERO_EXTRACT is significant. */
5905 rtx dest = SET_DEST (sets[i].rtl);
5907 /* Needed for registers to remove the register from its
5908 previous quantity's chain.
5909 Needed for memory if this is a nonvarying address, unless
5910 we have just done an invalidate_memory that covers even those. */
5911 if (REG_P (dest) || GET_CODE (dest) == SUBREG)
5912 invalidate (dest, VOIDmode);
5913 else if (MEM_P (dest))
5914 invalidate (dest, VOIDmode);
5915 else if (GET_CODE (dest) == STRICT_LOW_PART
5916 || GET_CODE (dest) == ZERO_EXTRACT)
5917 invalidate (XEXP (dest, 0), GET_MODE (dest));
5920 /* A volatile ASM invalidates everything. */
5921 if (NONJUMP_INSN_P (insn)
5922 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
5923 && MEM_VOLATILE_P (PATTERN (insn)))
5924 flush_hash_table ();
5926 /* Make sure registers mentioned in destinations
5927 are safe for use in an expression to be inserted.
5928 This removes from the hash table
5929 any invalid entry that refers to one of these registers.
5931 We don't care about the return value from mention_regs because
5932 we are going to hash the SET_DEST values unconditionally. */
5934 for (i = 0; i < n_sets; i++)
5936 if (sets[i].rtl)
5938 rtx x = SET_DEST (sets[i].rtl);
5940 if (!REG_P (x))
5941 mention_regs (x);
5942 else
5944 /* We used to rely on all references to a register becoming
5945 inaccessible when a register changes to a new quantity,
5946 since that changes the hash code. However, that is not
5947 safe, since after HASH_SIZE new quantities we get a
5948 hash 'collision' of a register with its own invalid
5949 entries. And since SUBREGs have been changed not to
5950 change their hash code with the hash code of the register,
5951 it wouldn't work any longer at all. So we have to check
5952 for any invalid references lying around now.
5953 This code is similar to the REG case in mention_regs,
5954 but it knows that reg_tick has been incremented, and
5955 it leaves reg_in_table as -1 . */
5956 unsigned int regno = REGNO (x);
5957 unsigned int endregno
5958 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
5959 : hard_regno_nregs[regno][GET_MODE (x)]);
5960 unsigned int i;
5962 for (i = regno; i < endregno; i++)
5964 if (REG_IN_TABLE (i) >= 0)
5966 remove_invalid_refs (i);
5967 REG_IN_TABLE (i) = -1;
5974 /* We may have just removed some of the src_elt's from the hash table.
5975 So replace each one with the current head of the same class. */
5977 for (i = 0; i < n_sets; i++)
5978 if (sets[i].rtl)
5980 if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
5981 /* If elt was removed, find current head of same class,
5982 or 0 if nothing remains of that class. */
5984 struct table_elt *elt = sets[i].src_elt;
5986 while (elt && elt->prev_same_value)
5987 elt = elt->prev_same_value;
5989 while (elt && elt->first_same_value == 0)
5990 elt = elt->next_same_value;
5991 sets[i].src_elt = elt ? elt->first_same_value : 0;
5995 /* Now insert the destinations into their equivalence classes. */
5997 for (i = 0; i < n_sets; i++)
5998 if (sets[i].rtl)
6000 rtx dest = SET_DEST (sets[i].rtl);
6001 struct table_elt *elt;
6003 /* Don't record value if we are not supposed to risk allocating
6004 floating-point values in registers that might be wider than
6005 memory. */
6006 if ((flag_float_store
6007 && MEM_P (dest)
6008 && FLOAT_MODE_P (GET_MODE (dest)))
6009 /* Don't record BLKmode values, because we don't know the
6010 size of it, and can't be sure that other BLKmode values
6011 have the same or smaller size. */
6012 || GET_MODE (dest) == BLKmode
6013 /* Don't record values of destinations set inside a libcall block
6014 since we might delete the libcall. Things should have been set
6015 up so we won't want to reuse such a value, but we play it safe
6016 here. */
6017 || libcall_insn
6018 /* If we didn't put a REG_EQUAL value or a source into the hash
6019 table, there is no point is recording DEST. */
6020 || sets[i].src_elt == 0
6021 /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
6022 or SIGN_EXTEND, don't record DEST since it can cause
6023 some tracking to be wrong.
6025 ??? Think about this more later. */
6026 || (GET_CODE (dest) == SUBREG
6027 && (GET_MODE_SIZE (GET_MODE (dest))
6028 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6029 && (GET_CODE (sets[i].src) == SIGN_EXTEND
6030 || GET_CODE (sets[i].src) == ZERO_EXTEND)))
6031 continue;
6033 /* STRICT_LOW_PART isn't part of the value BEING set,
6034 and neither is the SUBREG inside it.
6035 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
6036 if (GET_CODE (dest) == STRICT_LOW_PART)
6037 dest = SUBREG_REG (XEXP (dest, 0));
6039 if (REG_P (dest) || GET_CODE (dest) == SUBREG)
6040 /* Registers must also be inserted into chains for quantities. */
6041 if (insert_regs (dest, sets[i].src_elt, 1))
6043 /* If `insert_regs' changes something, the hash code must be
6044 recalculated. */
6045 rehash_using_reg (dest);
6046 sets[i].dest_hash = HASH (dest, GET_MODE (dest));
6049 elt = insert (dest, sets[i].src_elt,
6050 sets[i].dest_hash, GET_MODE (dest));
6052 elt->in_memory = (MEM_P (sets[i].inner_dest)
6053 && !MEM_READONLY_P (sets[i].inner_dest));
6055 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
6056 narrower than M2, and both M1 and M2 are the same number of words,
6057 we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
6058 make that equivalence as well.
6060 However, BAR may have equivalences for which gen_lowpart
6061 will produce a simpler value than gen_lowpart applied to
6062 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
6063 BAR's equivalences. If we don't get a simplified form, make
6064 the SUBREG. It will not be used in an equivalence, but will
6065 cause two similar assignments to be detected.
6067 Note the loop below will find SUBREG_REG (DEST) since we have
6068 already entered SRC and DEST of the SET in the table. */
6070 if (GET_CODE (dest) == SUBREG
6071 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
6072 / UNITS_PER_WORD)
6073 == (GET_MODE_SIZE (GET_MODE (dest)) - 1) / UNITS_PER_WORD)
6074 && (GET_MODE_SIZE (GET_MODE (dest))
6075 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6076 && sets[i].src_elt != 0)
6078 enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
6079 struct table_elt *elt, *classp = 0;
6081 for (elt = sets[i].src_elt->first_same_value; elt;
6082 elt = elt->next_same_value)
6084 rtx new_src = 0;
6085 unsigned src_hash;
6086 struct table_elt *src_elt;
6087 int byte = 0;
6089 /* Ignore invalid entries. */
6090 if (!REG_P (elt->exp)
6091 && ! exp_equiv_p (elt->exp, elt->exp, 1, false))
6092 continue;
6094 /* We may have already been playing subreg games. If the
6095 mode is already correct for the destination, use it. */
6096 if (GET_MODE (elt->exp) == new_mode)
6097 new_src = elt->exp;
6098 else
6100 /* Calculate big endian correction for the SUBREG_BYTE.
6101 We have already checked that M1 (GET_MODE (dest))
6102 is not narrower than M2 (new_mode). */
6103 if (BYTES_BIG_ENDIAN)
6104 byte = (GET_MODE_SIZE (GET_MODE (dest))
6105 - GET_MODE_SIZE (new_mode));
6107 new_src = simplify_gen_subreg (new_mode, elt->exp,
6108 GET_MODE (dest), byte);
6111 /* The call to simplify_gen_subreg fails if the value
6112 is VOIDmode, yet we can't do any simplification, e.g.
6113 for EXPR_LISTs denoting function call results.
6114 It is invalid to construct a SUBREG with a VOIDmode
6115 SUBREG_REG, hence a zero new_src means we can't do
6116 this substitution. */
6117 if (! new_src)
6118 continue;
6120 src_hash = HASH (new_src, new_mode);
6121 src_elt = lookup (new_src, src_hash, new_mode);
6123 /* Put the new source in the hash table is if isn't
6124 already. */
6125 if (src_elt == 0)
6127 if (insert_regs (new_src, classp, 0))
6129 rehash_using_reg (new_src);
6130 src_hash = HASH (new_src, new_mode);
6132 src_elt = insert (new_src, classp, src_hash, new_mode);
6133 src_elt->in_memory = elt->in_memory;
6135 else if (classp && classp != src_elt->first_same_value)
6136 /* Show that two things that we've seen before are
6137 actually the same. */
6138 merge_equiv_classes (src_elt, classp);
6140 classp = src_elt->first_same_value;
6141 /* Ignore invalid entries. */
6142 while (classp
6143 && !REG_P (classp->exp)
6144 && ! exp_equiv_p (classp->exp, classp->exp, 1, false))
6145 classp = classp->next_same_value;
6150 /* Special handling for (set REG0 REG1) where REG0 is the
6151 "cheapest", cheaper than REG1. After cse, REG1 will probably not
6152 be used in the sequel, so (if easily done) change this insn to
6153 (set REG1 REG0) and replace REG1 with REG0 in the previous insn
6154 that computed their value. Then REG1 will become a dead store
6155 and won't cloud the situation for later optimizations.
6157 Do not make this change if REG1 is a hard register, because it will
6158 then be used in the sequel and we may be changing a two-operand insn
6159 into a three-operand insn.
6161 Also do not do this if we are operating on a copy of INSN.
6163 Also don't do this if INSN ends a libcall; this would cause an unrelated
6164 register to be set in the middle of a libcall, and we then get bad code
6165 if the libcall is deleted. */
6167 if (n_sets == 1 && sets[0].rtl && REG_P (SET_DEST (sets[0].rtl))
6168 && NEXT_INSN (PREV_INSN (insn)) == insn
6169 && REG_P (SET_SRC (sets[0].rtl))
6170 && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
6171 && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl))))
6173 int src_q = REG_QTY (REGNO (SET_SRC (sets[0].rtl)));
6174 struct qty_table_elem *src_ent = &qty_table[src_q];
6176 if ((src_ent->first_reg == REGNO (SET_DEST (sets[0].rtl)))
6177 && ! find_reg_note (insn, REG_RETVAL, NULL_RTX))
6179 rtx prev = insn;
6180 /* Scan for the previous nonnote insn, but stop at a basic
6181 block boundary. */
6184 prev = PREV_INSN (prev);
6186 while (prev && NOTE_P (prev)
6187 && NOTE_LINE_NUMBER (prev) != NOTE_INSN_BASIC_BLOCK);
6189 /* Do not swap the registers around if the previous instruction
6190 attaches a REG_EQUIV note to REG1.
6192 ??? It's not entirely clear whether we can transfer a REG_EQUIV
6193 from the pseudo that originally shadowed an incoming argument
6194 to another register. Some uses of REG_EQUIV might rely on it
6195 being attached to REG1 rather than REG2.
6197 This section previously turned the REG_EQUIV into a REG_EQUAL
6198 note. We cannot do that because REG_EQUIV may provide an
6199 uninitialized stack slot when REG_PARM_STACK_SPACE is used. */
6201 if (prev != 0 && NONJUMP_INSN_P (prev)
6202 && GET_CODE (PATTERN (prev)) == SET
6203 && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl)
6204 && ! find_reg_note (prev, REG_EQUIV, NULL_RTX))
6206 rtx dest = SET_DEST (sets[0].rtl);
6207 rtx src = SET_SRC (sets[0].rtl);
6208 rtx note;
6210 validate_change (prev, &SET_DEST (PATTERN (prev)), dest, 1);
6211 validate_change (insn, &SET_DEST (sets[0].rtl), src, 1);
6212 validate_change (insn, &SET_SRC (sets[0].rtl), dest, 1);
6213 apply_change_group ();
6215 /* If INSN has a REG_EQUAL note, and this note mentions
6216 REG0, then we must delete it, because the value in
6217 REG0 has changed. If the note's value is REG1, we must
6218 also delete it because that is now this insn's dest. */
6219 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
6220 if (note != 0
6221 && (reg_mentioned_p (dest, XEXP (note, 0))
6222 || rtx_equal_p (src, XEXP (note, 0))))
6223 remove_note (insn, note);
6228 /* If this is a conditional jump insn, record any known equivalences due to
6229 the condition being tested. */
6231 if (JUMP_P (insn)
6232 && n_sets == 1 && GET_CODE (x) == SET
6233 && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
6234 record_jump_equiv (insn, 0);
6236 #ifdef HAVE_cc0
6237 /* If the previous insn set CC0 and this insn no longer references CC0,
6238 delete the previous insn. Here we use the fact that nothing expects CC0
6239 to be valid over an insn, which is true until the final pass. */
6240 if (prev_insn && NONJUMP_INSN_P (prev_insn)
6241 && (tem = single_set (prev_insn)) != 0
6242 && SET_DEST (tem) == cc0_rtx
6243 && ! reg_mentioned_p (cc0_rtx, x))
6244 delete_insn (prev_insn);
6246 prev_insn_cc0 = this_insn_cc0;
6247 prev_insn_cc0_mode = this_insn_cc0_mode;
6248 prev_insn = insn;
6249 #endif
6252 /* Remove from the hash table all expressions that reference memory. */
6254 static void
6255 invalidate_memory (void)
6257 int i;
6258 struct table_elt *p, *next;
6260 for (i = 0; i < HASH_SIZE; i++)
6261 for (p = table[i]; p; p = next)
6263 next = p->next_same_hash;
6264 if (p->in_memory)
6265 remove_from_table (p, i);
6269 /* If ADDR is an address that implicitly affects the stack pointer, return
6270 1 and update the register tables to show the effect. Else, return 0. */
6272 static int
6273 addr_affects_sp_p (rtx addr)
6275 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC
6276 && REG_P (XEXP (addr, 0))
6277 && REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
6279 if (REG_TICK (STACK_POINTER_REGNUM) >= 0)
6281 REG_TICK (STACK_POINTER_REGNUM)++;
6282 /* Is it possible to use a subreg of SP? */
6283 SUBREG_TICKED (STACK_POINTER_REGNUM) = -1;
6286 /* This should be *very* rare. */
6287 if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
6288 invalidate (stack_pointer_rtx, VOIDmode);
6290 return 1;
6293 return 0;
6296 /* Perform invalidation on the basis of everything about an insn
6297 except for invalidating the actual places that are SET in it.
6298 This includes the places CLOBBERed, and anything that might
6299 alias with something that is SET or CLOBBERed.
6301 X is the pattern of the insn. */
6303 static void
6304 invalidate_from_clobbers (rtx x)
6306 if (GET_CODE (x) == CLOBBER)
6308 rtx ref = XEXP (x, 0);
6309 if (ref)
6311 if (REG_P (ref) || GET_CODE (ref) == SUBREG
6312 || MEM_P (ref))
6313 invalidate (ref, VOIDmode);
6314 else if (GET_CODE (ref) == STRICT_LOW_PART
6315 || GET_CODE (ref) == ZERO_EXTRACT)
6316 invalidate (XEXP (ref, 0), GET_MODE (ref));
6319 else if (GET_CODE (x) == PARALLEL)
6321 int i;
6322 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
6324 rtx y = XVECEXP (x, 0, i);
6325 if (GET_CODE (y) == CLOBBER)
6327 rtx ref = XEXP (y, 0);
6328 if (REG_P (ref) || GET_CODE (ref) == SUBREG
6329 || MEM_P (ref))
6330 invalidate (ref, VOIDmode);
6331 else if (GET_CODE (ref) == STRICT_LOW_PART
6332 || GET_CODE (ref) == ZERO_EXTRACT)
6333 invalidate (XEXP (ref, 0), GET_MODE (ref));
6339 /* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes
6340 and replace any registers in them with either an equivalent constant
6341 or the canonical form of the register. If we are inside an address,
6342 only do this if the address remains valid.
6344 OBJECT is 0 except when within a MEM in which case it is the MEM.
6346 Return the replacement for X. */
6348 static rtx
6349 cse_process_notes (rtx x, rtx object)
6351 enum rtx_code code = GET_CODE (x);
6352 const char *fmt = GET_RTX_FORMAT (code);
6353 int i;
6355 switch (code)
6357 case CONST_INT:
6358 case CONST:
6359 case SYMBOL_REF:
6360 case LABEL_REF:
6361 case CONST_DOUBLE:
6362 case CONST_VECTOR:
6363 case PC:
6364 case CC0:
6365 case LO_SUM:
6366 return x;
6368 case MEM:
6369 validate_change (x, &XEXP (x, 0),
6370 cse_process_notes (XEXP (x, 0), x), 0);
6371 return x;
6373 case EXPR_LIST:
6374 case INSN_LIST:
6375 if (REG_NOTE_KIND (x) == REG_EQUAL)
6376 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX);
6377 if (XEXP (x, 1))
6378 XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX);
6379 return x;
6381 case SIGN_EXTEND:
6382 case ZERO_EXTEND:
6383 case SUBREG:
6385 rtx new = cse_process_notes (XEXP (x, 0), object);
6386 /* We don't substitute VOIDmode constants into these rtx,
6387 since they would impede folding. */
6388 if (GET_MODE (new) != VOIDmode)
6389 validate_change (object, &XEXP (x, 0), new, 0);
6390 return x;
6393 case REG:
6394 i = REG_QTY (REGNO (x));
6396 /* Return a constant or a constant register. */
6397 if (REGNO_QTY_VALID_P (REGNO (x)))
6399 struct qty_table_elem *ent = &qty_table[i];
6401 if (ent->const_rtx != NULL_RTX
6402 && (CONSTANT_P (ent->const_rtx)
6403 || REG_P (ent->const_rtx)))
6405 rtx new = gen_lowpart (GET_MODE (x), ent->const_rtx);
6406 if (new)
6407 return new;
6411 /* Otherwise, canonicalize this register. */
6412 return canon_reg (x, NULL_RTX);
6414 default:
6415 break;
6418 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6419 if (fmt[i] == 'e')
6420 validate_change (object, &XEXP (x, i),
6421 cse_process_notes (XEXP (x, i), object), 0);
6423 return x;
6426 /* Process one SET of an insn that was skipped. We ignore CLOBBERs
6427 since they are done elsewhere. This function is called via note_stores. */
6429 static void
6430 invalidate_skipped_set (rtx dest, rtx set, void *data ATTRIBUTE_UNUSED)
6432 enum rtx_code code = GET_CODE (dest);
6434 if (code == MEM
6435 && ! addr_affects_sp_p (dest) /* If this is not a stack push ... */
6436 /* There are times when an address can appear varying and be a PLUS
6437 during this scan when it would be a fixed address were we to know
6438 the proper equivalences. So invalidate all memory if there is
6439 a BLKmode or nonscalar memory reference or a reference to a
6440 variable address. */
6441 && (MEM_IN_STRUCT_P (dest) || GET_MODE (dest) == BLKmode
6442 || cse_rtx_varies_p (XEXP (dest, 0), 0)))
6444 invalidate_memory ();
6445 return;
6448 if (GET_CODE (set) == CLOBBER
6449 || CC0_P (dest)
6450 || dest == pc_rtx)
6451 return;
6453 if (code == STRICT_LOW_PART || code == ZERO_EXTRACT)
6454 invalidate (XEXP (dest, 0), GET_MODE (dest));
6455 else if (code == REG || code == SUBREG || code == MEM)
6456 invalidate (dest, VOIDmode);
6459 /* Invalidate all insns from START up to the end of the function or the
6460 next label. This called when we wish to CSE around a block that is
6461 conditionally executed. */
6463 static void
6464 invalidate_skipped_block (rtx start)
6466 rtx insn;
6468 for (insn = start; insn && !LABEL_P (insn);
6469 insn = NEXT_INSN (insn))
6471 if (! INSN_P (insn))
6472 continue;
6474 if (CALL_P (insn))
6476 if (! CONST_OR_PURE_CALL_P (insn))
6477 invalidate_memory ();
6478 invalidate_for_call ();
6481 invalidate_from_clobbers (PATTERN (insn));
6482 note_stores (PATTERN (insn), invalidate_skipped_set, NULL);
6486 /* Find the end of INSN's basic block and return its range,
6487 the total number of SETs in all the insns of the block, the last insn of the
6488 block, and the branch path.
6490 The branch path indicates which branches should be followed. If a nonzero
6491 path size is specified, the block should be rescanned and a different set
6492 of branches will be taken. The branch path is only used if
6493 FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is nonzero.
6495 DATA is a pointer to a struct cse_basic_block_data, defined below, that is
6496 used to describe the block. It is filled in with the information about
6497 the current block. The incoming structure's branch path, if any, is used
6498 to construct the output branch path. */
6500 static void
6501 cse_end_of_basic_block (rtx insn, struct cse_basic_block_data *data,
6502 int follow_jumps, int skip_blocks)
6504 rtx p = insn, q;
6505 int nsets = 0;
6506 int low_cuid = INSN_CUID (insn), high_cuid = INSN_CUID (insn);
6507 rtx next = INSN_P (insn) ? insn : next_real_insn (insn);
6508 int path_size = data->path_size;
6509 int path_entry = 0;
6510 int i;
6512 /* Update the previous branch path, if any. If the last branch was
6513 previously PATH_TAKEN, mark it PATH_NOT_TAKEN.
6514 If it was previously PATH_NOT_TAKEN,
6515 shorten the path by one and look at the previous branch. We know that
6516 at least one branch must have been taken if PATH_SIZE is nonzero. */
6517 while (path_size > 0)
6519 if (data->path[path_size - 1].status != PATH_NOT_TAKEN)
6521 data->path[path_size - 1].status = PATH_NOT_TAKEN;
6522 break;
6524 else
6525 path_size--;
6528 /* If the first instruction is marked with QImode, that means we've
6529 already processed this block. Our caller will look at DATA->LAST
6530 to figure out where to go next. We want to return the next block
6531 in the instruction stream, not some branched-to block somewhere
6532 else. We accomplish this by pretending our called forbid us to
6533 follow jumps, or skip blocks. */
6534 if (GET_MODE (insn) == QImode)
6535 follow_jumps = skip_blocks = 0;
6537 /* Scan to end of this basic block. */
6538 while (p && !LABEL_P (p))
6540 /* Don't cse over a call to setjmp; on some machines (eg VAX)
6541 the regs restored by the longjmp come from
6542 a later time than the setjmp. */
6543 if (PREV_INSN (p) && CALL_P (PREV_INSN (p))
6544 && find_reg_note (PREV_INSN (p), REG_SETJMP, NULL))
6545 break;
6547 /* A PARALLEL can have lots of SETs in it,
6548 especially if it is really an ASM_OPERANDS. */
6549 if (INSN_P (p) && GET_CODE (PATTERN (p)) == PARALLEL)
6550 nsets += XVECLEN (PATTERN (p), 0);
6551 else if (!NOTE_P (p))
6552 nsets += 1;
6554 /* Ignore insns made by CSE; they cannot affect the boundaries of
6555 the basic block. */
6557 if (INSN_UID (p) <= max_uid && INSN_CUID (p) > high_cuid)
6558 high_cuid = INSN_CUID (p);
6559 if (INSN_UID (p) <= max_uid && INSN_CUID (p) < low_cuid)
6560 low_cuid = INSN_CUID (p);
6562 /* See if this insn is in our branch path. If it is and we are to
6563 take it, do so. */
6564 if (path_entry < path_size && data->path[path_entry].branch == p)
6566 if (data->path[path_entry].status != PATH_NOT_TAKEN)
6567 p = JUMP_LABEL (p);
6569 /* Point to next entry in path, if any. */
6570 path_entry++;
6573 /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
6574 was specified, we haven't reached our maximum path length, there are
6575 insns following the target of the jump, this is the only use of the
6576 jump label, and the target label is preceded by a BARRIER.
6578 Alternatively, we can follow the jump if it branches around a
6579 block of code and there are no other branches into the block.
6580 In this case invalidate_skipped_block will be called to invalidate any
6581 registers set in the block when following the jump. */
6583 else if ((follow_jumps || skip_blocks) && path_size < PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH) - 1
6584 && JUMP_P (p)
6585 && GET_CODE (PATTERN (p)) == SET
6586 && GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
6587 && JUMP_LABEL (p) != 0
6588 && LABEL_NUSES (JUMP_LABEL (p)) == 1
6589 && NEXT_INSN (JUMP_LABEL (p)) != 0)
6591 for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
6592 if ((!NOTE_P (q)
6593 || NOTE_LINE_NUMBER (q) == NOTE_INSN_LOOP_END
6594 || (PREV_INSN (q) && CALL_P (PREV_INSN (q))
6595 && find_reg_note (PREV_INSN (q), REG_SETJMP, NULL)))
6596 && (!LABEL_P (q) || LABEL_NUSES (q) != 0))
6597 break;
6599 /* If we ran into a BARRIER, this code is an extension of the
6600 basic block when the branch is taken. */
6601 if (follow_jumps && q != 0 && BARRIER_P (q))
6603 /* Don't allow ourself to keep walking around an
6604 always-executed loop. */
6605 if (next_real_insn (q) == next)
6607 p = NEXT_INSN (p);
6608 continue;
6611 /* Similarly, don't put a branch in our path more than once. */
6612 for (i = 0; i < path_entry; i++)
6613 if (data->path[i].branch == p)
6614 break;
6616 if (i != path_entry)
6617 break;
6619 data->path[path_entry].branch = p;
6620 data->path[path_entry++].status = PATH_TAKEN;
6622 /* This branch now ends our path. It was possible that we
6623 didn't see this branch the last time around (when the
6624 insn in front of the target was a JUMP_INSN that was
6625 turned into a no-op). */
6626 path_size = path_entry;
6628 p = JUMP_LABEL (p);
6629 /* Mark block so we won't scan it again later. */
6630 PUT_MODE (NEXT_INSN (p), QImode);
6632 /* Detect a branch around a block of code. */
6633 else if (skip_blocks && q != 0 && !LABEL_P (q))
6635 rtx tmp;
6637 if (next_real_insn (q) == next)
6639 p = NEXT_INSN (p);
6640 continue;
6643 for (i = 0; i < path_entry; i++)
6644 if (data->path[i].branch == p)
6645 break;
6647 if (i != path_entry)
6648 break;
6650 /* This is no_labels_between_p (p, q) with an added check for
6651 reaching the end of a function (in case Q precedes P). */
6652 for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
6653 if (LABEL_P (tmp))
6654 break;
6656 if (tmp == q)
6658 data->path[path_entry].branch = p;
6659 data->path[path_entry++].status = PATH_AROUND;
6661 path_size = path_entry;
6663 p = JUMP_LABEL (p);
6664 /* Mark block so we won't scan it again later. */
6665 PUT_MODE (NEXT_INSN (p), QImode);
6669 p = NEXT_INSN (p);
6672 data->low_cuid = low_cuid;
6673 data->high_cuid = high_cuid;
6674 data->nsets = nsets;
6675 data->last = p;
6677 /* If all jumps in the path are not taken, set our path length to zero
6678 so a rescan won't be done. */
6679 for (i = path_size - 1; i >= 0; i--)
6680 if (data->path[i].status != PATH_NOT_TAKEN)
6681 break;
6683 if (i == -1)
6684 data->path_size = 0;
6685 else
6686 data->path_size = path_size;
6688 /* End the current branch path. */
6689 data->path[path_size].branch = 0;
6692 /* Perform cse on the instructions of a function.
6693 F is the first instruction.
6694 NREGS is one plus the highest pseudo-reg number used in the instruction.
6696 Returns 1 if jump_optimize should be redone due to simplifications
6697 in conditional jump instructions. */
6700 cse_main (rtx f, int nregs, FILE *file)
6702 struct cse_basic_block_data val;
6703 rtx insn = f;
6704 int i;
6706 init_cse_reg_info (nregs);
6708 val.path = xmalloc (sizeof (struct branch_path)
6709 * PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH));
6711 cse_jumps_altered = 0;
6712 recorded_label_ref = 0;
6713 constant_pool_entries_cost = 0;
6714 constant_pool_entries_regcost = 0;
6715 val.path_size = 0;
6716 rtl_hooks = cse_rtl_hooks;
6718 init_recog ();
6719 init_alias_analysis ();
6721 reg_eqv_table = xmalloc (nregs * sizeof (struct reg_eqv_elem));
6723 /* Find the largest uid. */
6725 max_uid = get_max_uid ();
6726 uid_cuid = xcalloc (max_uid + 1, sizeof (int));
6728 /* Compute the mapping from uids to cuids.
6729 CUIDs are numbers assigned to insns, like uids,
6730 except that cuids increase monotonically through the code.
6731 Don't assign cuids to line-number NOTEs, so that the distance in cuids
6732 between two insns is not affected by -g. */
6734 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
6736 if (!NOTE_P (insn)
6737 || NOTE_LINE_NUMBER (insn) < 0)
6738 INSN_CUID (insn) = ++i;
6739 else
6740 /* Give a line number note the same cuid as preceding insn. */
6741 INSN_CUID (insn) = i;
6744 /* Loop over basic blocks.
6745 Compute the maximum number of qty's needed for each basic block
6746 (which is 2 for each SET). */
6747 insn = f;
6748 while (insn)
6750 cse_altered = 0;
6751 cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps,
6752 flag_cse_skip_blocks);
6754 /* If this basic block was already processed or has no sets, skip it. */
6755 if (val.nsets == 0 || GET_MODE (insn) == QImode)
6757 PUT_MODE (insn, VOIDmode);
6758 insn = (val.last ? NEXT_INSN (val.last) : 0);
6759 val.path_size = 0;
6760 continue;
6763 cse_basic_block_start = val.low_cuid;
6764 cse_basic_block_end = val.high_cuid;
6765 max_qty = val.nsets * 2;
6767 if (file)
6768 fnotice (file, ";; Processing block from %d to %d, %d sets.\n",
6769 INSN_UID (insn), val.last ? INSN_UID (val.last) : 0,
6770 val.nsets);
6772 /* Make MAX_QTY bigger to give us room to optimize
6773 past the end of this basic block, if that should prove useful. */
6774 if (max_qty < 500)
6775 max_qty = 500;
6777 /* If this basic block is being extended by following certain jumps,
6778 (see `cse_end_of_basic_block'), we reprocess the code from the start.
6779 Otherwise, we start after this basic block. */
6780 if (val.path_size > 0)
6781 cse_basic_block (insn, val.last, val.path);
6782 else
6784 int old_cse_jumps_altered = cse_jumps_altered;
6785 rtx temp;
6787 /* When cse changes a conditional jump to an unconditional
6788 jump, we want to reprocess the block, since it will give
6789 us a new branch path to investigate. */
6790 cse_jumps_altered = 0;
6791 temp = cse_basic_block (insn, val.last, val.path);
6792 if (cse_jumps_altered == 0
6793 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
6794 insn = temp;
6796 cse_jumps_altered |= old_cse_jumps_altered;
6799 if (cse_altered)
6800 ggc_collect ();
6802 #ifdef USE_C_ALLOCA
6803 alloca (0);
6804 #endif
6807 /* Clean up. */
6808 end_alias_analysis ();
6809 free (uid_cuid);
6810 free (reg_eqv_table);
6811 free (val.path);
6812 rtl_hooks = general_rtl_hooks;
6814 return cse_jumps_altered || recorded_label_ref;
6817 /* Process a single basic block. FROM and TO and the limits of the basic
6818 block. NEXT_BRANCH points to the branch path when following jumps or
6819 a null path when not following jumps. */
6821 static rtx
6822 cse_basic_block (rtx from, rtx to, struct branch_path *next_branch)
6824 rtx insn;
6825 int to_usage = 0;
6826 rtx libcall_insn = NULL_RTX;
6827 int num_insns = 0;
6828 int no_conflict = 0;
6830 /* Allocate the space needed by qty_table. */
6831 qty_table = xmalloc (max_qty * sizeof (struct qty_table_elem));
6833 new_basic_block ();
6835 /* TO might be a label. If so, protect it from being deleted. */
6836 if (to != 0 && LABEL_P (to))
6837 ++LABEL_NUSES (to);
6839 for (insn = from; insn != to; insn = NEXT_INSN (insn))
6841 enum rtx_code code = GET_CODE (insn);
6843 /* If we have processed 1,000 insns, flush the hash table to
6844 avoid extreme quadratic behavior. We must not include NOTEs
6845 in the count since there may be more of them when generating
6846 debugging information. If we clear the table at different
6847 times, code generated with -g -O might be different than code
6848 generated with -O but not -g.
6850 ??? This is a real kludge and needs to be done some other way.
6851 Perhaps for 2.9. */
6852 if (code != NOTE && num_insns++ > 1000)
6854 flush_hash_table ();
6855 num_insns = 0;
6858 /* See if this is a branch that is part of the path. If so, and it is
6859 to be taken, do so. */
6860 if (next_branch->branch == insn)
6862 enum taken status = next_branch++->status;
6863 if (status != PATH_NOT_TAKEN)
6865 if (status == PATH_TAKEN)
6866 record_jump_equiv (insn, 1);
6867 else
6868 invalidate_skipped_block (NEXT_INSN (insn));
6870 /* Set the last insn as the jump insn; it doesn't affect cc0.
6871 Then follow this branch. */
6872 #ifdef HAVE_cc0
6873 prev_insn_cc0 = 0;
6874 prev_insn = insn;
6875 #endif
6876 insn = JUMP_LABEL (insn);
6877 continue;
6881 if (GET_MODE (insn) == QImode)
6882 PUT_MODE (insn, VOIDmode);
6884 if (GET_RTX_CLASS (code) == RTX_INSN)
6886 rtx p;
6888 /* Process notes first so we have all notes in canonical forms when
6889 looking for duplicate operations. */
6891 if (REG_NOTES (insn))
6892 REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn), NULL_RTX);
6894 /* Track when we are inside in LIBCALL block. Inside such a block,
6895 we do not want to record destinations. The last insn of a
6896 LIBCALL block is not considered to be part of the block, since
6897 its destination is the result of the block and hence should be
6898 recorded. */
6900 if (REG_NOTES (insn) != 0)
6902 if ((p = find_reg_note (insn, REG_LIBCALL, NULL_RTX)))
6903 libcall_insn = XEXP (p, 0);
6904 else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
6906 /* Keep libcall_insn for the last SET insn of a no-conflict
6907 block to prevent changing the destination. */
6908 if (! no_conflict)
6909 libcall_insn = 0;
6910 else
6911 no_conflict = -1;
6913 else if (find_reg_note (insn, REG_NO_CONFLICT, NULL_RTX))
6914 no_conflict = 1;
6917 cse_insn (insn, libcall_insn);
6919 if (no_conflict == -1)
6921 libcall_insn = 0;
6922 no_conflict = 0;
6925 /* If we haven't already found an insn where we added a LABEL_REF,
6926 check this one. */
6927 if (NONJUMP_INSN_P (insn) && ! recorded_label_ref
6928 && for_each_rtx (&PATTERN (insn), check_for_label_ref,
6929 (void *) insn))
6930 recorded_label_ref = 1;
6933 /* If INSN is now an unconditional jump, skip to the end of our
6934 basic block by pretending that we just did the last insn in the
6935 basic block. If we are jumping to the end of our block, show
6936 that we can have one usage of TO. */
6938 if (any_uncondjump_p (insn))
6940 if (to == 0)
6942 free (qty_table);
6943 return 0;
6946 if (JUMP_LABEL (insn) == to)
6947 to_usage = 1;
6949 /* Maybe TO was deleted because the jump is unconditional.
6950 If so, there is nothing left in this basic block. */
6951 /* ??? Perhaps it would be smarter to set TO
6952 to whatever follows this insn,
6953 and pretend the basic block had always ended here. */
6954 if (INSN_DELETED_P (to))
6955 break;
6957 insn = PREV_INSN (to);
6960 /* See if it is ok to keep on going past the label
6961 which used to end our basic block. Remember that we incremented
6962 the count of that label, so we decrement it here. If we made
6963 a jump unconditional, TO_USAGE will be one; in that case, we don't
6964 want to count the use in that jump. */
6966 if (to != 0 && NEXT_INSN (insn) == to
6967 && LABEL_P (to) && --LABEL_NUSES (to) == to_usage)
6969 struct cse_basic_block_data val;
6970 rtx prev;
6972 insn = NEXT_INSN (to);
6974 /* If TO was the last insn in the function, we are done. */
6975 if (insn == 0)
6977 free (qty_table);
6978 return 0;
6981 /* If TO was preceded by a BARRIER we are done with this block
6982 because it has no continuation. */
6983 prev = prev_nonnote_insn (to);
6984 if (prev && BARRIER_P (prev))
6986 free (qty_table);
6987 return insn;
6990 /* Find the end of the following block. Note that we won't be
6991 following branches in this case. */
6992 to_usage = 0;
6993 val.path_size = 0;
6994 val.path = xmalloc (sizeof (struct branch_path)
6995 * PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH));
6996 cse_end_of_basic_block (insn, &val, 0, 0);
6997 free (val.path);
6999 /* If the tables we allocated have enough space left
7000 to handle all the SETs in the next basic block,
7001 continue through it. Otherwise, return,
7002 and that block will be scanned individually. */
7003 if (val.nsets * 2 + next_qty > max_qty)
7004 break;
7006 cse_basic_block_start = val.low_cuid;
7007 cse_basic_block_end = val.high_cuid;
7008 to = val.last;
7010 /* Prevent TO from being deleted if it is a label. */
7011 if (to != 0 && LABEL_P (to))
7012 ++LABEL_NUSES (to);
7014 /* Back up so we process the first insn in the extension. */
7015 insn = PREV_INSN (insn);
7019 gcc_assert (next_qty <= max_qty);
7021 free (qty_table);
7023 return to ? NEXT_INSN (to) : 0;
7026 /* Called via for_each_rtx to see if an insn is using a LABEL_REF for which
7027 there isn't a REG_LABEL note. Return one if so. DATA is the insn. */
7029 static int
7030 check_for_label_ref (rtx *rtl, void *data)
7032 rtx insn = (rtx) data;
7034 /* If this insn uses a LABEL_REF and there isn't a REG_LABEL note for it,
7035 we must rerun jump since it needs to place the note. If this is a
7036 LABEL_REF for a CODE_LABEL that isn't in the insn chain, don't do this
7037 since no REG_LABEL will be added. */
7038 return (GET_CODE (*rtl) == LABEL_REF
7039 && ! LABEL_REF_NONLOCAL_P (*rtl)
7040 && LABEL_P (XEXP (*rtl, 0))
7041 && INSN_UID (XEXP (*rtl, 0)) != 0
7042 && ! find_reg_note (insn, REG_LABEL, XEXP (*rtl, 0)));
7045 /* Count the number of times registers are used (not set) in X.
7046 COUNTS is an array in which we accumulate the count, INCR is how much
7047 we count each register usage. */
7049 static void
7050 count_reg_usage (rtx x, int *counts, int incr)
7052 enum rtx_code code;
7053 rtx note;
7054 const char *fmt;
7055 int i, j;
7057 if (x == 0)
7058 return;
7060 switch (code = GET_CODE (x))
7062 case REG:
7063 counts[REGNO (x)] += incr;
7064 return;
7066 case PC:
7067 case CC0:
7068 case CONST:
7069 case CONST_INT:
7070 case CONST_DOUBLE:
7071 case CONST_VECTOR:
7072 case SYMBOL_REF:
7073 case LABEL_REF:
7074 return;
7076 case CLOBBER:
7077 /* If we are clobbering a MEM, mark any registers inside the address
7078 as being used. */
7079 if (MEM_P (XEXP (x, 0)))
7080 count_reg_usage (XEXP (XEXP (x, 0), 0), counts, incr);
7081 return;
7083 case SET:
7084 /* Unless we are setting a REG, count everything in SET_DEST. */
7085 if (!REG_P (SET_DEST (x)))
7086 count_reg_usage (SET_DEST (x), counts, incr);
7087 count_reg_usage (SET_SRC (x), counts, incr);
7088 return;
7090 case CALL_INSN:
7091 count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, incr);
7092 /* Fall through. */
7094 case INSN:
7095 case JUMP_INSN:
7096 count_reg_usage (PATTERN (x), counts, incr);
7098 /* Things used in a REG_EQUAL note aren't dead since loop may try to
7099 use them. */
7101 note = find_reg_equal_equiv_note (x);
7102 if (note)
7104 rtx eqv = XEXP (note, 0);
7106 if (GET_CODE (eqv) == EXPR_LIST)
7107 /* This REG_EQUAL note describes the result of a function call.
7108 Process all the arguments. */
7111 count_reg_usage (XEXP (eqv, 0), counts, incr);
7112 eqv = XEXP (eqv, 1);
7114 while (eqv && GET_CODE (eqv) == EXPR_LIST);
7115 else
7116 count_reg_usage (eqv, counts, incr);
7118 return;
7120 case EXPR_LIST:
7121 if (REG_NOTE_KIND (x) == REG_EQUAL
7122 || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE)
7123 /* FUNCTION_USAGE expression lists may include (CLOBBER (mem /u)),
7124 involving registers in the address. */
7125 || GET_CODE (XEXP (x, 0)) == CLOBBER)
7126 count_reg_usage (XEXP (x, 0), counts, incr);
7128 count_reg_usage (XEXP (x, 1), counts, incr);
7129 return;
7131 case ASM_OPERANDS:
7132 /* Iterate over just the inputs, not the constraints as well. */
7133 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
7134 count_reg_usage (ASM_OPERANDS_INPUT (x, i), counts, incr);
7135 return;
7137 case INSN_LIST:
7138 gcc_unreachable ();
7140 default:
7141 break;
7144 fmt = GET_RTX_FORMAT (code);
7145 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7147 if (fmt[i] == 'e')
7148 count_reg_usage (XEXP (x, i), counts, incr);
7149 else if (fmt[i] == 'E')
7150 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7151 count_reg_usage (XVECEXP (x, i, j), counts, incr);
7155 /* Return true if set is live. */
7156 static bool
7157 set_live_p (rtx set, rtx insn ATTRIBUTE_UNUSED, /* Only used with HAVE_cc0. */
7158 int *counts)
7160 #ifdef HAVE_cc0
7161 rtx tem;
7162 #endif
7164 if (set_noop_p (set))
7167 #ifdef HAVE_cc0
7168 else if (GET_CODE (SET_DEST (set)) == CC0
7169 && !side_effects_p (SET_SRC (set))
7170 && ((tem = next_nonnote_insn (insn)) == 0
7171 || !INSN_P (tem)
7172 || !reg_referenced_p (cc0_rtx, PATTERN (tem))))
7173 return false;
7174 #endif
7175 else if (!REG_P (SET_DEST (set))
7176 || REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
7177 || counts[REGNO (SET_DEST (set))] != 0
7178 || side_effects_p (SET_SRC (set)))
7179 return true;
7180 return false;
7183 /* Return true if insn is live. */
7185 static bool
7186 insn_live_p (rtx insn, int *counts)
7188 int i;
7189 if (flag_non_call_exceptions && may_trap_p (PATTERN (insn)))
7190 return true;
7191 else if (GET_CODE (PATTERN (insn)) == SET)
7192 return set_live_p (PATTERN (insn), insn, counts);
7193 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
7195 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7197 rtx elt = XVECEXP (PATTERN (insn), 0, i);
7199 if (GET_CODE (elt) == SET)
7201 if (set_live_p (elt, insn, counts))
7202 return true;
7204 else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
7205 return true;
7207 return false;
7209 else
7210 return true;
7213 /* Return true if libcall is dead as a whole. */
7215 static bool
7216 dead_libcall_p (rtx insn, int *counts)
7218 rtx note, set, new;
7220 /* See if there's a REG_EQUAL note on this insn and try to
7221 replace the source with the REG_EQUAL expression.
7223 We assume that insns with REG_RETVALs can only be reg->reg
7224 copies at this point. */
7225 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
7226 if (!note)
7227 return false;
7229 set = single_set (insn);
7230 if (!set)
7231 return false;
7233 new = simplify_rtx (XEXP (note, 0));
7234 if (!new)
7235 new = XEXP (note, 0);
7237 /* While changing insn, we must update the counts accordingly. */
7238 count_reg_usage (insn, counts, -1);
7240 if (validate_change (insn, &SET_SRC (set), new, 0))
7242 count_reg_usage (insn, counts, 1);
7243 remove_note (insn, find_reg_note (insn, REG_RETVAL, NULL_RTX));
7244 remove_note (insn, note);
7245 return true;
7248 if (CONSTANT_P (new))
7250 new = force_const_mem (GET_MODE (SET_DEST (set)), new);
7251 if (new && validate_change (insn, &SET_SRC (set), new, 0))
7253 count_reg_usage (insn, counts, 1);
7254 remove_note (insn, find_reg_note (insn, REG_RETVAL, NULL_RTX));
7255 remove_note (insn, note);
7256 return true;
7260 count_reg_usage (insn, counts, 1);
7261 return false;
7264 /* Scan all the insns and delete any that are dead; i.e., they store a register
7265 that is never used or they copy a register to itself.
7267 This is used to remove insns made obviously dead by cse, loop or other
7268 optimizations. It improves the heuristics in loop since it won't try to
7269 move dead invariants out of loops or make givs for dead quantities. The
7270 remaining passes of the compilation are also sped up. */
7273 delete_trivially_dead_insns (rtx insns, int nreg)
7275 int *counts;
7276 rtx insn, prev;
7277 int in_libcall = 0, dead_libcall = 0;
7278 int ndead = 0;
7280 timevar_push (TV_DELETE_TRIVIALLY_DEAD);
7281 /* First count the number of times each register is used. */
7282 counts = xcalloc (nreg, sizeof (int));
7283 for (insn = next_real_insn (insns); insn; insn = next_real_insn (insn))
7284 count_reg_usage (insn, counts, 1);
7286 /* Go from the last insn to the first and delete insns that only set unused
7287 registers or copy a register to itself. As we delete an insn, remove
7288 usage counts for registers it uses.
7290 The first jump optimization pass may leave a real insn as the last
7291 insn in the function. We must not skip that insn or we may end
7292 up deleting code that is not really dead. */
7293 insn = get_last_insn ();
7294 if (! INSN_P (insn))
7295 insn = prev_real_insn (insn);
7297 for (; insn; insn = prev)
7299 int live_insn = 0;
7301 prev = prev_real_insn (insn);
7303 /* Don't delete any insns that are part of a libcall block unless
7304 we can delete the whole libcall block.
7306 Flow or loop might get confused if we did that. Remember
7307 that we are scanning backwards. */
7308 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7310 in_libcall = 1;
7311 live_insn = 1;
7312 dead_libcall = dead_libcall_p (insn, counts);
7314 else if (in_libcall)
7315 live_insn = ! dead_libcall;
7316 else
7317 live_insn = insn_live_p (insn, counts);
7319 /* If this is a dead insn, delete it and show registers in it aren't
7320 being used. */
7322 if (! live_insn)
7324 count_reg_usage (insn, counts, -1);
7325 delete_insn_and_edges (insn);
7326 ndead++;
7329 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
7331 in_libcall = 0;
7332 dead_libcall = 0;
7336 if (dump_file && ndead)
7337 fprintf (dump_file, "Deleted %i trivially dead insns\n",
7338 ndead);
7339 /* Clean up. */
7340 free (counts);
7341 timevar_pop (TV_DELETE_TRIVIALLY_DEAD);
7342 return ndead;
7345 /* This function is called via for_each_rtx. The argument, NEWREG, is
7346 a condition code register with the desired mode. If we are looking
7347 at the same register in a different mode, replace it with
7348 NEWREG. */
7350 static int
7351 cse_change_cc_mode (rtx *loc, void *data)
7353 struct change_cc_mode_args* args = (struct change_cc_mode_args*)data;
7355 if (*loc
7356 && REG_P (*loc)
7357 && REGNO (*loc) == REGNO (args->newreg)
7358 && GET_MODE (*loc) != GET_MODE (args->newreg))
7360 validate_change (args->insn, loc, args->newreg, 1);
7362 return -1;
7364 return 0;
7367 /* Change the mode of any reference to the register REGNO (NEWREG) to
7368 GET_MODE (NEWREG) in INSN. */
7370 static void
7371 cse_change_cc_mode_insn (rtx insn, rtx newreg)
7373 struct change_cc_mode_args args;
7374 int success;
7376 if (!INSN_P (insn))
7377 return;
7379 args.insn = insn;
7380 args.newreg = newreg;
7382 for_each_rtx (&PATTERN (insn), cse_change_cc_mode, &args);
7383 for_each_rtx (&REG_NOTES (insn), cse_change_cc_mode, &args);
7385 /* If the following assertion was triggered, there is most probably
7386 something wrong with the cc_modes_compatible back end function.
7387 CC modes only can be considered compatible if the insn - with the mode
7388 replaced by any of the compatible modes - can still be recognized. */
7389 success = apply_change_group ();
7390 gcc_assert (success);
7393 /* Change the mode of any reference to the register REGNO (NEWREG) to
7394 GET_MODE (NEWREG), starting at START. Stop before END. Stop at
7395 any instruction which modifies NEWREG. */
7397 static void
7398 cse_change_cc_mode_insns (rtx start, rtx end, rtx newreg)
7400 rtx insn;
7402 for (insn = start; insn != end; insn = NEXT_INSN (insn))
7404 if (! INSN_P (insn))
7405 continue;
7407 if (reg_set_p (newreg, insn))
7408 return;
7410 cse_change_cc_mode_insn (insn, newreg);
7414 /* BB is a basic block which finishes with CC_REG as a condition code
7415 register which is set to CC_SRC. Look through the successors of BB
7416 to find blocks which have a single predecessor (i.e., this one),
7417 and look through those blocks for an assignment to CC_REG which is
7418 equivalent to CC_SRC. CAN_CHANGE_MODE indicates whether we are
7419 permitted to change the mode of CC_SRC to a compatible mode. This
7420 returns VOIDmode if no equivalent assignments were found.
7421 Otherwise it returns the mode which CC_SRC should wind up with.
7423 The main complexity in this function is handling the mode issues.
7424 We may have more than one duplicate which we can eliminate, and we
7425 try to find a mode which will work for multiple duplicates. */
7427 static enum machine_mode
7428 cse_cc_succs (basic_block bb, rtx cc_reg, rtx cc_src, bool can_change_mode)
7430 bool found_equiv;
7431 enum machine_mode mode;
7432 unsigned int insn_count;
7433 edge e;
7434 rtx insns[2];
7435 enum machine_mode modes[2];
7436 rtx last_insns[2];
7437 unsigned int i;
7438 rtx newreg;
7439 edge_iterator ei;
7441 /* We expect to have two successors. Look at both before picking
7442 the final mode for the comparison. If we have more successors
7443 (i.e., some sort of table jump, although that seems unlikely),
7444 then we require all beyond the first two to use the same
7445 mode. */
7447 found_equiv = false;
7448 mode = GET_MODE (cc_src);
7449 insn_count = 0;
7450 FOR_EACH_EDGE (e, ei, bb->succs)
7452 rtx insn;
7453 rtx end;
7455 if (e->flags & EDGE_COMPLEX)
7456 continue;
7458 if (EDGE_COUNT (e->dest->preds) != 1
7459 || e->dest == EXIT_BLOCK_PTR)
7460 continue;
7462 end = NEXT_INSN (BB_END (e->dest));
7463 for (insn = BB_HEAD (e->dest); insn != end; insn = NEXT_INSN (insn))
7465 rtx set;
7467 if (! INSN_P (insn))
7468 continue;
7470 /* If CC_SRC is modified, we have to stop looking for
7471 something which uses it. */
7472 if (modified_in_p (cc_src, insn))
7473 break;
7475 /* Check whether INSN sets CC_REG to CC_SRC. */
7476 set = single_set (insn);
7477 if (set
7478 && REG_P (SET_DEST (set))
7479 && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7481 bool found;
7482 enum machine_mode set_mode;
7483 enum machine_mode comp_mode;
7485 found = false;
7486 set_mode = GET_MODE (SET_SRC (set));
7487 comp_mode = set_mode;
7488 if (rtx_equal_p (cc_src, SET_SRC (set)))
7489 found = true;
7490 else if (GET_CODE (cc_src) == COMPARE
7491 && GET_CODE (SET_SRC (set)) == COMPARE
7492 && mode != set_mode
7493 && rtx_equal_p (XEXP (cc_src, 0),
7494 XEXP (SET_SRC (set), 0))
7495 && rtx_equal_p (XEXP (cc_src, 1),
7496 XEXP (SET_SRC (set), 1)))
7499 comp_mode = targetm.cc_modes_compatible (mode, set_mode);
7500 if (comp_mode != VOIDmode
7501 && (can_change_mode || comp_mode == mode))
7502 found = true;
7505 if (found)
7507 found_equiv = true;
7508 if (insn_count < ARRAY_SIZE (insns))
7510 insns[insn_count] = insn;
7511 modes[insn_count] = set_mode;
7512 last_insns[insn_count] = end;
7513 ++insn_count;
7515 if (mode != comp_mode)
7517 gcc_assert (can_change_mode);
7518 mode = comp_mode;
7520 /* The modified insn will be re-recognized later. */
7521 PUT_MODE (cc_src, mode);
7524 else
7526 if (set_mode != mode)
7528 /* We found a matching expression in the
7529 wrong mode, but we don't have room to
7530 store it in the array. Punt. This case
7531 should be rare. */
7532 break;
7534 /* INSN sets CC_REG to a value equal to CC_SRC
7535 with the right mode. We can simply delete
7536 it. */
7537 delete_insn (insn);
7540 /* We found an instruction to delete. Keep looking,
7541 in the hopes of finding a three-way jump. */
7542 continue;
7545 /* We found an instruction which sets the condition
7546 code, so don't look any farther. */
7547 break;
7550 /* If INSN sets CC_REG in some other way, don't look any
7551 farther. */
7552 if (reg_set_p (cc_reg, insn))
7553 break;
7556 /* If we fell off the bottom of the block, we can keep looking
7557 through successors. We pass CAN_CHANGE_MODE as false because
7558 we aren't prepared to handle compatibility between the
7559 further blocks and this block. */
7560 if (insn == end)
7562 enum machine_mode submode;
7564 submode = cse_cc_succs (e->dest, cc_reg, cc_src, false);
7565 if (submode != VOIDmode)
7567 gcc_assert (submode == mode);
7568 found_equiv = true;
7569 can_change_mode = false;
7574 if (! found_equiv)
7575 return VOIDmode;
7577 /* Now INSN_COUNT is the number of instructions we found which set
7578 CC_REG to a value equivalent to CC_SRC. The instructions are in
7579 INSNS. The modes used by those instructions are in MODES. */
7581 newreg = NULL_RTX;
7582 for (i = 0; i < insn_count; ++i)
7584 if (modes[i] != mode)
7586 /* We need to change the mode of CC_REG in INSNS[i] and
7587 subsequent instructions. */
7588 if (! newreg)
7590 if (GET_MODE (cc_reg) == mode)
7591 newreg = cc_reg;
7592 else
7593 newreg = gen_rtx_REG (mode, REGNO (cc_reg));
7595 cse_change_cc_mode_insns (NEXT_INSN (insns[i]), last_insns[i],
7596 newreg);
7599 delete_insn (insns[i]);
7602 return mode;
7605 /* If we have a fixed condition code register (or two), walk through
7606 the instructions and try to eliminate duplicate assignments. */
7608 void
7609 cse_condition_code_reg (void)
7611 unsigned int cc_regno_1;
7612 unsigned int cc_regno_2;
7613 rtx cc_reg_1;
7614 rtx cc_reg_2;
7615 basic_block bb;
7617 if (! targetm.fixed_condition_code_regs (&cc_regno_1, &cc_regno_2))
7618 return;
7620 cc_reg_1 = gen_rtx_REG (CCmode, cc_regno_1);
7621 if (cc_regno_2 != INVALID_REGNUM)
7622 cc_reg_2 = gen_rtx_REG (CCmode, cc_regno_2);
7623 else
7624 cc_reg_2 = NULL_RTX;
7626 FOR_EACH_BB (bb)
7628 rtx last_insn;
7629 rtx cc_reg;
7630 rtx insn;
7631 rtx cc_src_insn;
7632 rtx cc_src;
7633 enum machine_mode mode;
7634 enum machine_mode orig_mode;
7636 /* Look for blocks which end with a conditional jump based on a
7637 condition code register. Then look for the instruction which
7638 sets the condition code register. Then look through the
7639 successor blocks for instructions which set the condition
7640 code register to the same value. There are other possible
7641 uses of the condition code register, but these are by far the
7642 most common and the ones which we are most likely to be able
7643 to optimize. */
7645 last_insn = BB_END (bb);
7646 if (!JUMP_P (last_insn))
7647 continue;
7649 if (reg_referenced_p (cc_reg_1, PATTERN (last_insn)))
7650 cc_reg = cc_reg_1;
7651 else if (cc_reg_2 && reg_referenced_p (cc_reg_2, PATTERN (last_insn)))
7652 cc_reg = cc_reg_2;
7653 else
7654 continue;
7656 cc_src_insn = NULL_RTX;
7657 cc_src = NULL_RTX;
7658 for (insn = PREV_INSN (last_insn);
7659 insn && insn != PREV_INSN (BB_HEAD (bb));
7660 insn = PREV_INSN (insn))
7662 rtx set;
7664 if (! INSN_P (insn))
7665 continue;
7666 set = single_set (insn);
7667 if (set
7668 && REG_P (SET_DEST (set))
7669 && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7671 cc_src_insn = insn;
7672 cc_src = SET_SRC (set);
7673 break;
7675 else if (reg_set_p (cc_reg, insn))
7676 break;
7679 if (! cc_src_insn)
7680 continue;
7682 if (modified_between_p (cc_src, cc_src_insn, NEXT_INSN (last_insn)))
7683 continue;
7685 /* Now CC_REG is a condition code register used for a
7686 conditional jump at the end of the block, and CC_SRC, in
7687 CC_SRC_INSN, is the value to which that condition code
7688 register is set, and CC_SRC is still meaningful at the end of
7689 the basic block. */
7691 orig_mode = GET_MODE (cc_src);
7692 mode = cse_cc_succs (bb, cc_reg, cc_src, true);
7693 if (mode != VOIDmode)
7695 gcc_assert (mode == GET_MODE (cc_src));
7696 if (mode != orig_mode)
7698 rtx newreg = gen_rtx_REG (mode, REGNO (cc_reg));
7700 cse_change_cc_mode_insn (cc_src_insn, newreg);
7702 /* Do the same in the following insns that use the
7703 current value of CC_REG within BB. */
7704 cse_change_cc_mode_insns (NEXT_INSN (cc_src_insn),
7705 NEXT_INSN (last_insn),
7706 newreg);