2006-01-10 Richard Guenther <rguenther@suse.de>
[official-gcc.git] / gcc / cse.c
bloba352c0efadf1f17ee95b23481bfc8c0979c891f7
1 /* Common subexpression elimination for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
22 #include "config.h"
23 /* stdio.h must precede rtl.h for FFS. */
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "hard-reg-set.h"
30 #include "regs.h"
31 #include "basic-block.h"
32 #include "flags.h"
33 #include "real.h"
34 #include "insn-config.h"
35 #include "recog.h"
36 #include "function.h"
37 #include "expr.h"
38 #include "toplev.h"
39 #include "output.h"
40 #include "ggc.h"
41 #include "timevar.h"
42 #include "except.h"
43 #include "target.h"
44 #include "params.h"
45 #include "rtlhooks-def.h"
46 #include "tree-pass.h"
48 /* The basic idea of common subexpression elimination is to go
49 through the code, keeping a record of expressions that would
50 have the same value at the current scan point, and replacing
51 expressions encountered with the cheapest equivalent expression.
53 It is too complicated to keep track of the different possibilities
54 when control paths merge in this code; so, at each label, we forget all
55 that is known and start fresh. This can be described as processing each
56 extended basic block separately. We have a separate pass to perform
57 global CSE.
59 Note CSE can turn a conditional or computed jump into a nop or
60 an unconditional jump. When this occurs we arrange to run the jump
61 optimizer after CSE to delete the unreachable code.
63 We use two data structures to record the equivalent expressions:
64 a hash table for most expressions, and a vector of "quantity
65 numbers" to record equivalent (pseudo) registers.
67 The use of the special data structure for registers is desirable
68 because it is faster. It is possible because registers references
69 contain a fairly small number, the register number, taken from
70 a contiguously allocated series, and two register references are
71 identical if they have the same number. General expressions
72 do not have any such thing, so the only way to retrieve the
73 information recorded on an expression other than a register
74 is to keep it in a hash table.
76 Registers and "quantity numbers":
78 At the start of each basic block, all of the (hardware and pseudo)
79 registers used in the function are given distinct quantity
80 numbers to indicate their contents. During scan, when the code
81 copies one register into another, we copy the quantity number.
82 When a register is loaded in any other way, we allocate a new
83 quantity number to describe the value generated by this operation.
84 `REG_QTY (N)' records what quantity register N is currently thought
85 of as containing.
87 All real quantity numbers are greater than or equal to zero.
88 If register N has not been assigned a quantity, `REG_QTY (N)' will
89 equal -N - 1, which is always negative.
91 Quantity numbers below zero do not exist and none of the `qty_table'
92 entries should be referenced with a negative index.
94 We also maintain a bidirectional chain of registers for each
95 quantity number. The `qty_table` members `first_reg' and `last_reg',
96 and `reg_eqv_table' members `next' and `prev' hold these chains.
98 The first register in a chain is the one whose lifespan is least local.
99 Among equals, it is the one that was seen first.
100 We replace any equivalent register with that one.
102 If two registers have the same quantity number, it must be true that
103 REG expressions with qty_table `mode' must be in the hash table for both
104 registers and must be in the same class.
106 The converse is not true. Since hard registers may be referenced in
107 any mode, two REG expressions might be equivalent in the hash table
108 but not have the same quantity number if the quantity number of one
109 of the registers is not the same mode as those expressions.
111 Constants and quantity numbers
113 When a quantity has a known constant value, that value is stored
114 in the appropriate qty_table `const_rtx'. This is in addition to
115 putting the constant in the hash table as is usual for non-regs.
117 Whether a reg or a constant is preferred is determined by the configuration
118 macro CONST_COSTS and will often depend on the constant value. In any
119 event, expressions containing constants can be simplified, by fold_rtx.
121 When a quantity has a known nearly constant value (such as an address
122 of a stack slot), that value is stored in the appropriate qty_table
123 `const_rtx'.
125 Integer constants don't have a machine mode. However, cse
126 determines the intended machine mode from the destination
127 of the instruction that moves the constant. The machine mode
128 is recorded in the hash table along with the actual RTL
129 constant expression so that different modes are kept separate.
131 Other expressions:
133 To record known equivalences among expressions in general
134 we use a hash table called `table'. It has a fixed number of buckets
135 that contain chains of `struct table_elt' elements for expressions.
136 These chains connect the elements whose expressions have the same
137 hash codes.
139 Other chains through the same elements connect the elements which
140 currently have equivalent values.
142 Register references in an expression are canonicalized before hashing
143 the expression. This is done using `reg_qty' and qty_table `first_reg'.
144 The hash code of a register reference is computed using the quantity
145 number, not the register number.
147 When the value of an expression changes, it is necessary to remove from the
148 hash table not just that expression but all expressions whose values
149 could be different as a result.
151 1. If the value changing is in memory, except in special cases
152 ANYTHING referring to memory could be changed. That is because
153 nobody knows where a pointer does not point.
154 The function `invalidate_memory' removes what is necessary.
156 The special cases are when the address is constant or is
157 a constant plus a fixed register such as the frame pointer
158 or a static chain pointer. When such addresses are stored in,
159 we can tell exactly which other such addresses must be invalidated
160 due to overlap. `invalidate' does this.
161 All expressions that refer to non-constant
162 memory addresses are also invalidated. `invalidate_memory' does this.
164 2. If the value changing is a register, all expressions
165 containing references to that register, and only those,
166 must be removed.
168 Because searching the entire hash table for expressions that contain
169 a register is very slow, we try to figure out when it isn't necessary.
170 Precisely, this is necessary only when expressions have been
171 entered in the hash table using this register, and then the value has
172 changed, and then another expression wants to be added to refer to
173 the register's new value. This sequence of circumstances is rare
174 within any one basic block.
176 `REG_TICK' and `REG_IN_TABLE', accessors for members of
177 cse_reg_info, are used to detect this case. REG_TICK (i) is
178 incremented whenever a value is stored in register i.
179 REG_IN_TABLE (i) holds -1 if no references to register i have been
180 entered in the table; otherwise, it contains the value REG_TICK (i)
181 had when the references were entered. If we want to enter a
182 reference and REG_IN_TABLE (i) != REG_TICK (i), we must scan and
183 remove old references. Until we want to enter a new entry, the
184 mere fact that the two vectors don't match makes the entries be
185 ignored if anyone tries to match them.
187 Registers themselves are entered in the hash table as well as in
188 the equivalent-register chains. However, `REG_TICK' and
189 `REG_IN_TABLE' do not apply to expressions which are simple
190 register references. These expressions are removed from the table
191 immediately when they become invalid, and this can be done even if
192 we do not immediately search for all the expressions that refer to
193 the register.
195 A CLOBBER rtx in an instruction invalidates its operand for further
196 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
197 invalidates everything that resides in memory.
199 Related expressions:
201 Constant expressions that differ only by an additive integer
202 are called related. When a constant expression is put in
203 the table, the related expression with no constant term
204 is also entered. These are made to point at each other
205 so that it is possible to find out if there exists any
206 register equivalent to an expression related to a given expression. */
208 /* Length of qty_table vector. We know in advance we will not need
209 a quantity number this big. */
211 static int max_qty;
213 /* Next quantity number to be allocated.
214 This is 1 + the largest number needed so far. */
216 static int next_qty;
218 /* Per-qty information tracking.
220 `first_reg' and `last_reg' track the head and tail of the
221 chain of registers which currently contain this quantity.
223 `mode' contains the machine mode of this quantity.
225 `const_rtx' holds the rtx of the constant value of this
226 quantity, if known. A summations of the frame/arg pointer
227 and a constant can also be entered here. When this holds
228 a known value, `const_insn' is the insn which stored the
229 constant value.
231 `comparison_{code,const,qty}' are used to track when a
232 comparison between a quantity and some constant or register has
233 been passed. In such a case, we know the results of the comparison
234 in case we see it again. These members record a comparison that
235 is known to be true. `comparison_code' holds the rtx code of such
236 a comparison, else it is set to UNKNOWN and the other two
237 comparison members are undefined. `comparison_const' holds
238 the constant being compared against, or zero if the comparison
239 is not against a constant. `comparison_qty' holds the quantity
240 being compared against when the result is known. If the comparison
241 is not with a register, `comparison_qty' is -1. */
243 struct qty_table_elem
245 rtx const_rtx;
246 rtx const_insn;
247 rtx comparison_const;
248 int comparison_qty;
249 unsigned int first_reg, last_reg;
250 /* The sizes of these fields should match the sizes of the
251 code and mode fields of struct rtx_def (see rtl.h). */
252 ENUM_BITFIELD(rtx_code) comparison_code : 16;
253 ENUM_BITFIELD(machine_mode) mode : 8;
256 /* The table of all qtys, indexed by qty number. */
257 static struct qty_table_elem *qty_table;
259 /* Structure used to pass arguments via for_each_rtx to function
260 cse_change_cc_mode. */
261 struct change_cc_mode_args
263 rtx insn;
264 rtx newreg;
267 #ifdef HAVE_cc0
268 /* For machines that have a CC0, we do not record its value in the hash
269 table since its use is guaranteed to be the insn immediately following
270 its definition and any other insn is presumed to invalidate it.
272 Instead, we store below the value last assigned to CC0. If it should
273 happen to be a constant, it is stored in preference to the actual
274 assigned value. In case it is a constant, we store the mode in which
275 the constant should be interpreted. */
277 static rtx prev_insn_cc0;
278 static enum machine_mode prev_insn_cc0_mode;
280 /* Previous actual insn. 0 if at first insn of basic block. */
282 static rtx prev_insn;
283 #endif
285 /* Insn being scanned. */
287 static rtx this_insn;
289 /* Index by register number, gives the number of the next (or
290 previous) register in the chain of registers sharing the same
291 value.
293 Or -1 if this register is at the end of the chain.
295 If REG_QTY (N) == -N - 1, reg_eqv_table[N].next is undefined. */
297 /* Per-register equivalence chain. */
298 struct reg_eqv_elem
300 int next, prev;
303 /* The table of all register equivalence chains. */
304 static struct reg_eqv_elem *reg_eqv_table;
306 struct cse_reg_info
308 /* The timestamp at which this register is initialized. */
309 unsigned int timestamp;
311 /* The quantity number of the register's current contents. */
312 int reg_qty;
314 /* The number of times the register has been altered in the current
315 basic block. */
316 int reg_tick;
318 /* The REG_TICK value at which rtx's containing this register are
319 valid in the hash table. If this does not equal the current
320 reg_tick value, such expressions existing in the hash table are
321 invalid. */
322 int reg_in_table;
324 /* The SUBREG that was set when REG_TICK was last incremented. Set
325 to -1 if the last store was to the whole register, not a subreg. */
326 unsigned int subreg_ticked;
329 /* A table of cse_reg_info indexed by register numbers. */
330 static struct cse_reg_info *cse_reg_info_table;
332 /* The size of the above table. */
333 static unsigned int cse_reg_info_table_size;
335 /* The index of the first entry that has not been initialized. */
336 static unsigned int cse_reg_info_table_first_uninitialized;
338 /* The timestamp at the beginning of the current run of
339 cse_basic_block. We increment this variable at the beginning of
340 the current run of cse_basic_block. The timestamp field of a
341 cse_reg_info entry matches the value of this variable if and only
342 if the entry has been initialized during the current run of
343 cse_basic_block. */
344 static unsigned int cse_reg_info_timestamp;
346 /* A HARD_REG_SET containing all the hard registers for which there is
347 currently a REG expression in the hash table. Note the difference
348 from the above variables, which indicate if the REG is mentioned in some
349 expression in the table. */
351 static HARD_REG_SET hard_regs_in_table;
353 /* CUID of insn that starts the basic block currently being cse-processed. */
355 static int cse_basic_block_start;
357 /* CUID of insn that ends the basic block currently being cse-processed. */
359 static int cse_basic_block_end;
361 /* Vector mapping INSN_UIDs to cuids.
362 The cuids are like uids but increase monotonically always.
363 We use them to see whether a reg is used outside a given basic block. */
365 static int *uid_cuid;
367 /* Highest UID in UID_CUID. */
368 static int max_uid;
370 /* Get the cuid of an insn. */
372 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
374 /* Nonzero if this pass has made changes, and therefore it's
375 worthwhile to run the garbage collector. */
377 static int cse_altered;
379 /* Nonzero if cse has altered conditional jump insns
380 in such a way that jump optimization should be redone. */
382 static int cse_jumps_altered;
384 /* Nonzero if we put a LABEL_REF into the hash table for an INSN without a
385 REG_LABEL, we have to rerun jump after CSE to put in the note. */
386 static int recorded_label_ref;
388 /* canon_hash stores 1 in do_not_record
389 if it notices a reference to CC0, PC, or some other volatile
390 subexpression. */
392 static int do_not_record;
394 /* canon_hash stores 1 in hash_arg_in_memory
395 if it notices a reference to memory within the expression being hashed. */
397 static int hash_arg_in_memory;
399 /* The hash table contains buckets which are chains of `struct table_elt's,
400 each recording one expression's information.
401 That expression is in the `exp' field.
403 The canon_exp field contains a canonical (from the point of view of
404 alias analysis) version of the `exp' field.
406 Those elements with the same hash code are chained in both directions
407 through the `next_same_hash' and `prev_same_hash' fields.
409 Each set of expressions with equivalent values
410 are on a two-way chain through the `next_same_value'
411 and `prev_same_value' fields, and all point with
412 the `first_same_value' field at the first element in
413 that chain. The chain is in order of increasing cost.
414 Each element's cost value is in its `cost' field.
416 The `in_memory' field is nonzero for elements that
417 involve any reference to memory. These elements are removed
418 whenever a write is done to an unidentified location in memory.
419 To be safe, we assume that a memory address is unidentified unless
420 the address is either a symbol constant or a constant plus
421 the frame pointer or argument pointer.
423 The `related_value' field is used to connect related expressions
424 (that differ by adding an integer).
425 The related expressions are chained in a circular fashion.
426 `related_value' is zero for expressions for which this
427 chain is not useful.
429 The `cost' field stores the cost of this element's expression.
430 The `regcost' field stores the value returned by approx_reg_cost for
431 this element's expression.
433 The `is_const' flag is set if the element is a constant (including
434 a fixed address).
436 The `flag' field is used as a temporary during some search routines.
438 The `mode' field is usually the same as GET_MODE (`exp'), but
439 if `exp' is a CONST_INT and has no machine mode then the `mode'
440 field is the mode it was being used as. Each constant is
441 recorded separately for each mode it is used with. */
443 struct table_elt
445 rtx exp;
446 rtx canon_exp;
447 struct table_elt *next_same_hash;
448 struct table_elt *prev_same_hash;
449 struct table_elt *next_same_value;
450 struct table_elt *prev_same_value;
451 struct table_elt *first_same_value;
452 struct table_elt *related_value;
453 int cost;
454 int regcost;
455 /* The size of this field should match the size
456 of the mode field of struct rtx_def (see rtl.h). */
457 ENUM_BITFIELD(machine_mode) mode : 8;
458 char in_memory;
459 char is_const;
460 char flag;
463 /* We don't want a lot of buckets, because we rarely have very many
464 things stored in the hash table, and a lot of buckets slows
465 down a lot of loops that happen frequently. */
466 #define HASH_SHIFT 5
467 #define HASH_SIZE (1 << HASH_SHIFT)
468 #define HASH_MASK (HASH_SIZE - 1)
470 /* Compute hash code of X in mode M. Special-case case where X is a pseudo
471 register (hard registers may require `do_not_record' to be set). */
473 #define HASH(X, M) \
474 ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER \
475 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
476 : canon_hash (X, M)) & HASH_MASK)
478 /* Like HASH, but without side-effects. */
479 #define SAFE_HASH(X, M) \
480 ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER \
481 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
482 : safe_hash (X, M)) & HASH_MASK)
484 /* Determine whether register number N is considered a fixed register for the
485 purpose of approximating register costs.
486 It is desirable to replace other regs with fixed regs, to reduce need for
487 non-fixed hard regs.
488 A reg wins if it is either the frame pointer or designated as fixed. */
489 #define FIXED_REGNO_P(N) \
490 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
491 || fixed_regs[N] || global_regs[N])
493 /* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
494 hard registers and pointers into the frame are the cheapest with a cost
495 of 0. Next come pseudos with a cost of one and other hard registers with
496 a cost of 2. Aside from these special cases, call `rtx_cost'. */
498 #define CHEAP_REGNO(N) \
499 (REGNO_PTR_FRAME_P(N) \
500 || (HARD_REGISTER_NUM_P (N) \
501 && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
503 #define COST(X) (REG_P (X) ? 0 : notreg_cost (X, SET))
504 #define COST_IN(X,OUTER) (REG_P (X) ? 0 : notreg_cost (X, OUTER))
506 /* Get the number of times this register has been updated in this
507 basic block. */
509 #define REG_TICK(N) (get_cse_reg_info (N)->reg_tick)
511 /* Get the point at which REG was recorded in the table. */
513 #define REG_IN_TABLE(N) (get_cse_reg_info (N)->reg_in_table)
515 /* Get the SUBREG set at the last increment to REG_TICK (-1 if not a
516 SUBREG). */
518 #define SUBREG_TICKED(N) (get_cse_reg_info (N)->subreg_ticked)
520 /* Get the quantity number for REG. */
522 #define REG_QTY(N) (get_cse_reg_info (N)->reg_qty)
524 /* Determine if the quantity number for register X represents a valid index
525 into the qty_table. */
527 #define REGNO_QTY_VALID_P(N) (REG_QTY (N) >= 0)
529 static struct table_elt *table[HASH_SIZE];
531 /* Chain of `struct table_elt's made so far for this function
532 but currently removed from the table. */
534 static struct table_elt *free_element_chain;
536 /* Set to the cost of a constant pool reference if one was found for a
537 symbolic constant. If this was found, it means we should try to
538 convert constants into constant pool entries if they don't fit in
539 the insn. */
541 static int constant_pool_entries_cost;
542 static int constant_pool_entries_regcost;
544 /* This data describes a block that will be processed by cse_basic_block. */
546 struct cse_basic_block_data
548 /* Lowest CUID value of insns in block. */
549 int low_cuid;
550 /* Highest CUID value of insns in block. */
551 int high_cuid;
552 /* Total number of SETs in block. */
553 int nsets;
554 /* Last insn in the block. */
555 rtx last;
556 /* Size of current branch path, if any. */
557 int path_size;
558 /* Current branch path, indicating which branches will be taken. */
559 struct branch_path
561 /* The branch insn. */
562 rtx branch;
563 /* Whether it should be taken or not. AROUND is the same as taken
564 except that it is used when the destination label is not preceded
565 by a BARRIER. */
566 enum taken {PATH_TAKEN, PATH_NOT_TAKEN, PATH_AROUND} status;
567 } *path;
570 static bool fixed_base_plus_p (rtx x);
571 static int notreg_cost (rtx, enum rtx_code);
572 static int approx_reg_cost_1 (rtx *, void *);
573 static int approx_reg_cost (rtx);
574 static int preferable (int, int, int, int);
575 static void new_basic_block (void);
576 static void make_new_qty (unsigned int, enum machine_mode);
577 static void make_regs_eqv (unsigned int, unsigned int);
578 static void delete_reg_equiv (unsigned int);
579 static int mention_regs (rtx);
580 static int insert_regs (rtx, struct table_elt *, int);
581 static void remove_from_table (struct table_elt *, unsigned);
582 static struct table_elt *lookup (rtx, unsigned, enum machine_mode);
583 static struct table_elt *lookup_for_remove (rtx, unsigned, enum machine_mode);
584 static rtx lookup_as_function (rtx, enum rtx_code);
585 static struct table_elt *insert (rtx, struct table_elt *, unsigned,
586 enum machine_mode);
587 static void merge_equiv_classes (struct table_elt *, struct table_elt *);
588 static void invalidate (rtx, enum machine_mode);
589 static int cse_rtx_varies_p (rtx, int);
590 static void remove_invalid_refs (unsigned int);
591 static void remove_invalid_subreg_refs (unsigned int, unsigned int,
592 enum machine_mode);
593 static void rehash_using_reg (rtx);
594 static void invalidate_memory (void);
595 static void invalidate_for_call (void);
596 static rtx use_related_value (rtx, struct table_elt *);
598 static inline unsigned canon_hash (rtx, enum machine_mode);
599 static inline unsigned safe_hash (rtx, enum machine_mode);
600 static unsigned hash_rtx_string (const char *);
602 static rtx canon_reg (rtx, rtx);
603 static void find_best_addr (rtx, rtx *, enum machine_mode);
604 static enum rtx_code find_comparison_args (enum rtx_code, rtx *, rtx *,
605 enum machine_mode *,
606 enum machine_mode *);
607 static rtx fold_rtx (rtx, rtx);
608 static rtx equiv_constant (rtx);
609 static void record_jump_equiv (rtx, int);
610 static void record_jump_cond (enum rtx_code, enum machine_mode, rtx, rtx,
611 int);
612 static void cse_insn (rtx, rtx);
613 static void cse_end_of_basic_block (rtx, struct cse_basic_block_data *,
614 int, int);
615 static int addr_affects_sp_p (rtx);
616 static void invalidate_from_clobbers (rtx);
617 static rtx cse_process_notes (rtx, rtx);
618 static void invalidate_skipped_set (rtx, rtx, void *);
619 static void invalidate_skipped_block (rtx);
620 static rtx cse_basic_block (rtx, rtx, struct branch_path *);
621 static void count_reg_usage (rtx, int *, rtx, int);
622 static int check_for_label_ref (rtx *, void *);
623 extern void dump_class (struct table_elt*);
624 static void get_cse_reg_info_1 (unsigned int regno);
625 static struct cse_reg_info * get_cse_reg_info (unsigned int regno);
626 static int check_dependence (rtx *, void *);
628 static void flush_hash_table (void);
629 static bool insn_live_p (rtx, int *);
630 static bool set_live_p (rtx, rtx, int *);
631 static bool dead_libcall_p (rtx, int *);
632 static int cse_change_cc_mode (rtx *, void *);
633 static void cse_change_cc_mode_insn (rtx, rtx);
634 static void cse_change_cc_mode_insns (rtx, rtx, rtx);
635 static enum machine_mode cse_cc_succs (basic_block, rtx, rtx, bool);
638 #undef RTL_HOOKS_GEN_LOWPART
639 #define RTL_HOOKS_GEN_LOWPART gen_lowpart_if_possible
641 static const struct rtl_hooks cse_rtl_hooks = RTL_HOOKS_INITIALIZER;
643 /* Nonzero if X has the form (PLUS frame-pointer integer). We check for
644 virtual regs here because the simplify_*_operation routines are called
645 by integrate.c, which is called before virtual register instantiation. */
647 static bool
648 fixed_base_plus_p (rtx x)
650 switch (GET_CODE (x))
652 case REG:
653 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx)
654 return true;
655 if (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
656 return true;
657 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
658 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
659 return true;
660 return false;
662 case PLUS:
663 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
664 return false;
665 return fixed_base_plus_p (XEXP (x, 0));
667 default:
668 return false;
672 /* Dump the expressions in the equivalence class indicated by CLASSP.
673 This function is used only for debugging. */
674 void
675 dump_class (struct table_elt *classp)
677 struct table_elt *elt;
679 fprintf (stderr, "Equivalence chain for ");
680 print_rtl (stderr, classp->exp);
681 fprintf (stderr, ": \n");
683 for (elt = classp->first_same_value; elt; elt = elt->next_same_value)
685 print_rtl (stderr, elt->exp);
686 fprintf (stderr, "\n");
690 /* Subroutine of approx_reg_cost; called through for_each_rtx. */
692 static int
693 approx_reg_cost_1 (rtx *xp, void *data)
695 rtx x = *xp;
696 int *cost_p = data;
698 if (x && REG_P (x))
700 unsigned int regno = REGNO (x);
702 if (! CHEAP_REGNO (regno))
704 if (regno < FIRST_PSEUDO_REGISTER)
706 if (SMALL_REGISTER_CLASSES)
707 return 1;
708 *cost_p += 2;
710 else
711 *cost_p += 1;
715 return 0;
718 /* Return an estimate of the cost of the registers used in an rtx.
719 This is mostly the number of different REG expressions in the rtx;
720 however for some exceptions like fixed registers we use a cost of
721 0. If any other hard register reference occurs, return MAX_COST. */
723 static int
724 approx_reg_cost (rtx x)
726 int cost = 0;
728 if (for_each_rtx (&x, approx_reg_cost_1, (void *) &cost))
729 return MAX_COST;
731 return cost;
734 /* Returns a canonical version of X for the address, from the point of view,
735 that all multiplications are represented as MULT instead of the multiply
736 by a power of 2 being represented as ASHIFT. */
738 static rtx
739 canon_for_address (rtx x)
741 enum rtx_code code;
742 enum machine_mode mode;
743 rtx new = 0;
744 int i;
745 const char *fmt;
747 if (!x)
748 return x;
750 code = GET_CODE (x);
751 mode = GET_MODE (x);
753 switch (code)
755 case ASHIFT:
756 if (GET_CODE (XEXP (x, 1)) == CONST_INT
757 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode)
758 && INTVAL (XEXP (x, 1)) >= 0)
760 new = canon_for_address (XEXP (x, 0));
761 new = gen_rtx_MULT (mode, new,
762 gen_int_mode ((HOST_WIDE_INT) 1
763 << INTVAL (XEXP (x, 1)),
764 mode));
766 break;
767 default:
768 break;
771 if (new)
772 return new;
774 /* Now recursively process each operand of this operation. */
775 fmt = GET_RTX_FORMAT (code);
776 for (i = 0; i < GET_RTX_LENGTH (code); i++)
777 if (fmt[i] == 'e')
779 new = canon_for_address (XEXP (x, i));
780 XEXP (x, i) = new;
782 return x;
785 /* Return a negative value if an rtx A, whose costs are given by COST_A
786 and REGCOST_A, is more desirable than an rtx B.
787 Return a positive value if A is less desirable, or 0 if the two are
788 equally good. */
789 static int
790 preferable (int cost_a, int regcost_a, int cost_b, int regcost_b)
792 /* First, get rid of cases involving expressions that are entirely
793 unwanted. */
794 if (cost_a != cost_b)
796 if (cost_a == MAX_COST)
797 return 1;
798 if (cost_b == MAX_COST)
799 return -1;
802 /* Avoid extending lifetimes of hardregs. */
803 if (regcost_a != regcost_b)
805 if (regcost_a == MAX_COST)
806 return 1;
807 if (regcost_b == MAX_COST)
808 return -1;
811 /* Normal operation costs take precedence. */
812 if (cost_a != cost_b)
813 return cost_a - cost_b;
814 /* Only if these are identical consider effects on register pressure. */
815 if (regcost_a != regcost_b)
816 return regcost_a - regcost_b;
817 return 0;
820 /* Internal function, to compute cost when X is not a register; called
821 from COST macro to keep it simple. */
823 static int
824 notreg_cost (rtx x, enum rtx_code outer)
826 return ((GET_CODE (x) == SUBREG
827 && REG_P (SUBREG_REG (x))
828 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
829 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
830 && (GET_MODE_SIZE (GET_MODE (x))
831 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
832 && subreg_lowpart_p (x)
833 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (x)),
834 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))))
836 : rtx_cost (x, outer) * 2);
840 /* Initialize CSE_REG_INFO_TABLE. */
842 static void
843 init_cse_reg_info (unsigned int nregs)
845 /* Do we need to grow the table? */
846 if (nregs > cse_reg_info_table_size)
848 unsigned int new_size;
850 if (cse_reg_info_table_size < 2048)
852 /* Compute a new size that is a power of 2 and no smaller
853 than the large of NREGS and 64. */
854 new_size = (cse_reg_info_table_size
855 ? cse_reg_info_table_size : 64);
857 while (new_size < nregs)
858 new_size *= 2;
860 else
862 /* If we need a big table, allocate just enough to hold
863 NREGS registers. */
864 new_size = nregs;
867 /* Reallocate the table with NEW_SIZE entries. */
868 if (cse_reg_info_table)
869 free (cse_reg_info_table);
870 cse_reg_info_table = xmalloc (sizeof (struct cse_reg_info)
871 * new_size);
872 cse_reg_info_table_size = new_size;
873 cse_reg_info_table_first_uninitialized = 0;
876 /* Do we have all of the first NREGS entries initialized? */
877 if (cse_reg_info_table_first_uninitialized < nregs)
879 unsigned int old_timestamp = cse_reg_info_timestamp - 1;
880 unsigned int i;
882 /* Put the old timestamp on newly allocated entries so that they
883 will all be considered out of date. We do not touch those
884 entries beyond the first NREGS entries to be nice to the
885 virtual memory. */
886 for (i = cse_reg_info_table_first_uninitialized; i < nregs; i++)
887 cse_reg_info_table[i].timestamp = old_timestamp;
889 cse_reg_info_table_first_uninitialized = nregs;
893 /* Given REGNO, initialize the cse_reg_info entry for REGNO. */
895 static void
896 get_cse_reg_info_1 (unsigned int regno)
898 /* Set TIMESTAMP field to CSE_REG_INFO_TIMESTAMP so that this
899 entry will be considered to have been initialized. */
900 cse_reg_info_table[regno].timestamp = cse_reg_info_timestamp;
902 /* Initialize the rest of the entry. */
903 cse_reg_info_table[regno].reg_tick = 1;
904 cse_reg_info_table[regno].reg_in_table = -1;
905 cse_reg_info_table[regno].subreg_ticked = -1;
906 cse_reg_info_table[regno].reg_qty = -regno - 1;
909 /* Find a cse_reg_info entry for REGNO. */
911 static inline struct cse_reg_info *
912 get_cse_reg_info (unsigned int regno)
914 struct cse_reg_info *p = &cse_reg_info_table[regno];
916 /* If this entry has not been initialized, go ahead and initialize
917 it. */
918 if (p->timestamp != cse_reg_info_timestamp)
919 get_cse_reg_info_1 (regno);
921 return p;
924 /* Clear the hash table and initialize each register with its own quantity,
925 for a new basic block. */
927 static void
928 new_basic_block (void)
930 int i;
932 next_qty = 0;
934 /* Invalidate cse_reg_info_table. */
935 cse_reg_info_timestamp++;
937 /* Clear out hash table state for this pass. */
938 CLEAR_HARD_REG_SET (hard_regs_in_table);
940 /* The per-quantity values used to be initialized here, but it is
941 much faster to initialize each as it is made in `make_new_qty'. */
943 for (i = 0; i < HASH_SIZE; i++)
945 struct table_elt *first;
947 first = table[i];
948 if (first != NULL)
950 struct table_elt *last = first;
952 table[i] = NULL;
954 while (last->next_same_hash != NULL)
955 last = last->next_same_hash;
957 /* Now relink this hash entire chain into
958 the free element list. */
960 last->next_same_hash = free_element_chain;
961 free_element_chain = first;
965 #ifdef HAVE_cc0
966 prev_insn = 0;
967 prev_insn_cc0 = 0;
968 #endif
971 /* Say that register REG contains a quantity in mode MODE not in any
972 register before and initialize that quantity. */
974 static void
975 make_new_qty (unsigned int reg, enum machine_mode mode)
977 int q;
978 struct qty_table_elem *ent;
979 struct reg_eqv_elem *eqv;
981 gcc_assert (next_qty < max_qty);
983 q = REG_QTY (reg) = next_qty++;
984 ent = &qty_table[q];
985 ent->first_reg = reg;
986 ent->last_reg = reg;
987 ent->mode = mode;
988 ent->const_rtx = ent->const_insn = NULL_RTX;
989 ent->comparison_code = UNKNOWN;
991 eqv = &reg_eqv_table[reg];
992 eqv->next = eqv->prev = -1;
995 /* Make reg NEW equivalent to reg OLD.
996 OLD is not changing; NEW is. */
998 static void
999 make_regs_eqv (unsigned int new, unsigned int old)
1001 unsigned int lastr, firstr;
1002 int q = REG_QTY (old);
1003 struct qty_table_elem *ent;
1005 ent = &qty_table[q];
1007 /* Nothing should become eqv until it has a "non-invalid" qty number. */
1008 gcc_assert (REGNO_QTY_VALID_P (old));
1010 REG_QTY (new) = q;
1011 firstr = ent->first_reg;
1012 lastr = ent->last_reg;
1014 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
1015 hard regs. Among pseudos, if NEW will live longer than any other reg
1016 of the same qty, and that is beyond the current basic block,
1017 make it the new canonical replacement for this qty. */
1018 if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
1019 /* Certain fixed registers might be of the class NO_REGS. This means
1020 that not only can they not be allocated by the compiler, but
1021 they cannot be used in substitutions or canonicalizations
1022 either. */
1023 && (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
1024 && ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
1025 || (new >= FIRST_PSEUDO_REGISTER
1026 && (firstr < FIRST_PSEUDO_REGISTER
1027 || ((uid_cuid[REGNO_LAST_UID (new)] > cse_basic_block_end
1028 || (uid_cuid[REGNO_FIRST_UID (new)]
1029 < cse_basic_block_start))
1030 && (uid_cuid[REGNO_LAST_UID (new)]
1031 > uid_cuid[REGNO_LAST_UID (firstr)]))))))
1033 reg_eqv_table[firstr].prev = new;
1034 reg_eqv_table[new].next = firstr;
1035 reg_eqv_table[new].prev = -1;
1036 ent->first_reg = new;
1038 else
1040 /* If NEW is a hard reg (known to be non-fixed), insert at end.
1041 Otherwise, insert before any non-fixed hard regs that are at the
1042 end. Registers of class NO_REGS cannot be used as an
1043 equivalent for anything. */
1044 while (lastr < FIRST_PSEUDO_REGISTER && reg_eqv_table[lastr].prev >= 0
1045 && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
1046 && new >= FIRST_PSEUDO_REGISTER)
1047 lastr = reg_eqv_table[lastr].prev;
1048 reg_eqv_table[new].next = reg_eqv_table[lastr].next;
1049 if (reg_eqv_table[lastr].next >= 0)
1050 reg_eqv_table[reg_eqv_table[lastr].next].prev = new;
1051 else
1052 qty_table[q].last_reg = new;
1053 reg_eqv_table[lastr].next = new;
1054 reg_eqv_table[new].prev = lastr;
1058 /* Remove REG from its equivalence class. */
1060 static void
1061 delete_reg_equiv (unsigned int reg)
1063 struct qty_table_elem *ent;
1064 int q = REG_QTY (reg);
1065 int p, n;
1067 /* If invalid, do nothing. */
1068 if (! REGNO_QTY_VALID_P (reg))
1069 return;
1071 ent = &qty_table[q];
1073 p = reg_eqv_table[reg].prev;
1074 n = reg_eqv_table[reg].next;
1076 if (n != -1)
1077 reg_eqv_table[n].prev = p;
1078 else
1079 ent->last_reg = p;
1080 if (p != -1)
1081 reg_eqv_table[p].next = n;
1082 else
1083 ent->first_reg = n;
1085 REG_QTY (reg) = -reg - 1;
1088 /* Remove any invalid expressions from the hash table
1089 that refer to any of the registers contained in expression X.
1091 Make sure that newly inserted references to those registers
1092 as subexpressions will be considered valid.
1094 mention_regs is not called when a register itself
1095 is being stored in the table.
1097 Return 1 if we have done something that may have changed the hash code
1098 of X. */
1100 static int
1101 mention_regs (rtx x)
1103 enum rtx_code code;
1104 int i, j;
1105 const char *fmt;
1106 int changed = 0;
1108 if (x == 0)
1109 return 0;
1111 code = GET_CODE (x);
1112 if (code == REG)
1114 unsigned int regno = REGNO (x);
1115 unsigned int endregno
1116 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
1117 : hard_regno_nregs[regno][GET_MODE (x)]);
1118 unsigned int i;
1120 for (i = regno; i < endregno; i++)
1122 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1123 remove_invalid_refs (i);
1125 REG_IN_TABLE (i) = REG_TICK (i);
1126 SUBREG_TICKED (i) = -1;
1129 return 0;
1132 /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1133 pseudo if they don't use overlapping words. We handle only pseudos
1134 here for simplicity. */
1135 if (code == SUBREG && REG_P (SUBREG_REG (x))
1136 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1138 unsigned int i = REGNO (SUBREG_REG (x));
1140 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1142 /* If REG_IN_TABLE (i) differs from REG_TICK (i) by one, and
1143 the last store to this register really stored into this
1144 subreg, then remove the memory of this subreg.
1145 Otherwise, remove any memory of the entire register and
1146 all its subregs from the table. */
1147 if (REG_TICK (i) - REG_IN_TABLE (i) > 1
1148 || SUBREG_TICKED (i) != REGNO (SUBREG_REG (x)))
1149 remove_invalid_refs (i);
1150 else
1151 remove_invalid_subreg_refs (i, SUBREG_BYTE (x), GET_MODE (x));
1154 REG_IN_TABLE (i) = REG_TICK (i);
1155 SUBREG_TICKED (i) = REGNO (SUBREG_REG (x));
1156 return 0;
1159 /* If X is a comparison or a COMPARE and either operand is a register
1160 that does not have a quantity, give it one. This is so that a later
1161 call to record_jump_equiv won't cause X to be assigned a different
1162 hash code and not found in the table after that call.
1164 It is not necessary to do this here, since rehash_using_reg can
1165 fix up the table later, but doing this here eliminates the need to
1166 call that expensive function in the most common case where the only
1167 use of the register is in the comparison. */
1169 if (code == COMPARE || COMPARISON_P (x))
1171 if (REG_P (XEXP (x, 0))
1172 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
1173 if (insert_regs (XEXP (x, 0), NULL, 0))
1175 rehash_using_reg (XEXP (x, 0));
1176 changed = 1;
1179 if (REG_P (XEXP (x, 1))
1180 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
1181 if (insert_regs (XEXP (x, 1), NULL, 0))
1183 rehash_using_reg (XEXP (x, 1));
1184 changed = 1;
1188 fmt = GET_RTX_FORMAT (code);
1189 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1190 if (fmt[i] == 'e')
1191 changed |= mention_regs (XEXP (x, i));
1192 else if (fmt[i] == 'E')
1193 for (j = 0; j < XVECLEN (x, i); j++)
1194 changed |= mention_regs (XVECEXP (x, i, j));
1196 return changed;
1199 /* Update the register quantities for inserting X into the hash table
1200 with a value equivalent to CLASSP.
1201 (If the class does not contain a REG, it is irrelevant.)
1202 If MODIFIED is nonzero, X is a destination; it is being modified.
1203 Note that delete_reg_equiv should be called on a register
1204 before insert_regs is done on that register with MODIFIED != 0.
1206 Nonzero value means that elements of reg_qty have changed
1207 so X's hash code may be different. */
1209 static int
1210 insert_regs (rtx x, struct table_elt *classp, int modified)
1212 if (REG_P (x))
1214 unsigned int regno = REGNO (x);
1215 int qty_valid;
1217 /* If REGNO is in the equivalence table already but is of the
1218 wrong mode for that equivalence, don't do anything here. */
1220 qty_valid = REGNO_QTY_VALID_P (regno);
1221 if (qty_valid)
1223 struct qty_table_elem *ent = &qty_table[REG_QTY (regno)];
1225 if (ent->mode != GET_MODE (x))
1226 return 0;
1229 if (modified || ! qty_valid)
1231 if (classp)
1232 for (classp = classp->first_same_value;
1233 classp != 0;
1234 classp = classp->next_same_value)
1235 if (REG_P (classp->exp)
1236 && GET_MODE (classp->exp) == GET_MODE (x))
1238 unsigned c_regno = REGNO (classp->exp);
1240 gcc_assert (REGNO_QTY_VALID_P (c_regno));
1242 /* Suppose that 5 is hard reg and 100 and 101 are
1243 pseudos. Consider
1245 (set (reg:si 100) (reg:si 5))
1246 (set (reg:si 5) (reg:si 100))
1247 (set (reg:di 101) (reg:di 5))
1249 We would now set REG_QTY (101) = REG_QTY (5), but the
1250 entry for 5 is in SImode. When we use this later in
1251 copy propagation, we get the register in wrong mode. */
1252 if (qty_table[REG_QTY (c_regno)].mode != GET_MODE (x))
1253 continue;
1255 make_regs_eqv (regno, c_regno);
1256 return 1;
1259 /* Mention_regs for a SUBREG checks if REG_TICK is exactly one larger
1260 than REG_IN_TABLE to find out if there was only a single preceding
1261 invalidation - for the SUBREG - or another one, which would be
1262 for the full register. However, if we find here that REG_TICK
1263 indicates that the register is invalid, it means that it has
1264 been invalidated in a separate operation. The SUBREG might be used
1265 now (then this is a recursive call), or we might use the full REG
1266 now and a SUBREG of it later. So bump up REG_TICK so that
1267 mention_regs will do the right thing. */
1268 if (! modified
1269 && REG_IN_TABLE (regno) >= 0
1270 && REG_TICK (regno) == REG_IN_TABLE (regno) + 1)
1271 REG_TICK (regno)++;
1272 make_new_qty (regno, GET_MODE (x));
1273 return 1;
1276 return 0;
1279 /* If X is a SUBREG, we will likely be inserting the inner register in the
1280 table. If that register doesn't have an assigned quantity number at
1281 this point but does later, the insertion that we will be doing now will
1282 not be accessible because its hash code will have changed. So assign
1283 a quantity number now. */
1285 else if (GET_CODE (x) == SUBREG && REG_P (SUBREG_REG (x))
1286 && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1288 insert_regs (SUBREG_REG (x), NULL, 0);
1289 mention_regs (x);
1290 return 1;
1292 else
1293 return mention_regs (x);
1296 /* Look in or update the hash table. */
1298 /* Remove table element ELT from use in the table.
1299 HASH is its hash code, made using the HASH macro.
1300 It's an argument because often that is known in advance
1301 and we save much time not recomputing it. */
1303 static void
1304 remove_from_table (struct table_elt *elt, unsigned int hash)
1306 if (elt == 0)
1307 return;
1309 /* Mark this element as removed. See cse_insn. */
1310 elt->first_same_value = 0;
1312 /* Remove the table element from its equivalence class. */
1315 struct table_elt *prev = elt->prev_same_value;
1316 struct table_elt *next = elt->next_same_value;
1318 if (next)
1319 next->prev_same_value = prev;
1321 if (prev)
1322 prev->next_same_value = next;
1323 else
1325 struct table_elt *newfirst = next;
1326 while (next)
1328 next->first_same_value = newfirst;
1329 next = next->next_same_value;
1334 /* Remove the table element from its hash bucket. */
1337 struct table_elt *prev = elt->prev_same_hash;
1338 struct table_elt *next = elt->next_same_hash;
1340 if (next)
1341 next->prev_same_hash = prev;
1343 if (prev)
1344 prev->next_same_hash = next;
1345 else if (table[hash] == elt)
1346 table[hash] = next;
1347 else
1349 /* This entry is not in the proper hash bucket. This can happen
1350 when two classes were merged by `merge_equiv_classes'. Search
1351 for the hash bucket that it heads. This happens only very
1352 rarely, so the cost is acceptable. */
1353 for (hash = 0; hash < HASH_SIZE; hash++)
1354 if (table[hash] == elt)
1355 table[hash] = next;
1359 /* Remove the table element from its related-value circular chain. */
1361 if (elt->related_value != 0 && elt->related_value != elt)
1363 struct table_elt *p = elt->related_value;
1365 while (p->related_value != elt)
1366 p = p->related_value;
1367 p->related_value = elt->related_value;
1368 if (p->related_value == p)
1369 p->related_value = 0;
1372 /* Now add it to the free element chain. */
1373 elt->next_same_hash = free_element_chain;
1374 free_element_chain = elt;
1377 /* Look up X in the hash table and return its table element,
1378 or 0 if X is not in the table.
1380 MODE is the machine-mode of X, or if X is an integer constant
1381 with VOIDmode then MODE is the mode with which X will be used.
1383 Here we are satisfied to find an expression whose tree structure
1384 looks like X. */
1386 static struct table_elt *
1387 lookup (rtx x, unsigned int hash, enum machine_mode mode)
1389 struct table_elt *p;
1391 for (p = table[hash]; p; p = p->next_same_hash)
1392 if (mode == p->mode && ((x == p->exp && REG_P (x))
1393 || exp_equiv_p (x, p->exp, !REG_P (x), false)))
1394 return p;
1396 return 0;
1399 /* Like `lookup' but don't care whether the table element uses invalid regs.
1400 Also ignore discrepancies in the machine mode of a register. */
1402 static struct table_elt *
1403 lookup_for_remove (rtx x, unsigned int hash, enum machine_mode mode)
1405 struct table_elt *p;
1407 if (REG_P (x))
1409 unsigned int regno = REGNO (x);
1411 /* Don't check the machine mode when comparing registers;
1412 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1413 for (p = table[hash]; p; p = p->next_same_hash)
1414 if (REG_P (p->exp)
1415 && REGNO (p->exp) == regno)
1416 return p;
1418 else
1420 for (p = table[hash]; p; p = p->next_same_hash)
1421 if (mode == p->mode
1422 && (x == p->exp || exp_equiv_p (x, p->exp, 0, false)))
1423 return p;
1426 return 0;
1429 /* Look for an expression equivalent to X and with code CODE.
1430 If one is found, return that expression. */
1432 static rtx
1433 lookup_as_function (rtx x, enum rtx_code code)
1435 struct table_elt *p
1436 = lookup (x, SAFE_HASH (x, VOIDmode), GET_MODE (x));
1438 /* If we are looking for a CONST_INT, the mode doesn't really matter, as
1439 long as we are narrowing. So if we looked in vain for a mode narrower
1440 than word_mode before, look for word_mode now. */
1441 if (p == 0 && code == CONST_INT
1442 && GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (word_mode))
1444 x = copy_rtx (x);
1445 PUT_MODE (x, word_mode);
1446 p = lookup (x, SAFE_HASH (x, VOIDmode), word_mode);
1449 if (p == 0)
1450 return 0;
1452 for (p = p->first_same_value; p; p = p->next_same_value)
1453 if (GET_CODE (p->exp) == code
1454 /* Make sure this is a valid entry in the table. */
1455 && exp_equiv_p (p->exp, p->exp, 1, false))
1456 return p->exp;
1458 return 0;
1461 /* Insert X in the hash table, assuming HASH is its hash code
1462 and CLASSP is an element of the class it should go in
1463 (or 0 if a new class should be made).
1464 It is inserted at the proper position to keep the class in
1465 the order cheapest first.
1467 MODE is the machine-mode of X, or if X is an integer constant
1468 with VOIDmode then MODE is the mode with which X will be used.
1470 For elements of equal cheapness, the most recent one
1471 goes in front, except that the first element in the list
1472 remains first unless a cheaper element is added. The order of
1473 pseudo-registers does not matter, as canon_reg will be called to
1474 find the cheapest when a register is retrieved from the table.
1476 The in_memory field in the hash table element is set to 0.
1477 The caller must set it nonzero if appropriate.
1479 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1480 and if insert_regs returns a nonzero value
1481 you must then recompute its hash code before calling here.
1483 If necessary, update table showing constant values of quantities. */
1485 #define CHEAPER(X, Y) \
1486 (preferable ((X)->cost, (X)->regcost, (Y)->cost, (Y)->regcost) < 0)
1488 static struct table_elt *
1489 insert (rtx x, struct table_elt *classp, unsigned int hash, enum machine_mode mode)
1491 struct table_elt *elt;
1493 /* If X is a register and we haven't made a quantity for it,
1494 something is wrong. */
1495 gcc_assert (!REG_P (x) || REGNO_QTY_VALID_P (REGNO (x)));
1497 /* If X is a hard register, show it is being put in the table. */
1498 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
1500 unsigned int regno = REGNO (x);
1501 unsigned int endregno = regno + hard_regno_nregs[regno][GET_MODE (x)];
1502 unsigned int i;
1504 for (i = regno; i < endregno; i++)
1505 SET_HARD_REG_BIT (hard_regs_in_table, i);
1508 /* Put an element for X into the right hash bucket. */
1510 elt = free_element_chain;
1511 if (elt)
1512 free_element_chain = elt->next_same_hash;
1513 else
1514 elt = xmalloc (sizeof (struct table_elt));
1516 elt->exp = x;
1517 elt->canon_exp = NULL_RTX;
1518 elt->cost = COST (x);
1519 elt->regcost = approx_reg_cost (x);
1520 elt->next_same_value = 0;
1521 elt->prev_same_value = 0;
1522 elt->next_same_hash = table[hash];
1523 elt->prev_same_hash = 0;
1524 elt->related_value = 0;
1525 elt->in_memory = 0;
1526 elt->mode = mode;
1527 elt->is_const = (CONSTANT_P (x) || fixed_base_plus_p (x));
1529 if (table[hash])
1530 table[hash]->prev_same_hash = elt;
1531 table[hash] = elt;
1533 /* Put it into the proper value-class. */
1534 if (classp)
1536 classp = classp->first_same_value;
1537 if (CHEAPER (elt, classp))
1538 /* Insert at the head of the class. */
1540 struct table_elt *p;
1541 elt->next_same_value = classp;
1542 classp->prev_same_value = elt;
1543 elt->first_same_value = elt;
1545 for (p = classp; p; p = p->next_same_value)
1546 p->first_same_value = elt;
1548 else
1550 /* Insert not at head of the class. */
1551 /* Put it after the last element cheaper than X. */
1552 struct table_elt *p, *next;
1554 for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1555 p = next);
1557 /* Put it after P and before NEXT. */
1558 elt->next_same_value = next;
1559 if (next)
1560 next->prev_same_value = elt;
1562 elt->prev_same_value = p;
1563 p->next_same_value = elt;
1564 elt->first_same_value = classp;
1567 else
1568 elt->first_same_value = elt;
1570 /* If this is a constant being set equivalent to a register or a register
1571 being set equivalent to a constant, note the constant equivalence.
1573 If this is a constant, it cannot be equivalent to a different constant,
1574 and a constant is the only thing that can be cheaper than a register. So
1575 we know the register is the head of the class (before the constant was
1576 inserted).
1578 If this is a register that is not already known equivalent to a
1579 constant, we must check the entire class.
1581 If this is a register that is already known equivalent to an insn,
1582 update the qtys `const_insn' to show that `this_insn' is the latest
1583 insn making that quantity equivalent to the constant. */
1585 if (elt->is_const && classp && REG_P (classp->exp)
1586 && !REG_P (x))
1588 int exp_q = REG_QTY (REGNO (classp->exp));
1589 struct qty_table_elem *exp_ent = &qty_table[exp_q];
1591 exp_ent->const_rtx = gen_lowpart (exp_ent->mode, x);
1592 exp_ent->const_insn = this_insn;
1595 else if (REG_P (x)
1596 && classp
1597 && ! qty_table[REG_QTY (REGNO (x))].const_rtx
1598 && ! elt->is_const)
1600 struct table_elt *p;
1602 for (p = classp; p != 0; p = p->next_same_value)
1604 if (p->is_const && !REG_P (p->exp))
1606 int x_q = REG_QTY (REGNO (x));
1607 struct qty_table_elem *x_ent = &qty_table[x_q];
1609 x_ent->const_rtx
1610 = gen_lowpart (GET_MODE (x), p->exp);
1611 x_ent->const_insn = this_insn;
1612 break;
1617 else if (REG_P (x)
1618 && qty_table[REG_QTY (REGNO (x))].const_rtx
1619 && GET_MODE (x) == qty_table[REG_QTY (REGNO (x))].mode)
1620 qty_table[REG_QTY (REGNO (x))].const_insn = this_insn;
1622 /* If this is a constant with symbolic value,
1623 and it has a term with an explicit integer value,
1624 link it up with related expressions. */
1625 if (GET_CODE (x) == CONST)
1627 rtx subexp = get_related_value (x);
1628 unsigned subhash;
1629 struct table_elt *subelt, *subelt_prev;
1631 if (subexp != 0)
1633 /* Get the integer-free subexpression in the hash table. */
1634 subhash = SAFE_HASH (subexp, mode);
1635 subelt = lookup (subexp, subhash, mode);
1636 if (subelt == 0)
1637 subelt = insert (subexp, NULL, subhash, mode);
1638 /* Initialize SUBELT's circular chain if it has none. */
1639 if (subelt->related_value == 0)
1640 subelt->related_value = subelt;
1641 /* Find the element in the circular chain that precedes SUBELT. */
1642 subelt_prev = subelt;
1643 while (subelt_prev->related_value != subelt)
1644 subelt_prev = subelt_prev->related_value;
1645 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1646 This way the element that follows SUBELT is the oldest one. */
1647 elt->related_value = subelt_prev->related_value;
1648 subelt_prev->related_value = elt;
1652 return elt;
1655 /* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1656 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1657 the two classes equivalent.
1659 CLASS1 will be the surviving class; CLASS2 should not be used after this
1660 call.
1662 Any invalid entries in CLASS2 will not be copied. */
1664 static void
1665 merge_equiv_classes (struct table_elt *class1, struct table_elt *class2)
1667 struct table_elt *elt, *next, *new;
1669 /* Ensure we start with the head of the classes. */
1670 class1 = class1->first_same_value;
1671 class2 = class2->first_same_value;
1673 /* If they were already equal, forget it. */
1674 if (class1 == class2)
1675 return;
1677 for (elt = class2; elt; elt = next)
1679 unsigned int hash;
1680 rtx exp = elt->exp;
1681 enum machine_mode mode = elt->mode;
1683 next = elt->next_same_value;
1685 /* Remove old entry, make a new one in CLASS1's class.
1686 Don't do this for invalid entries as we cannot find their
1687 hash code (it also isn't necessary). */
1688 if (REG_P (exp) || exp_equiv_p (exp, exp, 1, false))
1690 bool need_rehash = false;
1692 hash_arg_in_memory = 0;
1693 hash = HASH (exp, mode);
1695 if (REG_P (exp))
1697 need_rehash = REGNO_QTY_VALID_P (REGNO (exp));
1698 delete_reg_equiv (REGNO (exp));
1701 remove_from_table (elt, hash);
1703 if (insert_regs (exp, class1, 0) || need_rehash)
1705 rehash_using_reg (exp);
1706 hash = HASH (exp, mode);
1708 new = insert (exp, class1, hash, mode);
1709 new->in_memory = hash_arg_in_memory;
1714 /* Flush the entire hash table. */
1716 static void
1717 flush_hash_table (void)
1719 int i;
1720 struct table_elt *p;
1722 for (i = 0; i < HASH_SIZE; i++)
1723 for (p = table[i]; p; p = table[i])
1725 /* Note that invalidate can remove elements
1726 after P in the current hash chain. */
1727 if (REG_P (p->exp))
1728 invalidate (p->exp, p->mode);
1729 else
1730 remove_from_table (p, i);
1734 /* Function called for each rtx to check whether true dependence exist. */
1735 struct check_dependence_data
1737 enum machine_mode mode;
1738 rtx exp;
1739 rtx addr;
1742 static int
1743 check_dependence (rtx *x, void *data)
1745 struct check_dependence_data *d = (struct check_dependence_data *) data;
1746 if (*x && MEM_P (*x))
1747 return canon_true_dependence (d->exp, d->mode, d->addr, *x,
1748 cse_rtx_varies_p);
1749 else
1750 return 0;
1753 /* Remove from the hash table, or mark as invalid, all expressions whose
1754 values could be altered by storing in X. X is a register, a subreg, or
1755 a memory reference with nonvarying address (because, when a memory
1756 reference with a varying address is stored in, all memory references are
1757 removed by invalidate_memory so specific invalidation is superfluous).
1758 FULL_MODE, if not VOIDmode, indicates that this much should be
1759 invalidated instead of just the amount indicated by the mode of X. This
1760 is only used for bitfield stores into memory.
1762 A nonvarying address may be just a register or just a symbol reference,
1763 or it may be either of those plus a numeric offset. */
1765 static void
1766 invalidate (rtx x, enum machine_mode full_mode)
1768 int i;
1769 struct table_elt *p;
1770 rtx addr;
1772 switch (GET_CODE (x))
1774 case REG:
1776 /* If X is a register, dependencies on its contents are recorded
1777 through the qty number mechanism. Just change the qty number of
1778 the register, mark it as invalid for expressions that refer to it,
1779 and remove it itself. */
1780 unsigned int regno = REGNO (x);
1781 unsigned int hash = HASH (x, GET_MODE (x));
1783 /* Remove REGNO from any quantity list it might be on and indicate
1784 that its value might have changed. If it is a pseudo, remove its
1785 entry from the hash table.
1787 For a hard register, we do the first two actions above for any
1788 additional hard registers corresponding to X. Then, if any of these
1789 registers are in the table, we must remove any REG entries that
1790 overlap these registers. */
1792 delete_reg_equiv (regno);
1793 REG_TICK (regno)++;
1794 SUBREG_TICKED (regno) = -1;
1796 if (regno >= FIRST_PSEUDO_REGISTER)
1798 /* Because a register can be referenced in more than one mode,
1799 we might have to remove more than one table entry. */
1800 struct table_elt *elt;
1802 while ((elt = lookup_for_remove (x, hash, GET_MODE (x))))
1803 remove_from_table (elt, hash);
1805 else
1807 HOST_WIDE_INT in_table
1808 = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1809 unsigned int endregno
1810 = regno + hard_regno_nregs[regno][GET_MODE (x)];
1811 unsigned int tregno, tendregno, rn;
1812 struct table_elt *p, *next;
1814 CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1816 for (rn = regno + 1; rn < endregno; rn++)
1818 in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, rn);
1819 CLEAR_HARD_REG_BIT (hard_regs_in_table, rn);
1820 delete_reg_equiv (rn);
1821 REG_TICK (rn)++;
1822 SUBREG_TICKED (rn) = -1;
1825 if (in_table)
1826 for (hash = 0; hash < HASH_SIZE; hash++)
1827 for (p = table[hash]; p; p = next)
1829 next = p->next_same_hash;
1831 if (!REG_P (p->exp)
1832 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1833 continue;
1835 tregno = REGNO (p->exp);
1836 tendregno
1837 = tregno + hard_regno_nregs[tregno][GET_MODE (p->exp)];
1838 if (tendregno > regno && tregno < endregno)
1839 remove_from_table (p, hash);
1843 return;
1845 case SUBREG:
1846 invalidate (SUBREG_REG (x), VOIDmode);
1847 return;
1849 case PARALLEL:
1850 for (i = XVECLEN (x, 0) - 1; i >= 0; --i)
1851 invalidate (XVECEXP (x, 0, i), VOIDmode);
1852 return;
1854 case EXPR_LIST:
1855 /* This is part of a disjoint return value; extract the location in
1856 question ignoring the offset. */
1857 invalidate (XEXP (x, 0), VOIDmode);
1858 return;
1860 case MEM:
1861 addr = canon_rtx (get_addr (XEXP (x, 0)));
1862 /* Calculate the canonical version of X here so that
1863 true_dependence doesn't generate new RTL for X on each call. */
1864 x = canon_rtx (x);
1866 /* Remove all hash table elements that refer to overlapping pieces of
1867 memory. */
1868 if (full_mode == VOIDmode)
1869 full_mode = GET_MODE (x);
1871 for (i = 0; i < HASH_SIZE; i++)
1873 struct table_elt *next;
1875 for (p = table[i]; p; p = next)
1877 next = p->next_same_hash;
1878 if (p->in_memory)
1880 struct check_dependence_data d;
1882 /* Just canonicalize the expression once;
1883 otherwise each time we call invalidate
1884 true_dependence will canonicalize the
1885 expression again. */
1886 if (!p->canon_exp)
1887 p->canon_exp = canon_rtx (p->exp);
1888 d.exp = x;
1889 d.addr = addr;
1890 d.mode = full_mode;
1891 if (for_each_rtx (&p->canon_exp, check_dependence, &d))
1892 remove_from_table (p, i);
1896 return;
1898 default:
1899 gcc_unreachable ();
1903 /* Remove all expressions that refer to register REGNO,
1904 since they are already invalid, and we are about to
1905 mark that register valid again and don't want the old
1906 expressions to reappear as valid. */
1908 static void
1909 remove_invalid_refs (unsigned int regno)
1911 unsigned int i;
1912 struct table_elt *p, *next;
1914 for (i = 0; i < HASH_SIZE; i++)
1915 for (p = table[i]; p; p = next)
1917 next = p->next_same_hash;
1918 if (!REG_P (p->exp)
1919 && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
1920 remove_from_table (p, i);
1924 /* Likewise for a subreg with subreg_reg REGNO, subreg_byte OFFSET,
1925 and mode MODE. */
1926 static void
1927 remove_invalid_subreg_refs (unsigned int regno, unsigned int offset,
1928 enum machine_mode mode)
1930 unsigned int i;
1931 struct table_elt *p, *next;
1932 unsigned int end = offset + (GET_MODE_SIZE (mode) - 1);
1934 for (i = 0; i < HASH_SIZE; i++)
1935 for (p = table[i]; p; p = next)
1937 rtx exp = p->exp;
1938 next = p->next_same_hash;
1940 if (!REG_P (exp)
1941 && (GET_CODE (exp) != SUBREG
1942 || !REG_P (SUBREG_REG (exp))
1943 || REGNO (SUBREG_REG (exp)) != regno
1944 || (((SUBREG_BYTE (exp)
1945 + (GET_MODE_SIZE (GET_MODE (exp)) - 1)) >= offset)
1946 && SUBREG_BYTE (exp) <= end))
1947 && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
1948 remove_from_table (p, i);
1952 /* Recompute the hash codes of any valid entries in the hash table that
1953 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
1955 This is called when we make a jump equivalence. */
1957 static void
1958 rehash_using_reg (rtx x)
1960 unsigned int i;
1961 struct table_elt *p, *next;
1962 unsigned hash;
1964 if (GET_CODE (x) == SUBREG)
1965 x = SUBREG_REG (x);
1967 /* If X is not a register or if the register is known not to be in any
1968 valid entries in the table, we have no work to do. */
1970 if (!REG_P (x)
1971 || REG_IN_TABLE (REGNO (x)) < 0
1972 || REG_IN_TABLE (REGNO (x)) != REG_TICK (REGNO (x)))
1973 return;
1975 /* Scan all hash chains looking for valid entries that mention X.
1976 If we find one and it is in the wrong hash chain, move it. */
1978 for (i = 0; i < HASH_SIZE; i++)
1979 for (p = table[i]; p; p = next)
1981 next = p->next_same_hash;
1982 if (reg_mentioned_p (x, p->exp)
1983 && exp_equiv_p (p->exp, p->exp, 1, false)
1984 && i != (hash = SAFE_HASH (p->exp, p->mode)))
1986 if (p->next_same_hash)
1987 p->next_same_hash->prev_same_hash = p->prev_same_hash;
1989 if (p->prev_same_hash)
1990 p->prev_same_hash->next_same_hash = p->next_same_hash;
1991 else
1992 table[i] = p->next_same_hash;
1994 p->next_same_hash = table[hash];
1995 p->prev_same_hash = 0;
1996 if (table[hash])
1997 table[hash]->prev_same_hash = p;
1998 table[hash] = p;
2003 /* Remove from the hash table any expression that is a call-clobbered
2004 register. Also update their TICK values. */
2006 static void
2007 invalidate_for_call (void)
2009 unsigned int regno, endregno;
2010 unsigned int i;
2011 unsigned hash;
2012 struct table_elt *p, *next;
2013 int in_table = 0;
2015 /* Go through all the hard registers. For each that is clobbered in
2016 a CALL_INSN, remove the register from quantity chains and update
2017 reg_tick if defined. Also see if any of these registers is currently
2018 in the table. */
2020 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2021 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2023 delete_reg_equiv (regno);
2024 if (REG_TICK (regno) >= 0)
2026 REG_TICK (regno)++;
2027 SUBREG_TICKED (regno) = -1;
2030 in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
2033 /* In the case where we have no call-clobbered hard registers in the
2034 table, we are done. Otherwise, scan the table and remove any
2035 entry that overlaps a call-clobbered register. */
2037 if (in_table)
2038 for (hash = 0; hash < HASH_SIZE; hash++)
2039 for (p = table[hash]; p; p = next)
2041 next = p->next_same_hash;
2043 if (!REG_P (p->exp)
2044 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
2045 continue;
2047 regno = REGNO (p->exp);
2048 endregno = regno + hard_regno_nregs[regno][GET_MODE (p->exp)];
2050 for (i = regno; i < endregno; i++)
2051 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
2053 remove_from_table (p, hash);
2054 break;
2059 /* Given an expression X of type CONST,
2060 and ELT which is its table entry (or 0 if it
2061 is not in the hash table),
2062 return an alternate expression for X as a register plus integer.
2063 If none can be found, return 0. */
2065 static rtx
2066 use_related_value (rtx x, struct table_elt *elt)
2068 struct table_elt *relt = 0;
2069 struct table_elt *p, *q;
2070 HOST_WIDE_INT offset;
2072 /* First, is there anything related known?
2073 If we have a table element, we can tell from that.
2074 Otherwise, must look it up. */
2076 if (elt != 0 && elt->related_value != 0)
2077 relt = elt;
2078 else if (elt == 0 && GET_CODE (x) == CONST)
2080 rtx subexp = get_related_value (x);
2081 if (subexp != 0)
2082 relt = lookup (subexp,
2083 SAFE_HASH (subexp, GET_MODE (subexp)),
2084 GET_MODE (subexp));
2087 if (relt == 0)
2088 return 0;
2090 /* Search all related table entries for one that has an
2091 equivalent register. */
2093 p = relt;
2094 while (1)
2096 /* This loop is strange in that it is executed in two different cases.
2097 The first is when X is already in the table. Then it is searching
2098 the RELATED_VALUE list of X's class (RELT). The second case is when
2099 X is not in the table. Then RELT points to a class for the related
2100 value.
2102 Ensure that, whatever case we are in, that we ignore classes that have
2103 the same value as X. */
2105 if (rtx_equal_p (x, p->exp))
2106 q = 0;
2107 else
2108 for (q = p->first_same_value; q; q = q->next_same_value)
2109 if (REG_P (q->exp))
2110 break;
2112 if (q)
2113 break;
2115 p = p->related_value;
2117 /* We went all the way around, so there is nothing to be found.
2118 Alternatively, perhaps RELT was in the table for some other reason
2119 and it has no related values recorded. */
2120 if (p == relt || p == 0)
2121 break;
2124 if (q == 0)
2125 return 0;
2127 offset = (get_integer_term (x) - get_integer_term (p->exp));
2128 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
2129 return plus_constant (q->exp, offset);
2132 /* Hash a string. Just add its bytes up. */
2133 static inline unsigned
2134 hash_rtx_string (const char *ps)
2136 unsigned hash = 0;
2137 const unsigned char *p = (const unsigned char *) ps;
2139 if (p)
2140 while (*p)
2141 hash += *p++;
2143 return hash;
2146 /* Hash an rtx. We are careful to make sure the value is never negative.
2147 Equivalent registers hash identically.
2148 MODE is used in hashing for CONST_INTs only;
2149 otherwise the mode of X is used.
2151 Store 1 in DO_NOT_RECORD_P if any subexpression is volatile.
2153 If HASH_ARG_IN_MEMORY_P is not NULL, store 1 in it if X contains
2154 a MEM rtx which does not have the RTX_UNCHANGING_P bit set.
2156 Note that cse_insn knows that the hash code of a MEM expression
2157 is just (int) MEM plus the hash code of the address. */
2159 unsigned
2160 hash_rtx (rtx x, enum machine_mode mode, int *do_not_record_p,
2161 int *hash_arg_in_memory_p, bool have_reg_qty)
2163 int i, j;
2164 unsigned hash = 0;
2165 enum rtx_code code;
2166 const char *fmt;
2168 /* Used to turn recursion into iteration. We can't rely on GCC's
2169 tail-recursion elimination since we need to keep accumulating values
2170 in HASH. */
2171 repeat:
2172 if (x == 0)
2173 return hash;
2175 code = GET_CODE (x);
2176 switch (code)
2178 case REG:
2180 unsigned int regno = REGNO (x);
2182 if (!reload_completed)
2184 /* On some machines, we can't record any non-fixed hard register,
2185 because extending its life will cause reload problems. We
2186 consider ap, fp, sp, gp to be fixed for this purpose.
2188 We also consider CCmode registers to be fixed for this purpose;
2189 failure to do so leads to failure to simplify 0<100 type of
2190 conditionals.
2192 On all machines, we can't record any global registers.
2193 Nor should we record any register that is in a small
2194 class, as defined by CLASS_LIKELY_SPILLED_P. */
2195 bool record;
2197 if (regno >= FIRST_PSEUDO_REGISTER)
2198 record = true;
2199 else if (x == frame_pointer_rtx
2200 || x == hard_frame_pointer_rtx
2201 || x == arg_pointer_rtx
2202 || x == stack_pointer_rtx
2203 || x == pic_offset_table_rtx)
2204 record = true;
2205 else if (global_regs[regno])
2206 record = false;
2207 else if (fixed_regs[regno])
2208 record = true;
2209 else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_CC)
2210 record = true;
2211 else if (SMALL_REGISTER_CLASSES)
2212 record = false;
2213 else if (CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (regno)))
2214 record = false;
2215 else
2216 record = true;
2218 if (!record)
2220 *do_not_record_p = 1;
2221 return 0;
2225 hash += ((unsigned int) REG << 7);
2226 hash += (have_reg_qty ? (unsigned) REG_QTY (regno) : regno);
2227 return hash;
2230 /* We handle SUBREG of a REG specially because the underlying
2231 reg changes its hash value with every value change; we don't
2232 want to have to forget unrelated subregs when one subreg changes. */
2233 case SUBREG:
2235 if (REG_P (SUBREG_REG (x)))
2237 hash += (((unsigned int) SUBREG << 7)
2238 + REGNO (SUBREG_REG (x))
2239 + (SUBREG_BYTE (x) / UNITS_PER_WORD));
2240 return hash;
2242 break;
2245 case CONST_INT:
2246 hash += (((unsigned int) CONST_INT << 7) + (unsigned int) mode
2247 + (unsigned int) INTVAL (x));
2248 return hash;
2250 case CONST_DOUBLE:
2251 /* This is like the general case, except that it only counts
2252 the integers representing the constant. */
2253 hash += (unsigned int) code + (unsigned int) GET_MODE (x);
2254 if (GET_MODE (x) != VOIDmode)
2255 hash += real_hash (CONST_DOUBLE_REAL_VALUE (x));
2256 else
2257 hash += ((unsigned int) CONST_DOUBLE_LOW (x)
2258 + (unsigned int) CONST_DOUBLE_HIGH (x));
2259 return hash;
2261 case CONST_VECTOR:
2263 int units;
2264 rtx elt;
2266 units = CONST_VECTOR_NUNITS (x);
2268 for (i = 0; i < units; ++i)
2270 elt = CONST_VECTOR_ELT (x, i);
2271 hash += hash_rtx (elt, GET_MODE (elt), do_not_record_p,
2272 hash_arg_in_memory_p, have_reg_qty);
2275 return hash;
2278 /* Assume there is only one rtx object for any given label. */
2279 case LABEL_REF:
2280 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
2281 differences and differences between each stage's debugging dumps. */
2282 hash += (((unsigned int) LABEL_REF << 7)
2283 + CODE_LABEL_NUMBER (XEXP (x, 0)));
2284 return hash;
2286 case SYMBOL_REF:
2288 /* Don't hash on the symbol's address to avoid bootstrap differences.
2289 Different hash values may cause expressions to be recorded in
2290 different orders and thus different registers to be used in the
2291 final assembler. This also avoids differences in the dump files
2292 between various stages. */
2293 unsigned int h = 0;
2294 const unsigned char *p = (const unsigned char *) XSTR (x, 0);
2296 while (*p)
2297 h += (h << 7) + *p++; /* ??? revisit */
2299 hash += ((unsigned int) SYMBOL_REF << 7) + h;
2300 return hash;
2303 case MEM:
2304 /* We don't record if marked volatile or if BLKmode since we don't
2305 know the size of the move. */
2306 if (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode)
2308 *do_not_record_p = 1;
2309 return 0;
2311 if (hash_arg_in_memory_p && !MEM_READONLY_P (x))
2312 *hash_arg_in_memory_p = 1;
2314 /* Now that we have already found this special case,
2315 might as well speed it up as much as possible. */
2316 hash += (unsigned) MEM;
2317 x = XEXP (x, 0);
2318 goto repeat;
2320 case USE:
2321 /* A USE that mentions non-volatile memory needs special
2322 handling since the MEM may be BLKmode which normally
2323 prevents an entry from being made. Pure calls are
2324 marked by a USE which mentions BLKmode memory.
2325 See calls.c:emit_call_1. */
2326 if (MEM_P (XEXP (x, 0))
2327 && ! MEM_VOLATILE_P (XEXP (x, 0)))
2329 hash += (unsigned) USE;
2330 x = XEXP (x, 0);
2332 if (hash_arg_in_memory_p && !MEM_READONLY_P (x))
2333 *hash_arg_in_memory_p = 1;
2335 /* Now that we have already found this special case,
2336 might as well speed it up as much as possible. */
2337 hash += (unsigned) MEM;
2338 x = XEXP (x, 0);
2339 goto repeat;
2341 break;
2343 case PRE_DEC:
2344 case PRE_INC:
2345 case POST_DEC:
2346 case POST_INC:
2347 case PRE_MODIFY:
2348 case POST_MODIFY:
2349 case PC:
2350 case CC0:
2351 case CALL:
2352 case UNSPEC_VOLATILE:
2353 *do_not_record_p = 1;
2354 return 0;
2356 case ASM_OPERANDS:
2357 if (MEM_VOLATILE_P (x))
2359 *do_not_record_p = 1;
2360 return 0;
2362 else
2364 /* We don't want to take the filename and line into account. */
2365 hash += (unsigned) code + (unsigned) GET_MODE (x)
2366 + hash_rtx_string (ASM_OPERANDS_TEMPLATE (x))
2367 + hash_rtx_string (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
2368 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
2370 if (ASM_OPERANDS_INPUT_LENGTH (x))
2372 for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2374 hash += (hash_rtx (ASM_OPERANDS_INPUT (x, i),
2375 GET_MODE (ASM_OPERANDS_INPUT (x, i)),
2376 do_not_record_p, hash_arg_in_memory_p,
2377 have_reg_qty)
2378 + hash_rtx_string
2379 (ASM_OPERANDS_INPUT_CONSTRAINT (x, i)));
2382 hash += hash_rtx_string (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
2383 x = ASM_OPERANDS_INPUT (x, 0);
2384 mode = GET_MODE (x);
2385 goto repeat;
2388 return hash;
2390 break;
2392 default:
2393 break;
2396 i = GET_RTX_LENGTH (code) - 1;
2397 hash += (unsigned) code + (unsigned) GET_MODE (x);
2398 fmt = GET_RTX_FORMAT (code);
2399 for (; i >= 0; i--)
2401 switch (fmt[i])
2403 case 'e':
2404 /* If we are about to do the last recursive call
2405 needed at this level, change it into iteration.
2406 This function is called enough to be worth it. */
2407 if (i == 0)
2409 x = XEXP (x, i);
2410 goto repeat;
2413 hash += hash_rtx (XEXP (x, i), 0, do_not_record_p,
2414 hash_arg_in_memory_p, have_reg_qty);
2415 break;
2417 case 'E':
2418 for (j = 0; j < XVECLEN (x, i); j++)
2419 hash += hash_rtx (XVECEXP (x, i, j), 0, do_not_record_p,
2420 hash_arg_in_memory_p, have_reg_qty);
2421 break;
2423 case 's':
2424 hash += hash_rtx_string (XSTR (x, i));
2425 break;
2427 case 'i':
2428 hash += (unsigned int) XINT (x, i);
2429 break;
2431 case '0': case 't':
2432 /* Unused. */
2433 break;
2435 default:
2436 gcc_unreachable ();
2440 return hash;
2443 /* Hash an rtx X for cse via hash_rtx.
2444 Stores 1 in do_not_record if any subexpression is volatile.
2445 Stores 1 in hash_arg_in_memory if X contains a mem rtx which
2446 does not have the RTX_UNCHANGING_P bit set. */
2448 static inline unsigned
2449 canon_hash (rtx x, enum machine_mode mode)
2451 return hash_rtx (x, mode, &do_not_record, &hash_arg_in_memory, true);
2454 /* Like canon_hash but with no side effects, i.e. do_not_record
2455 and hash_arg_in_memory are not changed. */
2457 static inline unsigned
2458 safe_hash (rtx x, enum machine_mode mode)
2460 int dummy_do_not_record;
2461 return hash_rtx (x, mode, &dummy_do_not_record, NULL, true);
2464 /* Return 1 iff X and Y would canonicalize into the same thing,
2465 without actually constructing the canonicalization of either one.
2466 If VALIDATE is nonzero,
2467 we assume X is an expression being processed from the rtl
2468 and Y was found in the hash table. We check register refs
2469 in Y for being marked as valid.
2471 If FOR_GCSE is true, we compare X and Y for equivalence for GCSE. */
2474 exp_equiv_p (rtx x, rtx y, int validate, bool for_gcse)
2476 int i, j;
2477 enum rtx_code code;
2478 const char *fmt;
2480 /* Note: it is incorrect to assume an expression is equivalent to itself
2481 if VALIDATE is nonzero. */
2482 if (x == y && !validate)
2483 return 1;
2485 if (x == 0 || y == 0)
2486 return x == y;
2488 code = GET_CODE (x);
2489 if (code != GET_CODE (y))
2490 return 0;
2492 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2493 if (GET_MODE (x) != GET_MODE (y))
2494 return 0;
2496 switch (code)
2498 case PC:
2499 case CC0:
2500 case CONST_INT:
2501 case CONST_DOUBLE:
2502 return x == y;
2504 case LABEL_REF:
2505 return XEXP (x, 0) == XEXP (y, 0);
2507 case SYMBOL_REF:
2508 return XSTR (x, 0) == XSTR (y, 0);
2510 case REG:
2511 if (for_gcse)
2512 return REGNO (x) == REGNO (y);
2513 else
2515 unsigned int regno = REGNO (y);
2516 unsigned int i;
2517 unsigned int endregno
2518 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
2519 : hard_regno_nregs[regno][GET_MODE (y)]);
2521 /* If the quantities are not the same, the expressions are not
2522 equivalent. If there are and we are not to validate, they
2523 are equivalent. Otherwise, ensure all regs are up-to-date. */
2525 if (REG_QTY (REGNO (x)) != REG_QTY (regno))
2526 return 0;
2528 if (! validate)
2529 return 1;
2531 for (i = regno; i < endregno; i++)
2532 if (REG_IN_TABLE (i) != REG_TICK (i))
2533 return 0;
2535 return 1;
2538 case MEM:
2539 if (for_gcse)
2541 /* A volatile mem should not be considered equivalent to any
2542 other. */
2543 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2544 return 0;
2546 /* Can't merge two expressions in different alias sets, since we
2547 can decide that the expression is transparent in a block when
2548 it isn't, due to it being set with the different alias set.
2550 Also, can't merge two expressions with different MEM_ATTRS.
2551 They could e.g. be two different entities allocated into the
2552 same space on the stack (see e.g. PR25130). In that case, the
2553 MEM addresses can be the same, even though the two MEMs are
2554 absolutely not equivalent.
2556 But because really all MEM attributes should be the same for
2557 equivalent MEMs, we just use the invariant that MEMs that have
2558 the same attributes share the same mem_attrs data structure. */
2559 if (MEM_ATTRS (x) != MEM_ATTRS (y))
2560 return 0;
2562 break;
2564 /* For commutative operations, check both orders. */
2565 case PLUS:
2566 case MULT:
2567 case AND:
2568 case IOR:
2569 case XOR:
2570 case NE:
2571 case EQ:
2572 return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0),
2573 validate, for_gcse)
2574 && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2575 validate, for_gcse))
2576 || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2577 validate, for_gcse)
2578 && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2579 validate, for_gcse)));
2581 case ASM_OPERANDS:
2582 /* We don't use the generic code below because we want to
2583 disregard filename and line numbers. */
2585 /* A volatile asm isn't equivalent to any other. */
2586 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2587 return 0;
2589 if (GET_MODE (x) != GET_MODE (y)
2590 || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
2591 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2592 ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
2593 || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
2594 || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
2595 return 0;
2597 if (ASM_OPERANDS_INPUT_LENGTH (x))
2599 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
2600 if (! exp_equiv_p (ASM_OPERANDS_INPUT (x, i),
2601 ASM_OPERANDS_INPUT (y, i),
2602 validate, for_gcse)
2603 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
2604 ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
2605 return 0;
2608 return 1;
2610 default:
2611 break;
2614 /* Compare the elements. If any pair of corresponding elements
2615 fail to match, return 0 for the whole thing. */
2617 fmt = GET_RTX_FORMAT (code);
2618 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2620 switch (fmt[i])
2622 case 'e':
2623 if (! exp_equiv_p (XEXP (x, i), XEXP (y, i),
2624 validate, for_gcse))
2625 return 0;
2626 break;
2628 case 'E':
2629 if (XVECLEN (x, i) != XVECLEN (y, i))
2630 return 0;
2631 for (j = 0; j < XVECLEN (x, i); j++)
2632 if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2633 validate, for_gcse))
2634 return 0;
2635 break;
2637 case 's':
2638 if (strcmp (XSTR (x, i), XSTR (y, i)))
2639 return 0;
2640 break;
2642 case 'i':
2643 if (XINT (x, i) != XINT (y, i))
2644 return 0;
2645 break;
2647 case 'w':
2648 if (XWINT (x, i) != XWINT (y, i))
2649 return 0;
2650 break;
2652 case '0':
2653 case 't':
2654 break;
2656 default:
2657 gcc_unreachable ();
2661 return 1;
2664 /* Return 1 if X has a value that can vary even between two
2665 executions of the program. 0 means X can be compared reliably
2666 against certain constants or near-constants. */
2668 static int
2669 cse_rtx_varies_p (rtx x, int from_alias)
2671 /* We need not check for X and the equivalence class being of the same
2672 mode because if X is equivalent to a constant in some mode, it
2673 doesn't vary in any mode. */
2675 if (REG_P (x)
2676 && REGNO_QTY_VALID_P (REGNO (x)))
2678 int x_q = REG_QTY (REGNO (x));
2679 struct qty_table_elem *x_ent = &qty_table[x_q];
2681 if (GET_MODE (x) == x_ent->mode
2682 && x_ent->const_rtx != NULL_RTX)
2683 return 0;
2686 if (GET_CODE (x) == PLUS
2687 && GET_CODE (XEXP (x, 1)) == CONST_INT
2688 && REG_P (XEXP (x, 0))
2689 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
2691 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2692 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2694 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2695 && x0_ent->const_rtx != NULL_RTX)
2696 return 0;
2699 /* This can happen as the result of virtual register instantiation, if
2700 the initial constant is too large to be a valid address. This gives
2701 us a three instruction sequence, load large offset into a register,
2702 load fp minus a constant into a register, then a MEM which is the
2703 sum of the two `constant' registers. */
2704 if (GET_CODE (x) == PLUS
2705 && REG_P (XEXP (x, 0))
2706 && REG_P (XEXP (x, 1))
2707 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2708 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
2710 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2711 int x1_q = REG_QTY (REGNO (XEXP (x, 1)));
2712 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2713 struct qty_table_elem *x1_ent = &qty_table[x1_q];
2715 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2716 && x0_ent->const_rtx != NULL_RTX
2717 && (GET_MODE (XEXP (x, 1)) == x1_ent->mode)
2718 && x1_ent->const_rtx != NULL_RTX)
2719 return 0;
2722 return rtx_varies_p (x, from_alias);
2725 /* Subroutine of canon_reg. Pass *XLOC through canon_reg, and validate
2726 the result if necessary. INSN is as for canon_reg. */
2728 static void
2729 validate_canon_reg (rtx *xloc, rtx insn)
2731 rtx new = canon_reg (*xloc, insn);
2732 int insn_code;
2734 /* If replacing pseudo with hard reg or vice versa, ensure the
2735 insn remains valid. Likewise if the insn has MATCH_DUPs. */
2736 if (insn != 0 && new != 0
2737 && REG_P (new) && REG_P (*xloc)
2738 && (((REGNO (new) < FIRST_PSEUDO_REGISTER)
2739 != (REGNO (*xloc) < FIRST_PSEUDO_REGISTER))
2740 || GET_MODE (new) != GET_MODE (*xloc)
2741 || (insn_code = recog_memoized (insn)) < 0
2742 || insn_data[insn_code].n_dups > 0))
2743 validate_change (insn, xloc, new, 1);
2744 else
2745 *xloc = new;
2748 /* Canonicalize an expression:
2749 replace each register reference inside it
2750 with the "oldest" equivalent register.
2752 If INSN is nonzero and we are replacing a pseudo with a hard register
2753 or vice versa, validate_change is used to ensure that INSN remains valid
2754 after we make our substitution. The calls are made with IN_GROUP nonzero
2755 so apply_change_group must be called upon the outermost return from this
2756 function (unless INSN is zero). The result of apply_change_group can
2757 generally be discarded since the changes we are making are optional. */
2759 static rtx
2760 canon_reg (rtx x, rtx insn)
2762 int i;
2763 enum rtx_code code;
2764 const char *fmt;
2766 if (x == 0)
2767 return x;
2769 code = GET_CODE (x);
2770 switch (code)
2772 case PC:
2773 case CC0:
2774 case CONST:
2775 case CONST_INT:
2776 case CONST_DOUBLE:
2777 case CONST_VECTOR:
2778 case SYMBOL_REF:
2779 case LABEL_REF:
2780 case ADDR_VEC:
2781 case ADDR_DIFF_VEC:
2782 return x;
2784 case REG:
2786 int first;
2787 int q;
2788 struct qty_table_elem *ent;
2790 /* Never replace a hard reg, because hard regs can appear
2791 in more than one machine mode, and we must preserve the mode
2792 of each occurrence. Also, some hard regs appear in
2793 MEMs that are shared and mustn't be altered. Don't try to
2794 replace any reg that maps to a reg of class NO_REGS. */
2795 if (REGNO (x) < FIRST_PSEUDO_REGISTER
2796 || ! REGNO_QTY_VALID_P (REGNO (x)))
2797 return x;
2799 q = REG_QTY (REGNO (x));
2800 ent = &qty_table[q];
2801 first = ent->first_reg;
2802 return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2803 : REGNO_REG_CLASS (first) == NO_REGS ? x
2804 : gen_rtx_REG (ent->mode, first));
2807 default:
2808 break;
2811 fmt = GET_RTX_FORMAT (code);
2812 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2814 int j;
2816 if (fmt[i] == 'e')
2817 validate_canon_reg (&XEXP (x, i), insn);
2818 else if (fmt[i] == 'E')
2819 for (j = 0; j < XVECLEN (x, i); j++)
2820 validate_canon_reg (&XVECEXP (x, i, j), insn);
2823 return x;
2826 /* LOC is a location within INSN that is an operand address (the contents of
2827 a MEM). Find the best equivalent address to use that is valid for this
2828 insn.
2830 On most CISC machines, complicated address modes are costly, and rtx_cost
2831 is a good approximation for that cost. However, most RISC machines have
2832 only a few (usually only one) memory reference formats. If an address is
2833 valid at all, it is often just as cheap as any other address. Hence, for
2834 RISC machines, we use `address_cost' to compare the costs of various
2835 addresses. For two addresses of equal cost, choose the one with the
2836 highest `rtx_cost' value as that has the potential of eliminating the
2837 most insns. For equal costs, we choose the first in the equivalence
2838 class. Note that we ignore the fact that pseudo registers are cheaper than
2839 hard registers here because we would also prefer the pseudo registers. */
2841 static void
2842 find_best_addr (rtx insn, rtx *loc, enum machine_mode mode)
2844 struct table_elt *elt;
2845 rtx addr = *loc;
2846 struct table_elt *p;
2847 int found_better = 1;
2848 int save_do_not_record = do_not_record;
2849 int save_hash_arg_in_memory = hash_arg_in_memory;
2850 int addr_volatile;
2851 int regno;
2852 unsigned hash;
2854 /* Do not try to replace constant addresses or addresses of local and
2855 argument slots. These MEM expressions are made only once and inserted
2856 in many instructions, as well as being used to control symbol table
2857 output. It is not safe to clobber them.
2859 There are some uncommon cases where the address is already in a register
2860 for some reason, but we cannot take advantage of that because we have
2861 no easy way to unshare the MEM. In addition, looking up all stack
2862 addresses is costly. */
2863 if ((GET_CODE (addr) == PLUS
2864 && REG_P (XEXP (addr, 0))
2865 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2866 && (regno = REGNO (XEXP (addr, 0)),
2867 regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM
2868 || regno == ARG_POINTER_REGNUM))
2869 || (REG_P (addr)
2870 && (regno = REGNO (addr), regno == FRAME_POINTER_REGNUM
2871 || regno == HARD_FRAME_POINTER_REGNUM
2872 || regno == ARG_POINTER_REGNUM))
2873 || CONSTANT_ADDRESS_P (addr))
2874 return;
2876 /* If this address is not simply a register, try to fold it. This will
2877 sometimes simplify the expression. Many simplifications
2878 will not be valid, but some, usually applying the associative rule, will
2879 be valid and produce better code. */
2880 if (!REG_P (addr))
2882 rtx folded = canon_for_address (fold_rtx (addr, NULL_RTX));
2884 if (folded != addr)
2886 int addr_folded_cost = address_cost (folded, mode);
2887 int addr_cost = address_cost (addr, mode);
2889 if ((addr_folded_cost < addr_cost
2890 || (addr_folded_cost == addr_cost
2891 /* ??? The rtx_cost comparison is left over from an older
2892 version of this code. It is probably no longer helpful.*/
2893 && (rtx_cost (folded, MEM) > rtx_cost (addr, MEM)
2894 || approx_reg_cost (folded) < approx_reg_cost (addr))))
2895 && validate_change (insn, loc, folded, 0))
2896 addr = folded;
2900 /* If this address is not in the hash table, we can't look for equivalences
2901 of the whole address. Also, ignore if volatile. */
2903 do_not_record = 0;
2904 hash = HASH (addr, Pmode);
2905 addr_volatile = do_not_record;
2906 do_not_record = save_do_not_record;
2907 hash_arg_in_memory = save_hash_arg_in_memory;
2909 if (addr_volatile)
2910 return;
2912 elt = lookup (addr, hash, Pmode);
2914 if (elt)
2916 /* We need to find the best (under the criteria documented above) entry
2917 in the class that is valid. We use the `flag' field to indicate
2918 choices that were invalid and iterate until we can't find a better
2919 one that hasn't already been tried. */
2921 for (p = elt->first_same_value; p; p = p->next_same_value)
2922 p->flag = 0;
2924 while (found_better)
2926 int best_addr_cost = address_cost (*loc, mode);
2927 int best_rtx_cost = (elt->cost + 1) >> 1;
2928 int exp_cost;
2929 struct table_elt *best_elt = elt;
2931 found_better = 0;
2932 for (p = elt->first_same_value; p; p = p->next_same_value)
2933 if (! p->flag)
2935 if ((REG_P (p->exp)
2936 || exp_equiv_p (p->exp, p->exp, 1, false))
2937 && ((exp_cost = address_cost (p->exp, mode)) < best_addr_cost
2938 || (exp_cost == best_addr_cost
2939 && ((p->cost + 1) >> 1) > best_rtx_cost)))
2941 found_better = 1;
2942 best_addr_cost = exp_cost;
2943 best_rtx_cost = (p->cost + 1) >> 1;
2944 best_elt = p;
2948 if (found_better)
2950 if (validate_change (insn, loc,
2951 canon_reg (copy_rtx (best_elt->exp),
2952 NULL_RTX), 0))
2953 return;
2954 else
2955 best_elt->flag = 1;
2960 /* If the address is a binary operation with the first operand a register
2961 and the second a constant, do the same as above, but looking for
2962 equivalences of the register. Then try to simplify before checking for
2963 the best address to use. This catches a few cases: First is when we
2964 have REG+const and the register is another REG+const. We can often merge
2965 the constants and eliminate one insn and one register. It may also be
2966 that a machine has a cheap REG+REG+const. Finally, this improves the
2967 code on the Alpha for unaligned byte stores. */
2969 if (flag_expensive_optimizations
2970 && ARITHMETIC_P (*loc)
2971 && REG_P (XEXP (*loc, 0)))
2973 rtx op1 = XEXP (*loc, 1);
2975 do_not_record = 0;
2976 hash = HASH (XEXP (*loc, 0), Pmode);
2977 do_not_record = save_do_not_record;
2978 hash_arg_in_memory = save_hash_arg_in_memory;
2980 elt = lookup (XEXP (*loc, 0), hash, Pmode);
2981 if (elt == 0)
2982 return;
2984 /* We need to find the best (under the criteria documented above) entry
2985 in the class that is valid. We use the `flag' field to indicate
2986 choices that were invalid and iterate until we can't find a better
2987 one that hasn't already been tried. */
2989 for (p = elt->first_same_value; p; p = p->next_same_value)
2990 p->flag = 0;
2992 while (found_better)
2994 int best_addr_cost = address_cost (*loc, mode);
2995 int best_rtx_cost = (COST (*loc) + 1) >> 1;
2996 struct table_elt *best_elt = elt;
2997 rtx best_rtx = *loc;
2998 int count;
3000 /* This is at worst case an O(n^2) algorithm, so limit our search
3001 to the first 32 elements on the list. This avoids trouble
3002 compiling code with very long basic blocks that can easily
3003 call simplify_gen_binary so many times that we run out of
3004 memory. */
3006 found_better = 0;
3007 for (p = elt->first_same_value, count = 0;
3008 p && count < 32;
3009 p = p->next_same_value, count++)
3010 if (! p->flag
3011 && (REG_P (p->exp)
3012 || (GET_CODE (p->exp) != EXPR_LIST
3013 && exp_equiv_p (p->exp, p->exp, 1, false))))
3016 rtx new = simplify_gen_binary (GET_CODE (*loc), Pmode,
3017 p->exp, op1);
3018 int new_cost;
3020 /* Get the canonical version of the address so we can accept
3021 more. */
3022 new = canon_for_address (new);
3024 new_cost = address_cost (new, mode);
3026 if (new_cost < best_addr_cost
3027 || (new_cost == best_addr_cost
3028 && (COST (new) + 1) >> 1 > best_rtx_cost))
3030 found_better = 1;
3031 best_addr_cost = new_cost;
3032 best_rtx_cost = (COST (new) + 1) >> 1;
3033 best_elt = p;
3034 best_rtx = new;
3038 if (found_better)
3040 if (validate_change (insn, loc,
3041 canon_reg (copy_rtx (best_rtx),
3042 NULL_RTX), 0))
3043 return;
3044 else
3045 best_elt->flag = 1;
3051 /* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
3052 operation (EQ, NE, GT, etc.), follow it back through the hash table and
3053 what values are being compared.
3055 *PARG1 and *PARG2 are updated to contain the rtx representing the values
3056 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
3057 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
3058 compared to produce cc0.
3060 The return value is the comparison operator and is either the code of
3061 A or the code corresponding to the inverse of the comparison. */
3063 static enum rtx_code
3064 find_comparison_args (enum rtx_code code, rtx *parg1, rtx *parg2,
3065 enum machine_mode *pmode1, enum machine_mode *pmode2)
3067 rtx arg1, arg2;
3069 arg1 = *parg1, arg2 = *parg2;
3071 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
3073 while (arg2 == CONST0_RTX (GET_MODE (arg1)))
3075 /* Set nonzero when we find something of interest. */
3076 rtx x = 0;
3077 int reverse_code = 0;
3078 struct table_elt *p = 0;
3080 /* If arg1 is a COMPARE, extract the comparison arguments from it.
3081 On machines with CC0, this is the only case that can occur, since
3082 fold_rtx will return the COMPARE or item being compared with zero
3083 when given CC0. */
3085 if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
3086 x = arg1;
3088 /* If ARG1 is a comparison operator and CODE is testing for
3089 STORE_FLAG_VALUE, get the inner arguments. */
3091 else if (COMPARISON_P (arg1))
3093 #ifdef FLOAT_STORE_FLAG_VALUE
3094 REAL_VALUE_TYPE fsfv;
3095 #endif
3097 if (code == NE
3098 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3099 && code == LT && STORE_FLAG_VALUE == -1)
3100 #ifdef FLOAT_STORE_FLAG_VALUE
3101 || (SCALAR_FLOAT_MODE_P (GET_MODE (arg1))
3102 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3103 REAL_VALUE_NEGATIVE (fsfv)))
3104 #endif
3106 x = arg1;
3107 else if (code == EQ
3108 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3109 && code == GE && STORE_FLAG_VALUE == -1)
3110 #ifdef FLOAT_STORE_FLAG_VALUE
3111 || (SCALAR_FLOAT_MODE_P (GET_MODE (arg1))
3112 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3113 REAL_VALUE_NEGATIVE (fsfv)))
3114 #endif
3116 x = arg1, reverse_code = 1;
3119 /* ??? We could also check for
3121 (ne (and (eq (...) (const_int 1))) (const_int 0))
3123 and related forms, but let's wait until we see them occurring. */
3125 if (x == 0)
3126 /* Look up ARG1 in the hash table and see if it has an equivalence
3127 that lets us see what is being compared. */
3128 p = lookup (arg1, SAFE_HASH (arg1, GET_MODE (arg1)), GET_MODE (arg1));
3129 if (p)
3131 p = p->first_same_value;
3133 /* If what we compare is already known to be constant, that is as
3134 good as it gets.
3135 We need to break the loop in this case, because otherwise we
3136 can have an infinite loop when looking at a reg that is known
3137 to be a constant which is the same as a comparison of a reg
3138 against zero which appears later in the insn stream, which in
3139 turn is constant and the same as the comparison of the first reg
3140 against zero... */
3141 if (p->is_const)
3142 break;
3145 for (; p; p = p->next_same_value)
3147 enum machine_mode inner_mode = GET_MODE (p->exp);
3148 #ifdef FLOAT_STORE_FLAG_VALUE
3149 REAL_VALUE_TYPE fsfv;
3150 #endif
3152 /* If the entry isn't valid, skip it. */
3153 if (! exp_equiv_p (p->exp, p->exp, 1, false))
3154 continue;
3156 if (GET_CODE (p->exp) == COMPARE
3157 /* Another possibility is that this machine has a compare insn
3158 that includes the comparison code. In that case, ARG1 would
3159 be equivalent to a comparison operation that would set ARG1 to
3160 either STORE_FLAG_VALUE or zero. If this is an NE operation,
3161 ORIG_CODE is the actual comparison being done; if it is an EQ,
3162 we must reverse ORIG_CODE. On machine with a negative value
3163 for STORE_FLAG_VALUE, also look at LT and GE operations. */
3164 || ((code == NE
3165 || (code == LT
3166 && GET_MODE_CLASS (inner_mode) == MODE_INT
3167 && (GET_MODE_BITSIZE (inner_mode)
3168 <= HOST_BITS_PER_WIDE_INT)
3169 && (STORE_FLAG_VALUE
3170 & ((HOST_WIDE_INT) 1
3171 << (GET_MODE_BITSIZE (inner_mode) - 1))))
3172 #ifdef FLOAT_STORE_FLAG_VALUE
3173 || (code == LT
3174 && SCALAR_FLOAT_MODE_P (inner_mode)
3175 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3176 REAL_VALUE_NEGATIVE (fsfv)))
3177 #endif
3179 && COMPARISON_P (p->exp)))
3181 x = p->exp;
3182 break;
3184 else if ((code == EQ
3185 || (code == GE
3186 && GET_MODE_CLASS (inner_mode) == MODE_INT
3187 && (GET_MODE_BITSIZE (inner_mode)
3188 <= HOST_BITS_PER_WIDE_INT)
3189 && (STORE_FLAG_VALUE
3190 & ((HOST_WIDE_INT) 1
3191 << (GET_MODE_BITSIZE (inner_mode) - 1))))
3192 #ifdef FLOAT_STORE_FLAG_VALUE
3193 || (code == GE
3194 && SCALAR_FLOAT_MODE_P (inner_mode)
3195 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3196 REAL_VALUE_NEGATIVE (fsfv)))
3197 #endif
3199 && COMPARISON_P (p->exp))
3201 reverse_code = 1;
3202 x = p->exp;
3203 break;
3206 /* If this non-trapping address, e.g. fp + constant, the
3207 equivalent is a better operand since it may let us predict
3208 the value of the comparison. */
3209 else if (!rtx_addr_can_trap_p (p->exp))
3211 arg1 = p->exp;
3212 continue;
3216 /* If we didn't find a useful equivalence for ARG1, we are done.
3217 Otherwise, set up for the next iteration. */
3218 if (x == 0)
3219 break;
3221 /* If we need to reverse the comparison, make sure that that is
3222 possible -- we can't necessarily infer the value of GE from LT
3223 with floating-point operands. */
3224 if (reverse_code)
3226 enum rtx_code reversed = reversed_comparison_code (x, NULL_RTX);
3227 if (reversed == UNKNOWN)
3228 break;
3229 else
3230 code = reversed;
3232 else if (COMPARISON_P (x))
3233 code = GET_CODE (x);
3234 arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
3237 /* Return our results. Return the modes from before fold_rtx
3238 because fold_rtx might produce const_int, and then it's too late. */
3239 *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
3240 *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
3242 return code;
3245 /* Fold SUBREG. */
3247 static rtx
3248 fold_rtx_subreg (rtx x, rtx insn)
3250 enum machine_mode mode = GET_MODE (x);
3251 rtx folded_arg0;
3252 rtx const_arg0;
3253 rtx new;
3255 /* See if we previously assigned a constant value to this SUBREG. */
3256 if ((new = lookup_as_function (x, CONST_INT)) != 0
3257 || (new = lookup_as_function (x, CONST_DOUBLE)) != 0)
3258 return new;
3260 /* If this is a paradoxical SUBREG, we have no idea what value the
3261 extra bits would have. However, if the operand is equivalent to
3262 a SUBREG whose operand is the same as our mode, and all the modes
3263 are within a word, we can just use the inner operand because
3264 these SUBREGs just say how to treat the register.
3266 Similarly if we find an integer constant. */
3268 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3270 enum machine_mode imode = GET_MODE (SUBREG_REG (x));
3271 struct table_elt *elt;
3273 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
3274 && GET_MODE_SIZE (imode) <= UNITS_PER_WORD
3275 && (elt = lookup (SUBREG_REG (x), HASH (SUBREG_REG (x), imode),
3276 imode)) != 0)
3277 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
3279 if (CONSTANT_P (elt->exp)
3280 && GET_MODE (elt->exp) == VOIDmode)
3281 return elt->exp;
3283 if (GET_CODE (elt->exp) == SUBREG
3284 && GET_MODE (SUBREG_REG (elt->exp)) == mode
3285 && exp_equiv_p (elt->exp, elt->exp, 1, false))
3286 return copy_rtx (SUBREG_REG (elt->exp));
3289 return x;
3292 /* Fold SUBREG_REG. If it changed, see if we can simplify the
3293 SUBREG. We might be able to if the SUBREG is extracting a single
3294 word in an integral mode or extracting the low part. */
3296 folded_arg0 = fold_rtx (SUBREG_REG (x), insn);
3297 const_arg0 = equiv_constant (folded_arg0);
3298 if (const_arg0)
3299 folded_arg0 = const_arg0;
3301 if (folded_arg0 != SUBREG_REG (x))
3303 new = simplify_subreg (mode, folded_arg0,
3304 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
3305 if (new)
3306 return new;
3309 if (REG_P (folded_arg0)
3310 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (folded_arg0)))
3312 struct table_elt *elt;
3314 elt = lookup (folded_arg0,
3315 HASH (folded_arg0, GET_MODE (folded_arg0)),
3316 GET_MODE (folded_arg0));
3318 if (elt)
3319 elt = elt->first_same_value;
3321 if (subreg_lowpart_p (x))
3322 /* If this is a narrowing SUBREG and our operand is a REG, see
3323 if we can find an equivalence for REG that is an arithmetic
3324 operation in a wider mode where both operands are
3325 paradoxical SUBREGs from objects of our result mode. In
3326 that case, we couldn-t report an equivalent value for that
3327 operation, since we don't know what the extra bits will be.
3328 But we can find an equivalence for this SUBREG by folding
3329 that operation in the narrow mode. This allows us to fold
3330 arithmetic in narrow modes when the machine only supports
3331 word-sized arithmetic.
3333 Also look for a case where we have a SUBREG whose operand
3334 is the same as our result. If both modes are smaller than
3335 a word, we are simply interpreting a register in different
3336 modes and we can use the inner value. */
3338 for (; elt; elt = elt->next_same_value)
3340 enum rtx_code eltcode = GET_CODE (elt->exp);
3342 /* Just check for unary and binary operations. */
3343 if (UNARY_P (elt->exp)
3344 && eltcode != SIGN_EXTEND
3345 && eltcode != ZERO_EXTEND
3346 && GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3347 && GET_MODE (SUBREG_REG (XEXP (elt->exp, 0))) == mode
3348 && (GET_MODE_CLASS (mode)
3349 == GET_MODE_CLASS (GET_MODE (XEXP (elt->exp, 0)))))
3351 rtx op0 = SUBREG_REG (XEXP (elt->exp, 0));
3353 if (!REG_P (op0) && ! CONSTANT_P (op0))
3354 op0 = fold_rtx (op0, NULL_RTX);
3356 op0 = equiv_constant (op0);
3357 if (op0)
3358 new = simplify_unary_operation (GET_CODE (elt->exp), mode,
3359 op0, mode);
3361 else if (ARITHMETIC_P (elt->exp)
3362 && eltcode != DIV && eltcode != MOD
3363 && eltcode != UDIV && eltcode != UMOD
3364 && eltcode != ASHIFTRT && eltcode != LSHIFTRT
3365 && eltcode != ROTATE && eltcode != ROTATERT
3366 && ((GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3367 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 0)))
3368 == mode))
3369 || CONSTANT_P (XEXP (elt->exp, 0)))
3370 && ((GET_CODE (XEXP (elt->exp, 1)) == SUBREG
3371 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 1)))
3372 == mode))
3373 || CONSTANT_P (XEXP (elt->exp, 1))))
3375 rtx op0 = gen_lowpart_common (mode, XEXP (elt->exp, 0));
3376 rtx op1 = gen_lowpart_common (mode, XEXP (elt->exp, 1));
3378 if (op0 && !REG_P (op0) && ! CONSTANT_P (op0))
3379 op0 = fold_rtx (op0, NULL_RTX);
3381 if (op0)
3382 op0 = equiv_constant (op0);
3384 if (op1 && !REG_P (op1) && ! CONSTANT_P (op1))
3385 op1 = fold_rtx (op1, NULL_RTX);
3387 if (op1)
3388 op1 = equiv_constant (op1);
3390 /* If we are looking for the low SImode part of
3391 (ashift:DI c (const_int 32)), it doesn't work to
3392 compute that in SImode, because a 32-bit shift in
3393 SImode is unpredictable. We know the value is
3394 0. */
3395 if (op0 && op1
3396 && GET_CODE (elt->exp) == ASHIFT
3397 && GET_CODE (op1) == CONST_INT
3398 && INTVAL (op1) >= GET_MODE_BITSIZE (mode))
3400 if (INTVAL (op1)
3401 < GET_MODE_BITSIZE (GET_MODE (elt->exp)))
3402 /* If the count fits in the inner mode's width,
3403 but exceeds the outer mode's width, the value
3404 will get truncated to 0 by the subreg. */
3405 new = CONST0_RTX (mode);
3406 else
3407 /* If the count exceeds even the inner mode's width,
3408 don't fold this expression. */
3409 new = 0;
3411 else if (op0 && op1)
3412 new = simplify_binary_operation (GET_CODE (elt->exp),
3413 mode, op0, op1);
3416 else if (GET_CODE (elt->exp) == SUBREG
3417 && GET_MODE (SUBREG_REG (elt->exp)) == mode
3418 && (GET_MODE_SIZE (GET_MODE (folded_arg0))
3419 <= UNITS_PER_WORD)
3420 && exp_equiv_p (elt->exp, elt->exp, 1, false))
3421 new = copy_rtx (SUBREG_REG (elt->exp));
3423 if (new)
3424 return new;
3426 else
3427 /* A SUBREG resulting from a zero extension may fold to zero
3428 if it extracts higher bits than the ZERO_EXTEND's source
3429 bits. FIXME: if combine tried to, er, combine these
3430 instructions, this transformation may be moved to
3431 simplify_subreg. */
3432 for (; elt; elt = elt->next_same_value)
3434 if (GET_CODE (elt->exp) == ZERO_EXTEND
3435 && subreg_lsb (x)
3436 >= GET_MODE_BITSIZE (GET_MODE (XEXP (elt->exp, 0))))
3437 return CONST0_RTX (mode);
3441 return x;
3444 /* Fold MEM. */
3446 static rtx
3447 fold_rtx_mem (rtx x, rtx insn)
3449 enum machine_mode mode = GET_MODE (x);
3450 rtx new;
3452 /* If we are not actually processing an insn, don't try to find the
3453 best address. Not only don't we care, but we could modify the
3454 MEM in an invalid way since we have no insn to validate
3455 against. */
3456 if (insn != 0)
3457 find_best_addr (insn, &XEXP (x, 0), mode);
3460 /* Even if we don't fold in the insn itself, we can safely do so
3461 here, in hopes of getting a constant. */
3462 rtx addr = fold_rtx (XEXP (x, 0), NULL_RTX);
3463 rtx base = 0;
3464 HOST_WIDE_INT offset = 0;
3466 if (REG_P (addr)
3467 && REGNO_QTY_VALID_P (REGNO (addr)))
3469 int addr_q = REG_QTY (REGNO (addr));
3470 struct qty_table_elem *addr_ent = &qty_table[addr_q];
3472 if (GET_MODE (addr) == addr_ent->mode
3473 && addr_ent->const_rtx != NULL_RTX)
3474 addr = addr_ent->const_rtx;
3477 /* Call target hook to avoid the effects of -fpic etc.... */
3478 addr = targetm.delegitimize_address (addr);
3480 /* If address is constant, split it into a base and integer
3481 offset. */
3482 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
3483 base = addr;
3484 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
3485 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
3487 base = XEXP (XEXP (addr, 0), 0);
3488 offset = INTVAL (XEXP (XEXP (addr, 0), 1));
3490 else if (GET_CODE (addr) == LO_SUM
3491 && GET_CODE (XEXP (addr, 1)) == SYMBOL_REF)
3492 base = XEXP (addr, 1);
3494 /* If this is a constant pool reference, we can fold it into its
3495 constant to allow better value tracking. */
3496 if (base && GET_CODE (base) == SYMBOL_REF
3497 && CONSTANT_POOL_ADDRESS_P (base))
3499 rtx constant = get_pool_constant (base);
3500 enum machine_mode const_mode = get_pool_mode (base);
3501 rtx new;
3503 if (CONSTANT_P (constant) && GET_CODE (constant) != CONST_INT)
3505 constant_pool_entries_cost = COST (constant);
3506 constant_pool_entries_regcost = approx_reg_cost (constant);
3509 /* If we are loading the full constant, we have an
3510 equivalence. */
3511 if (offset == 0 && mode == const_mode)
3512 return constant;
3514 /* If this actually isn't a constant (weird!), we can't do
3515 anything. Otherwise, handle the two most common cases:
3516 extracting a word from a multi-word constant, and
3517 extracting the low-order bits. Other cases don't seem
3518 common enough to worry about. */
3519 if (! CONSTANT_P (constant))
3520 return x;
3522 if (GET_MODE_CLASS (mode) == MODE_INT
3523 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
3524 && offset % UNITS_PER_WORD == 0
3525 && (new = operand_subword (constant,
3526 offset / UNITS_PER_WORD,
3527 0, const_mode)) != 0)
3528 return new;
3530 if (((BYTES_BIG_ENDIAN
3531 && offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
3532 || (! BYTES_BIG_ENDIAN && offset == 0))
3533 && (new = gen_lowpart (mode, constant)) != 0)
3534 return new;
3537 /* If this is a reference to a label at a known position in a jump
3538 table, we also know its value. */
3539 if (base && GET_CODE (base) == LABEL_REF)
3541 rtx label = XEXP (base, 0);
3542 rtx table_insn = NEXT_INSN (label);
3544 if (table_insn && JUMP_P (table_insn)
3545 && GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
3547 rtx table = PATTERN (table_insn);
3549 if (offset >= 0
3550 && (offset / GET_MODE_SIZE (GET_MODE (table))
3551 < XVECLEN (table, 0)))
3553 rtx label = XVECEXP
3554 (table, 0, offset / GET_MODE_SIZE (GET_MODE (table)));
3555 rtx set;
3557 /* If we have an insn that loads the label from the
3558 jumptable into a reg, we don't want to set the reg
3559 to the label, because this may cause a reference to
3560 the label to remain after the label is removed in
3561 some very obscure cases (PR middle-end/18628). */
3562 if (!insn)
3563 return label;
3565 set = single_set (insn);
3567 if (! set || SET_SRC (set) != x)
3568 return x;
3570 /* If it's a jump, it's safe to reference the label. */
3571 if (SET_DEST (set) == pc_rtx)
3572 return label;
3574 return x;
3577 if (table_insn && JUMP_P (table_insn)
3578 && GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
3580 rtx table = PATTERN (table_insn);
3582 if (offset >= 0
3583 && (offset / GET_MODE_SIZE (GET_MODE (table))
3584 < XVECLEN (table, 1)))
3586 offset /= GET_MODE_SIZE (GET_MODE (table));
3587 new = gen_rtx_MINUS (Pmode, XVECEXP (table, 1, offset),
3588 XEXP (table, 0));
3590 if (GET_MODE (table) != Pmode)
3591 new = gen_rtx_TRUNCATE (GET_MODE (table), new);
3593 /* Indicate this is a constant. This isn't a valid
3594 form of CONST, but it will only be used to fold the
3595 next insns and then discarded, so it should be
3596 safe.
3598 Note this expression must be explicitly discarded,
3599 by cse_insn, else it may end up in a REG_EQUAL note
3600 and "escape" to cause problems elsewhere. */
3601 return gen_rtx_CONST (GET_MODE (new), new);
3606 return x;
3610 /* If X is a nontrivial arithmetic operation on an argument
3611 for which a constant value can be determined, return
3612 the result of operating on that value, as a constant.
3613 Otherwise, return X, possibly with one or more operands
3614 modified by recursive calls to this function.
3616 If X is a register whose contents are known, we do NOT
3617 return those contents here. equiv_constant is called to
3618 perform that task.
3620 INSN is the insn that we may be modifying. If it is 0, make a copy
3621 of X before modifying it. */
3623 static rtx
3624 fold_rtx (rtx x, rtx insn)
3626 enum rtx_code code;
3627 enum machine_mode mode;
3628 const char *fmt;
3629 int i;
3630 rtx new = 0;
3631 int copied = 0;
3632 int must_swap = 0;
3634 /* Folded equivalents of first two operands of X. */
3635 rtx folded_arg0;
3636 rtx folded_arg1;
3638 /* Constant equivalents of first three operands of X;
3639 0 when no such equivalent is known. */
3640 rtx const_arg0;
3641 rtx const_arg1;
3642 rtx const_arg2;
3644 /* The mode of the first operand of X. We need this for sign and zero
3645 extends. */
3646 enum machine_mode mode_arg0;
3648 if (x == 0)
3649 return x;
3651 mode = GET_MODE (x);
3652 code = GET_CODE (x);
3653 switch (code)
3655 case CONST:
3656 case CONST_INT:
3657 case CONST_DOUBLE:
3658 case CONST_VECTOR:
3659 case SYMBOL_REF:
3660 case LABEL_REF:
3661 case REG:
3662 case PC:
3663 /* No use simplifying an EXPR_LIST
3664 since they are used only for lists of args
3665 in a function call's REG_EQUAL note. */
3666 case EXPR_LIST:
3667 return x;
3669 #ifdef HAVE_cc0
3670 case CC0:
3671 return prev_insn_cc0;
3672 #endif
3674 case SUBREG:
3675 return fold_rtx_subreg (x, insn);
3677 case NOT:
3678 case NEG:
3679 /* If we have (NOT Y), see if Y is known to be (NOT Z).
3680 If so, (NOT Y) simplifies to Z. Similarly for NEG. */
3681 new = lookup_as_function (XEXP (x, 0), code);
3682 if (new)
3683 return fold_rtx (copy_rtx (XEXP (new, 0)), insn);
3684 break;
3686 case MEM:
3687 return fold_rtx_mem (x, insn);
3689 #ifdef NO_FUNCTION_CSE
3690 case CALL:
3691 if (CONSTANT_P (XEXP (XEXP (x, 0), 0)))
3692 return x;
3693 break;
3694 #endif
3696 case ASM_OPERANDS:
3697 if (insn)
3699 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
3700 validate_change (insn, &ASM_OPERANDS_INPUT (x, i),
3701 fold_rtx (ASM_OPERANDS_INPUT (x, i), insn), 0);
3703 break;
3705 default:
3706 break;
3709 const_arg0 = 0;
3710 const_arg1 = 0;
3711 const_arg2 = 0;
3712 mode_arg0 = VOIDmode;
3714 /* Try folding our operands.
3715 Then see which ones have constant values known. */
3717 fmt = GET_RTX_FORMAT (code);
3718 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3719 if (fmt[i] == 'e')
3721 rtx arg = XEXP (x, i);
3722 rtx folded_arg = arg, const_arg = 0;
3723 enum machine_mode mode_arg = GET_MODE (arg);
3724 rtx cheap_arg, expensive_arg;
3725 rtx replacements[2];
3726 int j;
3727 int old_cost = COST_IN (XEXP (x, i), code);
3729 /* Most arguments are cheap, so handle them specially. */
3730 switch (GET_CODE (arg))
3732 case REG:
3733 /* This is the same as calling equiv_constant; it is duplicated
3734 here for speed. */
3735 if (REGNO_QTY_VALID_P (REGNO (arg)))
3737 int arg_q = REG_QTY (REGNO (arg));
3738 struct qty_table_elem *arg_ent = &qty_table[arg_q];
3740 if (arg_ent->const_rtx != NULL_RTX
3741 && !REG_P (arg_ent->const_rtx)
3742 && GET_CODE (arg_ent->const_rtx) != PLUS)
3743 const_arg
3744 = gen_lowpart (GET_MODE (arg),
3745 arg_ent->const_rtx);
3747 break;
3749 case CONST:
3750 case CONST_INT:
3751 case SYMBOL_REF:
3752 case LABEL_REF:
3753 case CONST_DOUBLE:
3754 case CONST_VECTOR:
3755 const_arg = arg;
3756 break;
3758 #ifdef HAVE_cc0
3759 case CC0:
3760 folded_arg = prev_insn_cc0;
3761 mode_arg = prev_insn_cc0_mode;
3762 const_arg = equiv_constant (folded_arg);
3763 break;
3764 #endif
3766 default:
3767 folded_arg = fold_rtx (arg, insn);
3768 const_arg = equiv_constant (folded_arg);
3771 /* For the first three operands, see if the operand
3772 is constant or equivalent to a constant. */
3773 switch (i)
3775 case 0:
3776 folded_arg0 = folded_arg;
3777 const_arg0 = const_arg;
3778 mode_arg0 = mode_arg;
3779 break;
3780 case 1:
3781 folded_arg1 = folded_arg;
3782 const_arg1 = const_arg;
3783 break;
3784 case 2:
3785 const_arg2 = const_arg;
3786 break;
3789 /* Pick the least expensive of the folded argument and an
3790 equivalent constant argument. */
3791 if (const_arg == 0 || const_arg == folded_arg
3792 || COST_IN (const_arg, code) > COST_IN (folded_arg, code))
3793 cheap_arg = folded_arg, expensive_arg = const_arg;
3794 else
3795 cheap_arg = const_arg, expensive_arg = folded_arg;
3797 /* Try to replace the operand with the cheapest of the two
3798 possibilities. If it doesn't work and this is either of the first
3799 two operands of a commutative operation, try swapping them.
3800 If THAT fails, try the more expensive, provided it is cheaper
3801 than what is already there. */
3803 if (cheap_arg == XEXP (x, i))
3804 continue;
3806 if (insn == 0 && ! copied)
3808 x = copy_rtx (x);
3809 copied = 1;
3812 /* Order the replacements from cheapest to most expensive. */
3813 replacements[0] = cheap_arg;
3814 replacements[1] = expensive_arg;
3816 for (j = 0; j < 2 && replacements[j]; j++)
3818 int new_cost = COST_IN (replacements[j], code);
3820 /* Stop if what existed before was cheaper. Prefer constants
3821 in the case of a tie. */
3822 if (new_cost > old_cost
3823 || (new_cost == old_cost && CONSTANT_P (XEXP (x, i))))
3824 break;
3826 /* It's not safe to substitute the operand of a conversion
3827 operator with a constant, as the conversion's identity
3828 depends upon the mode of its operand. This optimization
3829 is handled by the call to simplify_unary_operation. */
3830 if (GET_RTX_CLASS (code) == RTX_UNARY
3831 && GET_MODE (replacements[j]) != mode_arg0
3832 && (code == ZERO_EXTEND
3833 || code == SIGN_EXTEND
3834 || code == TRUNCATE
3835 || code == FLOAT_TRUNCATE
3836 || code == FLOAT_EXTEND
3837 || code == FLOAT
3838 || code == FIX
3839 || code == UNSIGNED_FLOAT
3840 || code == UNSIGNED_FIX))
3841 continue;
3843 if (validate_change (insn, &XEXP (x, i), replacements[j], 0))
3844 break;
3846 if (GET_RTX_CLASS (code) == RTX_COMM_COMPARE
3847 || GET_RTX_CLASS (code) == RTX_COMM_ARITH)
3849 validate_change (insn, &XEXP (x, i), XEXP (x, 1 - i), 1);
3850 validate_change (insn, &XEXP (x, 1 - i), replacements[j], 1);
3852 if (apply_change_group ())
3854 /* Swap them back to be invalid so that this loop can
3855 continue and flag them to be swapped back later. */
3856 rtx tem;
3858 tem = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1);
3859 XEXP (x, 1) = tem;
3860 must_swap = 1;
3861 break;
3867 else
3869 if (fmt[i] == 'E')
3870 /* Don't try to fold inside of a vector of expressions.
3871 Doing nothing is harmless. */
3875 /* If a commutative operation, place a constant integer as the second
3876 operand unless the first operand is also a constant integer. Otherwise,
3877 place any constant second unless the first operand is also a constant. */
3879 if (COMMUTATIVE_P (x))
3881 if (must_swap
3882 || swap_commutative_operands_p (const_arg0 ? const_arg0
3883 : XEXP (x, 0),
3884 const_arg1 ? const_arg1
3885 : XEXP (x, 1)))
3887 rtx tem = XEXP (x, 0);
3889 if (insn == 0 && ! copied)
3891 x = copy_rtx (x);
3892 copied = 1;
3895 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
3896 validate_change (insn, &XEXP (x, 1), tem, 1);
3897 if (apply_change_group ())
3899 tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
3900 tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
3905 /* If X is an arithmetic operation, see if we can simplify it. */
3907 switch (GET_RTX_CLASS (code))
3909 case RTX_UNARY:
3911 int is_const = 0;
3913 /* We can't simplify extension ops unless we know the
3914 original mode. */
3915 if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
3916 && mode_arg0 == VOIDmode)
3917 break;
3919 /* If we had a CONST, strip it off and put it back later if we
3920 fold. */
3921 if (const_arg0 != 0 && GET_CODE (const_arg0) == CONST)
3922 is_const = 1, const_arg0 = XEXP (const_arg0, 0);
3924 new = simplify_unary_operation (code, mode,
3925 const_arg0 ? const_arg0 : folded_arg0,
3926 mode_arg0);
3927 /* NEG of PLUS could be converted into MINUS, but that causes
3928 expressions of the form
3929 (CONST (MINUS (CONST_INT) (SYMBOL_REF)))
3930 which many ports mistakenly treat as LEGITIMATE_CONSTANT_P.
3931 FIXME: those ports should be fixed. */
3932 if (new != 0 && is_const
3933 && GET_CODE (new) == PLUS
3934 && (GET_CODE (XEXP (new, 0)) == SYMBOL_REF
3935 || GET_CODE (XEXP (new, 0)) == LABEL_REF)
3936 && GET_CODE (XEXP (new, 1)) == CONST_INT)
3937 new = gen_rtx_CONST (mode, new);
3939 break;
3941 case RTX_COMPARE:
3942 case RTX_COMM_COMPARE:
3943 /* See what items are actually being compared and set FOLDED_ARG[01]
3944 to those values and CODE to the actual comparison code. If any are
3945 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
3946 do anything if both operands are already known to be constant. */
3948 /* ??? Vector mode comparisons are not supported yet. */
3949 if (VECTOR_MODE_P (mode))
3950 break;
3952 if (const_arg0 == 0 || const_arg1 == 0)
3954 struct table_elt *p0, *p1;
3955 rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
3956 enum machine_mode mode_arg1;
3958 #ifdef FLOAT_STORE_FLAG_VALUE
3959 if (SCALAR_FLOAT_MODE_P (mode))
3961 true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
3962 (FLOAT_STORE_FLAG_VALUE (mode), mode));
3963 false_rtx = CONST0_RTX (mode);
3965 #endif
3967 code = find_comparison_args (code, &folded_arg0, &folded_arg1,
3968 &mode_arg0, &mode_arg1);
3970 /* If the mode is VOIDmode or a MODE_CC mode, we don't know
3971 what kinds of things are being compared, so we can't do
3972 anything with this comparison. */
3974 if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
3975 break;
3977 const_arg0 = equiv_constant (folded_arg0);
3978 const_arg1 = equiv_constant (folded_arg1);
3980 /* If we do not now have two constants being compared, see
3981 if we can nevertheless deduce some things about the
3982 comparison. */
3983 if (const_arg0 == 0 || const_arg1 == 0)
3985 /* Some addresses are known to be nonzero. We don't know
3986 their sign, but equality comparisons are known. */
3987 if (const_arg1 == const0_rtx
3988 && nonzero_address_p (folded_arg0))
3990 if (code == EQ)
3991 return false_rtx;
3992 else if (code == NE)
3993 return true_rtx;
3996 /* See if the two operands are the same. */
3998 if (folded_arg0 == folded_arg1
3999 || (REG_P (folded_arg0)
4000 && REG_P (folded_arg1)
4001 && (REG_QTY (REGNO (folded_arg0))
4002 == REG_QTY (REGNO (folded_arg1))))
4003 || ((p0 = lookup (folded_arg0,
4004 SAFE_HASH (folded_arg0, mode_arg0),
4005 mode_arg0))
4006 && (p1 = lookup (folded_arg1,
4007 SAFE_HASH (folded_arg1, mode_arg0),
4008 mode_arg0))
4009 && p0->first_same_value == p1->first_same_value))
4011 /* Sadly two equal NaNs are not equivalent. */
4012 if (!HONOR_NANS (mode_arg0))
4013 return ((code == EQ || code == LE || code == GE
4014 || code == LEU || code == GEU || code == UNEQ
4015 || code == UNLE || code == UNGE
4016 || code == ORDERED)
4017 ? true_rtx : false_rtx);
4018 /* Take care for the FP compares we can resolve. */
4019 if (code == UNEQ || code == UNLE || code == UNGE)
4020 return true_rtx;
4021 if (code == LTGT || code == LT || code == GT)
4022 return false_rtx;
4025 /* If FOLDED_ARG0 is a register, see if the comparison we are
4026 doing now is either the same as we did before or the reverse
4027 (we only check the reverse if not floating-point). */
4028 else if (REG_P (folded_arg0))
4030 int qty = REG_QTY (REGNO (folded_arg0));
4032 if (REGNO_QTY_VALID_P (REGNO (folded_arg0)))
4034 struct qty_table_elem *ent = &qty_table[qty];
4036 if ((comparison_dominates_p (ent->comparison_code, code)
4037 || (! FLOAT_MODE_P (mode_arg0)
4038 && comparison_dominates_p (ent->comparison_code,
4039 reverse_condition (code))))
4040 && (rtx_equal_p (ent->comparison_const, folded_arg1)
4041 || (const_arg1
4042 && rtx_equal_p (ent->comparison_const,
4043 const_arg1))
4044 || (REG_P (folded_arg1)
4045 && (REG_QTY (REGNO (folded_arg1)) == ent->comparison_qty))))
4046 return (comparison_dominates_p (ent->comparison_code, code)
4047 ? true_rtx : false_rtx);
4053 /* If we are comparing against zero, see if the first operand is
4054 equivalent to an IOR with a constant. If so, we may be able to
4055 determine the result of this comparison. */
4057 if (const_arg1 == const0_rtx)
4059 rtx y = lookup_as_function (folded_arg0, IOR);
4060 rtx inner_const;
4062 if (y != 0
4063 && (inner_const = equiv_constant (XEXP (y, 1))) != 0
4064 && GET_CODE (inner_const) == CONST_INT
4065 && INTVAL (inner_const) != 0)
4067 int sign_bitnum = GET_MODE_BITSIZE (mode_arg0) - 1;
4068 int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
4069 && (INTVAL (inner_const)
4070 & ((HOST_WIDE_INT) 1 << sign_bitnum)));
4071 rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
4073 #ifdef FLOAT_STORE_FLAG_VALUE
4074 if (SCALAR_FLOAT_MODE_P (mode))
4076 true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
4077 (FLOAT_STORE_FLAG_VALUE (mode), mode));
4078 false_rtx = CONST0_RTX (mode);
4080 #endif
4082 switch (code)
4084 case EQ:
4085 return false_rtx;
4086 case NE:
4087 return true_rtx;
4088 case LT: case LE:
4089 if (has_sign)
4090 return true_rtx;
4091 break;
4092 case GT: case GE:
4093 if (has_sign)
4094 return false_rtx;
4095 break;
4096 default:
4097 break;
4103 rtx op0 = const_arg0 ? const_arg0 : folded_arg0;
4104 rtx op1 = const_arg1 ? const_arg1 : folded_arg1;
4105 new = simplify_relational_operation (code, mode, mode_arg0, op0, op1);
4107 break;
4109 case RTX_BIN_ARITH:
4110 case RTX_COMM_ARITH:
4111 switch (code)
4113 case PLUS:
4114 /* If the second operand is a LABEL_REF, see if the first is a MINUS
4115 with that LABEL_REF as its second operand. If so, the result is
4116 the first operand of that MINUS. This handles switches with an
4117 ADDR_DIFF_VEC table. */
4118 if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
4120 rtx y
4121 = GET_CODE (folded_arg0) == MINUS ? folded_arg0
4122 : lookup_as_function (folded_arg0, MINUS);
4124 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
4125 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
4126 return XEXP (y, 0);
4128 /* Now try for a CONST of a MINUS like the above. */
4129 if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
4130 : lookup_as_function (folded_arg0, CONST))) != 0
4131 && GET_CODE (XEXP (y, 0)) == MINUS
4132 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
4133 && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg1, 0))
4134 return XEXP (XEXP (y, 0), 0);
4137 /* Likewise if the operands are in the other order. */
4138 if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
4140 rtx y
4141 = GET_CODE (folded_arg1) == MINUS ? folded_arg1
4142 : lookup_as_function (folded_arg1, MINUS);
4144 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
4145 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
4146 return XEXP (y, 0);
4148 /* Now try for a CONST of a MINUS like the above. */
4149 if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
4150 : lookup_as_function (folded_arg1, CONST))) != 0
4151 && GET_CODE (XEXP (y, 0)) == MINUS
4152 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
4153 && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg0, 0))
4154 return XEXP (XEXP (y, 0), 0);
4157 /* If second operand is a register equivalent to a negative
4158 CONST_INT, see if we can find a register equivalent to the
4159 positive constant. Make a MINUS if so. Don't do this for
4160 a non-negative constant since we might then alternate between
4161 choosing positive and negative constants. Having the positive
4162 constant previously-used is the more common case. Be sure
4163 the resulting constant is non-negative; if const_arg1 were
4164 the smallest negative number this would overflow: depending
4165 on the mode, this would either just be the same value (and
4166 hence not save anything) or be incorrect. */
4167 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT
4168 && INTVAL (const_arg1) < 0
4169 /* This used to test
4171 -INTVAL (const_arg1) >= 0
4173 But The Sun V5.0 compilers mis-compiled that test. So
4174 instead we test for the problematic value in a more direct
4175 manner and hope the Sun compilers get it correct. */
4176 && INTVAL (const_arg1) !=
4177 ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1))
4178 && REG_P (folded_arg1))
4180 rtx new_const = GEN_INT (-INTVAL (const_arg1));
4181 struct table_elt *p
4182 = lookup (new_const, SAFE_HASH (new_const, mode), mode);
4184 if (p)
4185 for (p = p->first_same_value; p; p = p->next_same_value)
4186 if (REG_P (p->exp))
4187 return simplify_gen_binary (MINUS, mode, folded_arg0,
4188 canon_reg (p->exp, NULL_RTX));
4190 goto from_plus;
4192 case MINUS:
4193 /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
4194 If so, produce (PLUS Z C2-C). */
4195 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT)
4197 rtx y = lookup_as_function (XEXP (x, 0), PLUS);
4198 if (y && GET_CODE (XEXP (y, 1)) == CONST_INT)
4199 return fold_rtx (plus_constant (copy_rtx (y),
4200 -INTVAL (const_arg1)),
4201 NULL_RTX);
4204 /* Fall through. */
4206 from_plus:
4207 case SMIN: case SMAX: case UMIN: case UMAX:
4208 case IOR: case AND: case XOR:
4209 case MULT:
4210 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
4211 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
4212 is known to be of similar form, we may be able to replace the
4213 operation with a combined operation. This may eliminate the
4214 intermediate operation if every use is simplified in this way.
4215 Note that the similar optimization done by combine.c only works
4216 if the intermediate operation's result has only one reference. */
4218 if (REG_P (folded_arg0)
4219 && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
4221 int is_shift
4222 = (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
4223 rtx y = lookup_as_function (folded_arg0, code);
4224 rtx inner_const;
4225 enum rtx_code associate_code;
4226 rtx new_const;
4228 if (y == 0
4229 || 0 == (inner_const
4230 = equiv_constant (fold_rtx (XEXP (y, 1), 0)))
4231 || GET_CODE (inner_const) != CONST_INT
4232 /* If we have compiled a statement like
4233 "if (x == (x & mask1))", and now are looking at
4234 "x & mask2", we will have a case where the first operand
4235 of Y is the same as our first operand. Unless we detect
4236 this case, an infinite loop will result. */
4237 || XEXP (y, 0) == folded_arg0)
4238 break;
4240 /* Don't associate these operations if they are a PLUS with the
4241 same constant and it is a power of two. These might be doable
4242 with a pre- or post-increment. Similarly for two subtracts of
4243 identical powers of two with post decrement. */
4245 if (code == PLUS && const_arg1 == inner_const
4246 && ((HAVE_PRE_INCREMENT
4247 && exact_log2 (INTVAL (const_arg1)) >= 0)
4248 || (HAVE_POST_INCREMENT
4249 && exact_log2 (INTVAL (const_arg1)) >= 0)
4250 || (HAVE_PRE_DECREMENT
4251 && exact_log2 (- INTVAL (const_arg1)) >= 0)
4252 || (HAVE_POST_DECREMENT
4253 && exact_log2 (- INTVAL (const_arg1)) >= 0)))
4254 break;
4256 /* Compute the code used to compose the constants. For example,
4257 A-C1-C2 is A-(C1 + C2), so if CODE == MINUS, we want PLUS. */
4259 associate_code = (is_shift || code == MINUS ? PLUS : code);
4261 new_const = simplify_binary_operation (associate_code, mode,
4262 const_arg1, inner_const);
4264 if (new_const == 0)
4265 break;
4267 /* If we are associating shift operations, don't let this
4268 produce a shift of the size of the object or larger.
4269 This could occur when we follow a sign-extend by a right
4270 shift on a machine that does a sign-extend as a pair
4271 of shifts. */
4273 if (is_shift && GET_CODE (new_const) == CONST_INT
4274 && INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
4276 /* As an exception, we can turn an ASHIFTRT of this
4277 form into a shift of the number of bits - 1. */
4278 if (code == ASHIFTRT)
4279 new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
4280 else
4281 break;
4284 y = copy_rtx (XEXP (y, 0));
4286 /* If Y contains our first operand (the most common way this
4287 can happen is if Y is a MEM), we would do into an infinite
4288 loop if we tried to fold it. So don't in that case. */
4290 if (! reg_mentioned_p (folded_arg0, y))
4291 y = fold_rtx (y, insn);
4293 return simplify_gen_binary (code, mode, y, new_const);
4295 break;
4297 case DIV: case UDIV:
4298 /* ??? The associative optimization performed immediately above is
4299 also possible for DIV and UDIV using associate_code of MULT.
4300 However, we would need extra code to verify that the
4301 multiplication does not overflow, that is, there is no overflow
4302 in the calculation of new_const. */
4303 break;
4305 default:
4306 break;
4309 new = simplify_binary_operation (code, mode,
4310 const_arg0 ? const_arg0 : folded_arg0,
4311 const_arg1 ? const_arg1 : folded_arg1);
4312 break;
4314 case RTX_OBJ:
4315 /* (lo_sum (high X) X) is simply X. */
4316 if (code == LO_SUM && const_arg0 != 0
4317 && GET_CODE (const_arg0) == HIGH
4318 && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
4319 return const_arg1;
4320 break;
4322 case RTX_TERNARY:
4323 case RTX_BITFIELD_OPS:
4324 new = simplify_ternary_operation (code, mode, mode_arg0,
4325 const_arg0 ? const_arg0 : folded_arg0,
4326 const_arg1 ? const_arg1 : folded_arg1,
4327 const_arg2 ? const_arg2 : XEXP (x, 2));
4328 break;
4330 default:
4331 break;
4334 return new ? new : x;
4337 /* Return a constant value currently equivalent to X.
4338 Return 0 if we don't know one. */
4340 static rtx
4341 equiv_constant (rtx x)
4343 if (REG_P (x)
4344 && REGNO_QTY_VALID_P (REGNO (x)))
4346 int x_q = REG_QTY (REGNO (x));
4347 struct qty_table_elem *x_ent = &qty_table[x_q];
4349 if (x_ent->const_rtx)
4350 x = gen_lowpart (GET_MODE (x), x_ent->const_rtx);
4353 if (x == 0 || CONSTANT_P (x))
4354 return x;
4356 /* If X is a MEM, try to fold it outside the context of any insn to see if
4357 it might be equivalent to a constant. That handles the case where it
4358 is a constant-pool reference. Then try to look it up in the hash table
4359 in case it is something whose value we have seen before. */
4361 if (MEM_P (x))
4363 struct table_elt *elt;
4365 x = fold_rtx (x, NULL_RTX);
4366 if (CONSTANT_P (x))
4367 return x;
4369 elt = lookup (x, SAFE_HASH (x, GET_MODE (x)), GET_MODE (x));
4370 if (elt == 0)
4371 return 0;
4373 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
4374 if (elt->is_const && CONSTANT_P (elt->exp))
4375 return elt->exp;
4378 return 0;
4381 /* Given INSN, a jump insn, PATH_TAKEN indicates if we are following the "taken"
4382 branch. It will be zero if not.
4384 In certain cases, this can cause us to add an equivalence. For example,
4385 if we are following the taken case of
4386 if (i == 2)
4387 we can add the fact that `i' and '2' are now equivalent.
4389 In any case, we can record that this comparison was passed. If the same
4390 comparison is seen later, we will know its value. */
4392 static void
4393 record_jump_equiv (rtx insn, int taken)
4395 int cond_known_true;
4396 rtx op0, op1;
4397 rtx set;
4398 enum machine_mode mode, mode0, mode1;
4399 int reversed_nonequality = 0;
4400 enum rtx_code code;
4402 /* Ensure this is the right kind of insn. */
4403 if (! any_condjump_p (insn))
4404 return;
4405 set = pc_set (insn);
4407 /* See if this jump condition is known true or false. */
4408 if (taken)
4409 cond_known_true = (XEXP (SET_SRC (set), 2) == pc_rtx);
4410 else
4411 cond_known_true = (XEXP (SET_SRC (set), 1) == pc_rtx);
4413 /* Get the type of comparison being done and the operands being compared.
4414 If we had to reverse a non-equality condition, record that fact so we
4415 know that it isn't valid for floating-point. */
4416 code = GET_CODE (XEXP (SET_SRC (set), 0));
4417 op0 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 0), insn);
4418 op1 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 1), insn);
4420 code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
4421 if (! cond_known_true)
4423 code = reversed_comparison_code_parts (code, op0, op1, insn);
4425 /* Don't remember if we can't find the inverse. */
4426 if (code == UNKNOWN)
4427 return;
4430 /* The mode is the mode of the non-constant. */
4431 mode = mode0;
4432 if (mode1 != VOIDmode)
4433 mode = mode1;
4435 record_jump_cond (code, mode, op0, op1, reversed_nonequality);
4438 /* Yet another form of subreg creation. In this case, we want something in
4439 MODE, and we should assume OP has MODE iff it is naturally modeless. */
4441 static rtx
4442 record_jump_cond_subreg (enum machine_mode mode, rtx op)
4444 enum machine_mode op_mode = GET_MODE (op);
4445 if (op_mode == mode || op_mode == VOIDmode)
4446 return op;
4447 return lowpart_subreg (mode, op, op_mode);
4450 /* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
4451 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
4452 Make any useful entries we can with that information. Called from
4453 above function and called recursively. */
4455 static void
4456 record_jump_cond (enum rtx_code code, enum machine_mode mode, rtx op0,
4457 rtx op1, int reversed_nonequality)
4459 unsigned op0_hash, op1_hash;
4460 int op0_in_memory, op1_in_memory;
4461 struct table_elt *op0_elt, *op1_elt;
4463 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
4464 we know that they are also equal in the smaller mode (this is also
4465 true for all smaller modes whether or not there is a SUBREG, but
4466 is not worth testing for with no SUBREG). */
4468 /* Note that GET_MODE (op0) may not equal MODE. */
4469 if (code == EQ && GET_CODE (op0) == SUBREG
4470 && (GET_MODE_SIZE (GET_MODE (op0))
4471 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4473 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4474 rtx tem = record_jump_cond_subreg (inner_mode, op1);
4475 if (tem)
4476 record_jump_cond (code, mode, SUBREG_REG (op0), tem,
4477 reversed_nonequality);
4480 if (code == EQ && GET_CODE (op1) == SUBREG
4481 && (GET_MODE_SIZE (GET_MODE (op1))
4482 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4484 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4485 rtx tem = record_jump_cond_subreg (inner_mode, op0);
4486 if (tem)
4487 record_jump_cond (code, mode, SUBREG_REG (op1), tem,
4488 reversed_nonequality);
4491 /* Similarly, if this is an NE comparison, and either is a SUBREG
4492 making a smaller mode, we know the whole thing is also NE. */
4494 /* Note that GET_MODE (op0) may not equal MODE;
4495 if we test MODE instead, we can get an infinite recursion
4496 alternating between two modes each wider than MODE. */
4498 if (code == NE && GET_CODE (op0) == SUBREG
4499 && subreg_lowpart_p (op0)
4500 && (GET_MODE_SIZE (GET_MODE (op0))
4501 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4503 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4504 rtx tem = record_jump_cond_subreg (inner_mode, op1);
4505 if (tem)
4506 record_jump_cond (code, mode, SUBREG_REG (op0), tem,
4507 reversed_nonequality);
4510 if (code == NE && GET_CODE (op1) == SUBREG
4511 && subreg_lowpart_p (op1)
4512 && (GET_MODE_SIZE (GET_MODE (op1))
4513 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4515 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4516 rtx tem = record_jump_cond_subreg (inner_mode, op0);
4517 if (tem)
4518 record_jump_cond (code, mode, SUBREG_REG (op1), tem,
4519 reversed_nonequality);
4522 /* Hash both operands. */
4524 do_not_record = 0;
4525 hash_arg_in_memory = 0;
4526 op0_hash = HASH (op0, mode);
4527 op0_in_memory = hash_arg_in_memory;
4529 if (do_not_record)
4530 return;
4532 do_not_record = 0;
4533 hash_arg_in_memory = 0;
4534 op1_hash = HASH (op1, mode);
4535 op1_in_memory = hash_arg_in_memory;
4537 if (do_not_record)
4538 return;
4540 /* Look up both operands. */
4541 op0_elt = lookup (op0, op0_hash, mode);
4542 op1_elt = lookup (op1, op1_hash, mode);
4544 /* If both operands are already equivalent or if they are not in the
4545 table but are identical, do nothing. */
4546 if ((op0_elt != 0 && op1_elt != 0
4547 && op0_elt->first_same_value == op1_elt->first_same_value)
4548 || op0 == op1 || rtx_equal_p (op0, op1))
4549 return;
4551 /* If we aren't setting two things equal all we can do is save this
4552 comparison. Similarly if this is floating-point. In the latter
4553 case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
4554 If we record the equality, we might inadvertently delete code
4555 whose intent was to change -0 to +0. */
4557 if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
4559 struct qty_table_elem *ent;
4560 int qty;
4562 /* If we reversed a floating-point comparison, if OP0 is not a
4563 register, or if OP1 is neither a register or constant, we can't
4564 do anything. */
4566 if (!REG_P (op1))
4567 op1 = equiv_constant (op1);
4569 if ((reversed_nonequality && FLOAT_MODE_P (mode))
4570 || !REG_P (op0) || op1 == 0)
4571 return;
4573 /* Put OP0 in the hash table if it isn't already. This gives it a
4574 new quantity number. */
4575 if (op0_elt == 0)
4577 if (insert_regs (op0, NULL, 0))
4579 rehash_using_reg (op0);
4580 op0_hash = HASH (op0, mode);
4582 /* If OP0 is contained in OP1, this changes its hash code
4583 as well. Faster to rehash than to check, except
4584 for the simple case of a constant. */
4585 if (! CONSTANT_P (op1))
4586 op1_hash = HASH (op1,mode);
4589 op0_elt = insert (op0, NULL, op0_hash, mode);
4590 op0_elt->in_memory = op0_in_memory;
4593 qty = REG_QTY (REGNO (op0));
4594 ent = &qty_table[qty];
4596 ent->comparison_code = code;
4597 if (REG_P (op1))
4599 /* Look it up again--in case op0 and op1 are the same. */
4600 op1_elt = lookup (op1, op1_hash, mode);
4602 /* Put OP1 in the hash table so it gets a new quantity number. */
4603 if (op1_elt == 0)
4605 if (insert_regs (op1, NULL, 0))
4607 rehash_using_reg (op1);
4608 op1_hash = HASH (op1, mode);
4611 op1_elt = insert (op1, NULL, op1_hash, mode);
4612 op1_elt->in_memory = op1_in_memory;
4615 ent->comparison_const = NULL_RTX;
4616 ent->comparison_qty = REG_QTY (REGNO (op1));
4618 else
4620 ent->comparison_const = op1;
4621 ent->comparison_qty = -1;
4624 return;
4627 /* If either side is still missing an equivalence, make it now,
4628 then merge the equivalences. */
4630 if (op0_elt == 0)
4632 if (insert_regs (op0, NULL, 0))
4634 rehash_using_reg (op0);
4635 op0_hash = HASH (op0, mode);
4638 op0_elt = insert (op0, NULL, op0_hash, mode);
4639 op0_elt->in_memory = op0_in_memory;
4642 if (op1_elt == 0)
4644 if (insert_regs (op1, NULL, 0))
4646 rehash_using_reg (op1);
4647 op1_hash = HASH (op1, mode);
4650 op1_elt = insert (op1, NULL, op1_hash, mode);
4651 op1_elt->in_memory = op1_in_memory;
4654 merge_equiv_classes (op0_elt, op1_elt);
4657 /* CSE processing for one instruction.
4658 First simplify sources and addresses of all assignments
4659 in the instruction, using previously-computed equivalents values.
4660 Then install the new sources and destinations in the table
4661 of available values.
4663 If LIBCALL_INSN is nonzero, don't record any equivalence made in
4664 the insn. It means that INSN is inside libcall block. In this
4665 case LIBCALL_INSN is the corresponding insn with REG_LIBCALL. */
4667 /* Data on one SET contained in the instruction. */
4669 struct set
4671 /* The SET rtx itself. */
4672 rtx rtl;
4673 /* The SET_SRC of the rtx (the original value, if it is changing). */
4674 rtx src;
4675 /* The hash-table element for the SET_SRC of the SET. */
4676 struct table_elt *src_elt;
4677 /* Hash value for the SET_SRC. */
4678 unsigned src_hash;
4679 /* Hash value for the SET_DEST. */
4680 unsigned dest_hash;
4681 /* The SET_DEST, with SUBREG, etc., stripped. */
4682 rtx inner_dest;
4683 /* Nonzero if the SET_SRC is in memory. */
4684 char src_in_memory;
4685 /* Nonzero if the SET_SRC contains something
4686 whose value cannot be predicted and understood. */
4687 char src_volatile;
4688 /* Original machine mode, in case it becomes a CONST_INT.
4689 The size of this field should match the size of the mode
4690 field of struct rtx_def (see rtl.h). */
4691 ENUM_BITFIELD(machine_mode) mode : 8;
4692 /* A constant equivalent for SET_SRC, if any. */
4693 rtx src_const;
4694 /* Original SET_SRC value used for libcall notes. */
4695 rtx orig_src;
4696 /* Hash value of constant equivalent for SET_SRC. */
4697 unsigned src_const_hash;
4698 /* Table entry for constant equivalent for SET_SRC, if any. */
4699 struct table_elt *src_const_elt;
4702 static void
4703 cse_insn (rtx insn, rtx libcall_insn)
4705 rtx x = PATTERN (insn);
4706 int i;
4707 rtx tem;
4708 int n_sets = 0;
4710 #ifdef HAVE_cc0
4711 /* Records what this insn does to set CC0. */
4712 rtx this_insn_cc0 = 0;
4713 enum machine_mode this_insn_cc0_mode = VOIDmode;
4714 #endif
4716 rtx src_eqv = 0;
4717 struct table_elt *src_eqv_elt = 0;
4718 int src_eqv_volatile = 0;
4719 int src_eqv_in_memory = 0;
4720 unsigned src_eqv_hash = 0;
4722 struct set *sets = (struct set *) 0;
4724 this_insn = insn;
4726 /* Find all the SETs and CLOBBERs in this instruction.
4727 Record all the SETs in the array `set' and count them.
4728 Also determine whether there is a CLOBBER that invalidates
4729 all memory references, or all references at varying addresses. */
4731 if (CALL_P (insn))
4733 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4735 if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
4736 invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
4737 XEXP (tem, 0) = canon_reg (XEXP (tem, 0), insn);
4741 if (GET_CODE (x) == SET)
4743 sets = alloca (sizeof (struct set));
4744 sets[0].rtl = x;
4746 /* Ignore SETs that are unconditional jumps.
4747 They never need cse processing, so this does not hurt.
4748 The reason is not efficiency but rather
4749 so that we can test at the end for instructions
4750 that have been simplified to unconditional jumps
4751 and not be misled by unchanged instructions
4752 that were unconditional jumps to begin with. */
4753 if (SET_DEST (x) == pc_rtx
4754 && GET_CODE (SET_SRC (x)) == LABEL_REF)
4757 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
4758 The hard function value register is used only once, to copy to
4759 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
4760 Ensure we invalidate the destination register. On the 80386 no
4761 other code would invalidate it since it is a fixed_reg.
4762 We need not check the return of apply_change_group; see canon_reg. */
4764 else if (GET_CODE (SET_SRC (x)) == CALL)
4766 canon_reg (SET_SRC (x), insn);
4767 apply_change_group ();
4768 fold_rtx (SET_SRC (x), insn);
4769 invalidate (SET_DEST (x), VOIDmode);
4771 else
4772 n_sets = 1;
4774 else if (GET_CODE (x) == PARALLEL)
4776 int lim = XVECLEN (x, 0);
4778 sets = alloca (lim * sizeof (struct set));
4780 /* Find all regs explicitly clobbered in this insn,
4781 and ensure they are not replaced with any other regs
4782 elsewhere in this insn.
4783 When a reg that is clobbered is also used for input,
4784 we should presume that that is for a reason,
4785 and we should not substitute some other register
4786 which is not supposed to be clobbered.
4787 Therefore, this loop cannot be merged into the one below
4788 because a CALL may precede a CLOBBER and refer to the
4789 value clobbered. We must not let a canonicalization do
4790 anything in that case. */
4791 for (i = 0; i < lim; i++)
4793 rtx y = XVECEXP (x, 0, i);
4794 if (GET_CODE (y) == CLOBBER)
4796 rtx clobbered = XEXP (y, 0);
4798 if (REG_P (clobbered)
4799 || GET_CODE (clobbered) == SUBREG)
4800 invalidate (clobbered, VOIDmode);
4801 else if (GET_CODE (clobbered) == STRICT_LOW_PART
4802 || GET_CODE (clobbered) == ZERO_EXTRACT)
4803 invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
4807 for (i = 0; i < lim; i++)
4809 rtx y = XVECEXP (x, 0, i);
4810 if (GET_CODE (y) == SET)
4812 /* As above, we ignore unconditional jumps and call-insns and
4813 ignore the result of apply_change_group. */
4814 if (GET_CODE (SET_SRC (y)) == CALL)
4816 canon_reg (SET_SRC (y), insn);
4817 apply_change_group ();
4818 fold_rtx (SET_SRC (y), insn);
4819 invalidate (SET_DEST (y), VOIDmode);
4821 else if (SET_DEST (y) == pc_rtx
4822 && GET_CODE (SET_SRC (y)) == LABEL_REF)
4824 else
4825 sets[n_sets++].rtl = y;
4827 else if (GET_CODE (y) == CLOBBER)
4829 /* If we clobber memory, canon the address.
4830 This does nothing when a register is clobbered
4831 because we have already invalidated the reg. */
4832 if (MEM_P (XEXP (y, 0)))
4833 canon_reg (XEXP (y, 0), NULL_RTX);
4835 else if (GET_CODE (y) == USE
4836 && ! (REG_P (XEXP (y, 0))
4837 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
4838 canon_reg (y, NULL_RTX);
4839 else if (GET_CODE (y) == CALL)
4841 /* The result of apply_change_group can be ignored; see
4842 canon_reg. */
4843 canon_reg (y, insn);
4844 apply_change_group ();
4845 fold_rtx (y, insn);
4849 else if (GET_CODE (x) == CLOBBER)
4851 if (MEM_P (XEXP (x, 0)))
4852 canon_reg (XEXP (x, 0), NULL_RTX);
4855 /* Canonicalize a USE of a pseudo register or memory location. */
4856 else if (GET_CODE (x) == USE
4857 && ! (REG_P (XEXP (x, 0))
4858 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
4859 canon_reg (XEXP (x, 0), NULL_RTX);
4860 else if (GET_CODE (x) == CALL)
4862 /* The result of apply_change_group can be ignored; see canon_reg. */
4863 canon_reg (x, insn);
4864 apply_change_group ();
4865 fold_rtx (x, insn);
4868 /* Store the equivalent value in SRC_EQV, if different, or if the DEST
4869 is a STRICT_LOW_PART. The latter condition is necessary because SRC_EQV
4870 is handled specially for this case, and if it isn't set, then there will
4871 be no equivalence for the destination. */
4872 if (n_sets == 1 && REG_NOTES (insn) != 0
4873 && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
4874 && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
4875 || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
4877 src_eqv = fold_rtx (canon_reg (XEXP (tem, 0), NULL_RTX), insn);
4878 XEXP (tem, 0) = src_eqv;
4881 /* Canonicalize sources and addresses of destinations.
4882 We do this in a separate pass to avoid problems when a MATCH_DUP is
4883 present in the insn pattern. In that case, we want to ensure that
4884 we don't break the duplicate nature of the pattern. So we will replace
4885 both operands at the same time. Otherwise, we would fail to find an
4886 equivalent substitution in the loop calling validate_change below.
4888 We used to suppress canonicalization of DEST if it appears in SRC,
4889 but we don't do this any more. */
4891 for (i = 0; i < n_sets; i++)
4893 rtx dest = SET_DEST (sets[i].rtl);
4894 rtx src = SET_SRC (sets[i].rtl);
4895 rtx new = canon_reg (src, insn);
4896 int insn_code;
4898 sets[i].orig_src = src;
4899 if ((REG_P (new) && REG_P (src)
4900 && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
4901 != (REGNO (src) < FIRST_PSEUDO_REGISTER)))
4902 || (insn_code = recog_memoized (insn)) < 0
4903 || insn_data[insn_code].n_dups > 0)
4904 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
4905 else
4906 SET_SRC (sets[i].rtl) = new;
4908 if (GET_CODE (dest) == ZERO_EXTRACT)
4910 validate_change (insn, &XEXP (dest, 1),
4911 canon_reg (XEXP (dest, 1), insn), 1);
4912 validate_change (insn, &XEXP (dest, 2),
4913 canon_reg (XEXP (dest, 2), insn), 1);
4916 while (GET_CODE (dest) == SUBREG
4917 || GET_CODE (dest) == ZERO_EXTRACT
4918 || GET_CODE (dest) == STRICT_LOW_PART)
4919 dest = XEXP (dest, 0);
4921 if (MEM_P (dest))
4922 canon_reg (dest, insn);
4925 /* Now that we have done all the replacements, we can apply the change
4926 group and see if they all work. Note that this will cause some
4927 canonicalizations that would have worked individually not to be applied
4928 because some other canonicalization didn't work, but this should not
4929 occur often.
4931 The result of apply_change_group can be ignored; see canon_reg. */
4933 apply_change_group ();
4935 /* Set sets[i].src_elt to the class each source belongs to.
4936 Detect assignments from or to volatile things
4937 and set set[i] to zero so they will be ignored
4938 in the rest of this function.
4940 Nothing in this loop changes the hash table or the register chains. */
4942 for (i = 0; i < n_sets; i++)
4944 rtx src, dest;
4945 rtx src_folded;
4946 struct table_elt *elt = 0, *p;
4947 enum machine_mode mode;
4948 rtx src_eqv_here;
4949 rtx src_const = 0;
4950 rtx src_related = 0;
4951 struct table_elt *src_const_elt = 0;
4952 int src_cost = MAX_COST;
4953 int src_eqv_cost = MAX_COST;
4954 int src_folded_cost = MAX_COST;
4955 int src_related_cost = MAX_COST;
4956 int src_elt_cost = MAX_COST;
4957 int src_regcost = MAX_COST;
4958 int src_eqv_regcost = MAX_COST;
4959 int src_folded_regcost = MAX_COST;
4960 int src_related_regcost = MAX_COST;
4961 int src_elt_regcost = MAX_COST;
4962 /* Set nonzero if we need to call force_const_mem on with the
4963 contents of src_folded before using it. */
4964 int src_folded_force_flag = 0;
4966 dest = SET_DEST (sets[i].rtl);
4967 src = SET_SRC (sets[i].rtl);
4969 /* If SRC is a constant that has no machine mode,
4970 hash it with the destination's machine mode.
4971 This way we can keep different modes separate. */
4973 mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
4974 sets[i].mode = mode;
4976 if (src_eqv)
4978 enum machine_mode eqvmode = mode;
4979 if (GET_CODE (dest) == STRICT_LOW_PART)
4980 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
4981 do_not_record = 0;
4982 hash_arg_in_memory = 0;
4983 src_eqv_hash = HASH (src_eqv, eqvmode);
4985 /* Find the equivalence class for the equivalent expression. */
4987 if (!do_not_record)
4988 src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
4990 src_eqv_volatile = do_not_record;
4991 src_eqv_in_memory = hash_arg_in_memory;
4994 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
4995 value of the INNER register, not the destination. So it is not
4996 a valid substitution for the source. But save it for later. */
4997 if (GET_CODE (dest) == STRICT_LOW_PART)
4998 src_eqv_here = 0;
4999 else
5000 src_eqv_here = src_eqv;
5002 /* Simplify and foldable subexpressions in SRC. Then get the fully-
5003 simplified result, which may not necessarily be valid. */
5004 src_folded = fold_rtx (src, insn);
5006 #if 0
5007 /* ??? This caused bad code to be generated for the m68k port with -O2.
5008 Suppose src is (CONST_INT -1), and that after truncation src_folded
5009 is (CONST_INT 3). Suppose src_folded is then used for src_const.
5010 At the end we will add src and src_const to the same equivalence
5011 class. We now have 3 and -1 on the same equivalence class. This
5012 causes later instructions to be mis-optimized. */
5013 /* If storing a constant in a bitfield, pre-truncate the constant
5014 so we will be able to record it later. */
5015 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT)
5017 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5019 if (GET_CODE (src) == CONST_INT
5020 && GET_CODE (width) == CONST_INT
5021 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5022 && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5023 src_folded
5024 = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
5025 << INTVAL (width)) - 1));
5027 #endif
5029 /* Compute SRC's hash code, and also notice if it
5030 should not be recorded at all. In that case,
5031 prevent any further processing of this assignment. */
5032 do_not_record = 0;
5033 hash_arg_in_memory = 0;
5035 sets[i].src = src;
5036 sets[i].src_hash = HASH (src, mode);
5037 sets[i].src_volatile = do_not_record;
5038 sets[i].src_in_memory = hash_arg_in_memory;
5040 /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
5041 a pseudo, do not record SRC. Using SRC as a replacement for
5042 anything else will be incorrect in that situation. Note that
5043 this usually occurs only for stack slots, in which case all the
5044 RTL would be referring to SRC, so we don't lose any optimization
5045 opportunities by not having SRC in the hash table. */
5047 if (MEM_P (src)
5048 && find_reg_note (insn, REG_EQUIV, NULL_RTX) != 0
5049 && REG_P (dest)
5050 && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
5051 sets[i].src_volatile = 1;
5053 #if 0
5054 /* It is no longer clear why we used to do this, but it doesn't
5055 appear to still be needed. So let's try without it since this
5056 code hurts cse'ing widened ops. */
5057 /* If source is a paradoxical subreg (such as QI treated as an SI),
5058 treat it as volatile. It may do the work of an SI in one context
5059 where the extra bits are not being used, but cannot replace an SI
5060 in general. */
5061 if (GET_CODE (src) == SUBREG
5062 && (GET_MODE_SIZE (GET_MODE (src))
5063 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
5064 sets[i].src_volatile = 1;
5065 #endif
5067 /* Locate all possible equivalent forms for SRC. Try to replace
5068 SRC in the insn with each cheaper equivalent.
5070 We have the following types of equivalents: SRC itself, a folded
5071 version, a value given in a REG_EQUAL note, or a value related
5072 to a constant.
5074 Each of these equivalents may be part of an additional class
5075 of equivalents (if more than one is in the table, they must be in
5076 the same class; we check for this).
5078 If the source is volatile, we don't do any table lookups.
5080 We note any constant equivalent for possible later use in a
5081 REG_NOTE. */
5083 if (!sets[i].src_volatile)
5084 elt = lookup (src, sets[i].src_hash, mode);
5086 sets[i].src_elt = elt;
5088 if (elt && src_eqv_here && src_eqv_elt)
5090 if (elt->first_same_value != src_eqv_elt->first_same_value)
5092 /* The REG_EQUAL is indicating that two formerly distinct
5093 classes are now equivalent. So merge them. */
5094 merge_equiv_classes (elt, src_eqv_elt);
5095 src_eqv_hash = HASH (src_eqv, elt->mode);
5096 src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
5099 src_eqv_here = 0;
5102 else if (src_eqv_elt)
5103 elt = src_eqv_elt;
5105 /* Try to find a constant somewhere and record it in `src_const'.
5106 Record its table element, if any, in `src_const_elt'. Look in
5107 any known equivalences first. (If the constant is not in the
5108 table, also set `sets[i].src_const_hash'). */
5109 if (elt)
5110 for (p = elt->first_same_value; p; p = p->next_same_value)
5111 if (p->is_const)
5113 src_const = p->exp;
5114 src_const_elt = elt;
5115 break;
5118 if (src_const == 0
5119 && (CONSTANT_P (src_folded)
5120 /* Consider (minus (label_ref L1) (label_ref L2)) as
5121 "constant" here so we will record it. This allows us
5122 to fold switch statements when an ADDR_DIFF_VEC is used. */
5123 || (GET_CODE (src_folded) == MINUS
5124 && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
5125 && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
5126 src_const = src_folded, src_const_elt = elt;
5127 else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
5128 src_const = src_eqv_here, src_const_elt = src_eqv_elt;
5130 /* If we don't know if the constant is in the table, get its
5131 hash code and look it up. */
5132 if (src_const && src_const_elt == 0)
5134 sets[i].src_const_hash = HASH (src_const, mode);
5135 src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
5138 sets[i].src_const = src_const;
5139 sets[i].src_const_elt = src_const_elt;
5141 /* If the constant and our source are both in the table, mark them as
5142 equivalent. Otherwise, if a constant is in the table but the source
5143 isn't, set ELT to it. */
5144 if (src_const_elt && elt
5145 && src_const_elt->first_same_value != elt->first_same_value)
5146 merge_equiv_classes (elt, src_const_elt);
5147 else if (src_const_elt && elt == 0)
5148 elt = src_const_elt;
5150 /* See if there is a register linearly related to a constant
5151 equivalent of SRC. */
5152 if (src_const
5153 && (GET_CODE (src_const) == CONST
5154 || (src_const_elt && src_const_elt->related_value != 0)))
5156 src_related = use_related_value (src_const, src_const_elt);
5157 if (src_related)
5159 struct table_elt *src_related_elt
5160 = lookup (src_related, HASH (src_related, mode), mode);
5161 if (src_related_elt && elt)
5163 if (elt->first_same_value
5164 != src_related_elt->first_same_value)
5165 /* This can occur when we previously saw a CONST
5166 involving a SYMBOL_REF and then see the SYMBOL_REF
5167 twice. Merge the involved classes. */
5168 merge_equiv_classes (elt, src_related_elt);
5170 src_related = 0;
5171 src_related_elt = 0;
5173 else if (src_related_elt && elt == 0)
5174 elt = src_related_elt;
5178 /* See if we have a CONST_INT that is already in a register in a
5179 wider mode. */
5181 if (src_const && src_related == 0 && GET_CODE (src_const) == CONST_INT
5182 && GET_MODE_CLASS (mode) == MODE_INT
5183 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
5185 enum machine_mode wider_mode;
5187 for (wider_mode = GET_MODE_WIDER_MODE (mode);
5188 GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
5189 && src_related == 0;
5190 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5192 struct table_elt *const_elt
5193 = lookup (src_const, HASH (src_const, wider_mode), wider_mode);
5195 if (const_elt == 0)
5196 continue;
5198 for (const_elt = const_elt->first_same_value;
5199 const_elt; const_elt = const_elt->next_same_value)
5200 if (REG_P (const_elt->exp))
5202 src_related = gen_lowpart (mode,
5203 const_elt->exp);
5204 break;
5209 /* Another possibility is that we have an AND with a constant in
5210 a mode narrower than a word. If so, it might have been generated
5211 as part of an "if" which would narrow the AND. If we already
5212 have done the AND in a wider mode, we can use a SUBREG of that
5213 value. */
5215 if (flag_expensive_optimizations && ! src_related
5216 && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
5217 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5219 enum machine_mode tmode;
5220 rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
5222 for (tmode = GET_MODE_WIDER_MODE (mode);
5223 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5224 tmode = GET_MODE_WIDER_MODE (tmode))
5226 rtx inner = gen_lowpart (tmode, XEXP (src, 0));
5227 struct table_elt *larger_elt;
5229 if (inner)
5231 PUT_MODE (new_and, tmode);
5232 XEXP (new_and, 0) = inner;
5233 larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
5234 if (larger_elt == 0)
5235 continue;
5237 for (larger_elt = larger_elt->first_same_value;
5238 larger_elt; larger_elt = larger_elt->next_same_value)
5239 if (REG_P (larger_elt->exp))
5241 src_related
5242 = gen_lowpart (mode, larger_elt->exp);
5243 break;
5246 if (src_related)
5247 break;
5252 #ifdef LOAD_EXTEND_OP
5253 /* See if a MEM has already been loaded with a widening operation;
5254 if it has, we can use a subreg of that. Many CISC machines
5255 also have such operations, but this is only likely to be
5256 beneficial on these machines. */
5258 if (flag_expensive_optimizations && src_related == 0
5259 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5260 && GET_MODE_CLASS (mode) == MODE_INT
5261 && MEM_P (src) && ! do_not_record
5262 && LOAD_EXTEND_OP (mode) != UNKNOWN)
5264 struct rtx_def memory_extend_buf;
5265 rtx memory_extend_rtx = &memory_extend_buf;
5266 enum machine_mode tmode;
5268 /* Set what we are trying to extend and the operation it might
5269 have been extended with. */
5270 memset (memory_extend_rtx, 0, sizeof(*memory_extend_rtx));
5271 PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
5272 XEXP (memory_extend_rtx, 0) = src;
5274 for (tmode = GET_MODE_WIDER_MODE (mode);
5275 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5276 tmode = GET_MODE_WIDER_MODE (tmode))
5278 struct table_elt *larger_elt;
5280 PUT_MODE (memory_extend_rtx, tmode);
5281 larger_elt = lookup (memory_extend_rtx,
5282 HASH (memory_extend_rtx, tmode), tmode);
5283 if (larger_elt == 0)
5284 continue;
5286 for (larger_elt = larger_elt->first_same_value;
5287 larger_elt; larger_elt = larger_elt->next_same_value)
5288 if (REG_P (larger_elt->exp))
5290 src_related = gen_lowpart (mode,
5291 larger_elt->exp);
5292 break;
5295 if (src_related)
5296 break;
5299 #endif /* LOAD_EXTEND_OP */
5301 if (src == src_folded)
5302 src_folded = 0;
5304 /* At this point, ELT, if nonzero, points to a class of expressions
5305 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
5306 and SRC_RELATED, if nonzero, each contain additional equivalent
5307 expressions. Prune these latter expressions by deleting expressions
5308 already in the equivalence class.
5310 Check for an equivalent identical to the destination. If found,
5311 this is the preferred equivalent since it will likely lead to
5312 elimination of the insn. Indicate this by placing it in
5313 `src_related'. */
5315 if (elt)
5316 elt = elt->first_same_value;
5317 for (p = elt; p; p = p->next_same_value)
5319 enum rtx_code code = GET_CODE (p->exp);
5321 /* If the expression is not valid, ignore it. Then we do not
5322 have to check for validity below. In most cases, we can use
5323 `rtx_equal_p', since canonicalization has already been done. */
5324 if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, false))
5325 continue;
5327 /* Also skip paradoxical subregs, unless that's what we're
5328 looking for. */
5329 if (code == SUBREG
5330 && (GET_MODE_SIZE (GET_MODE (p->exp))
5331 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))
5332 && ! (src != 0
5333 && GET_CODE (src) == SUBREG
5334 && GET_MODE (src) == GET_MODE (p->exp)
5335 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5336 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))))
5337 continue;
5339 if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
5340 src = 0;
5341 else if (src_folded && GET_CODE (src_folded) == code
5342 && rtx_equal_p (src_folded, p->exp))
5343 src_folded = 0;
5344 else if (src_eqv_here && GET_CODE (src_eqv_here) == code
5345 && rtx_equal_p (src_eqv_here, p->exp))
5346 src_eqv_here = 0;
5347 else if (src_related && GET_CODE (src_related) == code
5348 && rtx_equal_p (src_related, p->exp))
5349 src_related = 0;
5351 /* This is the same as the destination of the insns, we want
5352 to prefer it. Copy it to src_related. The code below will
5353 then give it a negative cost. */
5354 if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
5355 src_related = dest;
5358 /* Find the cheapest valid equivalent, trying all the available
5359 possibilities. Prefer items not in the hash table to ones
5360 that are when they are equal cost. Note that we can never
5361 worsen an insn as the current contents will also succeed.
5362 If we find an equivalent identical to the destination, use it as best,
5363 since this insn will probably be eliminated in that case. */
5364 if (src)
5366 if (rtx_equal_p (src, dest))
5367 src_cost = src_regcost = -1;
5368 else
5370 src_cost = COST (src);
5371 src_regcost = approx_reg_cost (src);
5375 if (src_eqv_here)
5377 if (rtx_equal_p (src_eqv_here, dest))
5378 src_eqv_cost = src_eqv_regcost = -1;
5379 else
5381 src_eqv_cost = COST (src_eqv_here);
5382 src_eqv_regcost = approx_reg_cost (src_eqv_here);
5386 if (src_folded)
5388 if (rtx_equal_p (src_folded, dest))
5389 src_folded_cost = src_folded_regcost = -1;
5390 else
5392 src_folded_cost = COST (src_folded);
5393 src_folded_regcost = approx_reg_cost (src_folded);
5397 if (src_related)
5399 if (rtx_equal_p (src_related, dest))
5400 src_related_cost = src_related_regcost = -1;
5401 else
5403 src_related_cost = COST (src_related);
5404 src_related_regcost = approx_reg_cost (src_related);
5408 /* If this was an indirect jump insn, a known label will really be
5409 cheaper even though it looks more expensive. */
5410 if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
5411 src_folded = src_const, src_folded_cost = src_folded_regcost = -1;
5413 /* Terminate loop when replacement made. This must terminate since
5414 the current contents will be tested and will always be valid. */
5415 while (1)
5417 rtx trial;
5419 /* Skip invalid entries. */
5420 while (elt && !REG_P (elt->exp)
5421 && ! exp_equiv_p (elt->exp, elt->exp, 1, false))
5422 elt = elt->next_same_value;
5424 /* A paradoxical subreg would be bad here: it'll be the right
5425 size, but later may be adjusted so that the upper bits aren't
5426 what we want. So reject it. */
5427 if (elt != 0
5428 && GET_CODE (elt->exp) == SUBREG
5429 && (GET_MODE_SIZE (GET_MODE (elt->exp))
5430 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))
5431 /* It is okay, though, if the rtx we're trying to match
5432 will ignore any of the bits we can't predict. */
5433 && ! (src != 0
5434 && GET_CODE (src) == SUBREG
5435 && GET_MODE (src) == GET_MODE (elt->exp)
5436 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5437 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))))
5439 elt = elt->next_same_value;
5440 continue;
5443 if (elt)
5445 src_elt_cost = elt->cost;
5446 src_elt_regcost = elt->regcost;
5449 /* Find cheapest and skip it for the next time. For items
5450 of equal cost, use this order:
5451 src_folded, src, src_eqv, src_related and hash table entry. */
5452 if (src_folded
5453 && preferable (src_folded_cost, src_folded_regcost,
5454 src_cost, src_regcost) <= 0
5455 && preferable (src_folded_cost, src_folded_regcost,
5456 src_eqv_cost, src_eqv_regcost) <= 0
5457 && preferable (src_folded_cost, src_folded_regcost,
5458 src_related_cost, src_related_regcost) <= 0
5459 && preferable (src_folded_cost, src_folded_regcost,
5460 src_elt_cost, src_elt_regcost) <= 0)
5462 trial = src_folded, src_folded_cost = MAX_COST;
5463 if (src_folded_force_flag)
5465 rtx forced = force_const_mem (mode, trial);
5466 if (forced)
5467 trial = forced;
5470 else if (src
5471 && preferable (src_cost, src_regcost,
5472 src_eqv_cost, src_eqv_regcost) <= 0
5473 && preferable (src_cost, src_regcost,
5474 src_related_cost, src_related_regcost) <= 0
5475 && preferable (src_cost, src_regcost,
5476 src_elt_cost, src_elt_regcost) <= 0)
5477 trial = src, src_cost = MAX_COST;
5478 else if (src_eqv_here
5479 && preferable (src_eqv_cost, src_eqv_regcost,
5480 src_related_cost, src_related_regcost) <= 0
5481 && preferable (src_eqv_cost, src_eqv_regcost,
5482 src_elt_cost, src_elt_regcost) <= 0)
5483 trial = copy_rtx (src_eqv_here), src_eqv_cost = MAX_COST;
5484 else if (src_related
5485 && preferable (src_related_cost, src_related_regcost,
5486 src_elt_cost, src_elt_regcost) <= 0)
5487 trial = copy_rtx (src_related), src_related_cost = MAX_COST;
5488 else
5490 trial = copy_rtx (elt->exp);
5491 elt = elt->next_same_value;
5492 src_elt_cost = MAX_COST;
5495 /* We don't normally have an insn matching (set (pc) (pc)), so
5496 check for this separately here. We will delete such an
5497 insn below.
5499 For other cases such as a table jump or conditional jump
5500 where we know the ultimate target, go ahead and replace the
5501 operand. While that may not make a valid insn, we will
5502 reemit the jump below (and also insert any necessary
5503 barriers). */
5504 if (n_sets == 1 && dest == pc_rtx
5505 && (trial == pc_rtx
5506 || (GET_CODE (trial) == LABEL_REF
5507 && ! condjump_p (insn))))
5509 /* Don't substitute non-local labels, this confuses CFG. */
5510 if (GET_CODE (trial) == LABEL_REF
5511 && LABEL_REF_NONLOCAL_P (trial))
5512 continue;
5514 SET_SRC (sets[i].rtl) = trial;
5515 cse_jumps_altered = 1;
5516 break;
5519 /* Reject certain invalid forms of CONST that we create. */
5520 else if (CONSTANT_P (trial)
5521 && GET_CODE (trial) == CONST
5522 /* Reject cases that will cause decode_rtx_const to
5523 die. On the alpha when simplifying a switch, we
5524 get (const (truncate (minus (label_ref)
5525 (label_ref)))). */
5526 && (GET_CODE (XEXP (trial, 0)) == TRUNCATE
5527 /* Likewise on IA-64, except without the
5528 truncate. */
5529 || (GET_CODE (XEXP (trial, 0)) == MINUS
5530 && GET_CODE (XEXP (XEXP (trial, 0), 0)) == LABEL_REF
5531 && GET_CODE (XEXP (XEXP (trial, 0), 1)) == LABEL_REF)))
5532 /* Do nothing for this case. */
5535 /* Look for a substitution that makes a valid insn. */
5536 else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
5538 rtx new = canon_reg (SET_SRC (sets[i].rtl), insn);
5540 /* If we just made a substitution inside a libcall, then we
5541 need to make the same substitution in any notes attached
5542 to the RETVAL insn. */
5543 if (libcall_insn
5544 && (REG_P (sets[i].orig_src)
5545 || GET_CODE (sets[i].orig_src) == SUBREG
5546 || MEM_P (sets[i].orig_src)))
5548 rtx note = find_reg_equal_equiv_note (libcall_insn);
5549 if (note != 0)
5550 XEXP (note, 0) = simplify_replace_rtx (XEXP (note, 0),
5551 sets[i].orig_src,
5552 copy_rtx (new));
5555 /* The result of apply_change_group can be ignored; see
5556 canon_reg. */
5558 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
5559 apply_change_group ();
5560 break;
5563 /* If we previously found constant pool entries for
5564 constants and this is a constant, try making a
5565 pool entry. Put it in src_folded unless we already have done
5566 this since that is where it likely came from. */
5568 else if (constant_pool_entries_cost
5569 && CONSTANT_P (trial)
5570 && (src_folded == 0
5571 || (!MEM_P (src_folded)
5572 && ! src_folded_force_flag))
5573 && GET_MODE_CLASS (mode) != MODE_CC
5574 && mode != VOIDmode)
5576 src_folded_force_flag = 1;
5577 src_folded = trial;
5578 src_folded_cost = constant_pool_entries_cost;
5579 src_folded_regcost = constant_pool_entries_regcost;
5583 src = SET_SRC (sets[i].rtl);
5585 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
5586 However, there is an important exception: If both are registers
5587 that are not the head of their equivalence class, replace SET_SRC
5588 with the head of the class. If we do not do this, we will have
5589 both registers live over a portion of the basic block. This way,
5590 their lifetimes will likely abut instead of overlapping. */
5591 if (REG_P (dest)
5592 && REGNO_QTY_VALID_P (REGNO (dest)))
5594 int dest_q = REG_QTY (REGNO (dest));
5595 struct qty_table_elem *dest_ent = &qty_table[dest_q];
5597 if (dest_ent->mode == GET_MODE (dest)
5598 && dest_ent->first_reg != REGNO (dest)
5599 && REG_P (src) && REGNO (src) == REGNO (dest)
5600 /* Don't do this if the original insn had a hard reg as
5601 SET_SRC or SET_DEST. */
5602 && (!REG_P (sets[i].src)
5603 || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER)
5604 && (!REG_P (dest) || REGNO (dest) >= FIRST_PSEUDO_REGISTER))
5605 /* We can't call canon_reg here because it won't do anything if
5606 SRC is a hard register. */
5608 int src_q = REG_QTY (REGNO (src));
5609 struct qty_table_elem *src_ent = &qty_table[src_q];
5610 int first = src_ent->first_reg;
5611 rtx new_src
5612 = (first >= FIRST_PSEUDO_REGISTER
5613 ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first));
5615 /* We must use validate-change even for this, because this
5616 might be a special no-op instruction, suitable only to
5617 tag notes onto. */
5618 if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0))
5620 src = new_src;
5621 /* If we had a constant that is cheaper than what we are now
5622 setting SRC to, use that constant. We ignored it when we
5623 thought we could make this into a no-op. */
5624 if (src_const && COST (src_const) < COST (src)
5625 && validate_change (insn, &SET_SRC (sets[i].rtl),
5626 src_const, 0))
5627 src = src_const;
5632 /* If we made a change, recompute SRC values. */
5633 if (src != sets[i].src)
5635 cse_altered = 1;
5636 do_not_record = 0;
5637 hash_arg_in_memory = 0;
5638 sets[i].src = src;
5639 sets[i].src_hash = HASH (src, mode);
5640 sets[i].src_volatile = do_not_record;
5641 sets[i].src_in_memory = hash_arg_in_memory;
5642 sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
5645 /* If this is a single SET, we are setting a register, and we have an
5646 equivalent constant, we want to add a REG_NOTE. We don't want
5647 to write a REG_EQUAL note for a constant pseudo since verifying that
5648 that pseudo hasn't been eliminated is a pain. Such a note also
5649 won't help anything.
5651 Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
5652 which can be created for a reference to a compile time computable
5653 entry in a jump table. */
5655 if (n_sets == 1 && src_const && REG_P (dest)
5656 && !REG_P (src_const)
5657 && ! (GET_CODE (src_const) == CONST
5658 && GET_CODE (XEXP (src_const, 0)) == MINUS
5659 && GET_CODE (XEXP (XEXP (src_const, 0), 0)) == LABEL_REF
5660 && GET_CODE (XEXP (XEXP (src_const, 0), 1)) == LABEL_REF))
5662 /* We only want a REG_EQUAL note if src_const != src. */
5663 if (! rtx_equal_p (src, src_const))
5665 /* Make sure that the rtx is not shared. */
5666 src_const = copy_rtx (src_const);
5668 /* Record the actual constant value in a REG_EQUAL note,
5669 making a new one if one does not already exist. */
5670 set_unique_reg_note (insn, REG_EQUAL, src_const);
5674 /* Now deal with the destination. */
5675 do_not_record = 0;
5677 /* Look within any ZERO_EXTRACT to the MEM or REG within it. */
5678 while (GET_CODE (dest) == SUBREG
5679 || GET_CODE (dest) == ZERO_EXTRACT
5680 || GET_CODE (dest) == STRICT_LOW_PART)
5681 dest = XEXP (dest, 0);
5683 sets[i].inner_dest = dest;
5685 if (MEM_P (dest))
5687 #ifdef PUSH_ROUNDING
5688 /* Stack pushes invalidate the stack pointer. */
5689 rtx addr = XEXP (dest, 0);
5690 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC
5691 && XEXP (addr, 0) == stack_pointer_rtx)
5692 invalidate (stack_pointer_rtx, Pmode);
5693 #endif
5694 dest = fold_rtx (dest, insn);
5697 /* Compute the hash code of the destination now,
5698 before the effects of this instruction are recorded,
5699 since the register values used in the address computation
5700 are those before this instruction. */
5701 sets[i].dest_hash = HASH (dest, mode);
5703 /* Don't enter a bit-field in the hash table
5704 because the value in it after the store
5705 may not equal what was stored, due to truncation. */
5707 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT)
5709 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5711 if (src_const != 0 && GET_CODE (src_const) == CONST_INT
5712 && GET_CODE (width) == CONST_INT
5713 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5714 && ! (INTVAL (src_const)
5715 & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5716 /* Exception: if the value is constant,
5717 and it won't be truncated, record it. */
5719 else
5721 /* This is chosen so that the destination will be invalidated
5722 but no new value will be recorded.
5723 We must invalidate because sometimes constant
5724 values can be recorded for bitfields. */
5725 sets[i].src_elt = 0;
5726 sets[i].src_volatile = 1;
5727 src_eqv = 0;
5728 src_eqv_elt = 0;
5732 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
5733 the insn. */
5734 else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
5736 /* One less use of the label this insn used to jump to. */
5737 delete_insn (insn);
5738 cse_jumps_altered = 1;
5739 /* No more processing for this set. */
5740 sets[i].rtl = 0;
5743 /* If this SET is now setting PC to a label, we know it used to
5744 be a conditional or computed branch. */
5745 else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF
5746 && !LABEL_REF_NONLOCAL_P (src))
5748 /* Now emit a BARRIER after the unconditional jump. */
5749 if (NEXT_INSN (insn) == 0
5750 || !BARRIER_P (NEXT_INSN (insn)))
5751 emit_barrier_after (insn);
5753 /* We reemit the jump in as many cases as possible just in
5754 case the form of an unconditional jump is significantly
5755 different than a computed jump or conditional jump.
5757 If this insn has multiple sets, then reemitting the
5758 jump is nontrivial. So instead we just force rerecognition
5759 and hope for the best. */
5760 if (n_sets == 1)
5762 rtx new, note;
5764 new = emit_jump_insn_after (gen_jump (XEXP (src, 0)), insn);
5765 JUMP_LABEL (new) = XEXP (src, 0);
5766 LABEL_NUSES (XEXP (src, 0))++;
5768 /* Make sure to copy over REG_NON_LOCAL_GOTO. */
5769 note = find_reg_note (insn, REG_NON_LOCAL_GOTO, 0);
5770 if (note)
5772 XEXP (note, 1) = NULL_RTX;
5773 REG_NOTES (new) = note;
5776 delete_insn (insn);
5777 insn = new;
5779 /* Now emit a BARRIER after the unconditional jump. */
5780 if (NEXT_INSN (insn) == 0
5781 || !BARRIER_P (NEXT_INSN (insn)))
5782 emit_barrier_after (insn);
5784 else
5785 INSN_CODE (insn) = -1;
5787 /* Do not bother deleting any unreachable code,
5788 let jump/flow do that. */
5790 cse_jumps_altered = 1;
5791 sets[i].rtl = 0;
5794 /* If destination is volatile, invalidate it and then do no further
5795 processing for this assignment. */
5797 else if (do_not_record)
5799 if (REG_P (dest) || GET_CODE (dest) == SUBREG)
5800 invalidate (dest, VOIDmode);
5801 else if (MEM_P (dest))
5802 invalidate (dest, VOIDmode);
5803 else if (GET_CODE (dest) == STRICT_LOW_PART
5804 || GET_CODE (dest) == ZERO_EXTRACT)
5805 invalidate (XEXP (dest, 0), GET_MODE (dest));
5806 sets[i].rtl = 0;
5809 if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
5810 sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
5812 #ifdef HAVE_cc0
5813 /* If setting CC0, record what it was set to, or a constant, if it
5814 is equivalent to a constant. If it is being set to a floating-point
5815 value, make a COMPARE with the appropriate constant of 0. If we
5816 don't do this, later code can interpret this as a test against
5817 const0_rtx, which can cause problems if we try to put it into an
5818 insn as a floating-point operand. */
5819 if (dest == cc0_rtx)
5821 this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
5822 this_insn_cc0_mode = mode;
5823 if (FLOAT_MODE_P (mode))
5824 this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0,
5825 CONST0_RTX (mode));
5827 #endif
5830 /* Now enter all non-volatile source expressions in the hash table
5831 if they are not already present.
5832 Record their equivalence classes in src_elt.
5833 This way we can insert the corresponding destinations into
5834 the same classes even if the actual sources are no longer in them
5835 (having been invalidated). */
5837 if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
5838 && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
5840 struct table_elt *elt;
5841 struct table_elt *classp = sets[0].src_elt;
5842 rtx dest = SET_DEST (sets[0].rtl);
5843 enum machine_mode eqvmode = GET_MODE (dest);
5845 if (GET_CODE (dest) == STRICT_LOW_PART)
5847 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5848 classp = 0;
5850 if (insert_regs (src_eqv, classp, 0))
5852 rehash_using_reg (src_eqv);
5853 src_eqv_hash = HASH (src_eqv, eqvmode);
5855 elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
5856 elt->in_memory = src_eqv_in_memory;
5857 src_eqv_elt = elt;
5859 /* Check to see if src_eqv_elt is the same as a set source which
5860 does not yet have an elt, and if so set the elt of the set source
5861 to src_eqv_elt. */
5862 for (i = 0; i < n_sets; i++)
5863 if (sets[i].rtl && sets[i].src_elt == 0
5864 && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
5865 sets[i].src_elt = src_eqv_elt;
5868 for (i = 0; i < n_sets; i++)
5869 if (sets[i].rtl && ! sets[i].src_volatile
5870 && ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
5872 if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
5874 /* REG_EQUAL in setting a STRICT_LOW_PART
5875 gives an equivalent for the entire destination register,
5876 not just for the subreg being stored in now.
5877 This is a more interesting equivalence, so we arrange later
5878 to treat the entire reg as the destination. */
5879 sets[i].src_elt = src_eqv_elt;
5880 sets[i].src_hash = src_eqv_hash;
5882 else
5884 /* Insert source and constant equivalent into hash table, if not
5885 already present. */
5886 struct table_elt *classp = src_eqv_elt;
5887 rtx src = sets[i].src;
5888 rtx dest = SET_DEST (sets[i].rtl);
5889 enum machine_mode mode
5890 = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5892 /* It's possible that we have a source value known to be
5893 constant but don't have a REG_EQUAL note on the insn.
5894 Lack of a note will mean src_eqv_elt will be NULL. This
5895 can happen where we've generated a SUBREG to access a
5896 CONST_INT that is already in a register in a wider mode.
5897 Ensure that the source expression is put in the proper
5898 constant class. */
5899 if (!classp)
5900 classp = sets[i].src_const_elt;
5902 if (sets[i].src_elt == 0)
5904 /* Don't put a hard register source into the table if this is
5905 the last insn of a libcall. In this case, we only need
5906 to put src_eqv_elt in src_elt. */
5907 if (! find_reg_note (insn, REG_RETVAL, NULL_RTX))
5909 struct table_elt *elt;
5911 /* Note that these insert_regs calls cannot remove
5912 any of the src_elt's, because they would have failed to
5913 match if not still valid. */
5914 if (insert_regs (src, classp, 0))
5916 rehash_using_reg (src);
5917 sets[i].src_hash = HASH (src, mode);
5919 elt = insert (src, classp, sets[i].src_hash, mode);
5920 elt->in_memory = sets[i].src_in_memory;
5921 sets[i].src_elt = classp = elt;
5923 else
5924 sets[i].src_elt = classp;
5926 if (sets[i].src_const && sets[i].src_const_elt == 0
5927 && src != sets[i].src_const
5928 && ! rtx_equal_p (sets[i].src_const, src))
5929 sets[i].src_elt = insert (sets[i].src_const, classp,
5930 sets[i].src_const_hash, mode);
5933 else if (sets[i].src_elt == 0)
5934 /* If we did not insert the source into the hash table (e.g., it was
5935 volatile), note the equivalence class for the REG_EQUAL value, if any,
5936 so that the destination goes into that class. */
5937 sets[i].src_elt = src_eqv_elt;
5939 invalidate_from_clobbers (x);
5941 /* Some registers are invalidated by subroutine calls. Memory is
5942 invalidated by non-constant calls. */
5944 if (CALL_P (insn))
5946 if (! CONST_OR_PURE_CALL_P (insn))
5947 invalidate_memory ();
5948 invalidate_for_call ();
5951 /* Now invalidate everything set by this instruction.
5952 If a SUBREG or other funny destination is being set,
5953 sets[i].rtl is still nonzero, so here we invalidate the reg
5954 a part of which is being set. */
5956 for (i = 0; i < n_sets; i++)
5957 if (sets[i].rtl)
5959 /* We can't use the inner dest, because the mode associated with
5960 a ZERO_EXTRACT is significant. */
5961 rtx dest = SET_DEST (sets[i].rtl);
5963 /* Needed for registers to remove the register from its
5964 previous quantity's chain.
5965 Needed for memory if this is a nonvarying address, unless
5966 we have just done an invalidate_memory that covers even those. */
5967 if (REG_P (dest) || GET_CODE (dest) == SUBREG)
5968 invalidate (dest, VOIDmode);
5969 else if (MEM_P (dest))
5970 invalidate (dest, VOIDmode);
5971 else if (GET_CODE (dest) == STRICT_LOW_PART
5972 || GET_CODE (dest) == ZERO_EXTRACT)
5973 invalidate (XEXP (dest, 0), GET_MODE (dest));
5976 /* A volatile ASM invalidates everything. */
5977 if (NONJUMP_INSN_P (insn)
5978 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
5979 && MEM_VOLATILE_P (PATTERN (insn)))
5980 flush_hash_table ();
5982 /* Make sure registers mentioned in destinations
5983 are safe for use in an expression to be inserted.
5984 This removes from the hash table
5985 any invalid entry that refers to one of these registers.
5987 We don't care about the return value from mention_regs because
5988 we are going to hash the SET_DEST values unconditionally. */
5990 for (i = 0; i < n_sets; i++)
5992 if (sets[i].rtl)
5994 rtx x = SET_DEST (sets[i].rtl);
5996 if (!REG_P (x))
5997 mention_regs (x);
5998 else
6000 /* We used to rely on all references to a register becoming
6001 inaccessible when a register changes to a new quantity,
6002 since that changes the hash code. However, that is not
6003 safe, since after HASH_SIZE new quantities we get a
6004 hash 'collision' of a register with its own invalid
6005 entries. And since SUBREGs have been changed not to
6006 change their hash code with the hash code of the register,
6007 it wouldn't work any longer at all. So we have to check
6008 for any invalid references lying around now.
6009 This code is similar to the REG case in mention_regs,
6010 but it knows that reg_tick has been incremented, and
6011 it leaves reg_in_table as -1 . */
6012 unsigned int regno = REGNO (x);
6013 unsigned int endregno
6014 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
6015 : hard_regno_nregs[regno][GET_MODE (x)]);
6016 unsigned int i;
6018 for (i = regno; i < endregno; i++)
6020 if (REG_IN_TABLE (i) >= 0)
6022 remove_invalid_refs (i);
6023 REG_IN_TABLE (i) = -1;
6030 /* We may have just removed some of the src_elt's from the hash table.
6031 So replace each one with the current head of the same class. */
6033 for (i = 0; i < n_sets; i++)
6034 if (sets[i].rtl)
6036 if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
6037 /* If elt was removed, find current head of same class,
6038 or 0 if nothing remains of that class. */
6040 struct table_elt *elt = sets[i].src_elt;
6042 while (elt && elt->prev_same_value)
6043 elt = elt->prev_same_value;
6045 while (elt && elt->first_same_value == 0)
6046 elt = elt->next_same_value;
6047 sets[i].src_elt = elt ? elt->first_same_value : 0;
6051 /* Now insert the destinations into their equivalence classes. */
6053 for (i = 0; i < n_sets; i++)
6054 if (sets[i].rtl)
6056 rtx dest = SET_DEST (sets[i].rtl);
6057 struct table_elt *elt;
6059 /* Don't record value if we are not supposed to risk allocating
6060 floating-point values in registers that might be wider than
6061 memory. */
6062 if ((flag_float_store
6063 && MEM_P (dest)
6064 && FLOAT_MODE_P (GET_MODE (dest)))
6065 /* Don't record BLKmode values, because we don't know the
6066 size of it, and can't be sure that other BLKmode values
6067 have the same or smaller size. */
6068 || GET_MODE (dest) == BLKmode
6069 /* Don't record values of destinations set inside a libcall block
6070 since we might delete the libcall. Things should have been set
6071 up so we won't want to reuse such a value, but we play it safe
6072 here. */
6073 || libcall_insn
6074 /* If we didn't put a REG_EQUAL value or a source into the hash
6075 table, there is no point is recording DEST. */
6076 || sets[i].src_elt == 0
6077 /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
6078 or SIGN_EXTEND, don't record DEST since it can cause
6079 some tracking to be wrong.
6081 ??? Think about this more later. */
6082 || (GET_CODE (dest) == SUBREG
6083 && (GET_MODE_SIZE (GET_MODE (dest))
6084 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6085 && (GET_CODE (sets[i].src) == SIGN_EXTEND
6086 || GET_CODE (sets[i].src) == ZERO_EXTEND)))
6087 continue;
6089 /* STRICT_LOW_PART isn't part of the value BEING set,
6090 and neither is the SUBREG inside it.
6091 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
6092 if (GET_CODE (dest) == STRICT_LOW_PART)
6093 dest = SUBREG_REG (XEXP (dest, 0));
6095 if (REG_P (dest) || GET_CODE (dest) == SUBREG)
6096 /* Registers must also be inserted into chains for quantities. */
6097 if (insert_regs (dest, sets[i].src_elt, 1))
6099 /* If `insert_regs' changes something, the hash code must be
6100 recalculated. */
6101 rehash_using_reg (dest);
6102 sets[i].dest_hash = HASH (dest, GET_MODE (dest));
6105 elt = insert (dest, sets[i].src_elt,
6106 sets[i].dest_hash, GET_MODE (dest));
6108 elt->in_memory = (MEM_P (sets[i].inner_dest)
6109 && !MEM_READONLY_P (sets[i].inner_dest));
6111 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
6112 narrower than M2, and both M1 and M2 are the same number of words,
6113 we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
6114 make that equivalence as well.
6116 However, BAR may have equivalences for which gen_lowpart
6117 will produce a simpler value than gen_lowpart applied to
6118 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
6119 BAR's equivalences. If we don't get a simplified form, make
6120 the SUBREG. It will not be used in an equivalence, but will
6121 cause two similar assignments to be detected.
6123 Note the loop below will find SUBREG_REG (DEST) since we have
6124 already entered SRC and DEST of the SET in the table. */
6126 if (GET_CODE (dest) == SUBREG
6127 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
6128 / UNITS_PER_WORD)
6129 == (GET_MODE_SIZE (GET_MODE (dest)) - 1) / UNITS_PER_WORD)
6130 && (GET_MODE_SIZE (GET_MODE (dest))
6131 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6132 && sets[i].src_elt != 0)
6134 enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
6135 struct table_elt *elt, *classp = 0;
6137 for (elt = sets[i].src_elt->first_same_value; elt;
6138 elt = elt->next_same_value)
6140 rtx new_src = 0;
6141 unsigned src_hash;
6142 struct table_elt *src_elt;
6143 int byte = 0;
6145 /* Ignore invalid entries. */
6146 if (!REG_P (elt->exp)
6147 && ! exp_equiv_p (elt->exp, elt->exp, 1, false))
6148 continue;
6150 /* We may have already been playing subreg games. If the
6151 mode is already correct for the destination, use it. */
6152 if (GET_MODE (elt->exp) == new_mode)
6153 new_src = elt->exp;
6154 else
6156 /* Calculate big endian correction for the SUBREG_BYTE.
6157 We have already checked that M1 (GET_MODE (dest))
6158 is not narrower than M2 (new_mode). */
6159 if (BYTES_BIG_ENDIAN)
6160 byte = (GET_MODE_SIZE (GET_MODE (dest))
6161 - GET_MODE_SIZE (new_mode));
6163 new_src = simplify_gen_subreg (new_mode, elt->exp,
6164 GET_MODE (dest), byte);
6167 /* The call to simplify_gen_subreg fails if the value
6168 is VOIDmode, yet we can't do any simplification, e.g.
6169 for EXPR_LISTs denoting function call results.
6170 It is invalid to construct a SUBREG with a VOIDmode
6171 SUBREG_REG, hence a zero new_src means we can't do
6172 this substitution. */
6173 if (! new_src)
6174 continue;
6176 src_hash = HASH (new_src, new_mode);
6177 src_elt = lookup (new_src, src_hash, new_mode);
6179 /* Put the new source in the hash table is if isn't
6180 already. */
6181 if (src_elt == 0)
6183 if (insert_regs (new_src, classp, 0))
6185 rehash_using_reg (new_src);
6186 src_hash = HASH (new_src, new_mode);
6188 src_elt = insert (new_src, classp, src_hash, new_mode);
6189 src_elt->in_memory = elt->in_memory;
6191 else if (classp && classp != src_elt->first_same_value)
6192 /* Show that two things that we've seen before are
6193 actually the same. */
6194 merge_equiv_classes (src_elt, classp);
6196 classp = src_elt->first_same_value;
6197 /* Ignore invalid entries. */
6198 while (classp
6199 && !REG_P (classp->exp)
6200 && ! exp_equiv_p (classp->exp, classp->exp, 1, false))
6201 classp = classp->next_same_value;
6206 /* Special handling for (set REG0 REG1) where REG0 is the
6207 "cheapest", cheaper than REG1. After cse, REG1 will probably not
6208 be used in the sequel, so (if easily done) change this insn to
6209 (set REG1 REG0) and replace REG1 with REG0 in the previous insn
6210 that computed their value. Then REG1 will become a dead store
6211 and won't cloud the situation for later optimizations.
6213 Do not make this change if REG1 is a hard register, because it will
6214 then be used in the sequel and we may be changing a two-operand insn
6215 into a three-operand insn.
6217 Also do not do this if we are operating on a copy of INSN.
6219 Also don't do this if INSN ends a libcall; this would cause an unrelated
6220 register to be set in the middle of a libcall, and we then get bad code
6221 if the libcall is deleted. */
6223 if (n_sets == 1 && sets[0].rtl && REG_P (SET_DEST (sets[0].rtl))
6224 && NEXT_INSN (PREV_INSN (insn)) == insn
6225 && REG_P (SET_SRC (sets[0].rtl))
6226 && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
6227 && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl))))
6229 int src_q = REG_QTY (REGNO (SET_SRC (sets[0].rtl)));
6230 struct qty_table_elem *src_ent = &qty_table[src_q];
6232 if ((src_ent->first_reg == REGNO (SET_DEST (sets[0].rtl)))
6233 && ! find_reg_note (insn, REG_RETVAL, NULL_RTX))
6235 rtx prev = insn;
6236 /* Scan for the previous nonnote insn, but stop at a basic
6237 block boundary. */
6240 prev = PREV_INSN (prev);
6242 while (prev && NOTE_P (prev)
6243 && NOTE_LINE_NUMBER (prev) != NOTE_INSN_BASIC_BLOCK);
6245 /* Do not swap the registers around if the previous instruction
6246 attaches a REG_EQUIV note to REG1.
6248 ??? It's not entirely clear whether we can transfer a REG_EQUIV
6249 from the pseudo that originally shadowed an incoming argument
6250 to another register. Some uses of REG_EQUIV might rely on it
6251 being attached to REG1 rather than REG2.
6253 This section previously turned the REG_EQUIV into a REG_EQUAL
6254 note. We cannot do that because REG_EQUIV may provide an
6255 uninitialized stack slot when REG_PARM_STACK_SPACE is used. */
6257 if (prev != 0 && NONJUMP_INSN_P (prev)
6258 && GET_CODE (PATTERN (prev)) == SET
6259 && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl)
6260 && ! find_reg_note (prev, REG_EQUIV, NULL_RTX))
6262 rtx dest = SET_DEST (sets[0].rtl);
6263 rtx src = SET_SRC (sets[0].rtl);
6264 rtx note;
6266 validate_change (prev, &SET_DEST (PATTERN (prev)), dest, 1);
6267 validate_change (insn, &SET_DEST (sets[0].rtl), src, 1);
6268 validate_change (insn, &SET_SRC (sets[0].rtl), dest, 1);
6269 apply_change_group ();
6271 /* If INSN has a REG_EQUAL note, and this note mentions
6272 REG0, then we must delete it, because the value in
6273 REG0 has changed. If the note's value is REG1, we must
6274 also delete it because that is now this insn's dest. */
6275 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
6276 if (note != 0
6277 && (reg_mentioned_p (dest, XEXP (note, 0))
6278 || rtx_equal_p (src, XEXP (note, 0))))
6279 remove_note (insn, note);
6284 /* If this is a conditional jump insn, record any known equivalences due to
6285 the condition being tested. */
6287 if (JUMP_P (insn)
6288 && n_sets == 1 && GET_CODE (x) == SET
6289 && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
6290 record_jump_equiv (insn, 0);
6292 #ifdef HAVE_cc0
6293 /* If the previous insn set CC0 and this insn no longer references CC0,
6294 delete the previous insn. Here we use the fact that nothing expects CC0
6295 to be valid over an insn, which is true until the final pass. */
6296 if (prev_insn && NONJUMP_INSN_P (prev_insn)
6297 && (tem = single_set (prev_insn)) != 0
6298 && SET_DEST (tem) == cc0_rtx
6299 && ! reg_mentioned_p (cc0_rtx, x))
6300 delete_insn (prev_insn);
6302 prev_insn_cc0 = this_insn_cc0;
6303 prev_insn_cc0_mode = this_insn_cc0_mode;
6304 prev_insn = insn;
6305 #endif
6308 /* Remove from the hash table all expressions that reference memory. */
6310 static void
6311 invalidate_memory (void)
6313 int i;
6314 struct table_elt *p, *next;
6316 for (i = 0; i < HASH_SIZE; i++)
6317 for (p = table[i]; p; p = next)
6319 next = p->next_same_hash;
6320 if (p->in_memory)
6321 remove_from_table (p, i);
6325 /* If ADDR is an address that implicitly affects the stack pointer, return
6326 1 and update the register tables to show the effect. Else, return 0. */
6328 static int
6329 addr_affects_sp_p (rtx addr)
6331 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC
6332 && REG_P (XEXP (addr, 0))
6333 && REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
6335 if (REG_TICK (STACK_POINTER_REGNUM) >= 0)
6337 REG_TICK (STACK_POINTER_REGNUM)++;
6338 /* Is it possible to use a subreg of SP? */
6339 SUBREG_TICKED (STACK_POINTER_REGNUM) = -1;
6342 /* This should be *very* rare. */
6343 if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
6344 invalidate (stack_pointer_rtx, VOIDmode);
6346 return 1;
6349 return 0;
6352 /* Perform invalidation on the basis of everything about an insn
6353 except for invalidating the actual places that are SET in it.
6354 This includes the places CLOBBERed, and anything that might
6355 alias with something that is SET or CLOBBERed.
6357 X is the pattern of the insn. */
6359 static void
6360 invalidate_from_clobbers (rtx x)
6362 if (GET_CODE (x) == CLOBBER)
6364 rtx ref = XEXP (x, 0);
6365 if (ref)
6367 if (REG_P (ref) || GET_CODE (ref) == SUBREG
6368 || MEM_P (ref))
6369 invalidate (ref, VOIDmode);
6370 else if (GET_CODE (ref) == STRICT_LOW_PART
6371 || GET_CODE (ref) == ZERO_EXTRACT)
6372 invalidate (XEXP (ref, 0), GET_MODE (ref));
6375 else if (GET_CODE (x) == PARALLEL)
6377 int i;
6378 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
6380 rtx y = XVECEXP (x, 0, i);
6381 if (GET_CODE (y) == CLOBBER)
6383 rtx ref = XEXP (y, 0);
6384 if (REG_P (ref) || GET_CODE (ref) == SUBREG
6385 || MEM_P (ref))
6386 invalidate (ref, VOIDmode);
6387 else if (GET_CODE (ref) == STRICT_LOW_PART
6388 || GET_CODE (ref) == ZERO_EXTRACT)
6389 invalidate (XEXP (ref, 0), GET_MODE (ref));
6395 /* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes
6396 and replace any registers in them with either an equivalent constant
6397 or the canonical form of the register. If we are inside an address,
6398 only do this if the address remains valid.
6400 OBJECT is 0 except when within a MEM in which case it is the MEM.
6402 Return the replacement for X. */
6404 static rtx
6405 cse_process_notes (rtx x, rtx object)
6407 enum rtx_code code = GET_CODE (x);
6408 const char *fmt = GET_RTX_FORMAT (code);
6409 int i;
6411 switch (code)
6413 case CONST_INT:
6414 case CONST:
6415 case SYMBOL_REF:
6416 case LABEL_REF:
6417 case CONST_DOUBLE:
6418 case CONST_VECTOR:
6419 case PC:
6420 case CC0:
6421 case LO_SUM:
6422 return x;
6424 case MEM:
6425 validate_change (x, &XEXP (x, 0),
6426 cse_process_notes (XEXP (x, 0), x), 0);
6427 return x;
6429 case EXPR_LIST:
6430 case INSN_LIST:
6431 if (REG_NOTE_KIND (x) == REG_EQUAL)
6432 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX);
6433 if (XEXP (x, 1))
6434 XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX);
6435 return x;
6437 case SIGN_EXTEND:
6438 case ZERO_EXTEND:
6439 case SUBREG:
6441 rtx new = cse_process_notes (XEXP (x, 0), object);
6442 /* We don't substitute VOIDmode constants into these rtx,
6443 since they would impede folding. */
6444 if (GET_MODE (new) != VOIDmode)
6445 validate_change (object, &XEXP (x, 0), new, 0);
6446 return x;
6449 case REG:
6450 i = REG_QTY (REGNO (x));
6452 /* Return a constant or a constant register. */
6453 if (REGNO_QTY_VALID_P (REGNO (x)))
6455 struct qty_table_elem *ent = &qty_table[i];
6457 if (ent->const_rtx != NULL_RTX
6458 && (CONSTANT_P (ent->const_rtx)
6459 || REG_P (ent->const_rtx)))
6461 rtx new = gen_lowpart (GET_MODE (x), ent->const_rtx);
6462 if (new)
6463 return new;
6467 /* Otherwise, canonicalize this register. */
6468 return canon_reg (x, NULL_RTX);
6470 default:
6471 break;
6474 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6475 if (fmt[i] == 'e')
6476 validate_change (object, &XEXP (x, i),
6477 cse_process_notes (XEXP (x, i), object), 0);
6479 return x;
6482 /* Process one SET of an insn that was skipped. We ignore CLOBBERs
6483 since they are done elsewhere. This function is called via note_stores. */
6485 static void
6486 invalidate_skipped_set (rtx dest, rtx set, void *data ATTRIBUTE_UNUSED)
6488 enum rtx_code code = GET_CODE (dest);
6490 if (code == MEM
6491 && ! addr_affects_sp_p (dest) /* If this is not a stack push ... */
6492 /* There are times when an address can appear varying and be a PLUS
6493 during this scan when it would be a fixed address were we to know
6494 the proper equivalences. So invalidate all memory if there is
6495 a BLKmode or nonscalar memory reference or a reference to a
6496 variable address. */
6497 && (MEM_IN_STRUCT_P (dest) || GET_MODE (dest) == BLKmode
6498 || cse_rtx_varies_p (XEXP (dest, 0), 0)))
6500 invalidate_memory ();
6501 return;
6504 if (GET_CODE (set) == CLOBBER
6505 || CC0_P (dest)
6506 || dest == pc_rtx)
6507 return;
6509 if (code == STRICT_LOW_PART || code == ZERO_EXTRACT)
6510 invalidate (XEXP (dest, 0), GET_MODE (dest));
6511 else if (code == REG || code == SUBREG || code == MEM)
6512 invalidate (dest, VOIDmode);
6515 /* Invalidate all insns from START up to the end of the function or the
6516 next label. This called when we wish to CSE around a block that is
6517 conditionally executed. */
6519 static void
6520 invalidate_skipped_block (rtx start)
6522 rtx insn;
6524 for (insn = start; insn && !LABEL_P (insn);
6525 insn = NEXT_INSN (insn))
6527 if (! INSN_P (insn))
6528 continue;
6530 if (CALL_P (insn))
6532 if (! CONST_OR_PURE_CALL_P (insn))
6533 invalidate_memory ();
6534 invalidate_for_call ();
6537 invalidate_from_clobbers (PATTERN (insn));
6538 note_stores (PATTERN (insn), invalidate_skipped_set, NULL);
6542 /* Find the end of INSN's basic block and return its range,
6543 the total number of SETs in all the insns of the block, the last insn of the
6544 block, and the branch path.
6546 The branch path indicates which branches should be followed. If a nonzero
6547 path size is specified, the block should be rescanned and a different set
6548 of branches will be taken. The branch path is only used if
6549 FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is nonzero.
6551 DATA is a pointer to a struct cse_basic_block_data, defined below, that is
6552 used to describe the block. It is filled in with the information about
6553 the current block. The incoming structure's branch path, if any, is used
6554 to construct the output branch path. */
6556 static void
6557 cse_end_of_basic_block (rtx insn, struct cse_basic_block_data *data,
6558 int follow_jumps, int skip_blocks)
6560 rtx p = insn, q;
6561 int nsets = 0;
6562 int low_cuid = INSN_CUID (insn), high_cuid = INSN_CUID (insn);
6563 rtx next = INSN_P (insn) ? insn : next_real_insn (insn);
6564 int path_size = data->path_size;
6565 int path_entry = 0;
6566 int i;
6568 /* Update the previous branch path, if any. If the last branch was
6569 previously PATH_TAKEN, mark it PATH_NOT_TAKEN.
6570 If it was previously PATH_NOT_TAKEN,
6571 shorten the path by one and look at the previous branch. We know that
6572 at least one branch must have been taken if PATH_SIZE is nonzero. */
6573 while (path_size > 0)
6575 if (data->path[path_size - 1].status != PATH_NOT_TAKEN)
6577 data->path[path_size - 1].status = PATH_NOT_TAKEN;
6578 break;
6580 else
6581 path_size--;
6584 /* If the first instruction is marked with QImode, that means we've
6585 already processed this block. Our caller will look at DATA->LAST
6586 to figure out where to go next. We want to return the next block
6587 in the instruction stream, not some branched-to block somewhere
6588 else. We accomplish this by pretending our called forbid us to
6589 follow jumps, or skip blocks. */
6590 if (GET_MODE (insn) == QImode)
6591 follow_jumps = skip_blocks = 0;
6593 /* Scan to end of this basic block. */
6594 while (p && !LABEL_P (p))
6596 /* Don't cse over a call to setjmp; on some machines (eg VAX)
6597 the regs restored by the longjmp come from
6598 a later time than the setjmp. */
6599 if (PREV_INSN (p) && CALL_P (PREV_INSN (p))
6600 && find_reg_note (PREV_INSN (p), REG_SETJMP, NULL))
6601 break;
6603 /* A PARALLEL can have lots of SETs in it,
6604 especially if it is really an ASM_OPERANDS. */
6605 if (INSN_P (p) && GET_CODE (PATTERN (p)) == PARALLEL)
6606 nsets += XVECLEN (PATTERN (p), 0);
6607 else if (!NOTE_P (p))
6608 nsets += 1;
6610 /* Ignore insns made by CSE; they cannot affect the boundaries of
6611 the basic block. */
6613 if (INSN_UID (p) <= max_uid && INSN_CUID (p) > high_cuid)
6614 high_cuid = INSN_CUID (p);
6615 if (INSN_UID (p) <= max_uid && INSN_CUID (p) < low_cuid)
6616 low_cuid = INSN_CUID (p);
6618 /* See if this insn is in our branch path. If it is and we are to
6619 take it, do so. */
6620 if (path_entry < path_size && data->path[path_entry].branch == p)
6622 if (data->path[path_entry].status != PATH_NOT_TAKEN)
6623 p = JUMP_LABEL (p);
6625 /* Point to next entry in path, if any. */
6626 path_entry++;
6629 /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
6630 was specified, we haven't reached our maximum path length, there are
6631 insns following the target of the jump, this is the only use of the
6632 jump label, and the target label is preceded by a BARRIER.
6634 Alternatively, we can follow the jump if it branches around a
6635 block of code and there are no other branches into the block.
6636 In this case invalidate_skipped_block will be called to invalidate any
6637 registers set in the block when following the jump. */
6639 else if ((follow_jumps || skip_blocks) && path_size < PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH) - 1
6640 && JUMP_P (p)
6641 && GET_CODE (PATTERN (p)) == SET
6642 && GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
6643 && JUMP_LABEL (p) != 0
6644 && LABEL_NUSES (JUMP_LABEL (p)) == 1
6645 && NEXT_INSN (JUMP_LABEL (p)) != 0)
6647 for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
6648 if ((!NOTE_P (q)
6649 || NOTE_LINE_NUMBER (q) == NOTE_INSN_LOOP_END
6650 || (PREV_INSN (q) && CALL_P (PREV_INSN (q))
6651 && find_reg_note (PREV_INSN (q), REG_SETJMP, NULL)))
6652 && (!LABEL_P (q) || LABEL_NUSES (q) != 0))
6653 break;
6655 /* If we ran into a BARRIER, this code is an extension of the
6656 basic block when the branch is taken. */
6657 if (follow_jumps && q != 0 && BARRIER_P (q))
6659 /* Don't allow ourself to keep walking around an
6660 always-executed loop. */
6661 if (next_real_insn (q) == next)
6663 p = NEXT_INSN (p);
6664 continue;
6667 /* Similarly, don't put a branch in our path more than once. */
6668 for (i = 0; i < path_entry; i++)
6669 if (data->path[i].branch == p)
6670 break;
6672 if (i != path_entry)
6673 break;
6675 data->path[path_entry].branch = p;
6676 data->path[path_entry++].status = PATH_TAKEN;
6678 /* This branch now ends our path. It was possible that we
6679 didn't see this branch the last time around (when the
6680 insn in front of the target was a JUMP_INSN that was
6681 turned into a no-op). */
6682 path_size = path_entry;
6684 p = JUMP_LABEL (p);
6685 /* Mark block so we won't scan it again later. */
6686 PUT_MODE (NEXT_INSN (p), QImode);
6688 /* Detect a branch around a block of code. */
6689 else if (skip_blocks && q != 0 && !LABEL_P (q))
6691 rtx tmp;
6693 if (next_real_insn (q) == next)
6695 p = NEXT_INSN (p);
6696 continue;
6699 for (i = 0; i < path_entry; i++)
6700 if (data->path[i].branch == p)
6701 break;
6703 if (i != path_entry)
6704 break;
6706 /* This is no_labels_between_p (p, q) with an added check for
6707 reaching the end of a function (in case Q precedes P). */
6708 for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
6709 if (LABEL_P (tmp))
6710 break;
6712 if (tmp == q)
6714 data->path[path_entry].branch = p;
6715 data->path[path_entry++].status = PATH_AROUND;
6717 path_size = path_entry;
6719 p = JUMP_LABEL (p);
6720 /* Mark block so we won't scan it again later. */
6721 PUT_MODE (NEXT_INSN (p), QImode);
6725 p = NEXT_INSN (p);
6728 data->low_cuid = low_cuid;
6729 data->high_cuid = high_cuid;
6730 data->nsets = nsets;
6731 data->last = p;
6733 /* If all jumps in the path are not taken, set our path length to zero
6734 so a rescan won't be done. */
6735 for (i = path_size - 1; i >= 0; i--)
6736 if (data->path[i].status != PATH_NOT_TAKEN)
6737 break;
6739 if (i == -1)
6740 data->path_size = 0;
6741 else
6742 data->path_size = path_size;
6744 /* End the current branch path. */
6745 data->path[path_size].branch = 0;
6748 /* Perform cse on the instructions of a function.
6749 F is the first instruction.
6750 NREGS is one plus the highest pseudo-reg number used in the instruction.
6752 Returns 1 if jump_optimize should be redone due to simplifications
6753 in conditional jump instructions. */
6756 cse_main (rtx f, int nregs, FILE *file)
6758 struct cse_basic_block_data val;
6759 rtx insn = f;
6760 int i;
6762 init_cse_reg_info (nregs);
6764 val.path = xmalloc (sizeof (struct branch_path)
6765 * PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH));
6767 cse_jumps_altered = 0;
6768 recorded_label_ref = 0;
6769 constant_pool_entries_cost = 0;
6770 constant_pool_entries_regcost = 0;
6771 val.path_size = 0;
6772 rtl_hooks = cse_rtl_hooks;
6774 init_recog ();
6775 init_alias_analysis ();
6777 reg_eqv_table = xmalloc (nregs * sizeof (struct reg_eqv_elem));
6779 /* Find the largest uid. */
6781 max_uid = get_max_uid ();
6782 uid_cuid = xcalloc (max_uid + 1, sizeof (int));
6784 /* Compute the mapping from uids to cuids.
6785 CUIDs are numbers assigned to insns, like uids,
6786 except that cuids increase monotonically through the code.
6787 Don't assign cuids to line-number NOTEs, so that the distance in cuids
6788 between two insns is not affected by -g. */
6790 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
6792 if (!NOTE_P (insn)
6793 || NOTE_LINE_NUMBER (insn) < 0)
6794 INSN_CUID (insn) = ++i;
6795 else
6796 /* Give a line number note the same cuid as preceding insn. */
6797 INSN_CUID (insn) = i;
6800 /* Loop over basic blocks.
6801 Compute the maximum number of qty's needed for each basic block
6802 (which is 2 for each SET). */
6803 insn = f;
6804 while (insn)
6806 cse_altered = 0;
6807 cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps,
6808 flag_cse_skip_blocks);
6810 /* If this basic block was already processed or has no sets, skip it. */
6811 if (val.nsets == 0 || GET_MODE (insn) == QImode)
6813 PUT_MODE (insn, VOIDmode);
6814 insn = (val.last ? NEXT_INSN (val.last) : 0);
6815 val.path_size = 0;
6816 continue;
6819 cse_basic_block_start = val.low_cuid;
6820 cse_basic_block_end = val.high_cuid;
6821 max_qty = val.nsets * 2;
6823 if (file)
6824 fprintf (file, ";; Processing block from %d to %d, %d sets.\n",
6825 INSN_UID (insn), val.last ? INSN_UID (val.last) : 0,
6826 val.nsets);
6828 /* Make MAX_QTY bigger to give us room to optimize
6829 past the end of this basic block, if that should prove useful. */
6830 if (max_qty < 500)
6831 max_qty = 500;
6833 /* If this basic block is being extended by following certain jumps,
6834 (see `cse_end_of_basic_block'), we reprocess the code from the start.
6835 Otherwise, we start after this basic block. */
6836 if (val.path_size > 0)
6837 cse_basic_block (insn, val.last, val.path);
6838 else
6840 int old_cse_jumps_altered = cse_jumps_altered;
6841 rtx temp;
6843 /* When cse changes a conditional jump to an unconditional
6844 jump, we want to reprocess the block, since it will give
6845 us a new branch path to investigate. */
6846 cse_jumps_altered = 0;
6847 temp = cse_basic_block (insn, val.last, val.path);
6848 if (cse_jumps_altered == 0
6849 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
6850 insn = temp;
6852 cse_jumps_altered |= old_cse_jumps_altered;
6855 if (cse_altered)
6856 ggc_collect ();
6858 #ifdef USE_C_ALLOCA
6859 alloca (0);
6860 #endif
6863 /* Clean up. */
6864 end_alias_analysis ();
6865 free (uid_cuid);
6866 free (reg_eqv_table);
6867 free (val.path);
6868 rtl_hooks = general_rtl_hooks;
6870 return cse_jumps_altered || recorded_label_ref;
6873 /* Process a single basic block. FROM and TO and the limits of the basic
6874 block. NEXT_BRANCH points to the branch path when following jumps or
6875 a null path when not following jumps. */
6877 static rtx
6878 cse_basic_block (rtx from, rtx to, struct branch_path *next_branch)
6880 rtx insn;
6881 int to_usage = 0;
6882 rtx libcall_insn = NULL_RTX;
6883 int num_insns = 0;
6884 int no_conflict = 0;
6886 /* Allocate the space needed by qty_table. */
6887 qty_table = xmalloc (max_qty * sizeof (struct qty_table_elem));
6889 new_basic_block ();
6891 /* TO might be a label. If so, protect it from being deleted. */
6892 if (to != 0 && LABEL_P (to))
6893 ++LABEL_NUSES (to);
6895 for (insn = from; insn != to; insn = NEXT_INSN (insn))
6897 enum rtx_code code = GET_CODE (insn);
6899 /* If we have processed 1,000 insns, flush the hash table to
6900 avoid extreme quadratic behavior. We must not include NOTEs
6901 in the count since there may be more of them when generating
6902 debugging information. If we clear the table at different
6903 times, code generated with -g -O might be different than code
6904 generated with -O but not -g.
6906 ??? This is a real kludge and needs to be done some other way.
6907 Perhaps for 2.9. */
6908 if (code != NOTE && num_insns++ > PARAM_VALUE (PARAM_MAX_CSE_INSNS))
6910 flush_hash_table ();
6911 num_insns = 0;
6914 /* See if this is a branch that is part of the path. If so, and it is
6915 to be taken, do so. */
6916 if (next_branch->branch == insn)
6918 enum taken status = next_branch++->status;
6919 if (status != PATH_NOT_TAKEN)
6921 if (status == PATH_TAKEN)
6922 record_jump_equiv (insn, 1);
6923 else
6924 invalidate_skipped_block (NEXT_INSN (insn));
6926 /* Set the last insn as the jump insn; it doesn't affect cc0.
6927 Then follow this branch. */
6928 #ifdef HAVE_cc0
6929 prev_insn_cc0 = 0;
6930 prev_insn = insn;
6931 #endif
6932 insn = JUMP_LABEL (insn);
6933 continue;
6937 if (GET_MODE (insn) == QImode)
6938 PUT_MODE (insn, VOIDmode);
6940 if (GET_RTX_CLASS (code) == RTX_INSN)
6942 rtx p;
6944 /* Process notes first so we have all notes in canonical forms when
6945 looking for duplicate operations. */
6947 if (REG_NOTES (insn))
6948 REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn), NULL_RTX);
6950 /* Track when we are inside in LIBCALL block. Inside such a block,
6951 we do not want to record destinations. The last insn of a
6952 LIBCALL block is not considered to be part of the block, since
6953 its destination is the result of the block and hence should be
6954 recorded. */
6956 if (REG_NOTES (insn) != 0)
6958 if ((p = find_reg_note (insn, REG_LIBCALL, NULL_RTX)))
6959 libcall_insn = XEXP (p, 0);
6960 else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
6962 /* Keep libcall_insn for the last SET insn of a no-conflict
6963 block to prevent changing the destination. */
6964 if (! no_conflict)
6965 libcall_insn = 0;
6966 else
6967 no_conflict = -1;
6969 else if (find_reg_note (insn, REG_NO_CONFLICT, NULL_RTX))
6970 no_conflict = 1;
6973 cse_insn (insn, libcall_insn);
6975 if (no_conflict == -1)
6977 libcall_insn = 0;
6978 no_conflict = 0;
6981 /* If we haven't already found an insn where we added a LABEL_REF,
6982 check this one. */
6983 if (NONJUMP_INSN_P (insn) && ! recorded_label_ref
6984 && for_each_rtx (&PATTERN (insn), check_for_label_ref,
6985 (void *) insn))
6986 recorded_label_ref = 1;
6989 /* If INSN is now an unconditional jump, skip to the end of our
6990 basic block by pretending that we just did the last insn in the
6991 basic block. If we are jumping to the end of our block, show
6992 that we can have one usage of TO. */
6994 if (any_uncondjump_p (insn))
6996 if (to == 0)
6998 free (qty_table);
6999 return 0;
7002 if (JUMP_LABEL (insn) == to)
7003 to_usage = 1;
7005 /* Maybe TO was deleted because the jump is unconditional.
7006 If so, there is nothing left in this basic block. */
7007 /* ??? Perhaps it would be smarter to set TO
7008 to whatever follows this insn,
7009 and pretend the basic block had always ended here. */
7010 if (INSN_DELETED_P (to))
7011 break;
7013 insn = PREV_INSN (to);
7016 /* See if it is ok to keep on going past the label
7017 which used to end our basic block. Remember that we incremented
7018 the count of that label, so we decrement it here. If we made
7019 a jump unconditional, TO_USAGE will be one; in that case, we don't
7020 want to count the use in that jump. */
7022 if (to != 0 && NEXT_INSN (insn) == to
7023 && LABEL_P (to) && --LABEL_NUSES (to) == to_usage)
7025 struct cse_basic_block_data val;
7026 rtx prev;
7028 insn = NEXT_INSN (to);
7030 /* If TO was the last insn in the function, we are done. */
7031 if (insn == 0)
7033 free (qty_table);
7034 return 0;
7037 /* If TO was preceded by a BARRIER we are done with this block
7038 because it has no continuation. */
7039 prev = prev_nonnote_insn (to);
7040 if (prev && BARRIER_P (prev))
7042 free (qty_table);
7043 return insn;
7046 /* Find the end of the following block. Note that we won't be
7047 following branches in this case. */
7048 to_usage = 0;
7049 val.path_size = 0;
7050 val.path = xmalloc (sizeof (struct branch_path)
7051 * PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH));
7052 cse_end_of_basic_block (insn, &val, 0, 0);
7053 free (val.path);
7055 /* If the tables we allocated have enough space left
7056 to handle all the SETs in the next basic block,
7057 continue through it. Otherwise, return,
7058 and that block will be scanned individually. */
7059 if (val.nsets * 2 + next_qty > max_qty)
7060 break;
7062 cse_basic_block_start = val.low_cuid;
7063 cse_basic_block_end = val.high_cuid;
7064 to = val.last;
7066 /* Prevent TO from being deleted if it is a label. */
7067 if (to != 0 && LABEL_P (to))
7068 ++LABEL_NUSES (to);
7070 /* Back up so we process the first insn in the extension. */
7071 insn = PREV_INSN (insn);
7075 gcc_assert (next_qty <= max_qty);
7077 free (qty_table);
7079 return to ? NEXT_INSN (to) : 0;
7082 /* Called via for_each_rtx to see if an insn is using a LABEL_REF for which
7083 there isn't a REG_LABEL note. Return one if so. DATA is the insn. */
7085 static int
7086 check_for_label_ref (rtx *rtl, void *data)
7088 rtx insn = (rtx) data;
7090 /* If this insn uses a LABEL_REF and there isn't a REG_LABEL note for it,
7091 we must rerun jump since it needs to place the note. If this is a
7092 LABEL_REF for a CODE_LABEL that isn't in the insn chain, don't do this
7093 since no REG_LABEL will be added. */
7094 return (GET_CODE (*rtl) == LABEL_REF
7095 && ! LABEL_REF_NONLOCAL_P (*rtl)
7096 && LABEL_P (XEXP (*rtl, 0))
7097 && INSN_UID (XEXP (*rtl, 0)) != 0
7098 && ! find_reg_note (insn, REG_LABEL, XEXP (*rtl, 0)));
7101 /* Count the number of times registers are used (not set) in X.
7102 COUNTS is an array in which we accumulate the count, INCR is how much
7103 we count each register usage.
7105 Don't count a usage of DEST, which is the SET_DEST of a SET which
7106 contains X in its SET_SRC. This is because such a SET does not
7107 modify the liveness of DEST.
7108 DEST is set to pc_rtx for a trapping insn, which means that we must count
7109 uses of a SET_DEST regardless because the insn can't be deleted here. */
7111 static void
7112 count_reg_usage (rtx x, int *counts, rtx dest, int incr)
7114 enum rtx_code code;
7115 rtx note;
7116 const char *fmt;
7117 int i, j;
7119 if (x == 0)
7120 return;
7122 switch (code = GET_CODE (x))
7124 case REG:
7125 if (x != dest)
7126 counts[REGNO (x)] += incr;
7127 return;
7129 case PC:
7130 case CC0:
7131 case CONST:
7132 case CONST_INT:
7133 case CONST_DOUBLE:
7134 case CONST_VECTOR:
7135 case SYMBOL_REF:
7136 case LABEL_REF:
7137 return;
7139 case CLOBBER:
7140 /* If we are clobbering a MEM, mark any registers inside the address
7141 as being used. */
7142 if (MEM_P (XEXP (x, 0)))
7143 count_reg_usage (XEXP (XEXP (x, 0), 0), counts, NULL_RTX, incr);
7144 return;
7146 case SET:
7147 /* Unless we are setting a REG, count everything in SET_DEST. */
7148 if (!REG_P (SET_DEST (x)))
7149 count_reg_usage (SET_DEST (x), counts, NULL_RTX, incr);
7150 count_reg_usage (SET_SRC (x), counts,
7151 dest ? dest : SET_DEST (x),
7152 incr);
7153 return;
7155 case CALL_INSN:
7156 case INSN:
7157 case JUMP_INSN:
7158 /* We expect dest to be NULL_RTX here. If the insn may trap, mark
7159 this fact by setting DEST to pc_rtx. */
7160 if (flag_non_call_exceptions && may_trap_p (PATTERN (x)))
7161 dest = pc_rtx;
7162 if (code == CALL_INSN)
7163 count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, dest, incr);
7164 count_reg_usage (PATTERN (x), counts, dest, incr);
7166 /* Things used in a REG_EQUAL note aren't dead since loop may try to
7167 use them. */
7169 note = find_reg_equal_equiv_note (x);
7170 if (note)
7172 rtx eqv = XEXP (note, 0);
7174 if (GET_CODE (eqv) == EXPR_LIST)
7175 /* This REG_EQUAL note describes the result of a function call.
7176 Process all the arguments. */
7179 count_reg_usage (XEXP (eqv, 0), counts, dest, incr);
7180 eqv = XEXP (eqv, 1);
7182 while (eqv && GET_CODE (eqv) == EXPR_LIST);
7183 else
7184 count_reg_usage (eqv, counts, dest, incr);
7186 return;
7188 case EXPR_LIST:
7189 if (REG_NOTE_KIND (x) == REG_EQUAL
7190 || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE)
7191 /* FUNCTION_USAGE expression lists may include (CLOBBER (mem /u)),
7192 involving registers in the address. */
7193 || GET_CODE (XEXP (x, 0)) == CLOBBER)
7194 count_reg_usage (XEXP (x, 0), counts, NULL_RTX, incr);
7196 count_reg_usage (XEXP (x, 1), counts, NULL_RTX, incr);
7197 return;
7199 case ASM_OPERANDS:
7200 /* If the asm is volatile, then this insn cannot be deleted,
7201 and so the inputs *must* be live. */
7202 if (MEM_VOLATILE_P (x))
7203 dest = NULL_RTX;
7204 /* Iterate over just the inputs, not the constraints as well. */
7205 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
7206 count_reg_usage (ASM_OPERANDS_INPUT (x, i), counts, dest, incr);
7207 return;
7209 case INSN_LIST:
7210 gcc_unreachable ();
7212 default:
7213 break;
7216 fmt = GET_RTX_FORMAT (code);
7217 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7219 if (fmt[i] == 'e')
7220 count_reg_usage (XEXP (x, i), counts, dest, incr);
7221 else if (fmt[i] == 'E')
7222 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7223 count_reg_usage (XVECEXP (x, i, j), counts, dest, incr);
7227 /* Return true if set is live. */
7228 static bool
7229 set_live_p (rtx set, rtx insn ATTRIBUTE_UNUSED, /* Only used with HAVE_cc0. */
7230 int *counts)
7232 #ifdef HAVE_cc0
7233 rtx tem;
7234 #endif
7236 if (set_noop_p (set))
7239 #ifdef HAVE_cc0
7240 else if (GET_CODE (SET_DEST (set)) == CC0
7241 && !side_effects_p (SET_SRC (set))
7242 && ((tem = next_nonnote_insn (insn)) == 0
7243 || !INSN_P (tem)
7244 || !reg_referenced_p (cc0_rtx, PATTERN (tem))))
7245 return false;
7246 #endif
7247 else if (!REG_P (SET_DEST (set))
7248 || REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
7249 || counts[REGNO (SET_DEST (set))] != 0
7250 || side_effects_p (SET_SRC (set)))
7251 return true;
7252 return false;
7255 /* Return true if insn is live. */
7257 static bool
7258 insn_live_p (rtx insn, int *counts)
7260 int i;
7261 if (flag_non_call_exceptions && may_trap_p (PATTERN (insn)))
7262 return true;
7263 else if (GET_CODE (PATTERN (insn)) == SET)
7264 return set_live_p (PATTERN (insn), insn, counts);
7265 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
7267 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7269 rtx elt = XVECEXP (PATTERN (insn), 0, i);
7271 if (GET_CODE (elt) == SET)
7273 if (set_live_p (elt, insn, counts))
7274 return true;
7276 else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
7277 return true;
7279 return false;
7281 else
7282 return true;
7285 /* Return true if libcall is dead as a whole. */
7287 static bool
7288 dead_libcall_p (rtx insn, int *counts)
7290 rtx note, set, new;
7292 /* See if there's a REG_EQUAL note on this insn and try to
7293 replace the source with the REG_EQUAL expression.
7295 We assume that insns with REG_RETVALs can only be reg->reg
7296 copies at this point. */
7297 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
7298 if (!note)
7299 return false;
7301 set = single_set (insn);
7302 if (!set)
7303 return false;
7305 new = simplify_rtx (XEXP (note, 0));
7306 if (!new)
7307 new = XEXP (note, 0);
7309 /* While changing insn, we must update the counts accordingly. */
7310 count_reg_usage (insn, counts, NULL_RTX, -1);
7312 if (validate_change (insn, &SET_SRC (set), new, 0))
7314 count_reg_usage (insn, counts, NULL_RTX, 1);
7315 remove_note (insn, find_reg_note (insn, REG_RETVAL, NULL_RTX));
7316 remove_note (insn, note);
7317 return true;
7320 if (CONSTANT_P (new))
7322 new = force_const_mem (GET_MODE (SET_DEST (set)), new);
7323 if (new && validate_change (insn, &SET_SRC (set), new, 0))
7325 count_reg_usage (insn, counts, NULL_RTX, 1);
7326 remove_note (insn, find_reg_note (insn, REG_RETVAL, NULL_RTX));
7327 remove_note (insn, note);
7328 return true;
7332 count_reg_usage (insn, counts, NULL_RTX, 1);
7333 return false;
7336 /* Scan all the insns and delete any that are dead; i.e., they store a register
7337 that is never used or they copy a register to itself.
7339 This is used to remove insns made obviously dead by cse, loop or other
7340 optimizations. It improves the heuristics in loop since it won't try to
7341 move dead invariants out of loops or make givs for dead quantities. The
7342 remaining passes of the compilation are also sped up. */
7345 delete_trivially_dead_insns (rtx insns, int nreg)
7347 int *counts;
7348 rtx insn, prev;
7349 int in_libcall = 0, dead_libcall = 0;
7350 int ndead = 0;
7352 timevar_push (TV_DELETE_TRIVIALLY_DEAD);
7353 /* First count the number of times each register is used. */
7354 counts = xcalloc (nreg, sizeof (int));
7355 for (insn = insns; insn; insn = NEXT_INSN (insn))
7356 if (INSN_P (insn))
7357 count_reg_usage (insn, counts, NULL_RTX, 1);
7359 /* Go from the last insn to the first and delete insns that only set unused
7360 registers or copy a register to itself. As we delete an insn, remove
7361 usage counts for registers it uses.
7363 The first jump optimization pass may leave a real insn as the last
7364 insn in the function. We must not skip that insn or we may end
7365 up deleting code that is not really dead. */
7366 for (insn = get_last_insn (); insn; insn = prev)
7368 int live_insn = 0;
7370 prev = PREV_INSN (insn);
7371 if (!INSN_P (insn))
7372 continue;
7374 /* Don't delete any insns that are part of a libcall block unless
7375 we can delete the whole libcall block.
7377 Flow or loop might get confused if we did that. Remember
7378 that we are scanning backwards. */
7379 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7381 in_libcall = 1;
7382 live_insn = 1;
7383 dead_libcall = dead_libcall_p (insn, counts);
7385 else if (in_libcall)
7386 live_insn = ! dead_libcall;
7387 else
7388 live_insn = insn_live_p (insn, counts);
7390 /* If this is a dead insn, delete it and show registers in it aren't
7391 being used. */
7393 if (! live_insn)
7395 count_reg_usage (insn, counts, NULL_RTX, -1);
7396 delete_insn_and_edges (insn);
7397 ndead++;
7400 if (in_libcall && find_reg_note (insn, REG_LIBCALL, NULL_RTX))
7402 in_libcall = 0;
7403 dead_libcall = 0;
7407 if (dump_file && ndead)
7408 fprintf (dump_file, "Deleted %i trivially dead insns\n",
7409 ndead);
7410 /* Clean up. */
7411 free (counts);
7412 timevar_pop (TV_DELETE_TRIVIALLY_DEAD);
7413 return ndead;
7416 /* This function is called via for_each_rtx. The argument, NEWREG, is
7417 a condition code register with the desired mode. If we are looking
7418 at the same register in a different mode, replace it with
7419 NEWREG. */
7421 static int
7422 cse_change_cc_mode (rtx *loc, void *data)
7424 struct change_cc_mode_args* args = (struct change_cc_mode_args*)data;
7426 if (*loc
7427 && REG_P (*loc)
7428 && REGNO (*loc) == REGNO (args->newreg)
7429 && GET_MODE (*loc) != GET_MODE (args->newreg))
7431 validate_change (args->insn, loc, args->newreg, 1);
7433 return -1;
7435 return 0;
7438 /* Change the mode of any reference to the register REGNO (NEWREG) to
7439 GET_MODE (NEWREG) in INSN. */
7441 static void
7442 cse_change_cc_mode_insn (rtx insn, rtx newreg)
7444 struct change_cc_mode_args args;
7445 int success;
7447 if (!INSN_P (insn))
7448 return;
7450 args.insn = insn;
7451 args.newreg = newreg;
7453 for_each_rtx (&PATTERN (insn), cse_change_cc_mode, &args);
7454 for_each_rtx (&REG_NOTES (insn), cse_change_cc_mode, &args);
7456 /* If the following assertion was triggered, there is most probably
7457 something wrong with the cc_modes_compatible back end function.
7458 CC modes only can be considered compatible if the insn - with the mode
7459 replaced by any of the compatible modes - can still be recognized. */
7460 success = apply_change_group ();
7461 gcc_assert (success);
7464 /* Change the mode of any reference to the register REGNO (NEWREG) to
7465 GET_MODE (NEWREG), starting at START. Stop before END. Stop at
7466 any instruction which modifies NEWREG. */
7468 static void
7469 cse_change_cc_mode_insns (rtx start, rtx end, rtx newreg)
7471 rtx insn;
7473 for (insn = start; insn != end; insn = NEXT_INSN (insn))
7475 if (! INSN_P (insn))
7476 continue;
7478 if (reg_set_p (newreg, insn))
7479 return;
7481 cse_change_cc_mode_insn (insn, newreg);
7485 /* BB is a basic block which finishes with CC_REG as a condition code
7486 register which is set to CC_SRC. Look through the successors of BB
7487 to find blocks which have a single predecessor (i.e., this one),
7488 and look through those blocks for an assignment to CC_REG which is
7489 equivalent to CC_SRC. CAN_CHANGE_MODE indicates whether we are
7490 permitted to change the mode of CC_SRC to a compatible mode. This
7491 returns VOIDmode if no equivalent assignments were found.
7492 Otherwise it returns the mode which CC_SRC should wind up with.
7494 The main complexity in this function is handling the mode issues.
7495 We may have more than one duplicate which we can eliminate, and we
7496 try to find a mode which will work for multiple duplicates. */
7498 static enum machine_mode
7499 cse_cc_succs (basic_block bb, rtx cc_reg, rtx cc_src, bool can_change_mode)
7501 bool found_equiv;
7502 enum machine_mode mode;
7503 unsigned int insn_count;
7504 edge e;
7505 rtx insns[2];
7506 enum machine_mode modes[2];
7507 rtx last_insns[2];
7508 unsigned int i;
7509 rtx newreg;
7510 edge_iterator ei;
7512 /* We expect to have two successors. Look at both before picking
7513 the final mode for the comparison. If we have more successors
7514 (i.e., some sort of table jump, although that seems unlikely),
7515 then we require all beyond the first two to use the same
7516 mode. */
7518 found_equiv = false;
7519 mode = GET_MODE (cc_src);
7520 insn_count = 0;
7521 FOR_EACH_EDGE (e, ei, bb->succs)
7523 rtx insn;
7524 rtx end;
7526 if (e->flags & EDGE_COMPLEX)
7527 continue;
7529 if (EDGE_COUNT (e->dest->preds) != 1
7530 || e->dest == EXIT_BLOCK_PTR)
7531 continue;
7533 end = NEXT_INSN (BB_END (e->dest));
7534 for (insn = BB_HEAD (e->dest); insn != end; insn = NEXT_INSN (insn))
7536 rtx set;
7538 if (! INSN_P (insn))
7539 continue;
7541 /* If CC_SRC is modified, we have to stop looking for
7542 something which uses it. */
7543 if (modified_in_p (cc_src, insn))
7544 break;
7546 /* Check whether INSN sets CC_REG to CC_SRC. */
7547 set = single_set (insn);
7548 if (set
7549 && REG_P (SET_DEST (set))
7550 && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7552 bool found;
7553 enum machine_mode set_mode;
7554 enum machine_mode comp_mode;
7556 found = false;
7557 set_mode = GET_MODE (SET_SRC (set));
7558 comp_mode = set_mode;
7559 if (rtx_equal_p (cc_src, SET_SRC (set)))
7560 found = true;
7561 else if (GET_CODE (cc_src) == COMPARE
7562 && GET_CODE (SET_SRC (set)) == COMPARE
7563 && mode != set_mode
7564 && rtx_equal_p (XEXP (cc_src, 0),
7565 XEXP (SET_SRC (set), 0))
7566 && rtx_equal_p (XEXP (cc_src, 1),
7567 XEXP (SET_SRC (set), 1)))
7570 comp_mode = targetm.cc_modes_compatible (mode, set_mode);
7571 if (comp_mode != VOIDmode
7572 && (can_change_mode || comp_mode == mode))
7573 found = true;
7576 if (found)
7578 found_equiv = true;
7579 if (insn_count < ARRAY_SIZE (insns))
7581 insns[insn_count] = insn;
7582 modes[insn_count] = set_mode;
7583 last_insns[insn_count] = end;
7584 ++insn_count;
7586 if (mode != comp_mode)
7588 gcc_assert (can_change_mode);
7589 mode = comp_mode;
7591 /* The modified insn will be re-recognized later. */
7592 PUT_MODE (cc_src, mode);
7595 else
7597 if (set_mode != mode)
7599 /* We found a matching expression in the
7600 wrong mode, but we don't have room to
7601 store it in the array. Punt. This case
7602 should be rare. */
7603 break;
7605 /* INSN sets CC_REG to a value equal to CC_SRC
7606 with the right mode. We can simply delete
7607 it. */
7608 delete_insn (insn);
7611 /* We found an instruction to delete. Keep looking,
7612 in the hopes of finding a three-way jump. */
7613 continue;
7616 /* We found an instruction which sets the condition
7617 code, so don't look any farther. */
7618 break;
7621 /* If INSN sets CC_REG in some other way, don't look any
7622 farther. */
7623 if (reg_set_p (cc_reg, insn))
7624 break;
7627 /* If we fell off the bottom of the block, we can keep looking
7628 through successors. We pass CAN_CHANGE_MODE as false because
7629 we aren't prepared to handle compatibility between the
7630 further blocks and this block. */
7631 if (insn == end)
7633 enum machine_mode submode;
7635 submode = cse_cc_succs (e->dest, cc_reg, cc_src, false);
7636 if (submode != VOIDmode)
7638 gcc_assert (submode == mode);
7639 found_equiv = true;
7640 can_change_mode = false;
7645 if (! found_equiv)
7646 return VOIDmode;
7648 /* Now INSN_COUNT is the number of instructions we found which set
7649 CC_REG to a value equivalent to CC_SRC. The instructions are in
7650 INSNS. The modes used by those instructions are in MODES. */
7652 newreg = NULL_RTX;
7653 for (i = 0; i < insn_count; ++i)
7655 if (modes[i] != mode)
7657 /* We need to change the mode of CC_REG in INSNS[i] and
7658 subsequent instructions. */
7659 if (! newreg)
7661 if (GET_MODE (cc_reg) == mode)
7662 newreg = cc_reg;
7663 else
7664 newreg = gen_rtx_REG (mode, REGNO (cc_reg));
7666 cse_change_cc_mode_insns (NEXT_INSN (insns[i]), last_insns[i],
7667 newreg);
7670 delete_insn (insns[i]);
7673 return mode;
7676 /* If we have a fixed condition code register (or two), walk through
7677 the instructions and try to eliminate duplicate assignments. */
7679 void
7680 cse_condition_code_reg (void)
7682 unsigned int cc_regno_1;
7683 unsigned int cc_regno_2;
7684 rtx cc_reg_1;
7685 rtx cc_reg_2;
7686 basic_block bb;
7688 if (! targetm.fixed_condition_code_regs (&cc_regno_1, &cc_regno_2))
7689 return;
7691 cc_reg_1 = gen_rtx_REG (CCmode, cc_regno_1);
7692 if (cc_regno_2 != INVALID_REGNUM)
7693 cc_reg_2 = gen_rtx_REG (CCmode, cc_regno_2);
7694 else
7695 cc_reg_2 = NULL_RTX;
7697 FOR_EACH_BB (bb)
7699 rtx last_insn;
7700 rtx cc_reg;
7701 rtx insn;
7702 rtx cc_src_insn;
7703 rtx cc_src;
7704 enum machine_mode mode;
7705 enum machine_mode orig_mode;
7707 /* Look for blocks which end with a conditional jump based on a
7708 condition code register. Then look for the instruction which
7709 sets the condition code register. Then look through the
7710 successor blocks for instructions which set the condition
7711 code register to the same value. There are other possible
7712 uses of the condition code register, but these are by far the
7713 most common and the ones which we are most likely to be able
7714 to optimize. */
7716 last_insn = BB_END (bb);
7717 if (!JUMP_P (last_insn))
7718 continue;
7720 if (reg_referenced_p (cc_reg_1, PATTERN (last_insn)))
7721 cc_reg = cc_reg_1;
7722 else if (cc_reg_2 && reg_referenced_p (cc_reg_2, PATTERN (last_insn)))
7723 cc_reg = cc_reg_2;
7724 else
7725 continue;
7727 cc_src_insn = NULL_RTX;
7728 cc_src = NULL_RTX;
7729 for (insn = PREV_INSN (last_insn);
7730 insn && insn != PREV_INSN (BB_HEAD (bb));
7731 insn = PREV_INSN (insn))
7733 rtx set;
7735 if (! INSN_P (insn))
7736 continue;
7737 set = single_set (insn);
7738 if (set
7739 && REG_P (SET_DEST (set))
7740 && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7742 cc_src_insn = insn;
7743 cc_src = SET_SRC (set);
7744 break;
7746 else if (reg_set_p (cc_reg, insn))
7747 break;
7750 if (! cc_src_insn)
7751 continue;
7753 if (modified_between_p (cc_src, cc_src_insn, NEXT_INSN (last_insn)))
7754 continue;
7756 /* Now CC_REG is a condition code register used for a
7757 conditional jump at the end of the block, and CC_SRC, in
7758 CC_SRC_INSN, is the value to which that condition code
7759 register is set, and CC_SRC is still meaningful at the end of
7760 the basic block. */
7762 orig_mode = GET_MODE (cc_src);
7763 mode = cse_cc_succs (bb, cc_reg, cc_src, true);
7764 if (mode != VOIDmode)
7766 gcc_assert (mode == GET_MODE (cc_src));
7767 if (mode != orig_mode)
7769 rtx newreg = gen_rtx_REG (mode, REGNO (cc_reg));
7771 cse_change_cc_mode_insn (cc_src_insn, newreg);
7773 /* Do the same in the following insns that use the
7774 current value of CC_REG within BB. */
7775 cse_change_cc_mode_insns (NEXT_INSN (cc_src_insn),
7776 NEXT_INSN (last_insn),
7777 newreg);
7784 /* Perform common subexpression elimination. Nonzero value from
7785 `cse_main' means that jumps were simplified and some code may now
7786 be unreachable, so do jump optimization again. */
7787 static bool
7788 gate_handle_cse (void)
7790 return optimize > 0;
7793 static void
7794 rest_of_handle_cse (void)
7796 int tem;
7798 if (dump_file)
7799 dump_flow_info (dump_file);
7801 reg_scan (get_insns (), max_reg_num ());
7803 tem = cse_main (get_insns (), max_reg_num (), dump_file);
7804 if (tem)
7805 rebuild_jump_labels (get_insns ());
7806 if (purge_all_dead_edges ())
7807 delete_unreachable_blocks ();
7809 delete_trivially_dead_insns (get_insns (), max_reg_num ());
7811 /* If we are not running more CSE passes, then we are no longer
7812 expecting CSE to be run. But always rerun it in a cheap mode. */
7813 cse_not_expected = !flag_rerun_cse_after_loop && !flag_gcse;
7815 if (tem)
7816 delete_dead_jumptables ();
7818 if (tem || optimize > 1)
7819 cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_PRE_LOOP);
7822 struct tree_opt_pass pass_cse =
7824 "cse1", /* name */
7825 gate_handle_cse, /* gate */
7826 rest_of_handle_cse, /* execute */
7827 NULL, /* sub */
7828 NULL, /* next */
7829 0, /* static_pass_number */
7830 TV_CSE, /* tv_id */
7831 0, /* properties_required */
7832 0, /* properties_provided */
7833 0, /* properties_destroyed */
7834 0, /* todo_flags_start */
7835 TODO_dump_func |
7836 TODO_ggc_collect, /* todo_flags_finish */
7837 's' /* letter */
7841 static bool
7842 gate_handle_cse2 (void)
7844 return optimize > 0 && flag_rerun_cse_after_loop;
7847 /* Run second CSE pass after loop optimizations. */
7848 static void
7849 rest_of_handle_cse2 (void)
7851 int tem;
7853 if (dump_file)
7854 dump_flow_info (dump_file);
7856 tem = cse_main (get_insns (), max_reg_num (), dump_file);
7858 /* Run a pass to eliminate duplicated assignments to condition code
7859 registers. We have to run this after bypass_jumps, because it
7860 makes it harder for that pass to determine whether a jump can be
7861 bypassed safely. */
7862 cse_condition_code_reg ();
7864 purge_all_dead_edges ();
7865 delete_trivially_dead_insns (get_insns (), max_reg_num ());
7867 if (tem)
7869 timevar_push (TV_JUMP);
7870 rebuild_jump_labels (get_insns ());
7871 delete_dead_jumptables ();
7872 cleanup_cfg (CLEANUP_EXPENSIVE);
7873 timevar_pop (TV_JUMP);
7875 reg_scan (get_insns (), max_reg_num ());
7876 cse_not_expected = 1;
7880 struct tree_opt_pass pass_cse2 =
7882 "cse2", /* name */
7883 gate_handle_cse2, /* gate */
7884 rest_of_handle_cse2, /* execute */
7885 NULL, /* sub */
7886 NULL, /* next */
7887 0, /* static_pass_number */
7888 TV_CSE2, /* tv_id */
7889 0, /* properties_required */
7890 0, /* properties_provided */
7891 0, /* properties_destroyed */
7892 0, /* todo_flags_start */
7893 TODO_dump_func |
7894 TODO_ggc_collect, /* todo_flags_finish */
7895 't' /* letter */