PR 43726 * config/lm32/lm32.h: Remove definition of GO_IF_MODE_DEPENDENT_ADDRESS.
[official-gcc.git] / gcc / cse.c
blobe45c9b25b1677a83aba6346fc3a696ac3310c793
1 /* Common subexpression elimination for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "hard-reg-set.h"
29 #include "regs.h"
30 #include "basic-block.h"
31 #include "flags.h"
32 #include "insn-config.h"
33 #include "recog.h"
34 #include "function.h"
35 #include "expr.h"
36 #include "toplev.h"
37 #include "output.h"
38 #include "ggc.h"
39 #include "timevar.h"
40 #include "except.h"
41 #include "target.h"
42 #include "params.h"
43 #include "rtlhooks-def.h"
44 #include "tree-pass.h"
45 #include "df.h"
46 #include "dbgcnt.h"
48 /* The basic idea of common subexpression elimination is to go
49 through the code, keeping a record of expressions that would
50 have the same value at the current scan point, and replacing
51 expressions encountered with the cheapest equivalent expression.
53 It is too complicated to keep track of the different possibilities
54 when control paths merge in this code; so, at each label, we forget all
55 that is known and start fresh. This can be described as processing each
56 extended basic block separately. We have a separate pass to perform
57 global CSE.
59 Note CSE can turn a conditional or computed jump into a nop or
60 an unconditional jump. When this occurs we arrange to run the jump
61 optimizer after CSE to delete the unreachable code.
63 We use two data structures to record the equivalent expressions:
64 a hash table for most expressions, and a vector of "quantity
65 numbers" to record equivalent (pseudo) registers.
67 The use of the special data structure for registers is desirable
68 because it is faster. It is possible because registers references
69 contain a fairly small number, the register number, taken from
70 a contiguously allocated series, and two register references are
71 identical if they have the same number. General expressions
72 do not have any such thing, so the only way to retrieve the
73 information recorded on an expression other than a register
74 is to keep it in a hash table.
76 Registers and "quantity numbers":
78 At the start of each basic block, all of the (hardware and pseudo)
79 registers used in the function are given distinct quantity
80 numbers to indicate their contents. During scan, when the code
81 copies one register into another, we copy the quantity number.
82 When a register is loaded in any other way, we allocate a new
83 quantity number to describe the value generated by this operation.
84 `REG_QTY (N)' records what quantity register N is currently thought
85 of as containing.
87 All real quantity numbers are greater than or equal to zero.
88 If register N has not been assigned a quantity, `REG_QTY (N)' will
89 equal -N - 1, which is always negative.
91 Quantity numbers below zero do not exist and none of the `qty_table'
92 entries should be referenced with a negative index.
94 We also maintain a bidirectional chain of registers for each
95 quantity number. The `qty_table` members `first_reg' and `last_reg',
96 and `reg_eqv_table' members `next' and `prev' hold these chains.
98 The first register in a chain is the one whose lifespan is least local.
99 Among equals, it is the one that was seen first.
100 We replace any equivalent register with that one.
102 If two registers have the same quantity number, it must be true that
103 REG expressions with qty_table `mode' must be in the hash table for both
104 registers and must be in the same class.
106 The converse is not true. Since hard registers may be referenced in
107 any mode, two REG expressions might be equivalent in the hash table
108 but not have the same quantity number if the quantity number of one
109 of the registers is not the same mode as those expressions.
111 Constants and quantity numbers
113 When a quantity has a known constant value, that value is stored
114 in the appropriate qty_table `const_rtx'. This is in addition to
115 putting the constant in the hash table as is usual for non-regs.
117 Whether a reg or a constant is preferred is determined by the configuration
118 macro CONST_COSTS and will often depend on the constant value. In any
119 event, expressions containing constants can be simplified, by fold_rtx.
121 When a quantity has a known nearly constant value (such as an address
122 of a stack slot), that value is stored in the appropriate qty_table
123 `const_rtx'.
125 Integer constants don't have a machine mode. However, cse
126 determines the intended machine mode from the destination
127 of the instruction that moves the constant. The machine mode
128 is recorded in the hash table along with the actual RTL
129 constant expression so that different modes are kept separate.
131 Other expressions:
133 To record known equivalences among expressions in general
134 we use a hash table called `table'. It has a fixed number of buckets
135 that contain chains of `struct table_elt' elements for expressions.
136 These chains connect the elements whose expressions have the same
137 hash codes.
139 Other chains through the same elements connect the elements which
140 currently have equivalent values.
142 Register references in an expression are canonicalized before hashing
143 the expression. This is done using `reg_qty' and qty_table `first_reg'.
144 The hash code of a register reference is computed using the quantity
145 number, not the register number.
147 When the value of an expression changes, it is necessary to remove from the
148 hash table not just that expression but all expressions whose values
149 could be different as a result.
151 1. If the value changing is in memory, except in special cases
152 ANYTHING referring to memory could be changed. That is because
153 nobody knows where a pointer does not point.
154 The function `invalidate_memory' removes what is necessary.
156 The special cases are when the address is constant or is
157 a constant plus a fixed register such as the frame pointer
158 or a static chain pointer. When such addresses are stored in,
159 we can tell exactly which other such addresses must be invalidated
160 due to overlap. `invalidate' does this.
161 All expressions that refer to non-constant
162 memory addresses are also invalidated. `invalidate_memory' does this.
164 2. If the value changing is a register, all expressions
165 containing references to that register, and only those,
166 must be removed.
168 Because searching the entire hash table for expressions that contain
169 a register is very slow, we try to figure out when it isn't necessary.
170 Precisely, this is necessary only when expressions have been
171 entered in the hash table using this register, and then the value has
172 changed, and then another expression wants to be added to refer to
173 the register's new value. This sequence of circumstances is rare
174 within any one basic block.
176 `REG_TICK' and `REG_IN_TABLE', accessors for members of
177 cse_reg_info, are used to detect this case. REG_TICK (i) is
178 incremented whenever a value is stored in register i.
179 REG_IN_TABLE (i) holds -1 if no references to register i have been
180 entered in the table; otherwise, it contains the value REG_TICK (i)
181 had when the references were entered. If we want to enter a
182 reference and REG_IN_TABLE (i) != REG_TICK (i), we must scan and
183 remove old references. Until we want to enter a new entry, the
184 mere fact that the two vectors don't match makes the entries be
185 ignored if anyone tries to match them.
187 Registers themselves are entered in the hash table as well as in
188 the equivalent-register chains. However, `REG_TICK' and
189 `REG_IN_TABLE' do not apply to expressions which are simple
190 register references. These expressions are removed from the table
191 immediately when they become invalid, and this can be done even if
192 we do not immediately search for all the expressions that refer to
193 the register.
195 A CLOBBER rtx in an instruction invalidates its operand for further
196 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
197 invalidates everything that resides in memory.
199 Related expressions:
201 Constant expressions that differ only by an additive integer
202 are called related. When a constant expression is put in
203 the table, the related expression with no constant term
204 is also entered. These are made to point at each other
205 so that it is possible to find out if there exists any
206 register equivalent to an expression related to a given expression. */
208 /* Length of qty_table vector. We know in advance we will not need
209 a quantity number this big. */
211 static int max_qty;
213 /* Next quantity number to be allocated.
214 This is 1 + the largest number needed so far. */
216 static int next_qty;
218 /* Per-qty information tracking.
220 `first_reg' and `last_reg' track the head and tail of the
221 chain of registers which currently contain this quantity.
223 `mode' contains the machine mode of this quantity.
225 `const_rtx' holds the rtx of the constant value of this
226 quantity, if known. A summations of the frame/arg pointer
227 and a constant can also be entered here. When this holds
228 a known value, `const_insn' is the insn which stored the
229 constant value.
231 `comparison_{code,const,qty}' are used to track when a
232 comparison between a quantity and some constant or register has
233 been passed. In such a case, we know the results of the comparison
234 in case we see it again. These members record a comparison that
235 is known to be true. `comparison_code' holds the rtx code of such
236 a comparison, else it is set to UNKNOWN and the other two
237 comparison members are undefined. `comparison_const' holds
238 the constant being compared against, or zero if the comparison
239 is not against a constant. `comparison_qty' holds the quantity
240 being compared against when the result is known. If the comparison
241 is not with a register, `comparison_qty' is -1. */
243 struct qty_table_elem
245 rtx const_rtx;
246 rtx const_insn;
247 rtx comparison_const;
248 int comparison_qty;
249 unsigned int first_reg, last_reg;
250 /* The sizes of these fields should match the sizes of the
251 code and mode fields of struct rtx_def (see rtl.h). */
252 ENUM_BITFIELD(rtx_code) comparison_code : 16;
253 ENUM_BITFIELD(machine_mode) mode : 8;
256 /* The table of all qtys, indexed by qty number. */
257 static struct qty_table_elem *qty_table;
259 /* Structure used to pass arguments via for_each_rtx to function
260 cse_change_cc_mode. */
261 struct change_cc_mode_args
263 rtx insn;
264 rtx newreg;
267 #ifdef HAVE_cc0
268 /* For machines that have a CC0, we do not record its value in the hash
269 table since its use is guaranteed to be the insn immediately following
270 its definition and any other insn is presumed to invalidate it.
272 Instead, we store below the current and last value assigned to CC0.
273 If it should happen to be a constant, it is stored in preference
274 to the actual assigned value. In case it is a constant, we store
275 the mode in which the constant should be interpreted. */
277 static rtx this_insn_cc0, prev_insn_cc0;
278 static enum machine_mode this_insn_cc0_mode, prev_insn_cc0_mode;
279 #endif
281 /* Insn being scanned. */
283 static rtx this_insn;
284 static bool optimize_this_for_speed_p;
286 /* Index by register number, gives the number of the next (or
287 previous) register in the chain of registers sharing the same
288 value.
290 Or -1 if this register is at the end of the chain.
292 If REG_QTY (N) == -N - 1, reg_eqv_table[N].next is undefined. */
294 /* Per-register equivalence chain. */
295 struct reg_eqv_elem
297 int next, prev;
300 /* The table of all register equivalence chains. */
301 static struct reg_eqv_elem *reg_eqv_table;
303 struct cse_reg_info
305 /* The timestamp at which this register is initialized. */
306 unsigned int timestamp;
308 /* The quantity number of the register's current contents. */
309 int reg_qty;
311 /* The number of times the register has been altered in the current
312 basic block. */
313 int reg_tick;
315 /* The REG_TICK value at which rtx's containing this register are
316 valid in the hash table. If this does not equal the current
317 reg_tick value, such expressions existing in the hash table are
318 invalid. */
319 int reg_in_table;
321 /* The SUBREG that was set when REG_TICK was last incremented. Set
322 to -1 if the last store was to the whole register, not a subreg. */
323 unsigned int subreg_ticked;
326 /* A table of cse_reg_info indexed by register numbers. */
327 static struct cse_reg_info *cse_reg_info_table;
329 /* The size of the above table. */
330 static unsigned int cse_reg_info_table_size;
332 /* The index of the first entry that has not been initialized. */
333 static unsigned int cse_reg_info_table_first_uninitialized;
335 /* The timestamp at the beginning of the current run of
336 cse_extended_basic_block. We increment this variable at the beginning of
337 the current run of cse_extended_basic_block. The timestamp field of a
338 cse_reg_info entry matches the value of this variable if and only
339 if the entry has been initialized during the current run of
340 cse_extended_basic_block. */
341 static unsigned int cse_reg_info_timestamp;
343 /* A HARD_REG_SET containing all the hard registers for which there is
344 currently a REG expression in the hash table. Note the difference
345 from the above variables, which indicate if the REG is mentioned in some
346 expression in the table. */
348 static HARD_REG_SET hard_regs_in_table;
350 /* True if CSE has altered the CFG. */
351 static bool cse_cfg_altered;
353 /* True if CSE has altered conditional jump insns in such a way
354 that jump optimization should be redone. */
355 static bool cse_jumps_altered;
357 /* True if we put a LABEL_REF into the hash table for an INSN
358 without a REG_LABEL_OPERAND, we have to rerun jump after CSE
359 to put in the note. */
360 static bool recorded_label_ref;
362 /* canon_hash stores 1 in do_not_record
363 if it notices a reference to CC0, PC, or some other volatile
364 subexpression. */
366 static int do_not_record;
368 /* canon_hash stores 1 in hash_arg_in_memory
369 if it notices a reference to memory within the expression being hashed. */
371 static int hash_arg_in_memory;
373 /* The hash table contains buckets which are chains of `struct table_elt's,
374 each recording one expression's information.
375 That expression is in the `exp' field.
377 The canon_exp field contains a canonical (from the point of view of
378 alias analysis) version of the `exp' field.
380 Those elements with the same hash code are chained in both directions
381 through the `next_same_hash' and `prev_same_hash' fields.
383 Each set of expressions with equivalent values
384 are on a two-way chain through the `next_same_value'
385 and `prev_same_value' fields, and all point with
386 the `first_same_value' field at the first element in
387 that chain. The chain is in order of increasing cost.
388 Each element's cost value is in its `cost' field.
390 The `in_memory' field is nonzero for elements that
391 involve any reference to memory. These elements are removed
392 whenever a write is done to an unidentified location in memory.
393 To be safe, we assume that a memory address is unidentified unless
394 the address is either a symbol constant or a constant plus
395 the frame pointer or argument pointer.
397 The `related_value' field is used to connect related expressions
398 (that differ by adding an integer).
399 The related expressions are chained in a circular fashion.
400 `related_value' is zero for expressions for which this
401 chain is not useful.
403 The `cost' field stores the cost of this element's expression.
404 The `regcost' field stores the value returned by approx_reg_cost for
405 this element's expression.
407 The `is_const' flag is set if the element is a constant (including
408 a fixed address).
410 The `flag' field is used as a temporary during some search routines.
412 The `mode' field is usually the same as GET_MODE (`exp'), but
413 if `exp' is a CONST_INT and has no machine mode then the `mode'
414 field is the mode it was being used as. Each constant is
415 recorded separately for each mode it is used with. */
417 struct table_elt
419 rtx exp;
420 rtx canon_exp;
421 struct table_elt *next_same_hash;
422 struct table_elt *prev_same_hash;
423 struct table_elt *next_same_value;
424 struct table_elt *prev_same_value;
425 struct table_elt *first_same_value;
426 struct table_elt *related_value;
427 int cost;
428 int regcost;
429 /* The size of this field should match the size
430 of the mode field of struct rtx_def (see rtl.h). */
431 ENUM_BITFIELD(machine_mode) mode : 8;
432 char in_memory;
433 char is_const;
434 char flag;
437 /* We don't want a lot of buckets, because we rarely have very many
438 things stored in the hash table, and a lot of buckets slows
439 down a lot of loops that happen frequently. */
440 #define HASH_SHIFT 5
441 #define HASH_SIZE (1 << HASH_SHIFT)
442 #define HASH_MASK (HASH_SIZE - 1)
444 /* Compute hash code of X in mode M. Special-case case where X is a pseudo
445 register (hard registers may require `do_not_record' to be set). */
447 #define HASH(X, M) \
448 ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER \
449 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
450 : canon_hash (X, M)) & HASH_MASK)
452 /* Like HASH, but without side-effects. */
453 #define SAFE_HASH(X, M) \
454 ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER \
455 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
456 : safe_hash (X, M)) & HASH_MASK)
458 /* Determine whether register number N is considered a fixed register for the
459 purpose of approximating register costs.
460 It is desirable to replace other regs with fixed regs, to reduce need for
461 non-fixed hard regs.
462 A reg wins if it is either the frame pointer or designated as fixed. */
463 #define FIXED_REGNO_P(N) \
464 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
465 || fixed_regs[N] || global_regs[N])
467 /* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
468 hard registers and pointers into the frame are the cheapest with a cost
469 of 0. Next come pseudos with a cost of one and other hard registers with
470 a cost of 2. Aside from these special cases, call `rtx_cost'. */
472 #define CHEAP_REGNO(N) \
473 (REGNO_PTR_FRAME_P(N) \
474 || (HARD_REGISTER_NUM_P (N) \
475 && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
477 #define COST(X) (REG_P (X) ? 0 : notreg_cost (X, SET))
478 #define COST_IN(X,OUTER) (REG_P (X) ? 0 : notreg_cost (X, OUTER))
480 /* Get the number of times this register has been updated in this
481 basic block. */
483 #define REG_TICK(N) (get_cse_reg_info (N)->reg_tick)
485 /* Get the point at which REG was recorded in the table. */
487 #define REG_IN_TABLE(N) (get_cse_reg_info (N)->reg_in_table)
489 /* Get the SUBREG set at the last increment to REG_TICK (-1 if not a
490 SUBREG). */
492 #define SUBREG_TICKED(N) (get_cse_reg_info (N)->subreg_ticked)
494 /* Get the quantity number for REG. */
496 #define REG_QTY(N) (get_cse_reg_info (N)->reg_qty)
498 /* Determine if the quantity number for register X represents a valid index
499 into the qty_table. */
501 #define REGNO_QTY_VALID_P(N) (REG_QTY (N) >= 0)
503 /* Compare table_elt X and Y and return true iff X is cheaper than Y. */
505 #define CHEAPER(X, Y) \
506 (preferable ((X)->cost, (X)->regcost, (Y)->cost, (Y)->regcost) < 0)
508 static struct table_elt *table[HASH_SIZE];
510 /* Chain of `struct table_elt's made so far for this function
511 but currently removed from the table. */
513 static struct table_elt *free_element_chain;
515 /* Set to the cost of a constant pool reference if one was found for a
516 symbolic constant. If this was found, it means we should try to
517 convert constants into constant pool entries if they don't fit in
518 the insn. */
520 static int constant_pool_entries_cost;
521 static int constant_pool_entries_regcost;
523 /* Trace a patch through the CFG. */
525 struct branch_path
527 /* The basic block for this path entry. */
528 basic_block bb;
531 /* This data describes a block that will be processed by
532 cse_extended_basic_block. */
534 struct cse_basic_block_data
536 /* Total number of SETs in block. */
537 int nsets;
538 /* Size of current branch path, if any. */
539 int path_size;
540 /* Current path, indicating which basic_blocks will be processed. */
541 struct branch_path *path;
545 /* Pointers to the live in/live out bitmaps for the boundaries of the
546 current EBB. */
547 static bitmap cse_ebb_live_in, cse_ebb_live_out;
549 /* A simple bitmap to track which basic blocks have been visited
550 already as part of an already processed extended basic block. */
551 static sbitmap cse_visited_basic_blocks;
553 static bool fixed_base_plus_p (rtx x);
554 static int notreg_cost (rtx, enum rtx_code);
555 static int approx_reg_cost_1 (rtx *, void *);
556 static int approx_reg_cost (rtx);
557 static int preferable (int, int, int, int);
558 static void new_basic_block (void);
559 static void make_new_qty (unsigned int, enum machine_mode);
560 static void make_regs_eqv (unsigned int, unsigned int);
561 static void delete_reg_equiv (unsigned int);
562 static int mention_regs (rtx);
563 static int insert_regs (rtx, struct table_elt *, int);
564 static void remove_from_table (struct table_elt *, unsigned);
565 static void remove_pseudo_from_table (rtx, unsigned);
566 static struct table_elt *lookup (rtx, unsigned, enum machine_mode);
567 static struct table_elt *lookup_for_remove (rtx, unsigned, enum machine_mode);
568 static rtx lookup_as_function (rtx, enum rtx_code);
569 static struct table_elt *insert_with_costs (rtx, struct table_elt *, unsigned,
570 enum machine_mode, int, int);
571 static struct table_elt *insert (rtx, struct table_elt *, unsigned,
572 enum machine_mode);
573 static void merge_equiv_classes (struct table_elt *, struct table_elt *);
574 static void invalidate (rtx, enum machine_mode);
575 static bool cse_rtx_varies_p (const_rtx, bool);
576 static void remove_invalid_refs (unsigned int);
577 static void remove_invalid_subreg_refs (unsigned int, unsigned int,
578 enum machine_mode);
579 static void rehash_using_reg (rtx);
580 static void invalidate_memory (void);
581 static void invalidate_for_call (void);
582 static rtx use_related_value (rtx, struct table_elt *);
584 static inline unsigned canon_hash (rtx, enum machine_mode);
585 static inline unsigned safe_hash (rtx, enum machine_mode);
586 static inline unsigned hash_rtx_string (const char *);
588 static rtx canon_reg (rtx, rtx);
589 static enum rtx_code find_comparison_args (enum rtx_code, rtx *, rtx *,
590 enum machine_mode *,
591 enum machine_mode *);
592 static rtx fold_rtx (rtx, rtx);
593 static rtx equiv_constant (rtx);
594 static void record_jump_equiv (rtx, bool);
595 static void record_jump_cond (enum rtx_code, enum machine_mode, rtx, rtx,
596 int);
597 static void cse_insn (rtx);
598 static void cse_prescan_path (struct cse_basic_block_data *);
599 static void invalidate_from_clobbers (rtx);
600 static rtx cse_process_notes (rtx, rtx, bool *);
601 static void cse_extended_basic_block (struct cse_basic_block_data *);
602 static void count_reg_usage (rtx, int *, rtx, int);
603 static int check_for_label_ref (rtx *, void *);
604 extern void dump_class (struct table_elt*);
605 static void get_cse_reg_info_1 (unsigned int regno);
606 static struct cse_reg_info * get_cse_reg_info (unsigned int regno);
607 static int check_dependence (rtx *, void *);
609 static void flush_hash_table (void);
610 static bool insn_live_p (rtx, int *);
611 static bool set_live_p (rtx, rtx, int *);
612 static int cse_change_cc_mode (rtx *, void *);
613 static void cse_change_cc_mode_insn (rtx, rtx);
614 static void cse_change_cc_mode_insns (rtx, rtx, rtx);
615 static enum machine_mode cse_cc_succs (basic_block, basic_block, rtx, rtx,
616 bool);
619 #undef RTL_HOOKS_GEN_LOWPART
620 #define RTL_HOOKS_GEN_LOWPART gen_lowpart_if_possible
622 static const struct rtl_hooks cse_rtl_hooks = RTL_HOOKS_INITIALIZER;
624 /* Nonzero if X has the form (PLUS frame-pointer integer). We check for
625 virtual regs here because the simplify_*_operation routines are called
626 by integrate.c, which is called before virtual register instantiation. */
628 static bool
629 fixed_base_plus_p (rtx x)
631 switch (GET_CODE (x))
633 case REG:
634 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx)
635 return true;
636 if (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
637 return true;
638 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
639 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
640 return true;
641 return false;
643 case PLUS:
644 if (!CONST_INT_P (XEXP (x, 1)))
645 return false;
646 return fixed_base_plus_p (XEXP (x, 0));
648 default:
649 return false;
653 /* Dump the expressions in the equivalence class indicated by CLASSP.
654 This function is used only for debugging. */
655 void
656 dump_class (struct table_elt *classp)
658 struct table_elt *elt;
660 fprintf (stderr, "Equivalence chain for ");
661 print_rtl (stderr, classp->exp);
662 fprintf (stderr, ": \n");
664 for (elt = classp->first_same_value; elt; elt = elt->next_same_value)
666 print_rtl (stderr, elt->exp);
667 fprintf (stderr, "\n");
671 /* Subroutine of approx_reg_cost; called through for_each_rtx. */
673 static int
674 approx_reg_cost_1 (rtx *xp, void *data)
676 rtx x = *xp;
677 int *cost_p = (int *) data;
679 if (x && REG_P (x))
681 unsigned int regno = REGNO (x);
683 if (! CHEAP_REGNO (regno))
685 if (regno < FIRST_PSEUDO_REGISTER)
687 if (targetm.small_register_classes_for_mode_p (GET_MODE (x)))
688 return 1;
689 *cost_p += 2;
691 else
692 *cost_p += 1;
696 return 0;
699 /* Return an estimate of the cost of the registers used in an rtx.
700 This is mostly the number of different REG expressions in the rtx;
701 however for some exceptions like fixed registers we use a cost of
702 0. If any other hard register reference occurs, return MAX_COST. */
704 static int
705 approx_reg_cost (rtx x)
707 int cost = 0;
709 if (for_each_rtx (&x, approx_reg_cost_1, (void *) &cost))
710 return MAX_COST;
712 return cost;
715 /* Return a negative value if an rtx A, whose costs are given by COST_A
716 and REGCOST_A, is more desirable than an rtx B.
717 Return a positive value if A is less desirable, or 0 if the two are
718 equally good. */
719 static int
720 preferable (int cost_a, int regcost_a, int cost_b, int regcost_b)
722 /* First, get rid of cases involving expressions that are entirely
723 unwanted. */
724 if (cost_a != cost_b)
726 if (cost_a == MAX_COST)
727 return 1;
728 if (cost_b == MAX_COST)
729 return -1;
732 /* Avoid extending lifetimes of hardregs. */
733 if (regcost_a != regcost_b)
735 if (regcost_a == MAX_COST)
736 return 1;
737 if (regcost_b == MAX_COST)
738 return -1;
741 /* Normal operation costs take precedence. */
742 if (cost_a != cost_b)
743 return cost_a - cost_b;
744 /* Only if these are identical consider effects on register pressure. */
745 if (regcost_a != regcost_b)
746 return regcost_a - regcost_b;
747 return 0;
750 /* Internal function, to compute cost when X is not a register; called
751 from COST macro to keep it simple. */
753 static int
754 notreg_cost (rtx x, enum rtx_code outer)
756 return ((GET_CODE (x) == SUBREG
757 && REG_P (SUBREG_REG (x))
758 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
759 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
760 && (GET_MODE_SIZE (GET_MODE (x))
761 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
762 && subreg_lowpart_p (x)
763 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (x)),
764 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))))
766 : rtx_cost (x, outer, optimize_this_for_speed_p) * 2);
770 /* Initialize CSE_REG_INFO_TABLE. */
772 static void
773 init_cse_reg_info (unsigned int nregs)
775 /* Do we need to grow the table? */
776 if (nregs > cse_reg_info_table_size)
778 unsigned int new_size;
780 if (cse_reg_info_table_size < 2048)
782 /* Compute a new size that is a power of 2 and no smaller
783 than the large of NREGS and 64. */
784 new_size = (cse_reg_info_table_size
785 ? cse_reg_info_table_size : 64);
787 while (new_size < nregs)
788 new_size *= 2;
790 else
792 /* If we need a big table, allocate just enough to hold
793 NREGS registers. */
794 new_size = nregs;
797 /* Reallocate the table with NEW_SIZE entries. */
798 if (cse_reg_info_table)
799 free (cse_reg_info_table);
800 cse_reg_info_table = XNEWVEC (struct cse_reg_info, new_size);
801 cse_reg_info_table_size = new_size;
802 cse_reg_info_table_first_uninitialized = 0;
805 /* Do we have all of the first NREGS entries initialized? */
806 if (cse_reg_info_table_first_uninitialized < nregs)
808 unsigned int old_timestamp = cse_reg_info_timestamp - 1;
809 unsigned int i;
811 /* Put the old timestamp on newly allocated entries so that they
812 will all be considered out of date. We do not touch those
813 entries beyond the first NREGS entries to be nice to the
814 virtual memory. */
815 for (i = cse_reg_info_table_first_uninitialized; i < nregs; i++)
816 cse_reg_info_table[i].timestamp = old_timestamp;
818 cse_reg_info_table_first_uninitialized = nregs;
822 /* Given REGNO, initialize the cse_reg_info entry for REGNO. */
824 static void
825 get_cse_reg_info_1 (unsigned int regno)
827 /* Set TIMESTAMP field to CSE_REG_INFO_TIMESTAMP so that this
828 entry will be considered to have been initialized. */
829 cse_reg_info_table[regno].timestamp = cse_reg_info_timestamp;
831 /* Initialize the rest of the entry. */
832 cse_reg_info_table[regno].reg_tick = 1;
833 cse_reg_info_table[regno].reg_in_table = -1;
834 cse_reg_info_table[regno].subreg_ticked = -1;
835 cse_reg_info_table[regno].reg_qty = -regno - 1;
838 /* Find a cse_reg_info entry for REGNO. */
840 static inline struct cse_reg_info *
841 get_cse_reg_info (unsigned int regno)
843 struct cse_reg_info *p = &cse_reg_info_table[regno];
845 /* If this entry has not been initialized, go ahead and initialize
846 it. */
847 if (p->timestamp != cse_reg_info_timestamp)
848 get_cse_reg_info_1 (regno);
850 return p;
853 /* Clear the hash table and initialize each register with its own quantity,
854 for a new basic block. */
856 static void
857 new_basic_block (void)
859 int i;
861 next_qty = 0;
863 /* Invalidate cse_reg_info_table. */
864 cse_reg_info_timestamp++;
866 /* Clear out hash table state for this pass. */
867 CLEAR_HARD_REG_SET (hard_regs_in_table);
869 /* The per-quantity values used to be initialized here, but it is
870 much faster to initialize each as it is made in `make_new_qty'. */
872 for (i = 0; i < HASH_SIZE; i++)
874 struct table_elt *first;
876 first = table[i];
877 if (first != NULL)
879 struct table_elt *last = first;
881 table[i] = NULL;
883 while (last->next_same_hash != NULL)
884 last = last->next_same_hash;
886 /* Now relink this hash entire chain into
887 the free element list. */
889 last->next_same_hash = free_element_chain;
890 free_element_chain = first;
894 #ifdef HAVE_cc0
895 prev_insn_cc0 = 0;
896 #endif
899 /* Say that register REG contains a quantity in mode MODE not in any
900 register before and initialize that quantity. */
902 static void
903 make_new_qty (unsigned int reg, enum machine_mode mode)
905 int q;
906 struct qty_table_elem *ent;
907 struct reg_eqv_elem *eqv;
909 gcc_assert (next_qty < max_qty);
911 q = REG_QTY (reg) = next_qty++;
912 ent = &qty_table[q];
913 ent->first_reg = reg;
914 ent->last_reg = reg;
915 ent->mode = mode;
916 ent->const_rtx = ent->const_insn = NULL_RTX;
917 ent->comparison_code = UNKNOWN;
919 eqv = &reg_eqv_table[reg];
920 eqv->next = eqv->prev = -1;
923 /* Make reg NEW equivalent to reg OLD.
924 OLD is not changing; NEW is. */
926 static void
927 make_regs_eqv (unsigned int new_reg, unsigned int old_reg)
929 unsigned int lastr, firstr;
930 int q = REG_QTY (old_reg);
931 struct qty_table_elem *ent;
933 ent = &qty_table[q];
935 /* Nothing should become eqv until it has a "non-invalid" qty number. */
936 gcc_assert (REGNO_QTY_VALID_P (old_reg));
938 REG_QTY (new_reg) = q;
939 firstr = ent->first_reg;
940 lastr = ent->last_reg;
942 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
943 hard regs. Among pseudos, if NEW will live longer than any other reg
944 of the same qty, and that is beyond the current basic block,
945 make it the new canonical replacement for this qty. */
946 if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
947 /* Certain fixed registers might be of the class NO_REGS. This means
948 that not only can they not be allocated by the compiler, but
949 they cannot be used in substitutions or canonicalizations
950 either. */
951 && (new_reg >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new_reg) != NO_REGS)
952 && ((new_reg < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new_reg))
953 || (new_reg >= FIRST_PSEUDO_REGISTER
954 && (firstr < FIRST_PSEUDO_REGISTER
955 || (bitmap_bit_p (cse_ebb_live_out, new_reg)
956 && !bitmap_bit_p (cse_ebb_live_out, firstr))
957 || (bitmap_bit_p (cse_ebb_live_in, new_reg)
958 && !bitmap_bit_p (cse_ebb_live_in, firstr))))))
960 reg_eqv_table[firstr].prev = new_reg;
961 reg_eqv_table[new_reg].next = firstr;
962 reg_eqv_table[new_reg].prev = -1;
963 ent->first_reg = new_reg;
965 else
967 /* If NEW is a hard reg (known to be non-fixed), insert at end.
968 Otherwise, insert before any non-fixed hard regs that are at the
969 end. Registers of class NO_REGS cannot be used as an
970 equivalent for anything. */
971 while (lastr < FIRST_PSEUDO_REGISTER && reg_eqv_table[lastr].prev >= 0
972 && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
973 && new_reg >= FIRST_PSEUDO_REGISTER)
974 lastr = reg_eqv_table[lastr].prev;
975 reg_eqv_table[new_reg].next = reg_eqv_table[lastr].next;
976 if (reg_eqv_table[lastr].next >= 0)
977 reg_eqv_table[reg_eqv_table[lastr].next].prev = new_reg;
978 else
979 qty_table[q].last_reg = new_reg;
980 reg_eqv_table[lastr].next = new_reg;
981 reg_eqv_table[new_reg].prev = lastr;
985 /* Remove REG from its equivalence class. */
987 static void
988 delete_reg_equiv (unsigned int reg)
990 struct qty_table_elem *ent;
991 int q = REG_QTY (reg);
992 int p, n;
994 /* If invalid, do nothing. */
995 if (! REGNO_QTY_VALID_P (reg))
996 return;
998 ent = &qty_table[q];
1000 p = reg_eqv_table[reg].prev;
1001 n = reg_eqv_table[reg].next;
1003 if (n != -1)
1004 reg_eqv_table[n].prev = p;
1005 else
1006 ent->last_reg = p;
1007 if (p != -1)
1008 reg_eqv_table[p].next = n;
1009 else
1010 ent->first_reg = n;
1012 REG_QTY (reg) = -reg - 1;
1015 /* Remove any invalid expressions from the hash table
1016 that refer to any of the registers contained in expression X.
1018 Make sure that newly inserted references to those registers
1019 as subexpressions will be considered valid.
1021 mention_regs is not called when a register itself
1022 is being stored in the table.
1024 Return 1 if we have done something that may have changed the hash code
1025 of X. */
1027 static int
1028 mention_regs (rtx x)
1030 enum rtx_code code;
1031 int i, j;
1032 const char *fmt;
1033 int changed = 0;
1035 if (x == 0)
1036 return 0;
1038 code = GET_CODE (x);
1039 if (code == REG)
1041 unsigned int regno = REGNO (x);
1042 unsigned int endregno = END_REGNO (x);
1043 unsigned int i;
1045 for (i = regno; i < endregno; i++)
1047 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1048 remove_invalid_refs (i);
1050 REG_IN_TABLE (i) = REG_TICK (i);
1051 SUBREG_TICKED (i) = -1;
1054 return 0;
1057 /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1058 pseudo if they don't use overlapping words. We handle only pseudos
1059 here for simplicity. */
1060 if (code == SUBREG && REG_P (SUBREG_REG (x))
1061 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1063 unsigned int i = REGNO (SUBREG_REG (x));
1065 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1067 /* If REG_IN_TABLE (i) differs from REG_TICK (i) by one, and
1068 the last store to this register really stored into this
1069 subreg, then remove the memory of this subreg.
1070 Otherwise, remove any memory of the entire register and
1071 all its subregs from the table. */
1072 if (REG_TICK (i) - REG_IN_TABLE (i) > 1
1073 || SUBREG_TICKED (i) != REGNO (SUBREG_REG (x)))
1074 remove_invalid_refs (i);
1075 else
1076 remove_invalid_subreg_refs (i, SUBREG_BYTE (x), GET_MODE (x));
1079 REG_IN_TABLE (i) = REG_TICK (i);
1080 SUBREG_TICKED (i) = REGNO (SUBREG_REG (x));
1081 return 0;
1084 /* If X is a comparison or a COMPARE and either operand is a register
1085 that does not have a quantity, give it one. This is so that a later
1086 call to record_jump_equiv won't cause X to be assigned a different
1087 hash code and not found in the table after that call.
1089 It is not necessary to do this here, since rehash_using_reg can
1090 fix up the table later, but doing this here eliminates the need to
1091 call that expensive function in the most common case where the only
1092 use of the register is in the comparison. */
1094 if (code == COMPARE || COMPARISON_P (x))
1096 if (REG_P (XEXP (x, 0))
1097 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
1098 if (insert_regs (XEXP (x, 0), NULL, 0))
1100 rehash_using_reg (XEXP (x, 0));
1101 changed = 1;
1104 if (REG_P (XEXP (x, 1))
1105 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
1106 if (insert_regs (XEXP (x, 1), NULL, 0))
1108 rehash_using_reg (XEXP (x, 1));
1109 changed = 1;
1113 fmt = GET_RTX_FORMAT (code);
1114 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1115 if (fmt[i] == 'e')
1116 changed |= mention_regs (XEXP (x, i));
1117 else if (fmt[i] == 'E')
1118 for (j = 0; j < XVECLEN (x, i); j++)
1119 changed |= mention_regs (XVECEXP (x, i, j));
1121 return changed;
1124 /* Update the register quantities for inserting X into the hash table
1125 with a value equivalent to CLASSP.
1126 (If the class does not contain a REG, it is irrelevant.)
1127 If MODIFIED is nonzero, X is a destination; it is being modified.
1128 Note that delete_reg_equiv should be called on a register
1129 before insert_regs is done on that register with MODIFIED != 0.
1131 Nonzero value means that elements of reg_qty have changed
1132 so X's hash code may be different. */
1134 static int
1135 insert_regs (rtx x, struct table_elt *classp, int modified)
1137 if (REG_P (x))
1139 unsigned int regno = REGNO (x);
1140 int qty_valid;
1142 /* If REGNO is in the equivalence table already but is of the
1143 wrong mode for that equivalence, don't do anything here. */
1145 qty_valid = REGNO_QTY_VALID_P (regno);
1146 if (qty_valid)
1148 struct qty_table_elem *ent = &qty_table[REG_QTY (regno)];
1150 if (ent->mode != GET_MODE (x))
1151 return 0;
1154 if (modified || ! qty_valid)
1156 if (classp)
1157 for (classp = classp->first_same_value;
1158 classp != 0;
1159 classp = classp->next_same_value)
1160 if (REG_P (classp->exp)
1161 && GET_MODE (classp->exp) == GET_MODE (x))
1163 unsigned c_regno = REGNO (classp->exp);
1165 gcc_assert (REGNO_QTY_VALID_P (c_regno));
1167 /* Suppose that 5 is hard reg and 100 and 101 are
1168 pseudos. Consider
1170 (set (reg:si 100) (reg:si 5))
1171 (set (reg:si 5) (reg:si 100))
1172 (set (reg:di 101) (reg:di 5))
1174 We would now set REG_QTY (101) = REG_QTY (5), but the
1175 entry for 5 is in SImode. When we use this later in
1176 copy propagation, we get the register in wrong mode. */
1177 if (qty_table[REG_QTY (c_regno)].mode != GET_MODE (x))
1178 continue;
1180 make_regs_eqv (regno, c_regno);
1181 return 1;
1184 /* Mention_regs for a SUBREG checks if REG_TICK is exactly one larger
1185 than REG_IN_TABLE to find out if there was only a single preceding
1186 invalidation - for the SUBREG - or another one, which would be
1187 for the full register. However, if we find here that REG_TICK
1188 indicates that the register is invalid, it means that it has
1189 been invalidated in a separate operation. The SUBREG might be used
1190 now (then this is a recursive call), or we might use the full REG
1191 now and a SUBREG of it later. So bump up REG_TICK so that
1192 mention_regs will do the right thing. */
1193 if (! modified
1194 && REG_IN_TABLE (regno) >= 0
1195 && REG_TICK (regno) == REG_IN_TABLE (regno) + 1)
1196 REG_TICK (regno)++;
1197 make_new_qty (regno, GET_MODE (x));
1198 return 1;
1201 return 0;
1204 /* If X is a SUBREG, we will likely be inserting the inner register in the
1205 table. If that register doesn't have an assigned quantity number at
1206 this point but does later, the insertion that we will be doing now will
1207 not be accessible because its hash code will have changed. So assign
1208 a quantity number now. */
1210 else if (GET_CODE (x) == SUBREG && REG_P (SUBREG_REG (x))
1211 && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1213 insert_regs (SUBREG_REG (x), NULL, 0);
1214 mention_regs (x);
1215 return 1;
1217 else
1218 return mention_regs (x);
1222 /* Compute upper and lower anchors for CST. Also compute the offset of CST
1223 from these anchors/bases such that *_BASE + *_OFFS = CST. Return false iff
1224 CST is equal to an anchor. */
1226 static bool
1227 compute_const_anchors (rtx cst,
1228 HOST_WIDE_INT *lower_base, HOST_WIDE_INT *lower_offs,
1229 HOST_WIDE_INT *upper_base, HOST_WIDE_INT *upper_offs)
1231 HOST_WIDE_INT n = INTVAL (cst);
1233 *lower_base = n & ~(targetm.const_anchor - 1);
1234 if (*lower_base == n)
1235 return false;
1237 *upper_base =
1238 (n + (targetm.const_anchor - 1)) & ~(targetm.const_anchor - 1);
1239 *upper_offs = n - *upper_base;
1240 *lower_offs = n - *lower_base;
1241 return true;
1244 /* Insert the equivalence between ANCHOR and (REG + OFF) in mode MODE. */
1246 static void
1247 insert_const_anchor (HOST_WIDE_INT anchor, rtx reg, HOST_WIDE_INT offs,
1248 enum machine_mode mode)
1250 struct table_elt *elt;
1251 unsigned hash;
1252 rtx anchor_exp;
1253 rtx exp;
1255 anchor_exp = GEN_INT (anchor);
1256 hash = HASH (anchor_exp, mode);
1257 elt = lookup (anchor_exp, hash, mode);
1258 if (!elt)
1259 elt = insert (anchor_exp, NULL, hash, mode);
1261 exp = plus_constant (reg, offs);
1262 /* REG has just been inserted and the hash codes recomputed. */
1263 mention_regs (exp);
1264 hash = HASH (exp, mode);
1266 /* Use the cost of the register rather than the whole expression. When
1267 looking up constant anchors we will further offset the corresponding
1268 expression therefore it does not make sense to prefer REGs over
1269 reg-immediate additions. Prefer instead the oldest expression. Also
1270 don't prefer pseudos over hard regs so that we derive constants in
1271 argument registers from other argument registers rather than from the
1272 original pseudo that was used to synthesize the constant. */
1273 insert_with_costs (exp, elt, hash, mode, COST (reg), 1);
1276 /* The constant CST is equivalent to the register REG. Create
1277 equivalences between the two anchors of CST and the corresponding
1278 register-offset expressions using REG. */
1280 static void
1281 insert_const_anchors (rtx reg, rtx cst, enum machine_mode mode)
1283 HOST_WIDE_INT lower_base, lower_offs, upper_base, upper_offs;
1285 if (!compute_const_anchors (cst, &lower_base, &lower_offs,
1286 &upper_base, &upper_offs))
1287 return;
1289 /* Ignore anchors of value 0. Constants accessible from zero are
1290 simple. */
1291 if (lower_base != 0)
1292 insert_const_anchor (lower_base, reg, -lower_offs, mode);
1294 if (upper_base != 0)
1295 insert_const_anchor (upper_base, reg, -upper_offs, mode);
1298 /* We need to express ANCHOR_ELT->exp + OFFS. Walk the equivalence list of
1299 ANCHOR_ELT and see if offsetting any of the entries by OFFS would create a
1300 valid expression. Return the cheapest and oldest of such expressions. In
1301 *OLD, return how old the resulting expression is compared to the other
1302 equivalent expressions. */
1304 static rtx
1305 find_reg_offset_for_const (struct table_elt *anchor_elt, HOST_WIDE_INT offs,
1306 unsigned *old)
1308 struct table_elt *elt;
1309 unsigned idx;
1310 struct table_elt *match_elt;
1311 rtx match;
1313 /* Find the cheapest and *oldest* expression to maximize the chance of
1314 reusing the same pseudo. */
1316 match_elt = NULL;
1317 match = NULL_RTX;
1318 for (elt = anchor_elt->first_same_value, idx = 0;
1319 elt;
1320 elt = elt->next_same_value, idx++)
1322 if (match_elt && CHEAPER (match_elt, elt))
1323 return match;
1325 if (REG_P (elt->exp)
1326 || (GET_CODE (elt->exp) == PLUS
1327 && REG_P (XEXP (elt->exp, 0))
1328 && GET_CODE (XEXP (elt->exp, 1)) == CONST_INT))
1330 rtx x;
1332 /* Ignore expressions that are no longer valid. */
1333 if (!REG_P (elt->exp) && !exp_equiv_p (elt->exp, elt->exp, 1, false))
1334 continue;
1336 x = plus_constant (elt->exp, offs);
1337 if (REG_P (x)
1338 || (GET_CODE (x) == PLUS
1339 && IN_RANGE (INTVAL (XEXP (x, 1)),
1340 -targetm.const_anchor,
1341 targetm.const_anchor - 1)))
1343 match = x;
1344 match_elt = elt;
1345 *old = idx;
1350 return match;
1353 /* Try to express the constant SRC_CONST using a register+offset expression
1354 derived from a constant anchor. Return it if successful or NULL_RTX,
1355 otherwise. */
1357 static rtx
1358 try_const_anchors (rtx src_const, enum machine_mode mode)
1360 struct table_elt *lower_elt, *upper_elt;
1361 HOST_WIDE_INT lower_base, lower_offs, upper_base, upper_offs;
1362 rtx lower_anchor_rtx, upper_anchor_rtx;
1363 rtx lower_exp = NULL_RTX, upper_exp = NULL_RTX;
1364 unsigned lower_old, upper_old;
1366 if (!compute_const_anchors (src_const, &lower_base, &lower_offs,
1367 &upper_base, &upper_offs))
1368 return NULL_RTX;
1370 lower_anchor_rtx = GEN_INT (lower_base);
1371 upper_anchor_rtx = GEN_INT (upper_base);
1372 lower_elt = lookup (lower_anchor_rtx, HASH (lower_anchor_rtx, mode), mode);
1373 upper_elt = lookup (upper_anchor_rtx, HASH (upper_anchor_rtx, mode), mode);
1375 if (lower_elt)
1376 lower_exp = find_reg_offset_for_const (lower_elt, lower_offs, &lower_old);
1377 if (upper_elt)
1378 upper_exp = find_reg_offset_for_const (upper_elt, upper_offs, &upper_old);
1380 if (!lower_exp)
1381 return upper_exp;
1382 if (!upper_exp)
1383 return lower_exp;
1385 /* Return the older expression. */
1386 return (upper_old > lower_old ? upper_exp : lower_exp);
1389 /* Look in or update the hash table. */
1391 /* Remove table element ELT from use in the table.
1392 HASH is its hash code, made using the HASH macro.
1393 It's an argument because often that is known in advance
1394 and we save much time not recomputing it. */
1396 static void
1397 remove_from_table (struct table_elt *elt, unsigned int hash)
1399 if (elt == 0)
1400 return;
1402 /* Mark this element as removed. See cse_insn. */
1403 elt->first_same_value = 0;
1405 /* Remove the table element from its equivalence class. */
1408 struct table_elt *prev = elt->prev_same_value;
1409 struct table_elt *next = elt->next_same_value;
1411 if (next)
1412 next->prev_same_value = prev;
1414 if (prev)
1415 prev->next_same_value = next;
1416 else
1418 struct table_elt *newfirst = next;
1419 while (next)
1421 next->first_same_value = newfirst;
1422 next = next->next_same_value;
1427 /* Remove the table element from its hash bucket. */
1430 struct table_elt *prev = elt->prev_same_hash;
1431 struct table_elt *next = elt->next_same_hash;
1433 if (next)
1434 next->prev_same_hash = prev;
1436 if (prev)
1437 prev->next_same_hash = next;
1438 else if (table[hash] == elt)
1439 table[hash] = next;
1440 else
1442 /* This entry is not in the proper hash bucket. This can happen
1443 when two classes were merged by `merge_equiv_classes'. Search
1444 for the hash bucket that it heads. This happens only very
1445 rarely, so the cost is acceptable. */
1446 for (hash = 0; hash < HASH_SIZE; hash++)
1447 if (table[hash] == elt)
1448 table[hash] = next;
1452 /* Remove the table element from its related-value circular chain. */
1454 if (elt->related_value != 0 && elt->related_value != elt)
1456 struct table_elt *p = elt->related_value;
1458 while (p->related_value != elt)
1459 p = p->related_value;
1460 p->related_value = elt->related_value;
1461 if (p->related_value == p)
1462 p->related_value = 0;
1465 /* Now add it to the free element chain. */
1466 elt->next_same_hash = free_element_chain;
1467 free_element_chain = elt;
1470 /* Same as above, but X is a pseudo-register. */
1472 static void
1473 remove_pseudo_from_table (rtx x, unsigned int hash)
1475 struct table_elt *elt;
1477 /* Because a pseudo-register can be referenced in more than one
1478 mode, we might have to remove more than one table entry. */
1479 while ((elt = lookup_for_remove (x, hash, VOIDmode)))
1480 remove_from_table (elt, hash);
1483 /* Look up X in the hash table and return its table element,
1484 or 0 if X is not in the table.
1486 MODE is the machine-mode of X, or if X is an integer constant
1487 with VOIDmode then MODE is the mode with which X will be used.
1489 Here we are satisfied to find an expression whose tree structure
1490 looks like X. */
1492 static struct table_elt *
1493 lookup (rtx x, unsigned int hash, enum machine_mode mode)
1495 struct table_elt *p;
1497 for (p = table[hash]; p; p = p->next_same_hash)
1498 if (mode == p->mode && ((x == p->exp && REG_P (x))
1499 || exp_equiv_p (x, p->exp, !REG_P (x), false)))
1500 return p;
1502 return 0;
1505 /* Like `lookup' but don't care whether the table element uses invalid regs.
1506 Also ignore discrepancies in the machine mode of a register. */
1508 static struct table_elt *
1509 lookup_for_remove (rtx x, unsigned int hash, enum machine_mode mode)
1511 struct table_elt *p;
1513 if (REG_P (x))
1515 unsigned int regno = REGNO (x);
1517 /* Don't check the machine mode when comparing registers;
1518 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1519 for (p = table[hash]; p; p = p->next_same_hash)
1520 if (REG_P (p->exp)
1521 && REGNO (p->exp) == regno)
1522 return p;
1524 else
1526 for (p = table[hash]; p; p = p->next_same_hash)
1527 if (mode == p->mode
1528 && (x == p->exp || exp_equiv_p (x, p->exp, 0, false)))
1529 return p;
1532 return 0;
1535 /* Look for an expression equivalent to X and with code CODE.
1536 If one is found, return that expression. */
1538 static rtx
1539 lookup_as_function (rtx x, enum rtx_code code)
1541 struct table_elt *p
1542 = lookup (x, SAFE_HASH (x, VOIDmode), GET_MODE (x));
1544 if (p == 0)
1545 return 0;
1547 for (p = p->first_same_value; p; p = p->next_same_value)
1548 if (GET_CODE (p->exp) == code
1549 /* Make sure this is a valid entry in the table. */
1550 && exp_equiv_p (p->exp, p->exp, 1, false))
1551 return p->exp;
1553 return 0;
1556 /* Insert X in the hash table, assuming HASH is its hash code and
1557 CLASSP is an element of the class it should go in (or 0 if a new
1558 class should be made). COST is the code of X and reg_cost is the
1559 cost of registers in X. It is inserted at the proper position to
1560 keep the class in the order cheapest first.
1562 MODE is the machine-mode of X, or if X is an integer constant
1563 with VOIDmode then MODE is the mode with which X will be used.
1565 For elements of equal cheapness, the most recent one
1566 goes in front, except that the first element in the list
1567 remains first unless a cheaper element is added. The order of
1568 pseudo-registers does not matter, as canon_reg will be called to
1569 find the cheapest when a register is retrieved from the table.
1571 The in_memory field in the hash table element is set to 0.
1572 The caller must set it nonzero if appropriate.
1574 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1575 and if insert_regs returns a nonzero value
1576 you must then recompute its hash code before calling here.
1578 If necessary, update table showing constant values of quantities. */
1580 static struct table_elt *
1581 insert_with_costs (rtx x, struct table_elt *classp, unsigned int hash,
1582 enum machine_mode mode, int cost, int reg_cost)
1584 struct table_elt *elt;
1586 /* If X is a register and we haven't made a quantity for it,
1587 something is wrong. */
1588 gcc_assert (!REG_P (x) || REGNO_QTY_VALID_P (REGNO (x)));
1590 /* If X is a hard register, show it is being put in the table. */
1591 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
1592 add_to_hard_reg_set (&hard_regs_in_table, GET_MODE (x), REGNO (x));
1594 /* Put an element for X into the right hash bucket. */
1596 elt = free_element_chain;
1597 if (elt)
1598 free_element_chain = elt->next_same_hash;
1599 else
1600 elt = XNEW (struct table_elt);
1602 elt->exp = x;
1603 elt->canon_exp = NULL_RTX;
1604 elt->cost = cost;
1605 elt->regcost = reg_cost;
1606 elt->next_same_value = 0;
1607 elt->prev_same_value = 0;
1608 elt->next_same_hash = table[hash];
1609 elt->prev_same_hash = 0;
1610 elt->related_value = 0;
1611 elt->in_memory = 0;
1612 elt->mode = mode;
1613 elt->is_const = (CONSTANT_P (x) || fixed_base_plus_p (x));
1615 if (table[hash])
1616 table[hash]->prev_same_hash = elt;
1617 table[hash] = elt;
1619 /* Put it into the proper value-class. */
1620 if (classp)
1622 classp = classp->first_same_value;
1623 if (CHEAPER (elt, classp))
1624 /* Insert at the head of the class. */
1626 struct table_elt *p;
1627 elt->next_same_value = classp;
1628 classp->prev_same_value = elt;
1629 elt->first_same_value = elt;
1631 for (p = classp; p; p = p->next_same_value)
1632 p->first_same_value = elt;
1634 else
1636 /* Insert not at head of the class. */
1637 /* Put it after the last element cheaper than X. */
1638 struct table_elt *p, *next;
1640 for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1641 p = next);
1643 /* Put it after P and before NEXT. */
1644 elt->next_same_value = next;
1645 if (next)
1646 next->prev_same_value = elt;
1648 elt->prev_same_value = p;
1649 p->next_same_value = elt;
1650 elt->first_same_value = classp;
1653 else
1654 elt->first_same_value = elt;
1656 /* If this is a constant being set equivalent to a register or a register
1657 being set equivalent to a constant, note the constant equivalence.
1659 If this is a constant, it cannot be equivalent to a different constant,
1660 and a constant is the only thing that can be cheaper than a register. So
1661 we know the register is the head of the class (before the constant was
1662 inserted).
1664 If this is a register that is not already known equivalent to a
1665 constant, we must check the entire class.
1667 If this is a register that is already known equivalent to an insn,
1668 update the qtys `const_insn' to show that `this_insn' is the latest
1669 insn making that quantity equivalent to the constant. */
1671 if (elt->is_const && classp && REG_P (classp->exp)
1672 && !REG_P (x))
1674 int exp_q = REG_QTY (REGNO (classp->exp));
1675 struct qty_table_elem *exp_ent = &qty_table[exp_q];
1677 exp_ent->const_rtx = gen_lowpart (exp_ent->mode, x);
1678 exp_ent->const_insn = this_insn;
1681 else if (REG_P (x)
1682 && classp
1683 && ! qty_table[REG_QTY (REGNO (x))].const_rtx
1684 && ! elt->is_const)
1686 struct table_elt *p;
1688 for (p = classp; p != 0; p = p->next_same_value)
1690 if (p->is_const && !REG_P (p->exp))
1692 int x_q = REG_QTY (REGNO (x));
1693 struct qty_table_elem *x_ent = &qty_table[x_q];
1695 x_ent->const_rtx
1696 = gen_lowpart (GET_MODE (x), p->exp);
1697 x_ent->const_insn = this_insn;
1698 break;
1703 else if (REG_P (x)
1704 && qty_table[REG_QTY (REGNO (x))].const_rtx
1705 && GET_MODE (x) == qty_table[REG_QTY (REGNO (x))].mode)
1706 qty_table[REG_QTY (REGNO (x))].const_insn = this_insn;
1708 /* If this is a constant with symbolic value,
1709 and it has a term with an explicit integer value,
1710 link it up with related expressions. */
1711 if (GET_CODE (x) == CONST)
1713 rtx subexp = get_related_value (x);
1714 unsigned subhash;
1715 struct table_elt *subelt, *subelt_prev;
1717 if (subexp != 0)
1719 /* Get the integer-free subexpression in the hash table. */
1720 subhash = SAFE_HASH (subexp, mode);
1721 subelt = lookup (subexp, subhash, mode);
1722 if (subelt == 0)
1723 subelt = insert (subexp, NULL, subhash, mode);
1724 /* Initialize SUBELT's circular chain if it has none. */
1725 if (subelt->related_value == 0)
1726 subelt->related_value = subelt;
1727 /* Find the element in the circular chain that precedes SUBELT. */
1728 subelt_prev = subelt;
1729 while (subelt_prev->related_value != subelt)
1730 subelt_prev = subelt_prev->related_value;
1731 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1732 This way the element that follows SUBELT is the oldest one. */
1733 elt->related_value = subelt_prev->related_value;
1734 subelt_prev->related_value = elt;
1738 return elt;
1741 /* Wrap insert_with_costs by passing the default costs. */
1743 static struct table_elt *
1744 insert (rtx x, struct table_elt *classp, unsigned int hash,
1745 enum machine_mode mode)
1747 return
1748 insert_with_costs (x, classp, hash, mode, COST (x), approx_reg_cost (x));
1752 /* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1753 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1754 the two classes equivalent.
1756 CLASS1 will be the surviving class; CLASS2 should not be used after this
1757 call.
1759 Any invalid entries in CLASS2 will not be copied. */
1761 static void
1762 merge_equiv_classes (struct table_elt *class1, struct table_elt *class2)
1764 struct table_elt *elt, *next, *new_elt;
1766 /* Ensure we start with the head of the classes. */
1767 class1 = class1->first_same_value;
1768 class2 = class2->first_same_value;
1770 /* If they were already equal, forget it. */
1771 if (class1 == class2)
1772 return;
1774 for (elt = class2; elt; elt = next)
1776 unsigned int hash;
1777 rtx exp = elt->exp;
1778 enum machine_mode mode = elt->mode;
1780 next = elt->next_same_value;
1782 /* Remove old entry, make a new one in CLASS1's class.
1783 Don't do this for invalid entries as we cannot find their
1784 hash code (it also isn't necessary). */
1785 if (REG_P (exp) || exp_equiv_p (exp, exp, 1, false))
1787 bool need_rehash = false;
1789 hash_arg_in_memory = 0;
1790 hash = HASH (exp, mode);
1792 if (REG_P (exp))
1794 need_rehash = REGNO_QTY_VALID_P (REGNO (exp));
1795 delete_reg_equiv (REGNO (exp));
1798 if (REG_P (exp) && REGNO (exp) >= FIRST_PSEUDO_REGISTER)
1799 remove_pseudo_from_table (exp, hash);
1800 else
1801 remove_from_table (elt, hash);
1803 if (insert_regs (exp, class1, 0) || need_rehash)
1805 rehash_using_reg (exp);
1806 hash = HASH (exp, mode);
1808 new_elt = insert (exp, class1, hash, mode);
1809 new_elt->in_memory = hash_arg_in_memory;
1814 /* Flush the entire hash table. */
1816 static void
1817 flush_hash_table (void)
1819 int i;
1820 struct table_elt *p;
1822 for (i = 0; i < HASH_SIZE; i++)
1823 for (p = table[i]; p; p = table[i])
1825 /* Note that invalidate can remove elements
1826 after P in the current hash chain. */
1827 if (REG_P (p->exp))
1828 invalidate (p->exp, VOIDmode);
1829 else
1830 remove_from_table (p, i);
1834 /* Function called for each rtx to check whether true dependence exist. */
1835 struct check_dependence_data
1837 enum machine_mode mode;
1838 rtx exp;
1839 rtx addr;
1842 static int
1843 check_dependence (rtx *x, void *data)
1845 struct check_dependence_data *d = (struct check_dependence_data *) data;
1846 if (*x && MEM_P (*x))
1847 return canon_true_dependence (d->exp, d->mode, d->addr, *x, NULL_RTX,
1848 cse_rtx_varies_p);
1849 else
1850 return 0;
1853 /* Remove from the hash table, or mark as invalid, all expressions whose
1854 values could be altered by storing in X. X is a register, a subreg, or
1855 a memory reference with nonvarying address (because, when a memory
1856 reference with a varying address is stored in, all memory references are
1857 removed by invalidate_memory so specific invalidation is superfluous).
1858 FULL_MODE, if not VOIDmode, indicates that this much should be
1859 invalidated instead of just the amount indicated by the mode of X. This
1860 is only used for bitfield stores into memory.
1862 A nonvarying address may be just a register or just a symbol reference,
1863 or it may be either of those plus a numeric offset. */
1865 static void
1866 invalidate (rtx x, enum machine_mode full_mode)
1868 int i;
1869 struct table_elt *p;
1870 rtx addr;
1872 switch (GET_CODE (x))
1874 case REG:
1876 /* If X is a register, dependencies on its contents are recorded
1877 through the qty number mechanism. Just change the qty number of
1878 the register, mark it as invalid for expressions that refer to it,
1879 and remove it itself. */
1880 unsigned int regno = REGNO (x);
1881 unsigned int hash = HASH (x, GET_MODE (x));
1883 /* Remove REGNO from any quantity list it might be on and indicate
1884 that its value might have changed. If it is a pseudo, remove its
1885 entry from the hash table.
1887 For a hard register, we do the first two actions above for any
1888 additional hard registers corresponding to X. Then, if any of these
1889 registers are in the table, we must remove any REG entries that
1890 overlap these registers. */
1892 delete_reg_equiv (regno);
1893 REG_TICK (regno)++;
1894 SUBREG_TICKED (regno) = -1;
1896 if (regno >= FIRST_PSEUDO_REGISTER)
1897 remove_pseudo_from_table (x, hash);
1898 else
1900 HOST_WIDE_INT in_table
1901 = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1902 unsigned int endregno = END_HARD_REGNO (x);
1903 unsigned int tregno, tendregno, rn;
1904 struct table_elt *p, *next;
1906 CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1908 for (rn = regno + 1; rn < endregno; rn++)
1910 in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, rn);
1911 CLEAR_HARD_REG_BIT (hard_regs_in_table, rn);
1912 delete_reg_equiv (rn);
1913 REG_TICK (rn)++;
1914 SUBREG_TICKED (rn) = -1;
1917 if (in_table)
1918 for (hash = 0; hash < HASH_SIZE; hash++)
1919 for (p = table[hash]; p; p = next)
1921 next = p->next_same_hash;
1923 if (!REG_P (p->exp)
1924 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1925 continue;
1927 tregno = REGNO (p->exp);
1928 tendregno = END_HARD_REGNO (p->exp);
1929 if (tendregno > regno && tregno < endregno)
1930 remove_from_table (p, hash);
1934 return;
1936 case SUBREG:
1937 invalidate (SUBREG_REG (x), VOIDmode);
1938 return;
1940 case PARALLEL:
1941 for (i = XVECLEN (x, 0) - 1; i >= 0; --i)
1942 invalidate (XVECEXP (x, 0, i), VOIDmode);
1943 return;
1945 case EXPR_LIST:
1946 /* This is part of a disjoint return value; extract the location in
1947 question ignoring the offset. */
1948 invalidate (XEXP (x, 0), VOIDmode);
1949 return;
1951 case MEM:
1952 addr = canon_rtx (get_addr (XEXP (x, 0)));
1953 /* Calculate the canonical version of X here so that
1954 true_dependence doesn't generate new RTL for X on each call. */
1955 x = canon_rtx (x);
1957 /* Remove all hash table elements that refer to overlapping pieces of
1958 memory. */
1959 if (full_mode == VOIDmode)
1960 full_mode = GET_MODE (x);
1962 for (i = 0; i < HASH_SIZE; i++)
1964 struct table_elt *next;
1966 for (p = table[i]; p; p = next)
1968 next = p->next_same_hash;
1969 if (p->in_memory)
1971 struct check_dependence_data d;
1973 /* Just canonicalize the expression once;
1974 otherwise each time we call invalidate
1975 true_dependence will canonicalize the
1976 expression again. */
1977 if (!p->canon_exp)
1978 p->canon_exp = canon_rtx (p->exp);
1979 d.exp = x;
1980 d.addr = addr;
1981 d.mode = full_mode;
1982 if (for_each_rtx (&p->canon_exp, check_dependence, &d))
1983 remove_from_table (p, i);
1987 return;
1989 default:
1990 gcc_unreachable ();
1994 /* Remove all expressions that refer to register REGNO,
1995 since they are already invalid, and we are about to
1996 mark that register valid again and don't want the old
1997 expressions to reappear as valid. */
1999 static void
2000 remove_invalid_refs (unsigned int regno)
2002 unsigned int i;
2003 struct table_elt *p, *next;
2005 for (i = 0; i < HASH_SIZE; i++)
2006 for (p = table[i]; p; p = next)
2008 next = p->next_same_hash;
2009 if (!REG_P (p->exp)
2010 && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
2011 remove_from_table (p, i);
2015 /* Likewise for a subreg with subreg_reg REGNO, subreg_byte OFFSET,
2016 and mode MODE. */
2017 static void
2018 remove_invalid_subreg_refs (unsigned int regno, unsigned int offset,
2019 enum machine_mode mode)
2021 unsigned int i;
2022 struct table_elt *p, *next;
2023 unsigned int end = offset + (GET_MODE_SIZE (mode) - 1);
2025 for (i = 0; i < HASH_SIZE; i++)
2026 for (p = table[i]; p; p = next)
2028 rtx exp = p->exp;
2029 next = p->next_same_hash;
2031 if (!REG_P (exp)
2032 && (GET_CODE (exp) != SUBREG
2033 || !REG_P (SUBREG_REG (exp))
2034 || REGNO (SUBREG_REG (exp)) != regno
2035 || (((SUBREG_BYTE (exp)
2036 + (GET_MODE_SIZE (GET_MODE (exp)) - 1)) >= offset)
2037 && SUBREG_BYTE (exp) <= end))
2038 && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
2039 remove_from_table (p, i);
2043 /* Recompute the hash codes of any valid entries in the hash table that
2044 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
2046 This is called when we make a jump equivalence. */
2048 static void
2049 rehash_using_reg (rtx x)
2051 unsigned int i;
2052 struct table_elt *p, *next;
2053 unsigned hash;
2055 if (GET_CODE (x) == SUBREG)
2056 x = SUBREG_REG (x);
2058 /* If X is not a register or if the register is known not to be in any
2059 valid entries in the table, we have no work to do. */
2061 if (!REG_P (x)
2062 || REG_IN_TABLE (REGNO (x)) < 0
2063 || REG_IN_TABLE (REGNO (x)) != REG_TICK (REGNO (x)))
2064 return;
2066 /* Scan all hash chains looking for valid entries that mention X.
2067 If we find one and it is in the wrong hash chain, move it. */
2069 for (i = 0; i < HASH_SIZE; i++)
2070 for (p = table[i]; p; p = next)
2072 next = p->next_same_hash;
2073 if (reg_mentioned_p (x, p->exp)
2074 && exp_equiv_p (p->exp, p->exp, 1, false)
2075 && i != (hash = SAFE_HASH (p->exp, p->mode)))
2077 if (p->next_same_hash)
2078 p->next_same_hash->prev_same_hash = p->prev_same_hash;
2080 if (p->prev_same_hash)
2081 p->prev_same_hash->next_same_hash = p->next_same_hash;
2082 else
2083 table[i] = p->next_same_hash;
2085 p->next_same_hash = table[hash];
2086 p->prev_same_hash = 0;
2087 if (table[hash])
2088 table[hash]->prev_same_hash = p;
2089 table[hash] = p;
2094 /* Remove from the hash table any expression that is a call-clobbered
2095 register. Also update their TICK values. */
2097 static void
2098 invalidate_for_call (void)
2100 unsigned int regno, endregno;
2101 unsigned int i;
2102 unsigned hash;
2103 struct table_elt *p, *next;
2104 int in_table = 0;
2106 /* Go through all the hard registers. For each that is clobbered in
2107 a CALL_INSN, remove the register from quantity chains and update
2108 reg_tick if defined. Also see if any of these registers is currently
2109 in the table. */
2111 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2112 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2114 delete_reg_equiv (regno);
2115 if (REG_TICK (regno) >= 0)
2117 REG_TICK (regno)++;
2118 SUBREG_TICKED (regno) = -1;
2121 in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
2124 /* In the case where we have no call-clobbered hard registers in the
2125 table, we are done. Otherwise, scan the table and remove any
2126 entry that overlaps a call-clobbered register. */
2128 if (in_table)
2129 for (hash = 0; hash < HASH_SIZE; hash++)
2130 for (p = table[hash]; p; p = next)
2132 next = p->next_same_hash;
2134 if (!REG_P (p->exp)
2135 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
2136 continue;
2138 regno = REGNO (p->exp);
2139 endregno = END_HARD_REGNO (p->exp);
2141 for (i = regno; i < endregno; i++)
2142 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
2144 remove_from_table (p, hash);
2145 break;
2150 /* Given an expression X of type CONST,
2151 and ELT which is its table entry (or 0 if it
2152 is not in the hash table),
2153 return an alternate expression for X as a register plus integer.
2154 If none can be found, return 0. */
2156 static rtx
2157 use_related_value (rtx x, struct table_elt *elt)
2159 struct table_elt *relt = 0;
2160 struct table_elt *p, *q;
2161 HOST_WIDE_INT offset;
2163 /* First, is there anything related known?
2164 If we have a table element, we can tell from that.
2165 Otherwise, must look it up. */
2167 if (elt != 0 && elt->related_value != 0)
2168 relt = elt;
2169 else if (elt == 0 && GET_CODE (x) == CONST)
2171 rtx subexp = get_related_value (x);
2172 if (subexp != 0)
2173 relt = lookup (subexp,
2174 SAFE_HASH (subexp, GET_MODE (subexp)),
2175 GET_MODE (subexp));
2178 if (relt == 0)
2179 return 0;
2181 /* Search all related table entries for one that has an
2182 equivalent register. */
2184 p = relt;
2185 while (1)
2187 /* This loop is strange in that it is executed in two different cases.
2188 The first is when X is already in the table. Then it is searching
2189 the RELATED_VALUE list of X's class (RELT). The second case is when
2190 X is not in the table. Then RELT points to a class for the related
2191 value.
2193 Ensure that, whatever case we are in, that we ignore classes that have
2194 the same value as X. */
2196 if (rtx_equal_p (x, p->exp))
2197 q = 0;
2198 else
2199 for (q = p->first_same_value; q; q = q->next_same_value)
2200 if (REG_P (q->exp))
2201 break;
2203 if (q)
2204 break;
2206 p = p->related_value;
2208 /* We went all the way around, so there is nothing to be found.
2209 Alternatively, perhaps RELT was in the table for some other reason
2210 and it has no related values recorded. */
2211 if (p == relt || p == 0)
2212 break;
2215 if (q == 0)
2216 return 0;
2218 offset = (get_integer_term (x) - get_integer_term (p->exp));
2219 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
2220 return plus_constant (q->exp, offset);
2224 /* Hash a string. Just add its bytes up. */
2225 static inline unsigned
2226 hash_rtx_string (const char *ps)
2228 unsigned hash = 0;
2229 const unsigned char *p = (const unsigned char *) ps;
2231 if (p)
2232 while (*p)
2233 hash += *p++;
2235 return hash;
2238 /* Same as hash_rtx, but call CB on each rtx if it is not NULL.
2239 When the callback returns true, we continue with the new rtx. */
2241 unsigned
2242 hash_rtx_cb (const_rtx x, enum machine_mode mode,
2243 int *do_not_record_p, int *hash_arg_in_memory_p,
2244 bool have_reg_qty, hash_rtx_callback_function cb)
2246 int i, j;
2247 unsigned hash = 0;
2248 enum rtx_code code;
2249 const char *fmt;
2250 enum machine_mode newmode;
2251 rtx newx;
2253 /* Used to turn recursion into iteration. We can't rely on GCC's
2254 tail-recursion elimination since we need to keep accumulating values
2255 in HASH. */
2256 repeat:
2257 if (x == 0)
2258 return hash;
2260 /* Invoke the callback first. */
2261 if (cb != NULL
2262 && ((*cb) (x, mode, &newx, &newmode)))
2264 hash += hash_rtx_cb (newx, newmode, do_not_record_p,
2265 hash_arg_in_memory_p, have_reg_qty, cb);
2266 return hash;
2269 code = GET_CODE (x);
2270 switch (code)
2272 case REG:
2274 unsigned int regno = REGNO (x);
2276 if (do_not_record_p && !reload_completed)
2278 /* On some machines, we can't record any non-fixed hard register,
2279 because extending its life will cause reload problems. We
2280 consider ap, fp, sp, gp to be fixed for this purpose.
2282 We also consider CCmode registers to be fixed for this purpose;
2283 failure to do so leads to failure to simplify 0<100 type of
2284 conditionals.
2286 On all machines, we can't record any global registers.
2287 Nor should we record any register that is in a small
2288 class, as defined by CLASS_LIKELY_SPILLED_P. */
2289 bool record;
2291 if (regno >= FIRST_PSEUDO_REGISTER)
2292 record = true;
2293 else if (x == frame_pointer_rtx
2294 || x == hard_frame_pointer_rtx
2295 || x == arg_pointer_rtx
2296 || x == stack_pointer_rtx
2297 || x == pic_offset_table_rtx)
2298 record = true;
2299 else if (global_regs[regno])
2300 record = false;
2301 else if (fixed_regs[regno])
2302 record = true;
2303 else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_CC)
2304 record = true;
2305 else if (targetm.small_register_classes_for_mode_p (GET_MODE (x)))
2306 record = false;
2307 else if (CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (regno)))
2308 record = false;
2309 else
2310 record = true;
2312 if (!record)
2314 *do_not_record_p = 1;
2315 return 0;
2319 hash += ((unsigned int) REG << 7);
2320 hash += (have_reg_qty ? (unsigned) REG_QTY (regno) : regno);
2321 return hash;
2324 /* We handle SUBREG of a REG specially because the underlying
2325 reg changes its hash value with every value change; we don't
2326 want to have to forget unrelated subregs when one subreg changes. */
2327 case SUBREG:
2329 if (REG_P (SUBREG_REG (x)))
2331 hash += (((unsigned int) SUBREG << 7)
2332 + REGNO (SUBREG_REG (x))
2333 + (SUBREG_BYTE (x) / UNITS_PER_WORD));
2334 return hash;
2336 break;
2339 case CONST_INT:
2340 hash += (((unsigned int) CONST_INT << 7) + (unsigned int) mode
2341 + (unsigned int) INTVAL (x));
2342 return hash;
2344 case CONST_DOUBLE:
2345 /* This is like the general case, except that it only counts
2346 the integers representing the constant. */
2347 hash += (unsigned int) code + (unsigned int) GET_MODE (x);
2348 if (GET_MODE (x) != VOIDmode)
2349 hash += real_hash (CONST_DOUBLE_REAL_VALUE (x));
2350 else
2351 hash += ((unsigned int) CONST_DOUBLE_LOW (x)
2352 + (unsigned int) CONST_DOUBLE_HIGH (x));
2353 return hash;
2355 case CONST_FIXED:
2356 hash += (unsigned int) code + (unsigned int) GET_MODE (x);
2357 hash += fixed_hash (CONST_FIXED_VALUE (x));
2358 return hash;
2360 case CONST_VECTOR:
2362 int units;
2363 rtx elt;
2365 units = CONST_VECTOR_NUNITS (x);
2367 for (i = 0; i < units; ++i)
2369 elt = CONST_VECTOR_ELT (x, i);
2370 hash += hash_rtx_cb (elt, GET_MODE (elt),
2371 do_not_record_p, hash_arg_in_memory_p,
2372 have_reg_qty, cb);
2375 return hash;
2378 /* Assume there is only one rtx object for any given label. */
2379 case LABEL_REF:
2380 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
2381 differences and differences between each stage's debugging dumps. */
2382 hash += (((unsigned int) LABEL_REF << 7)
2383 + CODE_LABEL_NUMBER (XEXP (x, 0)));
2384 return hash;
2386 case SYMBOL_REF:
2388 /* Don't hash on the symbol's address to avoid bootstrap differences.
2389 Different hash values may cause expressions to be recorded in
2390 different orders and thus different registers to be used in the
2391 final assembler. This also avoids differences in the dump files
2392 between various stages. */
2393 unsigned int h = 0;
2394 const unsigned char *p = (const unsigned char *) XSTR (x, 0);
2396 while (*p)
2397 h += (h << 7) + *p++; /* ??? revisit */
2399 hash += ((unsigned int) SYMBOL_REF << 7) + h;
2400 return hash;
2403 case MEM:
2404 /* We don't record if marked volatile or if BLKmode since we don't
2405 know the size of the move. */
2406 if (do_not_record_p && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
2408 *do_not_record_p = 1;
2409 return 0;
2411 if (hash_arg_in_memory_p && !MEM_READONLY_P (x))
2412 *hash_arg_in_memory_p = 1;
2414 /* Now that we have already found this special case,
2415 might as well speed it up as much as possible. */
2416 hash += (unsigned) MEM;
2417 x = XEXP (x, 0);
2418 goto repeat;
2420 case USE:
2421 /* A USE that mentions non-volatile memory needs special
2422 handling since the MEM may be BLKmode which normally
2423 prevents an entry from being made. Pure calls are
2424 marked by a USE which mentions BLKmode memory.
2425 See calls.c:emit_call_1. */
2426 if (MEM_P (XEXP (x, 0))
2427 && ! MEM_VOLATILE_P (XEXP (x, 0)))
2429 hash += (unsigned) USE;
2430 x = XEXP (x, 0);
2432 if (hash_arg_in_memory_p && !MEM_READONLY_P (x))
2433 *hash_arg_in_memory_p = 1;
2435 /* Now that we have already found this special case,
2436 might as well speed it up as much as possible. */
2437 hash += (unsigned) MEM;
2438 x = XEXP (x, 0);
2439 goto repeat;
2441 break;
2443 case PRE_DEC:
2444 case PRE_INC:
2445 case POST_DEC:
2446 case POST_INC:
2447 case PRE_MODIFY:
2448 case POST_MODIFY:
2449 case PC:
2450 case CC0:
2451 case CALL:
2452 case UNSPEC_VOLATILE:
2453 if (do_not_record_p) {
2454 *do_not_record_p = 1;
2455 return 0;
2457 else
2458 return hash;
2459 break;
2461 case ASM_OPERANDS:
2462 if (do_not_record_p && MEM_VOLATILE_P (x))
2464 *do_not_record_p = 1;
2465 return 0;
2467 else
2469 /* We don't want to take the filename and line into account. */
2470 hash += (unsigned) code + (unsigned) GET_MODE (x)
2471 + hash_rtx_string (ASM_OPERANDS_TEMPLATE (x))
2472 + hash_rtx_string (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
2473 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
2475 if (ASM_OPERANDS_INPUT_LENGTH (x))
2477 for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2479 hash += (hash_rtx_cb (ASM_OPERANDS_INPUT (x, i),
2480 GET_MODE (ASM_OPERANDS_INPUT (x, i)),
2481 do_not_record_p, hash_arg_in_memory_p,
2482 have_reg_qty, cb)
2483 + hash_rtx_string
2484 (ASM_OPERANDS_INPUT_CONSTRAINT (x, i)));
2487 hash += hash_rtx_string (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
2488 x = ASM_OPERANDS_INPUT (x, 0);
2489 mode = GET_MODE (x);
2490 goto repeat;
2493 return hash;
2495 break;
2497 default:
2498 break;
2501 i = GET_RTX_LENGTH (code) - 1;
2502 hash += (unsigned) code + (unsigned) GET_MODE (x);
2503 fmt = GET_RTX_FORMAT (code);
2504 for (; i >= 0; i--)
2506 switch (fmt[i])
2508 case 'e':
2509 /* If we are about to do the last recursive call
2510 needed at this level, change it into iteration.
2511 This function is called enough to be worth it. */
2512 if (i == 0)
2514 x = XEXP (x, i);
2515 goto repeat;
2518 hash += hash_rtx_cb (XEXP (x, i), VOIDmode, do_not_record_p,
2519 hash_arg_in_memory_p,
2520 have_reg_qty, cb);
2521 break;
2523 case 'E':
2524 for (j = 0; j < XVECLEN (x, i); j++)
2525 hash += hash_rtx_cb (XVECEXP (x, i, j), VOIDmode, do_not_record_p,
2526 hash_arg_in_memory_p,
2527 have_reg_qty, cb);
2528 break;
2530 case 's':
2531 hash += hash_rtx_string (XSTR (x, i));
2532 break;
2534 case 'i':
2535 hash += (unsigned int) XINT (x, i);
2536 break;
2538 case '0': case 't':
2539 /* Unused. */
2540 break;
2542 default:
2543 gcc_unreachable ();
2547 return hash;
2550 /* Hash an rtx. We are careful to make sure the value is never negative.
2551 Equivalent registers hash identically.
2552 MODE is used in hashing for CONST_INTs only;
2553 otherwise the mode of X is used.
2555 Store 1 in DO_NOT_RECORD_P if any subexpression is volatile.
2557 If HASH_ARG_IN_MEMORY_P is not NULL, store 1 in it if X contains
2558 a MEM rtx which does not have the RTX_UNCHANGING_P bit set.
2560 Note that cse_insn knows that the hash code of a MEM expression
2561 is just (int) MEM plus the hash code of the address. */
2563 unsigned
2564 hash_rtx (const_rtx x, enum machine_mode mode, int *do_not_record_p,
2565 int *hash_arg_in_memory_p, bool have_reg_qty)
2567 return hash_rtx_cb (x, mode, do_not_record_p,
2568 hash_arg_in_memory_p, have_reg_qty, NULL);
2571 /* Hash an rtx X for cse via hash_rtx.
2572 Stores 1 in do_not_record if any subexpression is volatile.
2573 Stores 1 in hash_arg_in_memory if X contains a mem rtx which
2574 does not have the RTX_UNCHANGING_P bit set. */
2576 static inline unsigned
2577 canon_hash (rtx x, enum machine_mode mode)
2579 return hash_rtx (x, mode, &do_not_record, &hash_arg_in_memory, true);
2582 /* Like canon_hash but with no side effects, i.e. do_not_record
2583 and hash_arg_in_memory are not changed. */
2585 static inline unsigned
2586 safe_hash (rtx x, enum machine_mode mode)
2588 int dummy_do_not_record;
2589 return hash_rtx (x, mode, &dummy_do_not_record, NULL, true);
2592 /* Return 1 iff X and Y would canonicalize into the same thing,
2593 without actually constructing the canonicalization of either one.
2594 If VALIDATE is nonzero,
2595 we assume X is an expression being processed from the rtl
2596 and Y was found in the hash table. We check register refs
2597 in Y for being marked as valid.
2599 If FOR_GCSE is true, we compare X and Y for equivalence for GCSE. */
2602 exp_equiv_p (const_rtx x, const_rtx y, int validate, bool for_gcse)
2604 int i, j;
2605 enum rtx_code code;
2606 const char *fmt;
2608 /* Note: it is incorrect to assume an expression is equivalent to itself
2609 if VALIDATE is nonzero. */
2610 if (x == y && !validate)
2611 return 1;
2613 if (x == 0 || y == 0)
2614 return x == y;
2616 code = GET_CODE (x);
2617 if (code != GET_CODE (y))
2618 return 0;
2620 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2621 if (GET_MODE (x) != GET_MODE (y))
2622 return 0;
2624 /* MEMs refering to different address space are not equivalent. */
2625 if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
2626 return 0;
2628 switch (code)
2630 case PC:
2631 case CC0:
2632 case CONST_INT:
2633 case CONST_DOUBLE:
2634 case CONST_FIXED:
2635 return x == y;
2637 case LABEL_REF:
2638 return XEXP (x, 0) == XEXP (y, 0);
2640 case SYMBOL_REF:
2641 return XSTR (x, 0) == XSTR (y, 0);
2643 case REG:
2644 if (for_gcse)
2645 return REGNO (x) == REGNO (y);
2646 else
2648 unsigned int regno = REGNO (y);
2649 unsigned int i;
2650 unsigned int endregno = END_REGNO (y);
2652 /* If the quantities are not the same, the expressions are not
2653 equivalent. If there are and we are not to validate, they
2654 are equivalent. Otherwise, ensure all regs are up-to-date. */
2656 if (REG_QTY (REGNO (x)) != REG_QTY (regno))
2657 return 0;
2659 if (! validate)
2660 return 1;
2662 for (i = regno; i < endregno; i++)
2663 if (REG_IN_TABLE (i) != REG_TICK (i))
2664 return 0;
2666 return 1;
2669 case MEM:
2670 if (for_gcse)
2672 /* A volatile mem should not be considered equivalent to any
2673 other. */
2674 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2675 return 0;
2677 /* Can't merge two expressions in different alias sets, since we
2678 can decide that the expression is transparent in a block when
2679 it isn't, due to it being set with the different alias set.
2681 Also, can't merge two expressions with different MEM_ATTRS.
2682 They could e.g. be two different entities allocated into the
2683 same space on the stack (see e.g. PR25130). In that case, the
2684 MEM addresses can be the same, even though the two MEMs are
2685 absolutely not equivalent.
2687 But because really all MEM attributes should be the same for
2688 equivalent MEMs, we just use the invariant that MEMs that have
2689 the same attributes share the same mem_attrs data structure. */
2690 if (MEM_ATTRS (x) != MEM_ATTRS (y))
2691 return 0;
2693 break;
2695 /* For commutative operations, check both orders. */
2696 case PLUS:
2697 case MULT:
2698 case AND:
2699 case IOR:
2700 case XOR:
2701 case NE:
2702 case EQ:
2703 return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0),
2704 validate, for_gcse)
2705 && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2706 validate, for_gcse))
2707 || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2708 validate, for_gcse)
2709 && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2710 validate, for_gcse)));
2712 case ASM_OPERANDS:
2713 /* We don't use the generic code below because we want to
2714 disregard filename and line numbers. */
2716 /* A volatile asm isn't equivalent to any other. */
2717 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2718 return 0;
2720 if (GET_MODE (x) != GET_MODE (y)
2721 || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
2722 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2723 ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
2724 || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
2725 || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
2726 return 0;
2728 if (ASM_OPERANDS_INPUT_LENGTH (x))
2730 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
2731 if (! exp_equiv_p (ASM_OPERANDS_INPUT (x, i),
2732 ASM_OPERANDS_INPUT (y, i),
2733 validate, for_gcse)
2734 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
2735 ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
2736 return 0;
2739 return 1;
2741 default:
2742 break;
2745 /* Compare the elements. If any pair of corresponding elements
2746 fail to match, return 0 for the whole thing. */
2748 fmt = GET_RTX_FORMAT (code);
2749 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2751 switch (fmt[i])
2753 case 'e':
2754 if (! exp_equiv_p (XEXP (x, i), XEXP (y, i),
2755 validate, for_gcse))
2756 return 0;
2757 break;
2759 case 'E':
2760 if (XVECLEN (x, i) != XVECLEN (y, i))
2761 return 0;
2762 for (j = 0; j < XVECLEN (x, i); j++)
2763 if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2764 validate, for_gcse))
2765 return 0;
2766 break;
2768 case 's':
2769 if (strcmp (XSTR (x, i), XSTR (y, i)))
2770 return 0;
2771 break;
2773 case 'i':
2774 if (XINT (x, i) != XINT (y, i))
2775 return 0;
2776 break;
2778 case 'w':
2779 if (XWINT (x, i) != XWINT (y, i))
2780 return 0;
2781 break;
2783 case '0':
2784 case 't':
2785 break;
2787 default:
2788 gcc_unreachable ();
2792 return 1;
2795 /* Return 1 if X has a value that can vary even between two
2796 executions of the program. 0 means X can be compared reliably
2797 against certain constants or near-constants. */
2799 static bool
2800 cse_rtx_varies_p (const_rtx x, bool from_alias)
2802 /* We need not check for X and the equivalence class being of the same
2803 mode because if X is equivalent to a constant in some mode, it
2804 doesn't vary in any mode. */
2806 if (REG_P (x)
2807 && REGNO_QTY_VALID_P (REGNO (x)))
2809 int x_q = REG_QTY (REGNO (x));
2810 struct qty_table_elem *x_ent = &qty_table[x_q];
2812 if (GET_MODE (x) == x_ent->mode
2813 && x_ent->const_rtx != NULL_RTX)
2814 return 0;
2817 if (GET_CODE (x) == PLUS
2818 && CONST_INT_P (XEXP (x, 1))
2819 && REG_P (XEXP (x, 0))
2820 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
2822 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2823 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2825 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2826 && x0_ent->const_rtx != NULL_RTX)
2827 return 0;
2830 /* This can happen as the result of virtual register instantiation, if
2831 the initial constant is too large to be a valid address. This gives
2832 us a three instruction sequence, load large offset into a register,
2833 load fp minus a constant into a register, then a MEM which is the
2834 sum of the two `constant' registers. */
2835 if (GET_CODE (x) == PLUS
2836 && REG_P (XEXP (x, 0))
2837 && REG_P (XEXP (x, 1))
2838 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2839 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
2841 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2842 int x1_q = REG_QTY (REGNO (XEXP (x, 1)));
2843 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2844 struct qty_table_elem *x1_ent = &qty_table[x1_q];
2846 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2847 && x0_ent->const_rtx != NULL_RTX
2848 && (GET_MODE (XEXP (x, 1)) == x1_ent->mode)
2849 && x1_ent->const_rtx != NULL_RTX)
2850 return 0;
2853 return rtx_varies_p (x, from_alias);
2856 /* Subroutine of canon_reg. Pass *XLOC through canon_reg, and validate
2857 the result if necessary. INSN is as for canon_reg. */
2859 static void
2860 validate_canon_reg (rtx *xloc, rtx insn)
2862 if (*xloc)
2864 rtx new_rtx = canon_reg (*xloc, insn);
2866 /* If replacing pseudo with hard reg or vice versa, ensure the
2867 insn remains valid. Likewise if the insn has MATCH_DUPs. */
2868 gcc_assert (insn && new_rtx);
2869 validate_change (insn, xloc, new_rtx, 1);
2873 /* Canonicalize an expression:
2874 replace each register reference inside it
2875 with the "oldest" equivalent register.
2877 If INSN is nonzero validate_change is used to ensure that INSN remains valid
2878 after we make our substitution. The calls are made with IN_GROUP nonzero
2879 so apply_change_group must be called upon the outermost return from this
2880 function (unless INSN is zero). The result of apply_change_group can
2881 generally be discarded since the changes we are making are optional. */
2883 static rtx
2884 canon_reg (rtx x, rtx insn)
2886 int i;
2887 enum rtx_code code;
2888 const char *fmt;
2890 if (x == 0)
2891 return x;
2893 code = GET_CODE (x);
2894 switch (code)
2896 case PC:
2897 case CC0:
2898 case CONST:
2899 case CONST_INT:
2900 case CONST_DOUBLE:
2901 case CONST_FIXED:
2902 case CONST_VECTOR:
2903 case SYMBOL_REF:
2904 case LABEL_REF:
2905 case ADDR_VEC:
2906 case ADDR_DIFF_VEC:
2907 return x;
2909 case REG:
2911 int first;
2912 int q;
2913 struct qty_table_elem *ent;
2915 /* Never replace a hard reg, because hard regs can appear
2916 in more than one machine mode, and we must preserve the mode
2917 of each occurrence. Also, some hard regs appear in
2918 MEMs that are shared and mustn't be altered. Don't try to
2919 replace any reg that maps to a reg of class NO_REGS. */
2920 if (REGNO (x) < FIRST_PSEUDO_REGISTER
2921 || ! REGNO_QTY_VALID_P (REGNO (x)))
2922 return x;
2924 q = REG_QTY (REGNO (x));
2925 ent = &qty_table[q];
2926 first = ent->first_reg;
2927 return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2928 : REGNO_REG_CLASS (first) == NO_REGS ? x
2929 : gen_rtx_REG (ent->mode, first));
2932 default:
2933 break;
2936 fmt = GET_RTX_FORMAT (code);
2937 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2939 int j;
2941 if (fmt[i] == 'e')
2942 validate_canon_reg (&XEXP (x, i), insn);
2943 else if (fmt[i] == 'E')
2944 for (j = 0; j < XVECLEN (x, i); j++)
2945 validate_canon_reg (&XVECEXP (x, i, j), insn);
2948 return x;
2951 /* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
2952 operation (EQ, NE, GT, etc.), follow it back through the hash table and
2953 what values are being compared.
2955 *PARG1 and *PARG2 are updated to contain the rtx representing the values
2956 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
2957 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
2958 compared to produce cc0.
2960 The return value is the comparison operator and is either the code of
2961 A or the code corresponding to the inverse of the comparison. */
2963 static enum rtx_code
2964 find_comparison_args (enum rtx_code code, rtx *parg1, rtx *parg2,
2965 enum machine_mode *pmode1, enum machine_mode *pmode2)
2967 rtx arg1, arg2;
2969 arg1 = *parg1, arg2 = *parg2;
2971 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
2973 while (arg2 == CONST0_RTX (GET_MODE (arg1)))
2975 /* Set nonzero when we find something of interest. */
2976 rtx x = 0;
2977 int reverse_code = 0;
2978 struct table_elt *p = 0;
2980 /* If arg1 is a COMPARE, extract the comparison arguments from it.
2981 On machines with CC0, this is the only case that can occur, since
2982 fold_rtx will return the COMPARE or item being compared with zero
2983 when given CC0. */
2985 if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
2986 x = arg1;
2988 /* If ARG1 is a comparison operator and CODE is testing for
2989 STORE_FLAG_VALUE, get the inner arguments. */
2991 else if (COMPARISON_P (arg1))
2993 #ifdef FLOAT_STORE_FLAG_VALUE
2994 REAL_VALUE_TYPE fsfv;
2995 #endif
2997 if (code == NE
2998 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2999 && code == LT && STORE_FLAG_VALUE == -1)
3000 #ifdef FLOAT_STORE_FLAG_VALUE
3001 || (SCALAR_FLOAT_MODE_P (GET_MODE (arg1))
3002 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3003 REAL_VALUE_NEGATIVE (fsfv)))
3004 #endif
3006 x = arg1;
3007 else if (code == EQ
3008 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3009 && code == GE && STORE_FLAG_VALUE == -1)
3010 #ifdef FLOAT_STORE_FLAG_VALUE
3011 || (SCALAR_FLOAT_MODE_P (GET_MODE (arg1))
3012 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3013 REAL_VALUE_NEGATIVE (fsfv)))
3014 #endif
3016 x = arg1, reverse_code = 1;
3019 /* ??? We could also check for
3021 (ne (and (eq (...) (const_int 1))) (const_int 0))
3023 and related forms, but let's wait until we see them occurring. */
3025 if (x == 0)
3026 /* Look up ARG1 in the hash table and see if it has an equivalence
3027 that lets us see what is being compared. */
3028 p = lookup (arg1, SAFE_HASH (arg1, GET_MODE (arg1)), GET_MODE (arg1));
3029 if (p)
3031 p = p->first_same_value;
3033 /* If what we compare is already known to be constant, that is as
3034 good as it gets.
3035 We need to break the loop in this case, because otherwise we
3036 can have an infinite loop when looking at a reg that is known
3037 to be a constant which is the same as a comparison of a reg
3038 against zero which appears later in the insn stream, which in
3039 turn is constant and the same as the comparison of the first reg
3040 against zero... */
3041 if (p->is_const)
3042 break;
3045 for (; p; p = p->next_same_value)
3047 enum machine_mode inner_mode = GET_MODE (p->exp);
3048 #ifdef FLOAT_STORE_FLAG_VALUE
3049 REAL_VALUE_TYPE fsfv;
3050 #endif
3052 /* If the entry isn't valid, skip it. */
3053 if (! exp_equiv_p (p->exp, p->exp, 1, false))
3054 continue;
3056 if (GET_CODE (p->exp) == COMPARE
3057 /* Another possibility is that this machine has a compare insn
3058 that includes the comparison code. In that case, ARG1 would
3059 be equivalent to a comparison operation that would set ARG1 to
3060 either STORE_FLAG_VALUE or zero. If this is an NE operation,
3061 ORIG_CODE is the actual comparison being done; if it is an EQ,
3062 we must reverse ORIG_CODE. On machine with a negative value
3063 for STORE_FLAG_VALUE, also look at LT and GE operations. */
3064 || ((code == NE
3065 || (code == LT
3066 && GET_MODE_CLASS (inner_mode) == MODE_INT
3067 && (GET_MODE_BITSIZE (inner_mode)
3068 <= HOST_BITS_PER_WIDE_INT)
3069 && (STORE_FLAG_VALUE
3070 & ((HOST_WIDE_INT) 1
3071 << (GET_MODE_BITSIZE (inner_mode) - 1))))
3072 #ifdef FLOAT_STORE_FLAG_VALUE
3073 || (code == LT
3074 && SCALAR_FLOAT_MODE_P (inner_mode)
3075 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3076 REAL_VALUE_NEGATIVE (fsfv)))
3077 #endif
3079 && COMPARISON_P (p->exp)))
3081 x = p->exp;
3082 break;
3084 else if ((code == EQ
3085 || (code == GE
3086 && GET_MODE_CLASS (inner_mode) == MODE_INT
3087 && (GET_MODE_BITSIZE (inner_mode)
3088 <= HOST_BITS_PER_WIDE_INT)
3089 && (STORE_FLAG_VALUE
3090 & ((HOST_WIDE_INT) 1
3091 << (GET_MODE_BITSIZE (inner_mode) - 1))))
3092 #ifdef FLOAT_STORE_FLAG_VALUE
3093 || (code == GE
3094 && SCALAR_FLOAT_MODE_P (inner_mode)
3095 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3096 REAL_VALUE_NEGATIVE (fsfv)))
3097 #endif
3099 && COMPARISON_P (p->exp))
3101 reverse_code = 1;
3102 x = p->exp;
3103 break;
3106 /* If this non-trapping address, e.g. fp + constant, the
3107 equivalent is a better operand since it may let us predict
3108 the value of the comparison. */
3109 else if (!rtx_addr_can_trap_p (p->exp))
3111 arg1 = p->exp;
3112 continue;
3116 /* If we didn't find a useful equivalence for ARG1, we are done.
3117 Otherwise, set up for the next iteration. */
3118 if (x == 0)
3119 break;
3121 /* If we need to reverse the comparison, make sure that that is
3122 possible -- we can't necessarily infer the value of GE from LT
3123 with floating-point operands. */
3124 if (reverse_code)
3126 enum rtx_code reversed = reversed_comparison_code (x, NULL_RTX);
3127 if (reversed == UNKNOWN)
3128 break;
3129 else
3130 code = reversed;
3132 else if (COMPARISON_P (x))
3133 code = GET_CODE (x);
3134 arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
3137 /* Return our results. Return the modes from before fold_rtx
3138 because fold_rtx might produce const_int, and then it's too late. */
3139 *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
3140 *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
3142 return code;
3145 /* If X is a nontrivial arithmetic operation on an argument for which
3146 a constant value can be determined, return the result of operating
3147 on that value, as a constant. Otherwise, return X, possibly with
3148 one or more operands changed to a forward-propagated constant.
3150 If X is a register whose contents are known, we do NOT return
3151 those contents here; equiv_constant is called to perform that task.
3152 For SUBREGs and MEMs, we do that both here and in equiv_constant.
3154 INSN is the insn that we may be modifying. If it is 0, make a copy
3155 of X before modifying it. */
3157 static rtx
3158 fold_rtx (rtx x, rtx insn)
3160 enum rtx_code code;
3161 enum machine_mode mode;
3162 const char *fmt;
3163 int i;
3164 rtx new_rtx = 0;
3165 int changed = 0;
3167 /* Operands of X. */
3168 rtx folded_arg0;
3169 rtx folded_arg1;
3171 /* Constant equivalents of first three operands of X;
3172 0 when no such equivalent is known. */
3173 rtx const_arg0;
3174 rtx const_arg1;
3175 rtx const_arg2;
3177 /* The mode of the first operand of X. We need this for sign and zero
3178 extends. */
3179 enum machine_mode mode_arg0;
3181 if (x == 0)
3182 return x;
3184 /* Try to perform some initial simplifications on X. */
3185 code = GET_CODE (x);
3186 switch (code)
3188 case MEM:
3189 case SUBREG:
3190 if ((new_rtx = equiv_constant (x)) != NULL_RTX)
3191 return new_rtx;
3192 return x;
3194 case CONST:
3195 case CONST_INT:
3196 case CONST_DOUBLE:
3197 case CONST_FIXED:
3198 case CONST_VECTOR:
3199 case SYMBOL_REF:
3200 case LABEL_REF:
3201 case REG:
3202 case PC:
3203 /* No use simplifying an EXPR_LIST
3204 since they are used only for lists of args
3205 in a function call's REG_EQUAL note. */
3206 case EXPR_LIST:
3207 return x;
3209 #ifdef HAVE_cc0
3210 case CC0:
3211 return prev_insn_cc0;
3212 #endif
3214 case ASM_OPERANDS:
3215 if (insn)
3217 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
3218 validate_change (insn, &ASM_OPERANDS_INPUT (x, i),
3219 fold_rtx (ASM_OPERANDS_INPUT (x, i), insn), 0);
3221 return x;
3223 #ifdef NO_FUNCTION_CSE
3224 case CALL:
3225 if (CONSTANT_P (XEXP (XEXP (x, 0), 0)))
3226 return x;
3227 break;
3228 #endif
3230 /* Anything else goes through the loop below. */
3231 default:
3232 break;
3235 mode = GET_MODE (x);
3236 const_arg0 = 0;
3237 const_arg1 = 0;
3238 const_arg2 = 0;
3239 mode_arg0 = VOIDmode;
3241 /* Try folding our operands.
3242 Then see which ones have constant values known. */
3244 fmt = GET_RTX_FORMAT (code);
3245 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3246 if (fmt[i] == 'e')
3248 rtx folded_arg = XEXP (x, i), const_arg;
3249 enum machine_mode mode_arg = GET_MODE (folded_arg);
3251 switch (GET_CODE (folded_arg))
3253 case MEM:
3254 case REG:
3255 case SUBREG:
3256 const_arg = equiv_constant (folded_arg);
3257 break;
3259 case CONST:
3260 case CONST_INT:
3261 case SYMBOL_REF:
3262 case LABEL_REF:
3263 case CONST_DOUBLE:
3264 case CONST_FIXED:
3265 case CONST_VECTOR:
3266 const_arg = folded_arg;
3267 break;
3269 #ifdef HAVE_cc0
3270 case CC0:
3271 folded_arg = prev_insn_cc0;
3272 mode_arg = prev_insn_cc0_mode;
3273 const_arg = equiv_constant (folded_arg);
3274 break;
3275 #endif
3277 default:
3278 folded_arg = fold_rtx (folded_arg, insn);
3279 const_arg = equiv_constant (folded_arg);
3280 break;
3283 /* For the first three operands, see if the operand
3284 is constant or equivalent to a constant. */
3285 switch (i)
3287 case 0:
3288 folded_arg0 = folded_arg;
3289 const_arg0 = const_arg;
3290 mode_arg0 = mode_arg;
3291 break;
3292 case 1:
3293 folded_arg1 = folded_arg;
3294 const_arg1 = const_arg;
3295 break;
3296 case 2:
3297 const_arg2 = const_arg;
3298 break;
3301 /* Pick the least expensive of the argument and an equivalent constant
3302 argument. */
3303 if (const_arg != 0
3304 && const_arg != folded_arg
3305 && COST_IN (const_arg, code) <= COST_IN (folded_arg, code)
3307 /* It's not safe to substitute the operand of a conversion
3308 operator with a constant, as the conversion's identity
3309 depends upon the mode of its operand. This optimization
3310 is handled by the call to simplify_unary_operation. */
3311 && (GET_RTX_CLASS (code) != RTX_UNARY
3312 || GET_MODE (const_arg) == mode_arg0
3313 || (code != ZERO_EXTEND
3314 && code != SIGN_EXTEND
3315 && code != TRUNCATE
3316 && code != FLOAT_TRUNCATE
3317 && code != FLOAT_EXTEND
3318 && code != FLOAT
3319 && code != FIX
3320 && code != UNSIGNED_FLOAT
3321 && code != UNSIGNED_FIX)))
3322 folded_arg = const_arg;
3324 if (folded_arg == XEXP (x, i))
3325 continue;
3327 if (insn == NULL_RTX && !changed)
3328 x = copy_rtx (x);
3329 changed = 1;
3330 validate_unshare_change (insn, &XEXP (x, i), folded_arg, 1);
3333 if (changed)
3335 /* Canonicalize X if necessary, and keep const_argN and folded_argN
3336 consistent with the order in X. */
3337 if (canonicalize_change_group (insn, x))
3339 rtx tem;
3340 tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
3341 tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
3344 apply_change_group ();
3347 /* If X is an arithmetic operation, see if we can simplify it. */
3349 switch (GET_RTX_CLASS (code))
3351 case RTX_UNARY:
3353 /* We can't simplify extension ops unless we know the
3354 original mode. */
3355 if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
3356 && mode_arg0 == VOIDmode)
3357 break;
3359 new_rtx = simplify_unary_operation (code, mode,
3360 const_arg0 ? const_arg0 : folded_arg0,
3361 mode_arg0);
3363 break;
3365 case RTX_COMPARE:
3366 case RTX_COMM_COMPARE:
3367 /* See what items are actually being compared and set FOLDED_ARG[01]
3368 to those values and CODE to the actual comparison code. If any are
3369 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
3370 do anything if both operands are already known to be constant. */
3372 /* ??? Vector mode comparisons are not supported yet. */
3373 if (VECTOR_MODE_P (mode))
3374 break;
3376 if (const_arg0 == 0 || const_arg1 == 0)
3378 struct table_elt *p0, *p1;
3379 rtx true_rtx, false_rtx;
3380 enum machine_mode mode_arg1;
3382 if (SCALAR_FLOAT_MODE_P (mode))
3384 #ifdef FLOAT_STORE_FLAG_VALUE
3385 true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
3386 (FLOAT_STORE_FLAG_VALUE (mode), mode));
3387 #else
3388 true_rtx = NULL_RTX;
3389 #endif
3390 false_rtx = CONST0_RTX (mode);
3392 else
3394 true_rtx = const_true_rtx;
3395 false_rtx = const0_rtx;
3398 code = find_comparison_args (code, &folded_arg0, &folded_arg1,
3399 &mode_arg0, &mode_arg1);
3401 /* If the mode is VOIDmode or a MODE_CC mode, we don't know
3402 what kinds of things are being compared, so we can't do
3403 anything with this comparison. */
3405 if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
3406 break;
3408 const_arg0 = equiv_constant (folded_arg0);
3409 const_arg1 = equiv_constant (folded_arg1);
3411 /* If we do not now have two constants being compared, see
3412 if we can nevertheless deduce some things about the
3413 comparison. */
3414 if (const_arg0 == 0 || const_arg1 == 0)
3416 if (const_arg1 != NULL)
3418 rtx cheapest_simplification;
3419 int cheapest_cost;
3420 rtx simp_result;
3421 struct table_elt *p;
3423 /* See if we can find an equivalent of folded_arg0
3424 that gets us a cheaper expression, possibly a
3425 constant through simplifications. */
3426 p = lookup (folded_arg0, SAFE_HASH (folded_arg0, mode_arg0),
3427 mode_arg0);
3429 if (p != NULL)
3431 cheapest_simplification = x;
3432 cheapest_cost = COST (x);
3434 for (p = p->first_same_value; p != NULL; p = p->next_same_value)
3436 int cost;
3438 /* If the entry isn't valid, skip it. */
3439 if (! exp_equiv_p (p->exp, p->exp, 1, false))
3440 continue;
3442 /* Try to simplify using this equivalence. */
3443 simp_result
3444 = simplify_relational_operation (code, mode,
3445 mode_arg0,
3446 p->exp,
3447 const_arg1);
3449 if (simp_result == NULL)
3450 continue;
3452 cost = COST (simp_result);
3453 if (cost < cheapest_cost)
3455 cheapest_cost = cost;
3456 cheapest_simplification = simp_result;
3460 /* If we have a cheaper expression now, use that
3461 and try folding it further, from the top. */
3462 if (cheapest_simplification != x)
3463 return fold_rtx (copy_rtx (cheapest_simplification),
3464 insn);
3468 /* See if the two operands are the same. */
3470 if ((REG_P (folded_arg0)
3471 && REG_P (folded_arg1)
3472 && (REG_QTY (REGNO (folded_arg0))
3473 == REG_QTY (REGNO (folded_arg1))))
3474 || ((p0 = lookup (folded_arg0,
3475 SAFE_HASH (folded_arg0, mode_arg0),
3476 mode_arg0))
3477 && (p1 = lookup (folded_arg1,
3478 SAFE_HASH (folded_arg1, mode_arg0),
3479 mode_arg0))
3480 && p0->first_same_value == p1->first_same_value))
3481 folded_arg1 = folded_arg0;
3483 /* If FOLDED_ARG0 is a register, see if the comparison we are
3484 doing now is either the same as we did before or the reverse
3485 (we only check the reverse if not floating-point). */
3486 else if (REG_P (folded_arg0))
3488 int qty = REG_QTY (REGNO (folded_arg0));
3490 if (REGNO_QTY_VALID_P (REGNO (folded_arg0)))
3492 struct qty_table_elem *ent = &qty_table[qty];
3494 if ((comparison_dominates_p (ent->comparison_code, code)
3495 || (! FLOAT_MODE_P (mode_arg0)
3496 && comparison_dominates_p (ent->comparison_code,
3497 reverse_condition (code))))
3498 && (rtx_equal_p (ent->comparison_const, folded_arg1)
3499 || (const_arg1
3500 && rtx_equal_p (ent->comparison_const,
3501 const_arg1))
3502 || (REG_P (folded_arg1)
3503 && (REG_QTY (REGNO (folded_arg1)) == ent->comparison_qty))))
3505 if (comparison_dominates_p (ent->comparison_code, code))
3507 if (true_rtx)
3508 return true_rtx;
3509 else
3510 break;
3512 else
3513 return false_rtx;
3520 /* If we are comparing against zero, see if the first operand is
3521 equivalent to an IOR with a constant. If so, we may be able to
3522 determine the result of this comparison. */
3523 if (const_arg1 == const0_rtx && !const_arg0)
3525 rtx y = lookup_as_function (folded_arg0, IOR);
3526 rtx inner_const;
3528 if (y != 0
3529 && (inner_const = equiv_constant (XEXP (y, 1))) != 0
3530 && CONST_INT_P (inner_const)
3531 && INTVAL (inner_const) != 0)
3532 folded_arg0 = gen_rtx_IOR (mode_arg0, XEXP (y, 0), inner_const);
3536 rtx op0 = const_arg0 ? const_arg0 : folded_arg0;
3537 rtx op1 = const_arg1 ? const_arg1 : folded_arg1;
3538 new_rtx = simplify_relational_operation (code, mode, mode_arg0, op0, op1);
3540 break;
3542 case RTX_BIN_ARITH:
3543 case RTX_COMM_ARITH:
3544 switch (code)
3546 case PLUS:
3547 /* If the second operand is a LABEL_REF, see if the first is a MINUS
3548 with that LABEL_REF as its second operand. If so, the result is
3549 the first operand of that MINUS. This handles switches with an
3550 ADDR_DIFF_VEC table. */
3551 if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
3553 rtx y
3554 = GET_CODE (folded_arg0) == MINUS ? folded_arg0
3555 : lookup_as_function (folded_arg0, MINUS);
3557 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
3558 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
3559 return XEXP (y, 0);
3561 /* Now try for a CONST of a MINUS like the above. */
3562 if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
3563 : lookup_as_function (folded_arg0, CONST))) != 0
3564 && GET_CODE (XEXP (y, 0)) == MINUS
3565 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
3566 && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg1, 0))
3567 return XEXP (XEXP (y, 0), 0);
3570 /* Likewise if the operands are in the other order. */
3571 if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
3573 rtx y
3574 = GET_CODE (folded_arg1) == MINUS ? folded_arg1
3575 : lookup_as_function (folded_arg1, MINUS);
3577 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
3578 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
3579 return XEXP (y, 0);
3581 /* Now try for a CONST of a MINUS like the above. */
3582 if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
3583 : lookup_as_function (folded_arg1, CONST))) != 0
3584 && GET_CODE (XEXP (y, 0)) == MINUS
3585 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
3586 && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg0, 0))
3587 return XEXP (XEXP (y, 0), 0);
3590 /* If second operand is a register equivalent to a negative
3591 CONST_INT, see if we can find a register equivalent to the
3592 positive constant. Make a MINUS if so. Don't do this for
3593 a non-negative constant since we might then alternate between
3594 choosing positive and negative constants. Having the positive
3595 constant previously-used is the more common case. Be sure
3596 the resulting constant is non-negative; if const_arg1 were
3597 the smallest negative number this would overflow: depending
3598 on the mode, this would either just be the same value (and
3599 hence not save anything) or be incorrect. */
3600 if (const_arg1 != 0 && CONST_INT_P (const_arg1)
3601 && INTVAL (const_arg1) < 0
3602 /* This used to test
3604 -INTVAL (const_arg1) >= 0
3606 But The Sun V5.0 compilers mis-compiled that test. So
3607 instead we test for the problematic value in a more direct
3608 manner and hope the Sun compilers get it correct. */
3609 && INTVAL (const_arg1) !=
3610 ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1))
3611 && REG_P (folded_arg1))
3613 rtx new_const = GEN_INT (-INTVAL (const_arg1));
3614 struct table_elt *p
3615 = lookup (new_const, SAFE_HASH (new_const, mode), mode);
3617 if (p)
3618 for (p = p->first_same_value; p; p = p->next_same_value)
3619 if (REG_P (p->exp))
3620 return simplify_gen_binary (MINUS, mode, folded_arg0,
3621 canon_reg (p->exp, NULL_RTX));
3623 goto from_plus;
3625 case MINUS:
3626 /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
3627 If so, produce (PLUS Z C2-C). */
3628 if (const_arg1 != 0 && CONST_INT_P (const_arg1))
3630 rtx y = lookup_as_function (XEXP (x, 0), PLUS);
3631 if (y && CONST_INT_P (XEXP (y, 1)))
3632 return fold_rtx (plus_constant (copy_rtx (y),
3633 -INTVAL (const_arg1)),
3634 NULL_RTX);
3637 /* Fall through. */
3639 from_plus:
3640 case SMIN: case SMAX: case UMIN: case UMAX:
3641 case IOR: case AND: case XOR:
3642 case MULT:
3643 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
3644 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
3645 is known to be of similar form, we may be able to replace the
3646 operation with a combined operation. This may eliminate the
3647 intermediate operation if every use is simplified in this way.
3648 Note that the similar optimization done by combine.c only works
3649 if the intermediate operation's result has only one reference. */
3651 if (REG_P (folded_arg0)
3652 && const_arg1 && CONST_INT_P (const_arg1))
3654 int is_shift
3655 = (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
3656 rtx y, inner_const, new_const;
3657 rtx canon_const_arg1 = const_arg1;
3658 enum rtx_code associate_code;
3660 if (is_shift
3661 && (INTVAL (const_arg1) >= GET_MODE_BITSIZE (mode)
3662 || INTVAL (const_arg1) < 0))
3664 if (SHIFT_COUNT_TRUNCATED)
3665 canon_const_arg1 = GEN_INT (INTVAL (const_arg1)
3666 & (GET_MODE_BITSIZE (mode)
3667 - 1));
3668 else
3669 break;
3672 y = lookup_as_function (folded_arg0, code);
3673 if (y == 0)
3674 break;
3676 /* If we have compiled a statement like
3677 "if (x == (x & mask1))", and now are looking at
3678 "x & mask2", we will have a case where the first operand
3679 of Y is the same as our first operand. Unless we detect
3680 this case, an infinite loop will result. */
3681 if (XEXP (y, 0) == folded_arg0)
3682 break;
3684 inner_const = equiv_constant (fold_rtx (XEXP (y, 1), 0));
3685 if (!inner_const || !CONST_INT_P (inner_const))
3686 break;
3688 /* Don't associate these operations if they are a PLUS with the
3689 same constant and it is a power of two. These might be doable
3690 with a pre- or post-increment. Similarly for two subtracts of
3691 identical powers of two with post decrement. */
3693 if (code == PLUS && const_arg1 == inner_const
3694 && ((HAVE_PRE_INCREMENT
3695 && exact_log2 (INTVAL (const_arg1)) >= 0)
3696 || (HAVE_POST_INCREMENT
3697 && exact_log2 (INTVAL (const_arg1)) >= 0)
3698 || (HAVE_PRE_DECREMENT
3699 && exact_log2 (- INTVAL (const_arg1)) >= 0)
3700 || (HAVE_POST_DECREMENT
3701 && exact_log2 (- INTVAL (const_arg1)) >= 0)))
3702 break;
3704 /* ??? Vector mode shifts by scalar
3705 shift operand are not supported yet. */
3706 if (is_shift && VECTOR_MODE_P (mode))
3707 break;
3709 if (is_shift
3710 && (INTVAL (inner_const) >= GET_MODE_BITSIZE (mode)
3711 || INTVAL (inner_const) < 0))
3713 if (SHIFT_COUNT_TRUNCATED)
3714 inner_const = GEN_INT (INTVAL (inner_const)
3715 & (GET_MODE_BITSIZE (mode) - 1));
3716 else
3717 break;
3720 /* Compute the code used to compose the constants. For example,
3721 A-C1-C2 is A-(C1 + C2), so if CODE == MINUS, we want PLUS. */
3723 associate_code = (is_shift || code == MINUS ? PLUS : code);
3725 new_const = simplify_binary_operation (associate_code, mode,
3726 canon_const_arg1,
3727 inner_const);
3729 if (new_const == 0)
3730 break;
3732 /* If we are associating shift operations, don't let this
3733 produce a shift of the size of the object or larger.
3734 This could occur when we follow a sign-extend by a right
3735 shift on a machine that does a sign-extend as a pair
3736 of shifts. */
3738 if (is_shift
3739 && CONST_INT_P (new_const)
3740 && INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
3742 /* As an exception, we can turn an ASHIFTRT of this
3743 form into a shift of the number of bits - 1. */
3744 if (code == ASHIFTRT)
3745 new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
3746 else if (!side_effects_p (XEXP (y, 0)))
3747 return CONST0_RTX (mode);
3748 else
3749 break;
3752 y = copy_rtx (XEXP (y, 0));
3754 /* If Y contains our first operand (the most common way this
3755 can happen is if Y is a MEM), we would do into an infinite
3756 loop if we tried to fold it. So don't in that case. */
3758 if (! reg_mentioned_p (folded_arg0, y))
3759 y = fold_rtx (y, insn);
3761 return simplify_gen_binary (code, mode, y, new_const);
3763 break;
3765 case DIV: case UDIV:
3766 /* ??? The associative optimization performed immediately above is
3767 also possible for DIV and UDIV using associate_code of MULT.
3768 However, we would need extra code to verify that the
3769 multiplication does not overflow, that is, there is no overflow
3770 in the calculation of new_const. */
3771 break;
3773 default:
3774 break;
3777 new_rtx = simplify_binary_operation (code, mode,
3778 const_arg0 ? const_arg0 : folded_arg0,
3779 const_arg1 ? const_arg1 : folded_arg1);
3780 break;
3782 case RTX_OBJ:
3783 /* (lo_sum (high X) X) is simply X. */
3784 if (code == LO_SUM && const_arg0 != 0
3785 && GET_CODE (const_arg0) == HIGH
3786 && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
3787 return const_arg1;
3788 break;
3790 case RTX_TERNARY:
3791 case RTX_BITFIELD_OPS:
3792 new_rtx = simplify_ternary_operation (code, mode, mode_arg0,
3793 const_arg0 ? const_arg0 : folded_arg0,
3794 const_arg1 ? const_arg1 : folded_arg1,
3795 const_arg2 ? const_arg2 : XEXP (x, 2));
3796 break;
3798 default:
3799 break;
3802 return new_rtx ? new_rtx : x;
3805 /* Return a constant value currently equivalent to X.
3806 Return 0 if we don't know one. */
3808 static rtx
3809 equiv_constant (rtx x)
3811 if (REG_P (x)
3812 && REGNO_QTY_VALID_P (REGNO (x)))
3814 int x_q = REG_QTY (REGNO (x));
3815 struct qty_table_elem *x_ent = &qty_table[x_q];
3817 if (x_ent->const_rtx)
3818 x = gen_lowpart (GET_MODE (x), x_ent->const_rtx);
3821 if (x == 0 || CONSTANT_P (x))
3822 return x;
3824 if (GET_CODE (x) == SUBREG)
3826 enum machine_mode mode = GET_MODE (x);
3827 enum machine_mode imode = GET_MODE (SUBREG_REG (x));
3828 rtx new_rtx;
3830 /* See if we previously assigned a constant value to this SUBREG. */
3831 if ((new_rtx = lookup_as_function (x, CONST_INT)) != 0
3832 || (new_rtx = lookup_as_function (x, CONST_DOUBLE)) != 0
3833 || (new_rtx = lookup_as_function (x, CONST_FIXED)) != 0)
3834 return new_rtx;
3836 /* If we didn't and if doing so makes sense, see if we previously
3837 assigned a constant value to the enclosing word mode SUBREG. */
3838 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode)
3839 && GET_MODE_SIZE (word_mode) < GET_MODE_SIZE (imode))
3841 int byte = SUBREG_BYTE (x) - subreg_lowpart_offset (mode, word_mode);
3842 if (byte >= 0 && (byte % UNITS_PER_WORD) == 0)
3844 rtx y = gen_rtx_SUBREG (word_mode, SUBREG_REG (x), byte);
3845 new_rtx = lookup_as_function (y, CONST_INT);
3846 if (new_rtx)
3847 return gen_lowpart (mode, new_rtx);
3851 /* Otherwise see if we already have a constant for the inner REG. */
3852 if (REG_P (SUBREG_REG (x))
3853 && (new_rtx = equiv_constant (SUBREG_REG (x))) != 0)
3854 return simplify_subreg (mode, new_rtx, imode, SUBREG_BYTE (x));
3856 return 0;
3859 /* If X is a MEM, see if it is a constant-pool reference, or look it up in
3860 the hash table in case its value was seen before. */
3862 if (MEM_P (x))
3864 struct table_elt *elt;
3866 x = avoid_constant_pool_reference (x);
3867 if (CONSTANT_P (x))
3868 return x;
3870 elt = lookup (x, SAFE_HASH (x, GET_MODE (x)), GET_MODE (x));
3871 if (elt == 0)
3872 return 0;
3874 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
3875 if (elt->is_const && CONSTANT_P (elt->exp))
3876 return elt->exp;
3879 return 0;
3882 /* Given INSN, a jump insn, TAKEN indicates if we are following the
3883 "taken" branch.
3885 In certain cases, this can cause us to add an equivalence. For example,
3886 if we are following the taken case of
3887 if (i == 2)
3888 we can add the fact that `i' and '2' are now equivalent.
3890 In any case, we can record that this comparison was passed. If the same
3891 comparison is seen later, we will know its value. */
3893 static void
3894 record_jump_equiv (rtx insn, bool taken)
3896 int cond_known_true;
3897 rtx op0, op1;
3898 rtx set;
3899 enum machine_mode mode, mode0, mode1;
3900 int reversed_nonequality = 0;
3901 enum rtx_code code;
3903 /* Ensure this is the right kind of insn. */
3904 gcc_assert (any_condjump_p (insn));
3906 set = pc_set (insn);
3908 /* See if this jump condition is known true or false. */
3909 if (taken)
3910 cond_known_true = (XEXP (SET_SRC (set), 2) == pc_rtx);
3911 else
3912 cond_known_true = (XEXP (SET_SRC (set), 1) == pc_rtx);
3914 /* Get the type of comparison being done and the operands being compared.
3915 If we had to reverse a non-equality condition, record that fact so we
3916 know that it isn't valid for floating-point. */
3917 code = GET_CODE (XEXP (SET_SRC (set), 0));
3918 op0 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 0), insn);
3919 op1 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 1), insn);
3921 code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
3922 if (! cond_known_true)
3924 code = reversed_comparison_code_parts (code, op0, op1, insn);
3926 /* Don't remember if we can't find the inverse. */
3927 if (code == UNKNOWN)
3928 return;
3931 /* The mode is the mode of the non-constant. */
3932 mode = mode0;
3933 if (mode1 != VOIDmode)
3934 mode = mode1;
3936 record_jump_cond (code, mode, op0, op1, reversed_nonequality);
3939 /* Yet another form of subreg creation. In this case, we want something in
3940 MODE, and we should assume OP has MODE iff it is naturally modeless. */
3942 static rtx
3943 record_jump_cond_subreg (enum machine_mode mode, rtx op)
3945 enum machine_mode op_mode = GET_MODE (op);
3946 if (op_mode == mode || op_mode == VOIDmode)
3947 return op;
3948 return lowpart_subreg (mode, op, op_mode);
3951 /* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
3952 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
3953 Make any useful entries we can with that information. Called from
3954 above function and called recursively. */
3956 static void
3957 record_jump_cond (enum rtx_code code, enum machine_mode mode, rtx op0,
3958 rtx op1, int reversed_nonequality)
3960 unsigned op0_hash, op1_hash;
3961 int op0_in_memory, op1_in_memory;
3962 struct table_elt *op0_elt, *op1_elt;
3964 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
3965 we know that they are also equal in the smaller mode (this is also
3966 true for all smaller modes whether or not there is a SUBREG, but
3967 is not worth testing for with no SUBREG). */
3969 /* Note that GET_MODE (op0) may not equal MODE. */
3970 if (code == EQ && GET_CODE (op0) == SUBREG
3971 && (GET_MODE_SIZE (GET_MODE (op0))
3972 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
3974 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
3975 rtx tem = record_jump_cond_subreg (inner_mode, op1);
3976 if (tem)
3977 record_jump_cond (code, mode, SUBREG_REG (op0), tem,
3978 reversed_nonequality);
3981 if (code == EQ && GET_CODE (op1) == SUBREG
3982 && (GET_MODE_SIZE (GET_MODE (op1))
3983 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
3985 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
3986 rtx tem = record_jump_cond_subreg (inner_mode, op0);
3987 if (tem)
3988 record_jump_cond (code, mode, SUBREG_REG (op1), tem,
3989 reversed_nonequality);
3992 /* Similarly, if this is an NE comparison, and either is a SUBREG
3993 making a smaller mode, we know the whole thing is also NE. */
3995 /* Note that GET_MODE (op0) may not equal MODE;
3996 if we test MODE instead, we can get an infinite recursion
3997 alternating between two modes each wider than MODE. */
3999 if (code == NE && GET_CODE (op0) == SUBREG
4000 && subreg_lowpart_p (op0)
4001 && (GET_MODE_SIZE (GET_MODE (op0))
4002 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4004 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4005 rtx tem = record_jump_cond_subreg (inner_mode, op1);
4006 if (tem)
4007 record_jump_cond (code, mode, SUBREG_REG (op0), tem,
4008 reversed_nonequality);
4011 if (code == NE && GET_CODE (op1) == SUBREG
4012 && subreg_lowpart_p (op1)
4013 && (GET_MODE_SIZE (GET_MODE (op1))
4014 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4016 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4017 rtx tem = record_jump_cond_subreg (inner_mode, op0);
4018 if (tem)
4019 record_jump_cond (code, mode, SUBREG_REG (op1), tem,
4020 reversed_nonequality);
4023 /* Hash both operands. */
4025 do_not_record = 0;
4026 hash_arg_in_memory = 0;
4027 op0_hash = HASH (op0, mode);
4028 op0_in_memory = hash_arg_in_memory;
4030 if (do_not_record)
4031 return;
4033 do_not_record = 0;
4034 hash_arg_in_memory = 0;
4035 op1_hash = HASH (op1, mode);
4036 op1_in_memory = hash_arg_in_memory;
4038 if (do_not_record)
4039 return;
4041 /* Look up both operands. */
4042 op0_elt = lookup (op0, op0_hash, mode);
4043 op1_elt = lookup (op1, op1_hash, mode);
4045 /* If both operands are already equivalent or if they are not in the
4046 table but are identical, do nothing. */
4047 if ((op0_elt != 0 && op1_elt != 0
4048 && op0_elt->first_same_value == op1_elt->first_same_value)
4049 || op0 == op1 || rtx_equal_p (op0, op1))
4050 return;
4052 /* If we aren't setting two things equal all we can do is save this
4053 comparison. Similarly if this is floating-point. In the latter
4054 case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
4055 If we record the equality, we might inadvertently delete code
4056 whose intent was to change -0 to +0. */
4058 if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
4060 struct qty_table_elem *ent;
4061 int qty;
4063 /* If we reversed a floating-point comparison, if OP0 is not a
4064 register, or if OP1 is neither a register or constant, we can't
4065 do anything. */
4067 if (!REG_P (op1))
4068 op1 = equiv_constant (op1);
4070 if ((reversed_nonequality && FLOAT_MODE_P (mode))
4071 || !REG_P (op0) || op1 == 0)
4072 return;
4074 /* Put OP0 in the hash table if it isn't already. This gives it a
4075 new quantity number. */
4076 if (op0_elt == 0)
4078 if (insert_regs (op0, NULL, 0))
4080 rehash_using_reg (op0);
4081 op0_hash = HASH (op0, mode);
4083 /* If OP0 is contained in OP1, this changes its hash code
4084 as well. Faster to rehash than to check, except
4085 for the simple case of a constant. */
4086 if (! CONSTANT_P (op1))
4087 op1_hash = HASH (op1,mode);
4090 op0_elt = insert (op0, NULL, op0_hash, mode);
4091 op0_elt->in_memory = op0_in_memory;
4094 qty = REG_QTY (REGNO (op0));
4095 ent = &qty_table[qty];
4097 ent->comparison_code = code;
4098 if (REG_P (op1))
4100 /* Look it up again--in case op0 and op1 are the same. */
4101 op1_elt = lookup (op1, op1_hash, mode);
4103 /* Put OP1 in the hash table so it gets a new quantity number. */
4104 if (op1_elt == 0)
4106 if (insert_regs (op1, NULL, 0))
4108 rehash_using_reg (op1);
4109 op1_hash = HASH (op1, mode);
4112 op1_elt = insert (op1, NULL, op1_hash, mode);
4113 op1_elt->in_memory = op1_in_memory;
4116 ent->comparison_const = NULL_RTX;
4117 ent->comparison_qty = REG_QTY (REGNO (op1));
4119 else
4121 ent->comparison_const = op1;
4122 ent->comparison_qty = -1;
4125 return;
4128 /* If either side is still missing an equivalence, make it now,
4129 then merge the equivalences. */
4131 if (op0_elt == 0)
4133 if (insert_regs (op0, NULL, 0))
4135 rehash_using_reg (op0);
4136 op0_hash = HASH (op0, mode);
4139 op0_elt = insert (op0, NULL, op0_hash, mode);
4140 op0_elt->in_memory = op0_in_memory;
4143 if (op1_elt == 0)
4145 if (insert_regs (op1, NULL, 0))
4147 rehash_using_reg (op1);
4148 op1_hash = HASH (op1, mode);
4151 op1_elt = insert (op1, NULL, op1_hash, mode);
4152 op1_elt->in_memory = op1_in_memory;
4155 merge_equiv_classes (op0_elt, op1_elt);
4158 /* CSE processing for one instruction.
4159 First simplify sources and addresses of all assignments
4160 in the instruction, using previously-computed equivalents values.
4161 Then install the new sources and destinations in the table
4162 of available values. */
4164 /* Data on one SET contained in the instruction. */
4166 struct set
4168 /* The SET rtx itself. */
4169 rtx rtl;
4170 /* The SET_SRC of the rtx (the original value, if it is changing). */
4171 rtx src;
4172 /* The hash-table element for the SET_SRC of the SET. */
4173 struct table_elt *src_elt;
4174 /* Hash value for the SET_SRC. */
4175 unsigned src_hash;
4176 /* Hash value for the SET_DEST. */
4177 unsigned dest_hash;
4178 /* The SET_DEST, with SUBREG, etc., stripped. */
4179 rtx inner_dest;
4180 /* Nonzero if the SET_SRC is in memory. */
4181 char src_in_memory;
4182 /* Nonzero if the SET_SRC contains something
4183 whose value cannot be predicted and understood. */
4184 char src_volatile;
4185 /* Original machine mode, in case it becomes a CONST_INT.
4186 The size of this field should match the size of the mode
4187 field of struct rtx_def (see rtl.h). */
4188 ENUM_BITFIELD(machine_mode) mode : 8;
4189 /* A constant equivalent for SET_SRC, if any. */
4190 rtx src_const;
4191 /* Hash value of constant equivalent for SET_SRC. */
4192 unsigned src_const_hash;
4193 /* Table entry for constant equivalent for SET_SRC, if any. */
4194 struct table_elt *src_const_elt;
4195 /* Table entry for the destination address. */
4196 struct table_elt *dest_addr_elt;
4199 static void
4200 cse_insn (rtx insn)
4202 rtx x = PATTERN (insn);
4203 int i;
4204 rtx tem;
4205 int n_sets = 0;
4207 rtx src_eqv = 0;
4208 struct table_elt *src_eqv_elt = 0;
4209 int src_eqv_volatile = 0;
4210 int src_eqv_in_memory = 0;
4211 unsigned src_eqv_hash = 0;
4213 struct set *sets = (struct set *) 0;
4215 this_insn = insn;
4216 #ifdef HAVE_cc0
4217 /* Records what this insn does to set CC0. */
4218 this_insn_cc0 = 0;
4219 this_insn_cc0_mode = VOIDmode;
4220 #endif
4222 /* Find all the SETs and CLOBBERs in this instruction.
4223 Record all the SETs in the array `set' and count them.
4224 Also determine whether there is a CLOBBER that invalidates
4225 all memory references, or all references at varying addresses. */
4227 if (CALL_P (insn))
4229 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4231 if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
4232 invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
4233 XEXP (tem, 0) = canon_reg (XEXP (tem, 0), insn);
4237 if (GET_CODE (x) == SET)
4239 sets = XALLOCA (struct set);
4240 sets[0].rtl = x;
4242 /* Ignore SETs that are unconditional jumps.
4243 They never need cse processing, so this does not hurt.
4244 The reason is not efficiency but rather
4245 so that we can test at the end for instructions
4246 that have been simplified to unconditional jumps
4247 and not be misled by unchanged instructions
4248 that were unconditional jumps to begin with. */
4249 if (SET_DEST (x) == pc_rtx
4250 && GET_CODE (SET_SRC (x)) == LABEL_REF)
4253 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
4254 The hard function value register is used only once, to copy to
4255 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
4256 Ensure we invalidate the destination register. On the 80386 no
4257 other code would invalidate it since it is a fixed_reg.
4258 We need not check the return of apply_change_group; see canon_reg. */
4260 else if (GET_CODE (SET_SRC (x)) == CALL)
4262 canon_reg (SET_SRC (x), insn);
4263 apply_change_group ();
4264 fold_rtx (SET_SRC (x), insn);
4265 invalidate (SET_DEST (x), VOIDmode);
4267 else
4268 n_sets = 1;
4270 else if (GET_CODE (x) == PARALLEL)
4272 int lim = XVECLEN (x, 0);
4274 sets = XALLOCAVEC (struct set, lim);
4276 /* Find all regs explicitly clobbered in this insn,
4277 and ensure they are not replaced with any other regs
4278 elsewhere in this insn.
4279 When a reg that is clobbered is also used for input,
4280 we should presume that that is for a reason,
4281 and we should not substitute some other register
4282 which is not supposed to be clobbered.
4283 Therefore, this loop cannot be merged into the one below
4284 because a CALL may precede a CLOBBER and refer to the
4285 value clobbered. We must not let a canonicalization do
4286 anything in that case. */
4287 for (i = 0; i < lim; i++)
4289 rtx y = XVECEXP (x, 0, i);
4290 if (GET_CODE (y) == CLOBBER)
4292 rtx clobbered = XEXP (y, 0);
4294 if (REG_P (clobbered)
4295 || GET_CODE (clobbered) == SUBREG)
4296 invalidate (clobbered, VOIDmode);
4297 else if (GET_CODE (clobbered) == STRICT_LOW_PART
4298 || GET_CODE (clobbered) == ZERO_EXTRACT)
4299 invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
4303 for (i = 0; i < lim; i++)
4305 rtx y = XVECEXP (x, 0, i);
4306 if (GET_CODE (y) == SET)
4308 /* As above, we ignore unconditional jumps and call-insns and
4309 ignore the result of apply_change_group. */
4310 if (GET_CODE (SET_SRC (y)) == CALL)
4312 canon_reg (SET_SRC (y), insn);
4313 apply_change_group ();
4314 fold_rtx (SET_SRC (y), insn);
4315 invalidate (SET_DEST (y), VOIDmode);
4317 else if (SET_DEST (y) == pc_rtx
4318 && GET_CODE (SET_SRC (y)) == LABEL_REF)
4320 else
4321 sets[n_sets++].rtl = y;
4323 else if (GET_CODE (y) == CLOBBER)
4325 /* If we clobber memory, canon the address.
4326 This does nothing when a register is clobbered
4327 because we have already invalidated the reg. */
4328 if (MEM_P (XEXP (y, 0)))
4329 canon_reg (XEXP (y, 0), insn);
4331 else if (GET_CODE (y) == USE
4332 && ! (REG_P (XEXP (y, 0))
4333 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
4334 canon_reg (y, insn);
4335 else if (GET_CODE (y) == CALL)
4337 /* The result of apply_change_group can be ignored; see
4338 canon_reg. */
4339 canon_reg (y, insn);
4340 apply_change_group ();
4341 fold_rtx (y, insn);
4345 else if (GET_CODE (x) == CLOBBER)
4347 if (MEM_P (XEXP (x, 0)))
4348 canon_reg (XEXP (x, 0), insn);
4351 /* Canonicalize a USE of a pseudo register or memory location. */
4352 else if (GET_CODE (x) == USE
4353 && ! (REG_P (XEXP (x, 0))
4354 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
4355 canon_reg (XEXP (x, 0), insn);
4356 else if (GET_CODE (x) == CALL)
4358 /* The result of apply_change_group can be ignored; see canon_reg. */
4359 canon_reg (x, insn);
4360 apply_change_group ();
4361 fold_rtx (x, insn);
4363 else if (DEBUG_INSN_P (insn))
4364 canon_reg (PATTERN (insn), insn);
4366 /* Store the equivalent value in SRC_EQV, if different, or if the DEST
4367 is a STRICT_LOW_PART. The latter condition is necessary because SRC_EQV
4368 is handled specially for this case, and if it isn't set, then there will
4369 be no equivalence for the destination. */
4370 if (n_sets == 1 && REG_NOTES (insn) != 0
4371 && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
4372 && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
4373 || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
4375 /* The result of apply_change_group can be ignored; see canon_reg. */
4376 canon_reg (XEXP (tem, 0), insn);
4377 apply_change_group ();
4378 src_eqv = fold_rtx (XEXP (tem, 0), insn);
4379 XEXP (tem, 0) = copy_rtx (src_eqv);
4380 df_notes_rescan (insn);
4383 /* Canonicalize sources and addresses of destinations.
4384 We do this in a separate pass to avoid problems when a MATCH_DUP is
4385 present in the insn pattern. In that case, we want to ensure that
4386 we don't break the duplicate nature of the pattern. So we will replace
4387 both operands at the same time. Otherwise, we would fail to find an
4388 equivalent substitution in the loop calling validate_change below.
4390 We used to suppress canonicalization of DEST if it appears in SRC,
4391 but we don't do this any more. */
4393 for (i = 0; i < n_sets; i++)
4395 rtx dest = SET_DEST (sets[i].rtl);
4396 rtx src = SET_SRC (sets[i].rtl);
4397 rtx new_rtx = canon_reg (src, insn);
4399 validate_change (insn, &SET_SRC (sets[i].rtl), new_rtx, 1);
4401 if (GET_CODE (dest) == ZERO_EXTRACT)
4403 validate_change (insn, &XEXP (dest, 1),
4404 canon_reg (XEXP (dest, 1), insn), 1);
4405 validate_change (insn, &XEXP (dest, 2),
4406 canon_reg (XEXP (dest, 2), insn), 1);
4409 while (GET_CODE (dest) == SUBREG
4410 || GET_CODE (dest) == ZERO_EXTRACT
4411 || GET_CODE (dest) == STRICT_LOW_PART)
4412 dest = XEXP (dest, 0);
4414 if (MEM_P (dest))
4415 canon_reg (dest, insn);
4418 /* Now that we have done all the replacements, we can apply the change
4419 group and see if they all work. Note that this will cause some
4420 canonicalizations that would have worked individually not to be applied
4421 because some other canonicalization didn't work, but this should not
4422 occur often.
4424 The result of apply_change_group can be ignored; see canon_reg. */
4426 apply_change_group ();
4428 /* Set sets[i].src_elt to the class each source belongs to.
4429 Detect assignments from or to volatile things
4430 and set set[i] to zero so they will be ignored
4431 in the rest of this function.
4433 Nothing in this loop changes the hash table or the register chains. */
4435 for (i = 0; i < n_sets; i++)
4437 bool repeat = false;
4438 rtx src, dest;
4439 rtx src_folded;
4440 struct table_elt *elt = 0, *p;
4441 enum machine_mode mode;
4442 rtx src_eqv_here;
4443 rtx src_const = 0;
4444 rtx src_related = 0;
4445 bool src_related_is_const_anchor = false;
4446 struct table_elt *src_const_elt = 0;
4447 int src_cost = MAX_COST;
4448 int src_eqv_cost = MAX_COST;
4449 int src_folded_cost = MAX_COST;
4450 int src_related_cost = MAX_COST;
4451 int src_elt_cost = MAX_COST;
4452 int src_regcost = MAX_COST;
4453 int src_eqv_regcost = MAX_COST;
4454 int src_folded_regcost = MAX_COST;
4455 int src_related_regcost = MAX_COST;
4456 int src_elt_regcost = MAX_COST;
4457 /* Set nonzero if we need to call force_const_mem on with the
4458 contents of src_folded before using it. */
4459 int src_folded_force_flag = 0;
4461 dest = SET_DEST (sets[i].rtl);
4462 src = SET_SRC (sets[i].rtl);
4464 /* If SRC is a constant that has no machine mode,
4465 hash it with the destination's machine mode.
4466 This way we can keep different modes separate. */
4468 mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
4469 sets[i].mode = mode;
4471 if (src_eqv)
4473 enum machine_mode eqvmode = mode;
4474 if (GET_CODE (dest) == STRICT_LOW_PART)
4475 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
4476 do_not_record = 0;
4477 hash_arg_in_memory = 0;
4478 src_eqv_hash = HASH (src_eqv, eqvmode);
4480 /* Find the equivalence class for the equivalent expression. */
4482 if (!do_not_record)
4483 src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
4485 src_eqv_volatile = do_not_record;
4486 src_eqv_in_memory = hash_arg_in_memory;
4489 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
4490 value of the INNER register, not the destination. So it is not
4491 a valid substitution for the source. But save it for later. */
4492 if (GET_CODE (dest) == STRICT_LOW_PART)
4493 src_eqv_here = 0;
4494 else
4495 src_eqv_here = src_eqv;
4497 /* Simplify and foldable subexpressions in SRC. Then get the fully-
4498 simplified result, which may not necessarily be valid. */
4499 src_folded = fold_rtx (src, insn);
4501 #if 0
4502 /* ??? This caused bad code to be generated for the m68k port with -O2.
4503 Suppose src is (CONST_INT -1), and that after truncation src_folded
4504 is (CONST_INT 3). Suppose src_folded is then used for src_const.
4505 At the end we will add src and src_const to the same equivalence
4506 class. We now have 3 and -1 on the same equivalence class. This
4507 causes later instructions to be mis-optimized. */
4508 /* If storing a constant in a bitfield, pre-truncate the constant
4509 so we will be able to record it later. */
4510 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT)
4512 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
4514 if (CONST_INT_P (src)
4515 && CONST_INT_P (width)
4516 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
4517 && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
4518 src_folded
4519 = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
4520 << INTVAL (width)) - 1));
4522 #endif
4524 /* Compute SRC's hash code, and also notice if it
4525 should not be recorded at all. In that case,
4526 prevent any further processing of this assignment. */
4527 do_not_record = 0;
4528 hash_arg_in_memory = 0;
4530 sets[i].src = src;
4531 sets[i].src_hash = HASH (src, mode);
4532 sets[i].src_volatile = do_not_record;
4533 sets[i].src_in_memory = hash_arg_in_memory;
4535 /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
4536 a pseudo, do not record SRC. Using SRC as a replacement for
4537 anything else will be incorrect in that situation. Note that
4538 this usually occurs only for stack slots, in which case all the
4539 RTL would be referring to SRC, so we don't lose any optimization
4540 opportunities by not having SRC in the hash table. */
4542 if (MEM_P (src)
4543 && find_reg_note (insn, REG_EQUIV, NULL_RTX) != 0
4544 && REG_P (dest)
4545 && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
4546 sets[i].src_volatile = 1;
4548 #if 0
4549 /* It is no longer clear why we used to do this, but it doesn't
4550 appear to still be needed. So let's try without it since this
4551 code hurts cse'ing widened ops. */
4552 /* If source is a paradoxical subreg (such as QI treated as an SI),
4553 treat it as volatile. It may do the work of an SI in one context
4554 where the extra bits are not being used, but cannot replace an SI
4555 in general. */
4556 if (GET_CODE (src) == SUBREG
4557 && (GET_MODE_SIZE (GET_MODE (src))
4558 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
4559 sets[i].src_volatile = 1;
4560 #endif
4562 /* Locate all possible equivalent forms for SRC. Try to replace
4563 SRC in the insn with each cheaper equivalent.
4565 We have the following types of equivalents: SRC itself, a folded
4566 version, a value given in a REG_EQUAL note, or a value related
4567 to a constant.
4569 Each of these equivalents may be part of an additional class
4570 of equivalents (if more than one is in the table, they must be in
4571 the same class; we check for this).
4573 If the source is volatile, we don't do any table lookups.
4575 We note any constant equivalent for possible later use in a
4576 REG_NOTE. */
4578 if (!sets[i].src_volatile)
4579 elt = lookup (src, sets[i].src_hash, mode);
4581 sets[i].src_elt = elt;
4583 if (elt && src_eqv_here && src_eqv_elt)
4585 if (elt->first_same_value != src_eqv_elt->first_same_value)
4587 /* The REG_EQUAL is indicating that two formerly distinct
4588 classes are now equivalent. So merge them. */
4589 merge_equiv_classes (elt, src_eqv_elt);
4590 src_eqv_hash = HASH (src_eqv, elt->mode);
4591 src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
4594 src_eqv_here = 0;
4597 else if (src_eqv_elt)
4598 elt = src_eqv_elt;
4600 /* Try to find a constant somewhere and record it in `src_const'.
4601 Record its table element, if any, in `src_const_elt'. Look in
4602 any known equivalences first. (If the constant is not in the
4603 table, also set `sets[i].src_const_hash'). */
4604 if (elt)
4605 for (p = elt->first_same_value; p; p = p->next_same_value)
4606 if (p->is_const)
4608 src_const = p->exp;
4609 src_const_elt = elt;
4610 break;
4613 if (src_const == 0
4614 && (CONSTANT_P (src_folded)
4615 /* Consider (minus (label_ref L1) (label_ref L2)) as
4616 "constant" here so we will record it. This allows us
4617 to fold switch statements when an ADDR_DIFF_VEC is used. */
4618 || (GET_CODE (src_folded) == MINUS
4619 && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
4620 && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
4621 src_const = src_folded, src_const_elt = elt;
4622 else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
4623 src_const = src_eqv_here, src_const_elt = src_eqv_elt;
4625 /* If we don't know if the constant is in the table, get its
4626 hash code and look it up. */
4627 if (src_const && src_const_elt == 0)
4629 sets[i].src_const_hash = HASH (src_const, mode);
4630 src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
4633 sets[i].src_const = src_const;
4634 sets[i].src_const_elt = src_const_elt;
4636 /* If the constant and our source are both in the table, mark them as
4637 equivalent. Otherwise, if a constant is in the table but the source
4638 isn't, set ELT to it. */
4639 if (src_const_elt && elt
4640 && src_const_elt->first_same_value != elt->first_same_value)
4641 merge_equiv_classes (elt, src_const_elt);
4642 else if (src_const_elt && elt == 0)
4643 elt = src_const_elt;
4645 /* See if there is a register linearly related to a constant
4646 equivalent of SRC. */
4647 if (src_const
4648 && (GET_CODE (src_const) == CONST
4649 || (src_const_elt && src_const_elt->related_value != 0)))
4651 src_related = use_related_value (src_const, src_const_elt);
4652 if (src_related)
4654 struct table_elt *src_related_elt
4655 = lookup (src_related, HASH (src_related, mode), mode);
4656 if (src_related_elt && elt)
4658 if (elt->first_same_value
4659 != src_related_elt->first_same_value)
4660 /* This can occur when we previously saw a CONST
4661 involving a SYMBOL_REF and then see the SYMBOL_REF
4662 twice. Merge the involved classes. */
4663 merge_equiv_classes (elt, src_related_elt);
4665 src_related = 0;
4666 src_related_elt = 0;
4668 else if (src_related_elt && elt == 0)
4669 elt = src_related_elt;
4673 /* See if we have a CONST_INT that is already in a register in a
4674 wider mode. */
4676 if (src_const && src_related == 0 && CONST_INT_P (src_const)
4677 && GET_MODE_CLASS (mode) == MODE_INT
4678 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
4680 enum machine_mode wider_mode;
4682 for (wider_mode = GET_MODE_WIDER_MODE (mode);
4683 wider_mode != VOIDmode
4684 && GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
4685 && src_related == 0;
4686 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
4688 struct table_elt *const_elt
4689 = lookup (src_const, HASH (src_const, wider_mode), wider_mode);
4691 if (const_elt == 0)
4692 continue;
4694 for (const_elt = const_elt->first_same_value;
4695 const_elt; const_elt = const_elt->next_same_value)
4696 if (REG_P (const_elt->exp))
4698 src_related = gen_lowpart (mode, const_elt->exp);
4699 break;
4704 /* Another possibility is that we have an AND with a constant in
4705 a mode narrower than a word. If so, it might have been generated
4706 as part of an "if" which would narrow the AND. If we already
4707 have done the AND in a wider mode, we can use a SUBREG of that
4708 value. */
4710 if (flag_expensive_optimizations && ! src_related
4711 && GET_CODE (src) == AND && CONST_INT_P (XEXP (src, 1))
4712 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
4714 enum machine_mode tmode;
4715 rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
4717 for (tmode = GET_MODE_WIDER_MODE (mode);
4718 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
4719 tmode = GET_MODE_WIDER_MODE (tmode))
4721 rtx inner = gen_lowpart (tmode, XEXP (src, 0));
4722 struct table_elt *larger_elt;
4724 if (inner)
4726 PUT_MODE (new_and, tmode);
4727 XEXP (new_and, 0) = inner;
4728 larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
4729 if (larger_elt == 0)
4730 continue;
4732 for (larger_elt = larger_elt->first_same_value;
4733 larger_elt; larger_elt = larger_elt->next_same_value)
4734 if (REG_P (larger_elt->exp))
4736 src_related
4737 = gen_lowpart (mode, larger_elt->exp);
4738 break;
4741 if (src_related)
4742 break;
4747 #ifdef LOAD_EXTEND_OP
4748 /* See if a MEM has already been loaded with a widening operation;
4749 if it has, we can use a subreg of that. Many CISC machines
4750 also have such operations, but this is only likely to be
4751 beneficial on these machines. */
4753 if (flag_expensive_optimizations && src_related == 0
4754 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
4755 && GET_MODE_CLASS (mode) == MODE_INT
4756 && MEM_P (src) && ! do_not_record
4757 && LOAD_EXTEND_OP (mode) != UNKNOWN)
4759 struct rtx_def memory_extend_buf;
4760 rtx memory_extend_rtx = &memory_extend_buf;
4761 enum machine_mode tmode;
4763 /* Set what we are trying to extend and the operation it might
4764 have been extended with. */
4765 memset (memory_extend_rtx, 0, sizeof(*memory_extend_rtx));
4766 PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
4767 XEXP (memory_extend_rtx, 0) = src;
4769 for (tmode = GET_MODE_WIDER_MODE (mode);
4770 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
4771 tmode = GET_MODE_WIDER_MODE (tmode))
4773 struct table_elt *larger_elt;
4775 PUT_MODE (memory_extend_rtx, tmode);
4776 larger_elt = lookup (memory_extend_rtx,
4777 HASH (memory_extend_rtx, tmode), tmode);
4778 if (larger_elt == 0)
4779 continue;
4781 for (larger_elt = larger_elt->first_same_value;
4782 larger_elt; larger_elt = larger_elt->next_same_value)
4783 if (REG_P (larger_elt->exp))
4785 src_related = gen_lowpart (mode, larger_elt->exp);
4786 break;
4789 if (src_related)
4790 break;
4793 #endif /* LOAD_EXTEND_OP */
4795 /* Try to express the constant using a register+offset expression
4796 derived from a constant anchor. */
4798 if (targetm.const_anchor
4799 && !src_related
4800 && src_const
4801 && GET_CODE (src_const) == CONST_INT)
4803 src_related = try_const_anchors (src_const, mode);
4804 src_related_is_const_anchor = src_related != NULL_RTX;
4808 if (src == src_folded)
4809 src_folded = 0;
4811 /* At this point, ELT, if nonzero, points to a class of expressions
4812 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
4813 and SRC_RELATED, if nonzero, each contain additional equivalent
4814 expressions. Prune these latter expressions by deleting expressions
4815 already in the equivalence class.
4817 Check for an equivalent identical to the destination. If found,
4818 this is the preferred equivalent since it will likely lead to
4819 elimination of the insn. Indicate this by placing it in
4820 `src_related'. */
4822 if (elt)
4823 elt = elt->first_same_value;
4824 for (p = elt; p; p = p->next_same_value)
4826 enum rtx_code code = GET_CODE (p->exp);
4828 /* If the expression is not valid, ignore it. Then we do not
4829 have to check for validity below. In most cases, we can use
4830 `rtx_equal_p', since canonicalization has already been done. */
4831 if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, false))
4832 continue;
4834 /* Also skip paradoxical subregs, unless that's what we're
4835 looking for. */
4836 if (code == SUBREG
4837 && (GET_MODE_SIZE (GET_MODE (p->exp))
4838 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))
4839 && ! (src != 0
4840 && GET_CODE (src) == SUBREG
4841 && GET_MODE (src) == GET_MODE (p->exp)
4842 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
4843 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))))
4844 continue;
4846 if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
4847 src = 0;
4848 else if (src_folded && GET_CODE (src_folded) == code
4849 && rtx_equal_p (src_folded, p->exp))
4850 src_folded = 0;
4851 else if (src_eqv_here && GET_CODE (src_eqv_here) == code
4852 && rtx_equal_p (src_eqv_here, p->exp))
4853 src_eqv_here = 0;
4854 else if (src_related && GET_CODE (src_related) == code
4855 && rtx_equal_p (src_related, p->exp))
4856 src_related = 0;
4858 /* This is the same as the destination of the insns, we want
4859 to prefer it. Copy it to src_related. The code below will
4860 then give it a negative cost. */
4861 if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
4862 src_related = dest;
4865 /* Find the cheapest valid equivalent, trying all the available
4866 possibilities. Prefer items not in the hash table to ones
4867 that are when they are equal cost. Note that we can never
4868 worsen an insn as the current contents will also succeed.
4869 If we find an equivalent identical to the destination, use it as best,
4870 since this insn will probably be eliminated in that case. */
4871 if (src)
4873 if (rtx_equal_p (src, dest))
4874 src_cost = src_regcost = -1;
4875 else
4877 src_cost = COST (src);
4878 src_regcost = approx_reg_cost (src);
4882 if (src_eqv_here)
4884 if (rtx_equal_p (src_eqv_here, dest))
4885 src_eqv_cost = src_eqv_regcost = -1;
4886 else
4888 src_eqv_cost = COST (src_eqv_here);
4889 src_eqv_regcost = approx_reg_cost (src_eqv_here);
4893 if (src_folded)
4895 if (rtx_equal_p (src_folded, dest))
4896 src_folded_cost = src_folded_regcost = -1;
4897 else
4899 src_folded_cost = COST (src_folded);
4900 src_folded_regcost = approx_reg_cost (src_folded);
4904 if (src_related)
4906 if (rtx_equal_p (src_related, dest))
4907 src_related_cost = src_related_regcost = -1;
4908 else
4910 src_related_cost = COST (src_related);
4911 src_related_regcost = approx_reg_cost (src_related);
4913 /* If a const-anchor is used to synthesize a constant that
4914 normally requires multiple instructions then slightly prefer
4915 it over the original sequence. These instructions are likely
4916 to become redundant now. We can't compare against the cost
4917 of src_eqv_here because, on MIPS for example, multi-insn
4918 constants have zero cost; they are assumed to be hoisted from
4919 loops. */
4920 if (src_related_is_const_anchor
4921 && src_related_cost == src_cost
4922 && src_eqv_here)
4923 src_related_cost--;
4927 /* If this was an indirect jump insn, a known label will really be
4928 cheaper even though it looks more expensive. */
4929 if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
4930 src_folded = src_const, src_folded_cost = src_folded_regcost = -1;
4932 /* Terminate loop when replacement made. This must terminate since
4933 the current contents will be tested and will always be valid. */
4934 while (1)
4936 rtx trial;
4938 /* Skip invalid entries. */
4939 while (elt && !REG_P (elt->exp)
4940 && ! exp_equiv_p (elt->exp, elt->exp, 1, false))
4941 elt = elt->next_same_value;
4943 /* A paradoxical subreg would be bad here: it'll be the right
4944 size, but later may be adjusted so that the upper bits aren't
4945 what we want. So reject it. */
4946 if (elt != 0
4947 && GET_CODE (elt->exp) == SUBREG
4948 && (GET_MODE_SIZE (GET_MODE (elt->exp))
4949 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))
4950 /* It is okay, though, if the rtx we're trying to match
4951 will ignore any of the bits we can't predict. */
4952 && ! (src != 0
4953 && GET_CODE (src) == SUBREG
4954 && GET_MODE (src) == GET_MODE (elt->exp)
4955 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
4956 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))))
4958 elt = elt->next_same_value;
4959 continue;
4962 if (elt)
4964 src_elt_cost = elt->cost;
4965 src_elt_regcost = elt->regcost;
4968 /* Find cheapest and skip it for the next time. For items
4969 of equal cost, use this order:
4970 src_folded, src, src_eqv, src_related and hash table entry. */
4971 if (src_folded
4972 && preferable (src_folded_cost, src_folded_regcost,
4973 src_cost, src_regcost) <= 0
4974 && preferable (src_folded_cost, src_folded_regcost,
4975 src_eqv_cost, src_eqv_regcost) <= 0
4976 && preferable (src_folded_cost, src_folded_regcost,
4977 src_related_cost, src_related_regcost) <= 0
4978 && preferable (src_folded_cost, src_folded_regcost,
4979 src_elt_cost, src_elt_regcost) <= 0)
4981 trial = src_folded, src_folded_cost = MAX_COST;
4982 if (src_folded_force_flag)
4984 rtx forced = force_const_mem (mode, trial);
4985 if (forced)
4986 trial = forced;
4989 else if (src
4990 && preferable (src_cost, src_regcost,
4991 src_eqv_cost, src_eqv_regcost) <= 0
4992 && preferable (src_cost, src_regcost,
4993 src_related_cost, src_related_regcost) <= 0
4994 && preferable (src_cost, src_regcost,
4995 src_elt_cost, src_elt_regcost) <= 0)
4996 trial = src, src_cost = MAX_COST;
4997 else if (src_eqv_here
4998 && preferable (src_eqv_cost, src_eqv_regcost,
4999 src_related_cost, src_related_regcost) <= 0
5000 && preferable (src_eqv_cost, src_eqv_regcost,
5001 src_elt_cost, src_elt_regcost) <= 0)
5002 trial = src_eqv_here, src_eqv_cost = MAX_COST;
5003 else if (src_related
5004 && preferable (src_related_cost, src_related_regcost,
5005 src_elt_cost, src_elt_regcost) <= 0)
5006 trial = src_related, src_related_cost = MAX_COST;
5007 else
5009 trial = elt->exp;
5010 elt = elt->next_same_value;
5011 src_elt_cost = MAX_COST;
5014 /* Avoid creation of overlapping memory moves. */
5015 if (MEM_P (trial) && MEM_P (SET_DEST (sets[i].rtl)))
5017 rtx src, dest;
5019 /* BLKmode moves are not handled by cse anyway. */
5020 if (GET_MODE (trial) == BLKmode)
5021 break;
5023 src = canon_rtx (trial);
5024 dest = canon_rtx (SET_DEST (sets[i].rtl));
5026 if (!MEM_P (src) || !MEM_P (dest)
5027 || !nonoverlapping_memrefs_p (src, dest))
5028 break;
5031 /* Try to optimize
5032 (set (reg:M N) (const_int A))
5033 (set (reg:M2 O) (const_int B))
5034 (set (zero_extract:M2 (reg:M N) (const_int C) (const_int D))
5035 (reg:M2 O)). */
5036 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
5037 && CONST_INT_P (trial)
5038 && CONST_INT_P (XEXP (SET_DEST (sets[i].rtl), 1))
5039 && CONST_INT_P (XEXP (SET_DEST (sets[i].rtl), 2))
5040 && REG_P (XEXP (SET_DEST (sets[i].rtl), 0))
5041 && (GET_MODE_BITSIZE (GET_MODE (SET_DEST (sets[i].rtl)))
5042 >= INTVAL (XEXP (SET_DEST (sets[i].rtl), 1)))
5043 && ((unsigned) INTVAL (XEXP (SET_DEST (sets[i].rtl), 1))
5044 + (unsigned) INTVAL (XEXP (SET_DEST (sets[i].rtl), 2))
5045 <= HOST_BITS_PER_WIDE_INT))
5047 rtx dest_reg = XEXP (SET_DEST (sets[i].rtl), 0);
5048 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5049 rtx pos = XEXP (SET_DEST (sets[i].rtl), 2);
5050 unsigned int dest_hash = HASH (dest_reg, GET_MODE (dest_reg));
5051 struct table_elt *dest_elt
5052 = lookup (dest_reg, dest_hash, GET_MODE (dest_reg));
5053 rtx dest_cst = NULL;
5055 if (dest_elt)
5056 for (p = dest_elt->first_same_value; p; p = p->next_same_value)
5057 if (p->is_const && CONST_INT_P (p->exp))
5059 dest_cst = p->exp;
5060 break;
5062 if (dest_cst)
5064 HOST_WIDE_INT val = INTVAL (dest_cst);
5065 HOST_WIDE_INT mask;
5066 unsigned int shift;
5067 if (BITS_BIG_ENDIAN)
5068 shift = GET_MODE_BITSIZE (GET_MODE (dest_reg))
5069 - INTVAL (pos) - INTVAL (width);
5070 else
5071 shift = INTVAL (pos);
5072 if (INTVAL (width) == HOST_BITS_PER_WIDE_INT)
5073 mask = ~(HOST_WIDE_INT) 0;
5074 else
5075 mask = ((HOST_WIDE_INT) 1 << INTVAL (width)) - 1;
5076 val &= ~(mask << shift);
5077 val |= (INTVAL (trial) & mask) << shift;
5078 val = trunc_int_for_mode (val, GET_MODE (dest_reg));
5079 validate_unshare_change (insn, &SET_DEST (sets[i].rtl),
5080 dest_reg, 1);
5081 validate_unshare_change (insn, &SET_SRC (sets[i].rtl),
5082 GEN_INT (val), 1);
5083 if (apply_change_group ())
5085 rtx note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
5086 if (note)
5088 remove_note (insn, note);
5089 df_notes_rescan (insn);
5091 src_eqv = NULL_RTX;
5092 src_eqv_elt = NULL;
5093 src_eqv_volatile = 0;
5094 src_eqv_in_memory = 0;
5095 src_eqv_hash = 0;
5096 repeat = true;
5097 break;
5102 /* We don't normally have an insn matching (set (pc) (pc)), so
5103 check for this separately here. We will delete such an
5104 insn below.
5106 For other cases such as a table jump or conditional jump
5107 where we know the ultimate target, go ahead and replace the
5108 operand. While that may not make a valid insn, we will
5109 reemit the jump below (and also insert any necessary
5110 barriers). */
5111 if (n_sets == 1 && dest == pc_rtx
5112 && (trial == pc_rtx
5113 || (GET_CODE (trial) == LABEL_REF
5114 && ! condjump_p (insn))))
5116 /* Don't substitute non-local labels, this confuses CFG. */
5117 if (GET_CODE (trial) == LABEL_REF
5118 && LABEL_REF_NONLOCAL_P (trial))
5119 continue;
5121 SET_SRC (sets[i].rtl) = trial;
5122 cse_jumps_altered = true;
5123 break;
5126 /* Reject certain invalid forms of CONST that we create. */
5127 else if (CONSTANT_P (trial)
5128 && GET_CODE (trial) == CONST
5129 /* Reject cases that will cause decode_rtx_const to
5130 die. On the alpha when simplifying a switch, we
5131 get (const (truncate (minus (label_ref)
5132 (label_ref)))). */
5133 && (GET_CODE (XEXP (trial, 0)) == TRUNCATE
5134 /* Likewise on IA-64, except without the
5135 truncate. */
5136 || (GET_CODE (XEXP (trial, 0)) == MINUS
5137 && GET_CODE (XEXP (XEXP (trial, 0), 0)) == LABEL_REF
5138 && GET_CODE (XEXP (XEXP (trial, 0), 1)) == LABEL_REF)))
5139 /* Do nothing for this case. */
5142 /* Look for a substitution that makes a valid insn. */
5143 else if (validate_unshare_change
5144 (insn, &SET_SRC (sets[i].rtl), trial, 0))
5146 rtx new_rtx = canon_reg (SET_SRC (sets[i].rtl), insn);
5148 /* The result of apply_change_group can be ignored; see
5149 canon_reg. */
5151 validate_change (insn, &SET_SRC (sets[i].rtl), new_rtx, 1);
5152 apply_change_group ();
5154 break;
5157 /* If we previously found constant pool entries for
5158 constants and this is a constant, try making a
5159 pool entry. Put it in src_folded unless we already have done
5160 this since that is where it likely came from. */
5162 else if (constant_pool_entries_cost
5163 && CONSTANT_P (trial)
5164 && (src_folded == 0
5165 || (!MEM_P (src_folded)
5166 && ! src_folded_force_flag))
5167 && GET_MODE_CLASS (mode) != MODE_CC
5168 && mode != VOIDmode)
5170 src_folded_force_flag = 1;
5171 src_folded = trial;
5172 src_folded_cost = constant_pool_entries_cost;
5173 src_folded_regcost = constant_pool_entries_regcost;
5177 /* If we changed the insn too much, handle this set from scratch. */
5178 if (repeat)
5180 i--;
5181 continue;
5184 src = SET_SRC (sets[i].rtl);
5186 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
5187 However, there is an important exception: If both are registers
5188 that are not the head of their equivalence class, replace SET_SRC
5189 with the head of the class. If we do not do this, we will have
5190 both registers live over a portion of the basic block. This way,
5191 their lifetimes will likely abut instead of overlapping. */
5192 if (REG_P (dest)
5193 && REGNO_QTY_VALID_P (REGNO (dest)))
5195 int dest_q = REG_QTY (REGNO (dest));
5196 struct qty_table_elem *dest_ent = &qty_table[dest_q];
5198 if (dest_ent->mode == GET_MODE (dest)
5199 && dest_ent->first_reg != REGNO (dest)
5200 && REG_P (src) && REGNO (src) == REGNO (dest)
5201 /* Don't do this if the original insn had a hard reg as
5202 SET_SRC or SET_DEST. */
5203 && (!REG_P (sets[i].src)
5204 || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER)
5205 && (!REG_P (dest) || REGNO (dest) >= FIRST_PSEUDO_REGISTER))
5206 /* We can't call canon_reg here because it won't do anything if
5207 SRC is a hard register. */
5209 int src_q = REG_QTY (REGNO (src));
5210 struct qty_table_elem *src_ent = &qty_table[src_q];
5211 int first = src_ent->first_reg;
5212 rtx new_src
5213 = (first >= FIRST_PSEUDO_REGISTER
5214 ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first));
5216 /* We must use validate-change even for this, because this
5217 might be a special no-op instruction, suitable only to
5218 tag notes onto. */
5219 if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0))
5221 src = new_src;
5222 /* If we had a constant that is cheaper than what we are now
5223 setting SRC to, use that constant. We ignored it when we
5224 thought we could make this into a no-op. */
5225 if (src_const && COST (src_const) < COST (src)
5226 && validate_change (insn, &SET_SRC (sets[i].rtl),
5227 src_const, 0))
5228 src = src_const;
5233 /* If we made a change, recompute SRC values. */
5234 if (src != sets[i].src)
5236 do_not_record = 0;
5237 hash_arg_in_memory = 0;
5238 sets[i].src = src;
5239 sets[i].src_hash = HASH (src, mode);
5240 sets[i].src_volatile = do_not_record;
5241 sets[i].src_in_memory = hash_arg_in_memory;
5242 sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
5245 /* If this is a single SET, we are setting a register, and we have an
5246 equivalent constant, we want to add a REG_NOTE. We don't want
5247 to write a REG_EQUAL note for a constant pseudo since verifying that
5248 that pseudo hasn't been eliminated is a pain. Such a note also
5249 won't help anything.
5251 Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
5252 which can be created for a reference to a compile time computable
5253 entry in a jump table. */
5255 if (n_sets == 1 && src_const && REG_P (dest)
5256 && !REG_P (src_const)
5257 && ! (GET_CODE (src_const) == CONST
5258 && GET_CODE (XEXP (src_const, 0)) == MINUS
5259 && GET_CODE (XEXP (XEXP (src_const, 0), 0)) == LABEL_REF
5260 && GET_CODE (XEXP (XEXP (src_const, 0), 1)) == LABEL_REF))
5262 /* We only want a REG_EQUAL note if src_const != src. */
5263 if (! rtx_equal_p (src, src_const))
5265 /* Make sure that the rtx is not shared. */
5266 src_const = copy_rtx (src_const);
5268 /* Record the actual constant value in a REG_EQUAL note,
5269 making a new one if one does not already exist. */
5270 set_unique_reg_note (insn, REG_EQUAL, src_const);
5271 df_notes_rescan (insn);
5275 /* Now deal with the destination. */
5276 do_not_record = 0;
5278 /* Look within any ZERO_EXTRACT to the MEM or REG within it. */
5279 while (GET_CODE (dest) == SUBREG
5280 || GET_CODE (dest) == ZERO_EXTRACT
5281 || GET_CODE (dest) == STRICT_LOW_PART)
5282 dest = XEXP (dest, 0);
5284 sets[i].inner_dest = dest;
5286 if (MEM_P (dest))
5288 #ifdef PUSH_ROUNDING
5289 /* Stack pushes invalidate the stack pointer. */
5290 rtx addr = XEXP (dest, 0);
5291 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC
5292 && XEXP (addr, 0) == stack_pointer_rtx)
5293 invalidate (stack_pointer_rtx, VOIDmode);
5294 #endif
5295 dest = fold_rtx (dest, insn);
5298 /* Compute the hash code of the destination now,
5299 before the effects of this instruction are recorded,
5300 since the register values used in the address computation
5301 are those before this instruction. */
5302 sets[i].dest_hash = HASH (dest, mode);
5304 /* Don't enter a bit-field in the hash table
5305 because the value in it after the store
5306 may not equal what was stored, due to truncation. */
5308 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT)
5310 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5312 if (src_const != 0 && CONST_INT_P (src_const)
5313 && CONST_INT_P (width)
5314 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5315 && ! (INTVAL (src_const)
5316 & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5317 /* Exception: if the value is constant,
5318 and it won't be truncated, record it. */
5320 else
5322 /* This is chosen so that the destination will be invalidated
5323 but no new value will be recorded.
5324 We must invalidate because sometimes constant
5325 values can be recorded for bitfields. */
5326 sets[i].src_elt = 0;
5327 sets[i].src_volatile = 1;
5328 src_eqv = 0;
5329 src_eqv_elt = 0;
5333 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
5334 the insn. */
5335 else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
5337 /* One less use of the label this insn used to jump to. */
5338 delete_insn_and_edges (insn);
5339 cse_jumps_altered = true;
5340 /* No more processing for this set. */
5341 sets[i].rtl = 0;
5344 /* If this SET is now setting PC to a label, we know it used to
5345 be a conditional or computed branch. */
5346 else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF
5347 && !LABEL_REF_NONLOCAL_P (src))
5349 /* We reemit the jump in as many cases as possible just in
5350 case the form of an unconditional jump is significantly
5351 different than a computed jump or conditional jump.
5353 If this insn has multiple sets, then reemitting the
5354 jump is nontrivial. So instead we just force rerecognition
5355 and hope for the best. */
5356 if (n_sets == 1)
5358 rtx new_rtx, note;
5360 new_rtx = emit_jump_insn_before (gen_jump (XEXP (src, 0)), insn);
5361 JUMP_LABEL (new_rtx) = XEXP (src, 0);
5362 LABEL_NUSES (XEXP (src, 0))++;
5364 /* Make sure to copy over REG_NON_LOCAL_GOTO. */
5365 note = find_reg_note (insn, REG_NON_LOCAL_GOTO, 0);
5366 if (note)
5368 XEXP (note, 1) = NULL_RTX;
5369 REG_NOTES (new_rtx) = note;
5372 delete_insn_and_edges (insn);
5373 insn = new_rtx;
5375 else
5376 INSN_CODE (insn) = -1;
5378 /* Do not bother deleting any unreachable code, let jump do it. */
5379 cse_jumps_altered = true;
5380 sets[i].rtl = 0;
5383 /* If destination is volatile, invalidate it and then do no further
5384 processing for this assignment. */
5386 else if (do_not_record)
5388 if (REG_P (dest) || GET_CODE (dest) == SUBREG)
5389 invalidate (dest, VOIDmode);
5390 else if (MEM_P (dest))
5391 invalidate (dest, VOIDmode);
5392 else if (GET_CODE (dest) == STRICT_LOW_PART
5393 || GET_CODE (dest) == ZERO_EXTRACT)
5394 invalidate (XEXP (dest, 0), GET_MODE (dest));
5395 sets[i].rtl = 0;
5398 if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
5399 sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
5401 #ifdef HAVE_cc0
5402 /* If setting CC0, record what it was set to, or a constant, if it
5403 is equivalent to a constant. If it is being set to a floating-point
5404 value, make a COMPARE with the appropriate constant of 0. If we
5405 don't do this, later code can interpret this as a test against
5406 const0_rtx, which can cause problems if we try to put it into an
5407 insn as a floating-point operand. */
5408 if (dest == cc0_rtx)
5410 this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
5411 this_insn_cc0_mode = mode;
5412 if (FLOAT_MODE_P (mode))
5413 this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0,
5414 CONST0_RTX (mode));
5416 #endif
5419 /* Now enter all non-volatile source expressions in the hash table
5420 if they are not already present.
5421 Record their equivalence classes in src_elt.
5422 This way we can insert the corresponding destinations into
5423 the same classes even if the actual sources are no longer in them
5424 (having been invalidated). */
5426 if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
5427 && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
5429 struct table_elt *elt;
5430 struct table_elt *classp = sets[0].src_elt;
5431 rtx dest = SET_DEST (sets[0].rtl);
5432 enum machine_mode eqvmode = GET_MODE (dest);
5434 if (GET_CODE (dest) == STRICT_LOW_PART)
5436 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5437 classp = 0;
5439 if (insert_regs (src_eqv, classp, 0))
5441 rehash_using_reg (src_eqv);
5442 src_eqv_hash = HASH (src_eqv, eqvmode);
5444 elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
5445 elt->in_memory = src_eqv_in_memory;
5446 src_eqv_elt = elt;
5448 /* Check to see if src_eqv_elt is the same as a set source which
5449 does not yet have an elt, and if so set the elt of the set source
5450 to src_eqv_elt. */
5451 for (i = 0; i < n_sets; i++)
5452 if (sets[i].rtl && sets[i].src_elt == 0
5453 && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
5454 sets[i].src_elt = src_eqv_elt;
5457 for (i = 0; i < n_sets; i++)
5458 if (sets[i].rtl && ! sets[i].src_volatile
5459 && ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
5461 if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
5463 /* REG_EQUAL in setting a STRICT_LOW_PART
5464 gives an equivalent for the entire destination register,
5465 not just for the subreg being stored in now.
5466 This is a more interesting equivalence, so we arrange later
5467 to treat the entire reg as the destination. */
5468 sets[i].src_elt = src_eqv_elt;
5469 sets[i].src_hash = src_eqv_hash;
5471 else
5473 /* Insert source and constant equivalent into hash table, if not
5474 already present. */
5475 struct table_elt *classp = src_eqv_elt;
5476 rtx src = sets[i].src;
5477 rtx dest = SET_DEST (sets[i].rtl);
5478 enum machine_mode mode
5479 = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5481 /* It's possible that we have a source value known to be
5482 constant but don't have a REG_EQUAL note on the insn.
5483 Lack of a note will mean src_eqv_elt will be NULL. This
5484 can happen where we've generated a SUBREG to access a
5485 CONST_INT that is already in a register in a wider mode.
5486 Ensure that the source expression is put in the proper
5487 constant class. */
5488 if (!classp)
5489 classp = sets[i].src_const_elt;
5491 if (sets[i].src_elt == 0)
5493 struct table_elt *elt;
5495 /* Note that these insert_regs calls cannot remove
5496 any of the src_elt's, because they would have failed to
5497 match if not still valid. */
5498 if (insert_regs (src, classp, 0))
5500 rehash_using_reg (src);
5501 sets[i].src_hash = HASH (src, mode);
5503 elt = insert (src, classp, sets[i].src_hash, mode);
5504 elt->in_memory = sets[i].src_in_memory;
5505 sets[i].src_elt = classp = elt;
5507 if (sets[i].src_const && sets[i].src_const_elt == 0
5508 && src != sets[i].src_const
5509 && ! rtx_equal_p (sets[i].src_const, src))
5510 sets[i].src_elt = insert (sets[i].src_const, classp,
5511 sets[i].src_const_hash, mode);
5514 else if (sets[i].src_elt == 0)
5515 /* If we did not insert the source into the hash table (e.g., it was
5516 volatile), note the equivalence class for the REG_EQUAL value, if any,
5517 so that the destination goes into that class. */
5518 sets[i].src_elt = src_eqv_elt;
5520 /* Record destination addresses in the hash table. This allows us to
5521 check if they are invalidated by other sets. */
5522 for (i = 0; i < n_sets; i++)
5524 if (sets[i].rtl)
5526 rtx x = sets[i].inner_dest;
5527 struct table_elt *elt;
5528 enum machine_mode mode;
5529 unsigned hash;
5531 if (MEM_P (x))
5533 x = XEXP (x, 0);
5534 mode = GET_MODE (x);
5535 hash = HASH (x, mode);
5536 elt = lookup (x, hash, mode);
5537 if (!elt)
5539 if (insert_regs (x, NULL, 0))
5541 rtx dest = SET_DEST (sets[i].rtl);
5543 rehash_using_reg (x);
5544 hash = HASH (x, mode);
5545 sets[i].dest_hash = HASH (dest, GET_MODE (dest));
5547 elt = insert (x, NULL, hash, mode);
5550 sets[i].dest_addr_elt = elt;
5552 else
5553 sets[i].dest_addr_elt = NULL;
5557 invalidate_from_clobbers (x);
5559 /* Some registers are invalidated by subroutine calls. Memory is
5560 invalidated by non-constant calls. */
5562 if (CALL_P (insn))
5564 if (!(RTL_CONST_OR_PURE_CALL_P (insn)))
5565 invalidate_memory ();
5566 invalidate_for_call ();
5569 /* Now invalidate everything set by this instruction.
5570 If a SUBREG or other funny destination is being set,
5571 sets[i].rtl is still nonzero, so here we invalidate the reg
5572 a part of which is being set. */
5574 for (i = 0; i < n_sets; i++)
5575 if (sets[i].rtl)
5577 /* We can't use the inner dest, because the mode associated with
5578 a ZERO_EXTRACT is significant. */
5579 rtx dest = SET_DEST (sets[i].rtl);
5581 /* Needed for registers to remove the register from its
5582 previous quantity's chain.
5583 Needed for memory if this is a nonvarying address, unless
5584 we have just done an invalidate_memory that covers even those. */
5585 if (REG_P (dest) || GET_CODE (dest) == SUBREG)
5586 invalidate (dest, VOIDmode);
5587 else if (MEM_P (dest))
5588 invalidate (dest, VOIDmode);
5589 else if (GET_CODE (dest) == STRICT_LOW_PART
5590 || GET_CODE (dest) == ZERO_EXTRACT)
5591 invalidate (XEXP (dest, 0), GET_MODE (dest));
5594 /* A volatile ASM invalidates everything. */
5595 if (NONJUMP_INSN_P (insn)
5596 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
5597 && MEM_VOLATILE_P (PATTERN (insn)))
5598 flush_hash_table ();
5600 /* Don't cse over a call to setjmp; on some machines (eg VAX)
5601 the regs restored by the longjmp come from a later time
5602 than the setjmp. */
5603 if (CALL_P (insn) && find_reg_note (insn, REG_SETJMP, NULL))
5605 flush_hash_table ();
5606 goto done;
5609 /* Make sure registers mentioned in destinations
5610 are safe for use in an expression to be inserted.
5611 This removes from the hash table
5612 any invalid entry that refers to one of these registers.
5614 We don't care about the return value from mention_regs because
5615 we are going to hash the SET_DEST values unconditionally. */
5617 for (i = 0; i < n_sets; i++)
5619 if (sets[i].rtl)
5621 rtx x = SET_DEST (sets[i].rtl);
5623 if (!REG_P (x))
5624 mention_regs (x);
5625 else
5627 /* We used to rely on all references to a register becoming
5628 inaccessible when a register changes to a new quantity,
5629 since that changes the hash code. However, that is not
5630 safe, since after HASH_SIZE new quantities we get a
5631 hash 'collision' of a register with its own invalid
5632 entries. And since SUBREGs have been changed not to
5633 change their hash code with the hash code of the register,
5634 it wouldn't work any longer at all. So we have to check
5635 for any invalid references lying around now.
5636 This code is similar to the REG case in mention_regs,
5637 but it knows that reg_tick has been incremented, and
5638 it leaves reg_in_table as -1 . */
5639 unsigned int regno = REGNO (x);
5640 unsigned int endregno = END_REGNO (x);
5641 unsigned int i;
5643 for (i = regno; i < endregno; i++)
5645 if (REG_IN_TABLE (i) >= 0)
5647 remove_invalid_refs (i);
5648 REG_IN_TABLE (i) = -1;
5655 /* We may have just removed some of the src_elt's from the hash table.
5656 So replace each one with the current head of the same class.
5657 Also check if destination addresses have been removed. */
5659 for (i = 0; i < n_sets; i++)
5660 if (sets[i].rtl)
5662 if (sets[i].dest_addr_elt
5663 && sets[i].dest_addr_elt->first_same_value == 0)
5665 /* The elt was removed, which means this destination is not
5666 valid after this instruction. */
5667 sets[i].rtl = NULL_RTX;
5669 else if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
5670 /* If elt was removed, find current head of same class,
5671 or 0 if nothing remains of that class. */
5673 struct table_elt *elt = sets[i].src_elt;
5675 while (elt && elt->prev_same_value)
5676 elt = elt->prev_same_value;
5678 while (elt && elt->first_same_value == 0)
5679 elt = elt->next_same_value;
5680 sets[i].src_elt = elt ? elt->first_same_value : 0;
5684 /* Now insert the destinations into their equivalence classes. */
5686 for (i = 0; i < n_sets; i++)
5687 if (sets[i].rtl)
5689 rtx dest = SET_DEST (sets[i].rtl);
5690 struct table_elt *elt;
5692 /* Don't record value if we are not supposed to risk allocating
5693 floating-point values in registers that might be wider than
5694 memory. */
5695 if ((flag_float_store
5696 && MEM_P (dest)
5697 && FLOAT_MODE_P (GET_MODE (dest)))
5698 /* Don't record BLKmode values, because we don't know the
5699 size of it, and can't be sure that other BLKmode values
5700 have the same or smaller size. */
5701 || GET_MODE (dest) == BLKmode
5702 /* If we didn't put a REG_EQUAL value or a source into the hash
5703 table, there is no point is recording DEST. */
5704 || sets[i].src_elt == 0
5705 /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
5706 or SIGN_EXTEND, don't record DEST since it can cause
5707 some tracking to be wrong.
5709 ??? Think about this more later. */
5710 || (GET_CODE (dest) == SUBREG
5711 && (GET_MODE_SIZE (GET_MODE (dest))
5712 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
5713 && (GET_CODE (sets[i].src) == SIGN_EXTEND
5714 || GET_CODE (sets[i].src) == ZERO_EXTEND)))
5715 continue;
5717 /* STRICT_LOW_PART isn't part of the value BEING set,
5718 and neither is the SUBREG inside it.
5719 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
5720 if (GET_CODE (dest) == STRICT_LOW_PART)
5721 dest = SUBREG_REG (XEXP (dest, 0));
5723 if (REG_P (dest) || GET_CODE (dest) == SUBREG)
5724 /* Registers must also be inserted into chains for quantities. */
5725 if (insert_regs (dest, sets[i].src_elt, 1))
5727 /* If `insert_regs' changes something, the hash code must be
5728 recalculated. */
5729 rehash_using_reg (dest);
5730 sets[i].dest_hash = HASH (dest, GET_MODE (dest));
5733 elt = insert (dest, sets[i].src_elt,
5734 sets[i].dest_hash, GET_MODE (dest));
5736 /* If this is a constant, insert the constant anchors with the
5737 equivalent register-offset expressions using register DEST. */
5738 if (targetm.const_anchor
5739 && REG_P (dest)
5740 && SCALAR_INT_MODE_P (GET_MODE (dest))
5741 && GET_CODE (sets[i].src_elt->exp) == CONST_INT)
5742 insert_const_anchors (dest, sets[i].src_elt->exp, GET_MODE (dest));
5744 elt->in_memory = (MEM_P (sets[i].inner_dest)
5745 && !MEM_READONLY_P (sets[i].inner_dest));
5747 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
5748 narrower than M2, and both M1 and M2 are the same number of words,
5749 we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
5750 make that equivalence as well.
5752 However, BAR may have equivalences for which gen_lowpart
5753 will produce a simpler value than gen_lowpart applied to
5754 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
5755 BAR's equivalences. If we don't get a simplified form, make
5756 the SUBREG. It will not be used in an equivalence, but will
5757 cause two similar assignments to be detected.
5759 Note the loop below will find SUBREG_REG (DEST) since we have
5760 already entered SRC and DEST of the SET in the table. */
5762 if (GET_CODE (dest) == SUBREG
5763 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
5764 / UNITS_PER_WORD)
5765 == (GET_MODE_SIZE (GET_MODE (dest)) - 1) / UNITS_PER_WORD)
5766 && (GET_MODE_SIZE (GET_MODE (dest))
5767 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
5768 && sets[i].src_elt != 0)
5770 enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
5771 struct table_elt *elt, *classp = 0;
5773 for (elt = sets[i].src_elt->first_same_value; elt;
5774 elt = elt->next_same_value)
5776 rtx new_src = 0;
5777 unsigned src_hash;
5778 struct table_elt *src_elt;
5779 int byte = 0;
5781 /* Ignore invalid entries. */
5782 if (!REG_P (elt->exp)
5783 && ! exp_equiv_p (elt->exp, elt->exp, 1, false))
5784 continue;
5786 /* We may have already been playing subreg games. If the
5787 mode is already correct for the destination, use it. */
5788 if (GET_MODE (elt->exp) == new_mode)
5789 new_src = elt->exp;
5790 else
5792 /* Calculate big endian correction for the SUBREG_BYTE.
5793 We have already checked that M1 (GET_MODE (dest))
5794 is not narrower than M2 (new_mode). */
5795 if (BYTES_BIG_ENDIAN)
5796 byte = (GET_MODE_SIZE (GET_MODE (dest))
5797 - GET_MODE_SIZE (new_mode));
5799 new_src = simplify_gen_subreg (new_mode, elt->exp,
5800 GET_MODE (dest), byte);
5803 /* The call to simplify_gen_subreg fails if the value
5804 is VOIDmode, yet we can't do any simplification, e.g.
5805 for EXPR_LISTs denoting function call results.
5806 It is invalid to construct a SUBREG with a VOIDmode
5807 SUBREG_REG, hence a zero new_src means we can't do
5808 this substitution. */
5809 if (! new_src)
5810 continue;
5812 src_hash = HASH (new_src, new_mode);
5813 src_elt = lookup (new_src, src_hash, new_mode);
5815 /* Put the new source in the hash table is if isn't
5816 already. */
5817 if (src_elt == 0)
5819 if (insert_regs (new_src, classp, 0))
5821 rehash_using_reg (new_src);
5822 src_hash = HASH (new_src, new_mode);
5824 src_elt = insert (new_src, classp, src_hash, new_mode);
5825 src_elt->in_memory = elt->in_memory;
5827 else if (classp && classp != src_elt->first_same_value)
5828 /* Show that two things that we've seen before are
5829 actually the same. */
5830 merge_equiv_classes (src_elt, classp);
5832 classp = src_elt->first_same_value;
5833 /* Ignore invalid entries. */
5834 while (classp
5835 && !REG_P (classp->exp)
5836 && ! exp_equiv_p (classp->exp, classp->exp, 1, false))
5837 classp = classp->next_same_value;
5842 /* Special handling for (set REG0 REG1) where REG0 is the
5843 "cheapest", cheaper than REG1. After cse, REG1 will probably not
5844 be used in the sequel, so (if easily done) change this insn to
5845 (set REG1 REG0) and replace REG1 with REG0 in the previous insn
5846 that computed their value. Then REG1 will become a dead store
5847 and won't cloud the situation for later optimizations.
5849 Do not make this change if REG1 is a hard register, because it will
5850 then be used in the sequel and we may be changing a two-operand insn
5851 into a three-operand insn.
5853 Also do not do this if we are operating on a copy of INSN. */
5855 if (n_sets == 1 && sets[0].rtl && REG_P (SET_DEST (sets[0].rtl))
5856 && NEXT_INSN (PREV_INSN (insn)) == insn
5857 && REG_P (SET_SRC (sets[0].rtl))
5858 && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
5859 && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl))))
5861 int src_q = REG_QTY (REGNO (SET_SRC (sets[0].rtl)));
5862 struct qty_table_elem *src_ent = &qty_table[src_q];
5864 if (src_ent->first_reg == REGNO (SET_DEST (sets[0].rtl)))
5866 /* Scan for the previous nonnote insn, but stop at a basic
5867 block boundary. */
5868 rtx prev = insn;
5869 rtx bb_head = BB_HEAD (BLOCK_FOR_INSN (insn));
5872 prev = PREV_INSN (prev);
5874 while (prev != bb_head && (NOTE_P (prev) || DEBUG_INSN_P (prev)));
5876 /* Do not swap the registers around if the previous instruction
5877 attaches a REG_EQUIV note to REG1.
5879 ??? It's not entirely clear whether we can transfer a REG_EQUIV
5880 from the pseudo that originally shadowed an incoming argument
5881 to another register. Some uses of REG_EQUIV might rely on it
5882 being attached to REG1 rather than REG2.
5884 This section previously turned the REG_EQUIV into a REG_EQUAL
5885 note. We cannot do that because REG_EQUIV may provide an
5886 uninitialized stack slot when REG_PARM_STACK_SPACE is used. */
5887 if (NONJUMP_INSN_P (prev)
5888 && GET_CODE (PATTERN (prev)) == SET
5889 && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl)
5890 && ! find_reg_note (prev, REG_EQUIV, NULL_RTX))
5892 rtx dest = SET_DEST (sets[0].rtl);
5893 rtx src = SET_SRC (sets[0].rtl);
5894 rtx note;
5896 validate_change (prev, &SET_DEST (PATTERN (prev)), dest, 1);
5897 validate_change (insn, &SET_DEST (sets[0].rtl), src, 1);
5898 validate_change (insn, &SET_SRC (sets[0].rtl), dest, 1);
5899 apply_change_group ();
5901 /* If INSN has a REG_EQUAL note, and this note mentions
5902 REG0, then we must delete it, because the value in
5903 REG0 has changed. If the note's value is REG1, we must
5904 also delete it because that is now this insn's dest. */
5905 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
5906 if (note != 0
5907 && (reg_mentioned_p (dest, XEXP (note, 0))
5908 || rtx_equal_p (src, XEXP (note, 0))))
5909 remove_note (insn, note);
5914 done:;
5917 /* Remove from the hash table all expressions that reference memory. */
5919 static void
5920 invalidate_memory (void)
5922 int i;
5923 struct table_elt *p, *next;
5925 for (i = 0; i < HASH_SIZE; i++)
5926 for (p = table[i]; p; p = next)
5928 next = p->next_same_hash;
5929 if (p->in_memory)
5930 remove_from_table (p, i);
5934 /* Perform invalidation on the basis of everything about an insn
5935 except for invalidating the actual places that are SET in it.
5936 This includes the places CLOBBERed, and anything that might
5937 alias with something that is SET or CLOBBERed.
5939 X is the pattern of the insn. */
5941 static void
5942 invalidate_from_clobbers (rtx x)
5944 if (GET_CODE (x) == CLOBBER)
5946 rtx ref = XEXP (x, 0);
5947 if (ref)
5949 if (REG_P (ref) || GET_CODE (ref) == SUBREG
5950 || MEM_P (ref))
5951 invalidate (ref, VOIDmode);
5952 else if (GET_CODE (ref) == STRICT_LOW_PART
5953 || GET_CODE (ref) == ZERO_EXTRACT)
5954 invalidate (XEXP (ref, 0), GET_MODE (ref));
5957 else if (GET_CODE (x) == PARALLEL)
5959 int i;
5960 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
5962 rtx y = XVECEXP (x, 0, i);
5963 if (GET_CODE (y) == CLOBBER)
5965 rtx ref = XEXP (y, 0);
5966 if (REG_P (ref) || GET_CODE (ref) == SUBREG
5967 || MEM_P (ref))
5968 invalidate (ref, VOIDmode);
5969 else if (GET_CODE (ref) == STRICT_LOW_PART
5970 || GET_CODE (ref) == ZERO_EXTRACT)
5971 invalidate (XEXP (ref, 0), GET_MODE (ref));
5977 /* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes
5978 and replace any registers in them with either an equivalent constant
5979 or the canonical form of the register. If we are inside an address,
5980 only do this if the address remains valid.
5982 OBJECT is 0 except when within a MEM in which case it is the MEM.
5984 Return the replacement for X. */
5986 static rtx
5987 cse_process_notes_1 (rtx x, rtx object, bool *changed)
5989 enum rtx_code code = GET_CODE (x);
5990 const char *fmt = GET_RTX_FORMAT (code);
5991 int i;
5993 switch (code)
5995 case CONST_INT:
5996 case CONST:
5997 case SYMBOL_REF:
5998 case LABEL_REF:
5999 case CONST_DOUBLE:
6000 case CONST_FIXED:
6001 case CONST_VECTOR:
6002 case PC:
6003 case CC0:
6004 case LO_SUM:
6005 return x;
6007 case MEM:
6008 validate_change (x, &XEXP (x, 0),
6009 cse_process_notes (XEXP (x, 0), x, changed), 0);
6010 return x;
6012 case EXPR_LIST:
6013 case INSN_LIST:
6014 if (REG_NOTE_KIND (x) == REG_EQUAL)
6015 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX, changed);
6016 if (XEXP (x, 1))
6017 XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX, changed);
6018 return x;
6020 case SIGN_EXTEND:
6021 case ZERO_EXTEND:
6022 case SUBREG:
6024 rtx new_rtx = cse_process_notes (XEXP (x, 0), object, changed);
6025 /* We don't substitute VOIDmode constants into these rtx,
6026 since they would impede folding. */
6027 if (GET_MODE (new_rtx) != VOIDmode)
6028 validate_change (object, &XEXP (x, 0), new_rtx, 0);
6029 return x;
6032 case REG:
6033 i = REG_QTY (REGNO (x));
6035 /* Return a constant or a constant register. */
6036 if (REGNO_QTY_VALID_P (REGNO (x)))
6038 struct qty_table_elem *ent = &qty_table[i];
6040 if (ent->const_rtx != NULL_RTX
6041 && (CONSTANT_P (ent->const_rtx)
6042 || REG_P (ent->const_rtx)))
6044 rtx new_rtx = gen_lowpart (GET_MODE (x), ent->const_rtx);
6045 if (new_rtx)
6046 return copy_rtx (new_rtx);
6050 /* Otherwise, canonicalize this register. */
6051 return canon_reg (x, NULL_RTX);
6053 default:
6054 break;
6057 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6058 if (fmt[i] == 'e')
6059 validate_change (object, &XEXP (x, i),
6060 cse_process_notes (XEXP (x, i), object, changed), 0);
6062 return x;
6065 static rtx
6066 cse_process_notes (rtx x, rtx object, bool *changed)
6068 rtx new_rtx = cse_process_notes_1 (x, object, changed);
6069 if (new_rtx != x)
6070 *changed = true;
6071 return new_rtx;
6075 /* Find a path in the CFG, starting with FIRST_BB to perform CSE on.
6077 DATA is a pointer to a struct cse_basic_block_data, that is used to
6078 describe the path.
6079 It is filled with a queue of basic blocks, starting with FIRST_BB
6080 and following a trace through the CFG.
6082 If all paths starting at FIRST_BB have been followed, or no new path
6083 starting at FIRST_BB can be constructed, this function returns FALSE.
6084 Otherwise, DATA->path is filled and the function returns TRUE indicating
6085 that a path to follow was found.
6087 If FOLLOW_JUMPS is false, the maximum path length is 1 and the only
6088 block in the path will be FIRST_BB. */
6090 static bool
6091 cse_find_path (basic_block first_bb, struct cse_basic_block_data *data,
6092 int follow_jumps)
6094 basic_block bb;
6095 edge e;
6096 int path_size;
6098 SET_BIT (cse_visited_basic_blocks, first_bb->index);
6100 /* See if there is a previous path. */
6101 path_size = data->path_size;
6103 /* There is a previous path. Make sure it started with FIRST_BB. */
6104 if (path_size)
6105 gcc_assert (data->path[0].bb == first_bb);
6107 /* There was only one basic block in the last path. Clear the path and
6108 return, so that paths starting at another basic block can be tried. */
6109 if (path_size == 1)
6111 path_size = 0;
6112 goto done;
6115 /* If the path was empty from the beginning, construct a new path. */
6116 if (path_size == 0)
6117 data->path[path_size++].bb = first_bb;
6118 else
6120 /* Otherwise, path_size must be equal to or greater than 2, because
6121 a previous path exists that is at least two basic blocks long.
6123 Update the previous branch path, if any. If the last branch was
6124 previously along the branch edge, take the fallthrough edge now. */
6125 while (path_size >= 2)
6127 basic_block last_bb_in_path, previous_bb_in_path;
6128 edge e;
6130 --path_size;
6131 last_bb_in_path = data->path[path_size].bb;
6132 previous_bb_in_path = data->path[path_size - 1].bb;
6134 /* If we previously followed a path along the branch edge, try
6135 the fallthru edge now. */
6136 if (EDGE_COUNT (previous_bb_in_path->succs) == 2
6137 && any_condjump_p (BB_END (previous_bb_in_path))
6138 && (e = find_edge (previous_bb_in_path, last_bb_in_path))
6139 && e == BRANCH_EDGE (previous_bb_in_path))
6141 bb = FALLTHRU_EDGE (previous_bb_in_path)->dest;
6142 if (bb != EXIT_BLOCK_PTR
6143 && single_pred_p (bb)
6144 /* We used to assert here that we would only see blocks
6145 that we have not visited yet. But we may end up
6146 visiting basic blocks twice if the CFG has changed
6147 in this run of cse_main, because when the CFG changes
6148 the topological sort of the CFG also changes. A basic
6149 blocks that previously had more than two predecessors
6150 may now have a single predecessor, and become part of
6151 a path that starts at another basic block.
6153 We still want to visit each basic block only once, so
6154 halt the path here if we have already visited BB. */
6155 && !TEST_BIT (cse_visited_basic_blocks, bb->index))
6157 SET_BIT (cse_visited_basic_blocks, bb->index);
6158 data->path[path_size++].bb = bb;
6159 break;
6163 data->path[path_size].bb = NULL;
6166 /* If only one block remains in the path, bail. */
6167 if (path_size == 1)
6169 path_size = 0;
6170 goto done;
6174 /* Extend the path if possible. */
6175 if (follow_jumps)
6177 bb = data->path[path_size - 1].bb;
6178 while (bb && path_size < PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH))
6180 if (single_succ_p (bb))
6181 e = single_succ_edge (bb);
6182 else if (EDGE_COUNT (bb->succs) == 2
6183 && any_condjump_p (BB_END (bb)))
6185 /* First try to follow the branch. If that doesn't lead
6186 to a useful path, follow the fallthru edge. */
6187 e = BRANCH_EDGE (bb);
6188 if (!single_pred_p (e->dest))
6189 e = FALLTHRU_EDGE (bb);
6191 else
6192 e = NULL;
6194 if (e && e->dest != EXIT_BLOCK_PTR
6195 && single_pred_p (e->dest)
6196 /* Avoid visiting basic blocks twice. The large comment
6197 above explains why this can happen. */
6198 && !TEST_BIT (cse_visited_basic_blocks, e->dest->index))
6200 basic_block bb2 = e->dest;
6201 SET_BIT (cse_visited_basic_blocks, bb2->index);
6202 data->path[path_size++].bb = bb2;
6203 bb = bb2;
6205 else
6206 bb = NULL;
6210 done:
6211 data->path_size = path_size;
6212 return path_size != 0;
6215 /* Dump the path in DATA to file F. NSETS is the number of sets
6216 in the path. */
6218 static void
6219 cse_dump_path (struct cse_basic_block_data *data, int nsets, FILE *f)
6221 int path_entry;
6223 fprintf (f, ";; Following path with %d sets: ", nsets);
6224 for (path_entry = 0; path_entry < data->path_size; path_entry++)
6225 fprintf (f, "%d ", (data->path[path_entry].bb)->index);
6226 fputc ('\n', dump_file);
6227 fflush (f);
6231 /* Return true if BB has exception handling successor edges. */
6233 static bool
6234 have_eh_succ_edges (basic_block bb)
6236 edge e;
6237 edge_iterator ei;
6239 FOR_EACH_EDGE (e, ei, bb->succs)
6240 if (e->flags & EDGE_EH)
6241 return true;
6243 return false;
6247 /* Scan to the end of the path described by DATA. Return an estimate of
6248 the total number of SETs of all insns in the path. */
6250 static void
6251 cse_prescan_path (struct cse_basic_block_data *data)
6253 int nsets = 0;
6254 int path_size = data->path_size;
6255 int path_entry;
6257 /* Scan to end of each basic block in the path. */
6258 for (path_entry = 0; path_entry < path_size; path_entry++)
6260 basic_block bb;
6261 rtx insn;
6263 bb = data->path[path_entry].bb;
6265 FOR_BB_INSNS (bb, insn)
6267 if (!INSN_P (insn))
6268 continue;
6270 /* A PARALLEL can have lots of SETs in it,
6271 especially if it is really an ASM_OPERANDS. */
6272 if (GET_CODE (PATTERN (insn)) == PARALLEL)
6273 nsets += XVECLEN (PATTERN (insn), 0);
6274 else
6275 nsets += 1;
6279 data->nsets = nsets;
6282 /* Process a single extended basic block described by EBB_DATA. */
6284 static void
6285 cse_extended_basic_block (struct cse_basic_block_data *ebb_data)
6287 int path_size = ebb_data->path_size;
6288 int path_entry;
6289 int num_insns = 0;
6291 /* Allocate the space needed by qty_table. */
6292 qty_table = XNEWVEC (struct qty_table_elem, max_qty);
6294 new_basic_block ();
6295 cse_ebb_live_in = df_get_live_in (ebb_data->path[0].bb);
6296 cse_ebb_live_out = df_get_live_out (ebb_data->path[path_size - 1].bb);
6297 for (path_entry = 0; path_entry < path_size; path_entry++)
6299 basic_block bb;
6300 rtx insn;
6302 bb = ebb_data->path[path_entry].bb;
6304 /* Invalidate recorded information for eh regs if there is an EH
6305 edge pointing to that bb. */
6306 if (bb_has_eh_pred (bb))
6308 df_ref *def_rec;
6310 for (def_rec = df_get_artificial_defs (bb->index); *def_rec; def_rec++)
6312 df_ref def = *def_rec;
6313 if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
6314 invalidate (DF_REF_REG (def), GET_MODE (DF_REF_REG (def)));
6318 FOR_BB_INSNS (bb, insn)
6320 optimize_this_for_speed_p = optimize_bb_for_speed_p (bb);
6321 /* If we have processed 1,000 insns, flush the hash table to
6322 avoid extreme quadratic behavior. We must not include NOTEs
6323 in the count since there may be more of them when generating
6324 debugging information. If we clear the table at different
6325 times, code generated with -g -O might be different than code
6326 generated with -O but not -g.
6328 FIXME: This is a real kludge and needs to be done some other
6329 way. */
6330 if (NONDEBUG_INSN_P (insn)
6331 && num_insns++ > PARAM_VALUE (PARAM_MAX_CSE_INSNS))
6333 flush_hash_table ();
6334 num_insns = 0;
6337 if (INSN_P (insn))
6339 /* Process notes first so we have all notes in canonical forms
6340 when looking for duplicate operations. */
6341 if (REG_NOTES (insn))
6343 bool changed = false;
6344 REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn),
6345 NULL_RTX, &changed);
6346 if (changed)
6347 df_notes_rescan (insn);
6350 cse_insn (insn);
6352 /* If we haven't already found an insn where we added a LABEL_REF,
6353 check this one. */
6354 if (INSN_P (insn) && !recorded_label_ref
6355 && for_each_rtx (&PATTERN (insn), check_for_label_ref,
6356 (void *) insn))
6357 recorded_label_ref = true;
6359 #ifdef HAVE_cc0
6360 /* If the previous insn set CC0 and this insn no longer
6361 references CC0, delete the previous insn. Here we use
6362 fact that nothing expects CC0 to be valid over an insn,
6363 which is true until the final pass. */
6365 rtx prev_insn, tem;
6367 prev_insn = PREV_INSN (insn);
6368 if (prev_insn && NONJUMP_INSN_P (prev_insn)
6369 && (tem = single_set (prev_insn)) != 0
6370 && SET_DEST (tem) == cc0_rtx
6371 && ! reg_mentioned_p (cc0_rtx, PATTERN (insn)))
6372 delete_insn (prev_insn);
6375 /* If this insn is not the last insn in the basic block,
6376 it will be PREV_INSN(insn) in the next iteration. If
6377 we recorded any CC0-related information for this insn,
6378 remember it. */
6379 if (insn != BB_END (bb))
6381 prev_insn_cc0 = this_insn_cc0;
6382 prev_insn_cc0_mode = this_insn_cc0_mode;
6384 #endif
6388 /* With non-call exceptions, we are not always able to update
6389 the CFG properly inside cse_insn. So clean up possibly
6390 redundant EH edges here. */
6391 if (cfun->can_throw_non_call_exceptions && have_eh_succ_edges (bb))
6392 cse_cfg_altered |= purge_dead_edges (bb);
6394 /* If we changed a conditional jump, we may have terminated
6395 the path we are following. Check that by verifying that
6396 the edge we would take still exists. If the edge does
6397 not exist anymore, purge the remainder of the path.
6398 Note that this will cause us to return to the caller. */
6399 if (path_entry < path_size - 1)
6401 basic_block next_bb = ebb_data->path[path_entry + 1].bb;
6402 if (!find_edge (bb, next_bb))
6406 path_size--;
6408 /* If we truncate the path, we must also reset the
6409 visited bit on the remaining blocks in the path,
6410 or we will never visit them at all. */
6411 RESET_BIT (cse_visited_basic_blocks,
6412 ebb_data->path[path_size].bb->index);
6413 ebb_data->path[path_size].bb = NULL;
6415 while (path_size - 1 != path_entry);
6416 ebb_data->path_size = path_size;
6420 /* If this is a conditional jump insn, record any known
6421 equivalences due to the condition being tested. */
6422 insn = BB_END (bb);
6423 if (path_entry < path_size - 1
6424 && JUMP_P (insn)
6425 && single_set (insn)
6426 && any_condjump_p (insn))
6428 basic_block next_bb = ebb_data->path[path_entry + 1].bb;
6429 bool taken = (next_bb == BRANCH_EDGE (bb)->dest);
6430 record_jump_equiv (insn, taken);
6433 #ifdef HAVE_cc0
6434 /* Clear the CC0-tracking related insns, they can't provide
6435 useful information across basic block boundaries. */
6436 prev_insn_cc0 = 0;
6437 #endif
6440 gcc_assert (next_qty <= max_qty);
6442 free (qty_table);
6446 /* Perform cse on the instructions of a function.
6447 F is the first instruction.
6448 NREGS is one plus the highest pseudo-reg number used in the instruction.
6450 Return 2 if jump optimizations should be redone due to simplifications
6451 in conditional jump instructions.
6452 Return 1 if the CFG should be cleaned up because it has been modified.
6453 Return 0 otherwise. */
6456 cse_main (rtx f ATTRIBUTE_UNUSED, int nregs)
6458 struct cse_basic_block_data ebb_data;
6459 basic_block bb;
6460 int *rc_order = XNEWVEC (int, last_basic_block);
6461 int i, n_blocks;
6463 df_set_flags (DF_LR_RUN_DCE);
6464 df_analyze ();
6465 df_set_flags (DF_DEFER_INSN_RESCAN);
6467 reg_scan (get_insns (), max_reg_num ());
6468 init_cse_reg_info (nregs);
6470 ebb_data.path = XNEWVEC (struct branch_path,
6471 PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH));
6473 cse_cfg_altered = false;
6474 cse_jumps_altered = false;
6475 recorded_label_ref = false;
6476 constant_pool_entries_cost = 0;
6477 constant_pool_entries_regcost = 0;
6478 ebb_data.path_size = 0;
6479 ebb_data.nsets = 0;
6480 rtl_hooks = cse_rtl_hooks;
6482 init_recog ();
6483 init_alias_analysis ();
6485 reg_eqv_table = XNEWVEC (struct reg_eqv_elem, nregs);
6487 /* Set up the table of already visited basic blocks. */
6488 cse_visited_basic_blocks = sbitmap_alloc (last_basic_block);
6489 sbitmap_zero (cse_visited_basic_blocks);
6491 /* Loop over basic blocks in reverse completion order (RPO),
6492 excluding the ENTRY and EXIT blocks. */
6493 n_blocks = pre_and_rev_post_order_compute (NULL, rc_order, false);
6494 i = 0;
6495 while (i < n_blocks)
6497 /* Find the first block in the RPO queue that we have not yet
6498 processed before. */
6501 bb = BASIC_BLOCK (rc_order[i++]);
6503 while (TEST_BIT (cse_visited_basic_blocks, bb->index)
6504 && i < n_blocks);
6506 /* Find all paths starting with BB, and process them. */
6507 while (cse_find_path (bb, &ebb_data, flag_cse_follow_jumps))
6509 /* Pre-scan the path. */
6510 cse_prescan_path (&ebb_data);
6512 /* If this basic block has no sets, skip it. */
6513 if (ebb_data.nsets == 0)
6514 continue;
6516 /* Get a reasonable estimate for the maximum number of qty's
6517 needed for this path. For this, we take the number of sets
6518 and multiply that by MAX_RECOG_OPERANDS. */
6519 max_qty = ebb_data.nsets * MAX_RECOG_OPERANDS;
6521 /* Dump the path we're about to process. */
6522 if (dump_file)
6523 cse_dump_path (&ebb_data, ebb_data.nsets, dump_file);
6525 cse_extended_basic_block (&ebb_data);
6529 /* Clean up. */
6530 end_alias_analysis ();
6531 free (reg_eqv_table);
6532 free (ebb_data.path);
6533 sbitmap_free (cse_visited_basic_blocks);
6534 free (rc_order);
6535 rtl_hooks = general_rtl_hooks;
6537 if (cse_jumps_altered || recorded_label_ref)
6538 return 2;
6539 else if (cse_cfg_altered)
6540 return 1;
6541 else
6542 return 0;
6545 /* Called via for_each_rtx to see if an insn is using a LABEL_REF for
6546 which there isn't a REG_LABEL_OPERAND note.
6547 Return one if so. DATA is the insn. */
6549 static int
6550 check_for_label_ref (rtx *rtl, void *data)
6552 rtx insn = (rtx) data;
6554 /* If this insn uses a LABEL_REF and there isn't a REG_LABEL_OPERAND
6555 note for it, we must rerun jump since it needs to place the note. If
6556 this is a LABEL_REF for a CODE_LABEL that isn't in the insn chain,
6557 don't do this since no REG_LABEL_OPERAND will be added. */
6558 return (GET_CODE (*rtl) == LABEL_REF
6559 && ! LABEL_REF_NONLOCAL_P (*rtl)
6560 && (!JUMP_P (insn)
6561 || !label_is_jump_target_p (XEXP (*rtl, 0), insn))
6562 && LABEL_P (XEXP (*rtl, 0))
6563 && INSN_UID (XEXP (*rtl, 0)) != 0
6564 && ! find_reg_note (insn, REG_LABEL_OPERAND, XEXP (*rtl, 0)));
6567 /* Count the number of times registers are used (not set) in X.
6568 COUNTS is an array in which we accumulate the count, INCR is how much
6569 we count each register usage.
6571 Don't count a usage of DEST, which is the SET_DEST of a SET which
6572 contains X in its SET_SRC. This is because such a SET does not
6573 modify the liveness of DEST.
6574 DEST is set to pc_rtx for a trapping insn, which means that we must count
6575 uses of a SET_DEST regardless because the insn can't be deleted here. */
6577 static void
6578 count_reg_usage (rtx x, int *counts, rtx dest, int incr)
6580 enum rtx_code code;
6581 rtx note;
6582 const char *fmt;
6583 int i, j;
6585 if (x == 0)
6586 return;
6588 switch (code = GET_CODE (x))
6590 case REG:
6591 if (x != dest)
6592 counts[REGNO (x)] += incr;
6593 return;
6595 case PC:
6596 case CC0:
6597 case CONST:
6598 case CONST_INT:
6599 case CONST_DOUBLE:
6600 case CONST_FIXED:
6601 case CONST_VECTOR:
6602 case SYMBOL_REF:
6603 case LABEL_REF:
6604 return;
6606 case CLOBBER:
6607 /* If we are clobbering a MEM, mark any registers inside the address
6608 as being used. */
6609 if (MEM_P (XEXP (x, 0)))
6610 count_reg_usage (XEXP (XEXP (x, 0), 0), counts, NULL_RTX, incr);
6611 return;
6613 case SET:
6614 /* Unless we are setting a REG, count everything in SET_DEST. */
6615 if (!REG_P (SET_DEST (x)))
6616 count_reg_usage (SET_DEST (x), counts, NULL_RTX, incr);
6617 count_reg_usage (SET_SRC (x), counts,
6618 dest ? dest : SET_DEST (x),
6619 incr);
6620 return;
6622 case DEBUG_INSN:
6623 return;
6625 case CALL_INSN:
6626 case INSN:
6627 case JUMP_INSN:
6628 /* We expect dest to be NULL_RTX here. If the insn may trap, mark
6629 this fact by setting DEST to pc_rtx. */
6630 if (insn_could_throw_p (x))
6631 dest = pc_rtx;
6632 if (code == CALL_INSN)
6633 count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, dest, incr);
6634 count_reg_usage (PATTERN (x), counts, dest, incr);
6636 /* Things used in a REG_EQUAL note aren't dead since loop may try to
6637 use them. */
6639 note = find_reg_equal_equiv_note (x);
6640 if (note)
6642 rtx eqv = XEXP (note, 0);
6644 if (GET_CODE (eqv) == EXPR_LIST)
6645 /* This REG_EQUAL note describes the result of a function call.
6646 Process all the arguments. */
6649 count_reg_usage (XEXP (eqv, 0), counts, dest, incr);
6650 eqv = XEXP (eqv, 1);
6652 while (eqv && GET_CODE (eqv) == EXPR_LIST);
6653 else
6654 count_reg_usage (eqv, counts, dest, incr);
6656 return;
6658 case EXPR_LIST:
6659 if (REG_NOTE_KIND (x) == REG_EQUAL
6660 || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE)
6661 /* FUNCTION_USAGE expression lists may include (CLOBBER (mem /u)),
6662 involving registers in the address. */
6663 || GET_CODE (XEXP (x, 0)) == CLOBBER)
6664 count_reg_usage (XEXP (x, 0), counts, NULL_RTX, incr);
6666 count_reg_usage (XEXP (x, 1), counts, NULL_RTX, incr);
6667 return;
6669 case ASM_OPERANDS:
6670 /* If the asm is volatile, then this insn cannot be deleted,
6671 and so the inputs *must* be live. */
6672 if (MEM_VOLATILE_P (x))
6673 dest = NULL_RTX;
6674 /* Iterate over just the inputs, not the constraints as well. */
6675 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
6676 count_reg_usage (ASM_OPERANDS_INPUT (x, i), counts, dest, incr);
6677 return;
6679 case INSN_LIST:
6680 gcc_unreachable ();
6682 default:
6683 break;
6686 fmt = GET_RTX_FORMAT (code);
6687 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6689 if (fmt[i] == 'e')
6690 count_reg_usage (XEXP (x, i), counts, dest, incr);
6691 else if (fmt[i] == 'E')
6692 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6693 count_reg_usage (XVECEXP (x, i, j), counts, dest, incr);
6697 /* Return true if a register is dead. Can be used in for_each_rtx. */
6699 static int
6700 is_dead_reg (rtx *loc, void *data)
6702 rtx x = *loc;
6703 int *counts = (int *)data;
6705 return (REG_P (x)
6706 && REGNO (x) >= FIRST_PSEUDO_REGISTER
6707 && counts[REGNO (x)] == 0);
6710 /* Return true if set is live. */
6711 static bool
6712 set_live_p (rtx set, rtx insn ATTRIBUTE_UNUSED, /* Only used with HAVE_cc0. */
6713 int *counts)
6715 #ifdef HAVE_cc0
6716 rtx tem;
6717 #endif
6719 if (set_noop_p (set))
6722 #ifdef HAVE_cc0
6723 else if (GET_CODE (SET_DEST (set)) == CC0
6724 && !side_effects_p (SET_SRC (set))
6725 && ((tem = next_nonnote_insn (insn)) == 0
6726 || !INSN_P (tem)
6727 || !reg_referenced_p (cc0_rtx, PATTERN (tem))))
6728 return false;
6729 #endif
6730 else if (!is_dead_reg (&SET_DEST (set), counts)
6731 || side_effects_p (SET_SRC (set)))
6732 return true;
6733 return false;
6736 /* Return true if insn is live. */
6738 static bool
6739 insn_live_p (rtx insn, int *counts)
6741 int i;
6742 if (insn_could_throw_p (insn))
6743 return true;
6744 else if (GET_CODE (PATTERN (insn)) == SET)
6745 return set_live_p (PATTERN (insn), insn, counts);
6746 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
6748 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6750 rtx elt = XVECEXP (PATTERN (insn), 0, i);
6752 if (GET_CODE (elt) == SET)
6754 if (set_live_p (elt, insn, counts))
6755 return true;
6757 else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
6758 return true;
6760 return false;
6762 else if (DEBUG_INSN_P (insn))
6764 rtx next;
6766 for (next = NEXT_INSN (insn); next; next = NEXT_INSN (next))
6767 if (NOTE_P (next))
6768 continue;
6769 else if (!DEBUG_INSN_P (next))
6770 return true;
6771 else if (INSN_VAR_LOCATION_DECL (insn) == INSN_VAR_LOCATION_DECL (next))
6772 return false;
6774 /* If this debug insn references a dead register, drop the
6775 location expression for now. ??? We could try to find the
6776 def and see if propagation is possible. */
6777 if (for_each_rtx (&INSN_VAR_LOCATION_LOC (insn), is_dead_reg, counts))
6779 INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC ();
6780 df_insn_rescan (insn);
6783 return true;
6785 else
6786 return true;
6789 /* Scan all the insns and delete any that are dead; i.e., they store a register
6790 that is never used or they copy a register to itself.
6792 This is used to remove insns made obviously dead by cse, loop or other
6793 optimizations. It improves the heuristics in loop since it won't try to
6794 move dead invariants out of loops or make givs for dead quantities. The
6795 remaining passes of the compilation are also sped up. */
6798 delete_trivially_dead_insns (rtx insns, int nreg)
6800 int *counts;
6801 rtx insn, prev;
6802 int ndead = 0;
6804 timevar_push (TV_DELETE_TRIVIALLY_DEAD);
6805 /* First count the number of times each register is used. */
6806 counts = XCNEWVEC (int, nreg);
6807 for (insn = insns; insn; insn = NEXT_INSN (insn))
6808 if (INSN_P (insn))
6809 count_reg_usage (insn, counts, NULL_RTX, 1);
6811 /* Go from the last insn to the first and delete insns that only set unused
6812 registers or copy a register to itself. As we delete an insn, remove
6813 usage counts for registers it uses.
6815 The first jump optimization pass may leave a real insn as the last
6816 insn in the function. We must not skip that insn or we may end
6817 up deleting code that is not really dead. */
6818 for (insn = get_last_insn (); insn; insn = prev)
6820 int live_insn = 0;
6822 prev = PREV_INSN (insn);
6823 if (!INSN_P (insn))
6824 continue;
6826 live_insn = insn_live_p (insn, counts);
6828 /* If this is a dead insn, delete it and show registers in it aren't
6829 being used. */
6831 if (! live_insn && dbg_cnt (delete_trivial_dead))
6833 count_reg_usage (insn, counts, NULL_RTX, -1);
6834 delete_insn_and_edges (insn);
6835 ndead++;
6839 if (dump_file && ndead)
6840 fprintf (dump_file, "Deleted %i trivially dead insns\n",
6841 ndead);
6842 /* Clean up. */
6843 free (counts);
6844 timevar_pop (TV_DELETE_TRIVIALLY_DEAD);
6845 return ndead;
6848 /* This function is called via for_each_rtx. The argument, NEWREG, is
6849 a condition code register with the desired mode. If we are looking
6850 at the same register in a different mode, replace it with
6851 NEWREG. */
6853 static int
6854 cse_change_cc_mode (rtx *loc, void *data)
6856 struct change_cc_mode_args* args = (struct change_cc_mode_args*)data;
6858 if (*loc
6859 && REG_P (*loc)
6860 && REGNO (*loc) == REGNO (args->newreg)
6861 && GET_MODE (*loc) != GET_MODE (args->newreg))
6863 validate_change (args->insn, loc, args->newreg, 1);
6865 return -1;
6867 return 0;
6870 /* Change the mode of any reference to the register REGNO (NEWREG) to
6871 GET_MODE (NEWREG) in INSN. */
6873 static void
6874 cse_change_cc_mode_insn (rtx insn, rtx newreg)
6876 struct change_cc_mode_args args;
6877 int success;
6879 if (!INSN_P (insn))
6880 return;
6882 args.insn = insn;
6883 args.newreg = newreg;
6885 for_each_rtx (&PATTERN (insn), cse_change_cc_mode, &args);
6886 for_each_rtx (&REG_NOTES (insn), cse_change_cc_mode, &args);
6888 /* If the following assertion was triggered, there is most probably
6889 something wrong with the cc_modes_compatible back end function.
6890 CC modes only can be considered compatible if the insn - with the mode
6891 replaced by any of the compatible modes - can still be recognized. */
6892 success = apply_change_group ();
6893 gcc_assert (success);
6896 /* Change the mode of any reference to the register REGNO (NEWREG) to
6897 GET_MODE (NEWREG), starting at START. Stop before END. Stop at
6898 any instruction which modifies NEWREG. */
6900 static void
6901 cse_change_cc_mode_insns (rtx start, rtx end, rtx newreg)
6903 rtx insn;
6905 for (insn = start; insn != end; insn = NEXT_INSN (insn))
6907 if (! INSN_P (insn))
6908 continue;
6910 if (reg_set_p (newreg, insn))
6911 return;
6913 cse_change_cc_mode_insn (insn, newreg);
6917 /* BB is a basic block which finishes with CC_REG as a condition code
6918 register which is set to CC_SRC. Look through the successors of BB
6919 to find blocks which have a single predecessor (i.e., this one),
6920 and look through those blocks for an assignment to CC_REG which is
6921 equivalent to CC_SRC. CAN_CHANGE_MODE indicates whether we are
6922 permitted to change the mode of CC_SRC to a compatible mode. This
6923 returns VOIDmode if no equivalent assignments were found.
6924 Otherwise it returns the mode which CC_SRC should wind up with.
6925 ORIG_BB should be the same as BB in the outermost cse_cc_succs call,
6926 but is passed unmodified down to recursive calls in order to prevent
6927 endless recursion.
6929 The main complexity in this function is handling the mode issues.
6930 We may have more than one duplicate which we can eliminate, and we
6931 try to find a mode which will work for multiple duplicates. */
6933 static enum machine_mode
6934 cse_cc_succs (basic_block bb, basic_block orig_bb, rtx cc_reg, rtx cc_src,
6935 bool can_change_mode)
6937 bool found_equiv;
6938 enum machine_mode mode;
6939 unsigned int insn_count;
6940 edge e;
6941 rtx insns[2];
6942 enum machine_mode modes[2];
6943 rtx last_insns[2];
6944 unsigned int i;
6945 rtx newreg;
6946 edge_iterator ei;
6948 /* We expect to have two successors. Look at both before picking
6949 the final mode for the comparison. If we have more successors
6950 (i.e., some sort of table jump, although that seems unlikely),
6951 then we require all beyond the first two to use the same
6952 mode. */
6954 found_equiv = false;
6955 mode = GET_MODE (cc_src);
6956 insn_count = 0;
6957 FOR_EACH_EDGE (e, ei, bb->succs)
6959 rtx insn;
6960 rtx end;
6962 if (e->flags & EDGE_COMPLEX)
6963 continue;
6965 if (EDGE_COUNT (e->dest->preds) != 1
6966 || e->dest == EXIT_BLOCK_PTR
6967 /* Avoid endless recursion on unreachable blocks. */
6968 || e->dest == orig_bb)
6969 continue;
6971 end = NEXT_INSN (BB_END (e->dest));
6972 for (insn = BB_HEAD (e->dest); insn != end; insn = NEXT_INSN (insn))
6974 rtx set;
6976 if (! INSN_P (insn))
6977 continue;
6979 /* If CC_SRC is modified, we have to stop looking for
6980 something which uses it. */
6981 if (modified_in_p (cc_src, insn))
6982 break;
6984 /* Check whether INSN sets CC_REG to CC_SRC. */
6985 set = single_set (insn);
6986 if (set
6987 && REG_P (SET_DEST (set))
6988 && REGNO (SET_DEST (set)) == REGNO (cc_reg))
6990 bool found;
6991 enum machine_mode set_mode;
6992 enum machine_mode comp_mode;
6994 found = false;
6995 set_mode = GET_MODE (SET_SRC (set));
6996 comp_mode = set_mode;
6997 if (rtx_equal_p (cc_src, SET_SRC (set)))
6998 found = true;
6999 else if (GET_CODE (cc_src) == COMPARE
7000 && GET_CODE (SET_SRC (set)) == COMPARE
7001 && mode != set_mode
7002 && rtx_equal_p (XEXP (cc_src, 0),
7003 XEXP (SET_SRC (set), 0))
7004 && rtx_equal_p (XEXP (cc_src, 1),
7005 XEXP (SET_SRC (set), 1)))
7008 comp_mode = targetm.cc_modes_compatible (mode, set_mode);
7009 if (comp_mode != VOIDmode
7010 && (can_change_mode || comp_mode == mode))
7011 found = true;
7014 if (found)
7016 found_equiv = true;
7017 if (insn_count < ARRAY_SIZE (insns))
7019 insns[insn_count] = insn;
7020 modes[insn_count] = set_mode;
7021 last_insns[insn_count] = end;
7022 ++insn_count;
7024 if (mode != comp_mode)
7026 gcc_assert (can_change_mode);
7027 mode = comp_mode;
7029 /* The modified insn will be re-recognized later. */
7030 PUT_MODE (cc_src, mode);
7033 else
7035 if (set_mode != mode)
7037 /* We found a matching expression in the
7038 wrong mode, but we don't have room to
7039 store it in the array. Punt. This case
7040 should be rare. */
7041 break;
7043 /* INSN sets CC_REG to a value equal to CC_SRC
7044 with the right mode. We can simply delete
7045 it. */
7046 delete_insn (insn);
7049 /* We found an instruction to delete. Keep looking,
7050 in the hopes of finding a three-way jump. */
7051 continue;
7054 /* We found an instruction which sets the condition
7055 code, so don't look any farther. */
7056 break;
7059 /* If INSN sets CC_REG in some other way, don't look any
7060 farther. */
7061 if (reg_set_p (cc_reg, insn))
7062 break;
7065 /* If we fell off the bottom of the block, we can keep looking
7066 through successors. We pass CAN_CHANGE_MODE as false because
7067 we aren't prepared to handle compatibility between the
7068 further blocks and this block. */
7069 if (insn == end)
7071 enum machine_mode submode;
7073 submode = cse_cc_succs (e->dest, orig_bb, cc_reg, cc_src, false);
7074 if (submode != VOIDmode)
7076 gcc_assert (submode == mode);
7077 found_equiv = true;
7078 can_change_mode = false;
7083 if (! found_equiv)
7084 return VOIDmode;
7086 /* Now INSN_COUNT is the number of instructions we found which set
7087 CC_REG to a value equivalent to CC_SRC. The instructions are in
7088 INSNS. The modes used by those instructions are in MODES. */
7090 newreg = NULL_RTX;
7091 for (i = 0; i < insn_count; ++i)
7093 if (modes[i] != mode)
7095 /* We need to change the mode of CC_REG in INSNS[i] and
7096 subsequent instructions. */
7097 if (! newreg)
7099 if (GET_MODE (cc_reg) == mode)
7100 newreg = cc_reg;
7101 else
7102 newreg = gen_rtx_REG (mode, REGNO (cc_reg));
7104 cse_change_cc_mode_insns (NEXT_INSN (insns[i]), last_insns[i],
7105 newreg);
7108 delete_insn_and_edges (insns[i]);
7111 return mode;
7114 /* If we have a fixed condition code register (or two), walk through
7115 the instructions and try to eliminate duplicate assignments. */
7117 static void
7118 cse_condition_code_reg (void)
7120 unsigned int cc_regno_1;
7121 unsigned int cc_regno_2;
7122 rtx cc_reg_1;
7123 rtx cc_reg_2;
7124 basic_block bb;
7126 if (! targetm.fixed_condition_code_regs (&cc_regno_1, &cc_regno_2))
7127 return;
7129 cc_reg_1 = gen_rtx_REG (CCmode, cc_regno_1);
7130 if (cc_regno_2 != INVALID_REGNUM)
7131 cc_reg_2 = gen_rtx_REG (CCmode, cc_regno_2);
7132 else
7133 cc_reg_2 = NULL_RTX;
7135 FOR_EACH_BB (bb)
7137 rtx last_insn;
7138 rtx cc_reg;
7139 rtx insn;
7140 rtx cc_src_insn;
7141 rtx cc_src;
7142 enum machine_mode mode;
7143 enum machine_mode orig_mode;
7145 /* Look for blocks which end with a conditional jump based on a
7146 condition code register. Then look for the instruction which
7147 sets the condition code register. Then look through the
7148 successor blocks for instructions which set the condition
7149 code register to the same value. There are other possible
7150 uses of the condition code register, but these are by far the
7151 most common and the ones which we are most likely to be able
7152 to optimize. */
7154 last_insn = BB_END (bb);
7155 if (!JUMP_P (last_insn))
7156 continue;
7158 if (reg_referenced_p (cc_reg_1, PATTERN (last_insn)))
7159 cc_reg = cc_reg_1;
7160 else if (cc_reg_2 && reg_referenced_p (cc_reg_2, PATTERN (last_insn)))
7161 cc_reg = cc_reg_2;
7162 else
7163 continue;
7165 cc_src_insn = NULL_RTX;
7166 cc_src = NULL_RTX;
7167 for (insn = PREV_INSN (last_insn);
7168 insn && insn != PREV_INSN (BB_HEAD (bb));
7169 insn = PREV_INSN (insn))
7171 rtx set;
7173 if (! INSN_P (insn))
7174 continue;
7175 set = single_set (insn);
7176 if (set
7177 && REG_P (SET_DEST (set))
7178 && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7180 cc_src_insn = insn;
7181 cc_src = SET_SRC (set);
7182 break;
7184 else if (reg_set_p (cc_reg, insn))
7185 break;
7188 if (! cc_src_insn)
7189 continue;
7191 if (modified_between_p (cc_src, cc_src_insn, NEXT_INSN (last_insn)))
7192 continue;
7194 /* Now CC_REG is a condition code register used for a
7195 conditional jump at the end of the block, and CC_SRC, in
7196 CC_SRC_INSN, is the value to which that condition code
7197 register is set, and CC_SRC is still meaningful at the end of
7198 the basic block. */
7200 orig_mode = GET_MODE (cc_src);
7201 mode = cse_cc_succs (bb, bb, cc_reg, cc_src, true);
7202 if (mode != VOIDmode)
7204 gcc_assert (mode == GET_MODE (cc_src));
7205 if (mode != orig_mode)
7207 rtx newreg = gen_rtx_REG (mode, REGNO (cc_reg));
7209 cse_change_cc_mode_insn (cc_src_insn, newreg);
7211 /* Do the same in the following insns that use the
7212 current value of CC_REG within BB. */
7213 cse_change_cc_mode_insns (NEXT_INSN (cc_src_insn),
7214 NEXT_INSN (last_insn),
7215 newreg);
7222 /* Perform common subexpression elimination. Nonzero value from
7223 `cse_main' means that jumps were simplified and some code may now
7224 be unreachable, so do jump optimization again. */
7225 static bool
7226 gate_handle_cse (void)
7228 return optimize > 0;
7231 static unsigned int
7232 rest_of_handle_cse (void)
7234 int tem;
7236 if (dump_file)
7237 dump_flow_info (dump_file, dump_flags);
7239 tem = cse_main (get_insns (), max_reg_num ());
7241 /* If we are not running more CSE passes, then we are no longer
7242 expecting CSE to be run. But always rerun it in a cheap mode. */
7243 cse_not_expected = !flag_rerun_cse_after_loop && !flag_gcse;
7245 if (tem == 2)
7247 timevar_push (TV_JUMP);
7248 rebuild_jump_labels (get_insns ());
7249 cleanup_cfg (0);
7250 timevar_pop (TV_JUMP);
7252 else if (tem == 1 || optimize > 1)
7253 cleanup_cfg (0);
7255 return 0;
7258 struct rtl_opt_pass pass_cse =
7261 RTL_PASS,
7262 "cse1", /* name */
7263 gate_handle_cse, /* gate */
7264 rest_of_handle_cse, /* execute */
7265 NULL, /* sub */
7266 NULL, /* next */
7267 0, /* static_pass_number */
7268 TV_CSE, /* tv_id */
7269 0, /* properties_required */
7270 0, /* properties_provided */
7271 0, /* properties_destroyed */
7272 0, /* todo_flags_start */
7273 TODO_df_finish | TODO_verify_rtl_sharing |
7274 TODO_dump_func |
7275 TODO_ggc_collect |
7276 TODO_verify_flow, /* todo_flags_finish */
7281 static bool
7282 gate_handle_cse2 (void)
7284 return optimize > 0 && flag_rerun_cse_after_loop;
7287 /* Run second CSE pass after loop optimizations. */
7288 static unsigned int
7289 rest_of_handle_cse2 (void)
7291 int tem;
7293 if (dump_file)
7294 dump_flow_info (dump_file, dump_flags);
7296 tem = cse_main (get_insns (), max_reg_num ());
7298 /* Run a pass to eliminate duplicated assignments to condition code
7299 registers. We have to run this after bypass_jumps, because it
7300 makes it harder for that pass to determine whether a jump can be
7301 bypassed safely. */
7302 cse_condition_code_reg ();
7304 delete_trivially_dead_insns (get_insns (), max_reg_num ());
7306 if (tem == 2)
7308 timevar_push (TV_JUMP);
7309 rebuild_jump_labels (get_insns ());
7310 cleanup_cfg (0);
7311 timevar_pop (TV_JUMP);
7313 else if (tem == 1)
7314 cleanup_cfg (0);
7316 cse_not_expected = 1;
7317 return 0;
7321 struct rtl_opt_pass pass_cse2 =
7324 RTL_PASS,
7325 "cse2", /* name */
7326 gate_handle_cse2, /* gate */
7327 rest_of_handle_cse2, /* execute */
7328 NULL, /* sub */
7329 NULL, /* next */
7330 0, /* static_pass_number */
7331 TV_CSE2, /* tv_id */
7332 0, /* properties_required */
7333 0, /* properties_provided */
7334 0, /* properties_destroyed */
7335 0, /* todo_flags_start */
7336 TODO_df_finish | TODO_verify_rtl_sharing |
7337 TODO_dump_func |
7338 TODO_ggc_collect |
7339 TODO_verify_flow /* todo_flags_finish */
7343 static bool
7344 gate_handle_cse_after_global_opts (void)
7346 return optimize > 0 && flag_rerun_cse_after_global_opts;
7349 /* Run second CSE pass after loop optimizations. */
7350 static unsigned int
7351 rest_of_handle_cse_after_global_opts (void)
7353 int save_cfj;
7354 int tem;
7356 /* We only want to do local CSE, so don't follow jumps. */
7357 save_cfj = flag_cse_follow_jumps;
7358 flag_cse_follow_jumps = 0;
7360 rebuild_jump_labels (get_insns ());
7361 tem = cse_main (get_insns (), max_reg_num ());
7362 purge_all_dead_edges ();
7363 delete_trivially_dead_insns (get_insns (), max_reg_num ());
7365 cse_not_expected = !flag_rerun_cse_after_loop;
7367 /* If cse altered any jumps, rerun jump opts to clean things up. */
7368 if (tem == 2)
7370 timevar_push (TV_JUMP);
7371 rebuild_jump_labels (get_insns ());
7372 cleanup_cfg (0);
7373 timevar_pop (TV_JUMP);
7375 else if (tem == 1)
7376 cleanup_cfg (0);
7378 flag_cse_follow_jumps = save_cfj;
7379 return 0;
7382 struct rtl_opt_pass pass_cse_after_global_opts =
7385 RTL_PASS,
7386 "cse_local", /* name */
7387 gate_handle_cse_after_global_opts, /* gate */
7388 rest_of_handle_cse_after_global_opts, /* execute */
7389 NULL, /* sub */
7390 NULL, /* next */
7391 0, /* static_pass_number */
7392 TV_CSE, /* tv_id */
7393 0, /* properties_required */
7394 0, /* properties_provided */
7395 0, /* properties_destroyed */
7396 0, /* todo_flags_start */
7397 TODO_df_finish | TODO_verify_rtl_sharing |
7398 TODO_dump_func |
7399 TODO_ggc_collect |
7400 TODO_verify_flow /* todo_flags_finish */