* gcc.dg/const-elim-1.c: xfail for xtensa.
[official-gcc.git] / gcc / cse.c
blob1d3d3a193e914db6c5e88302b0da3a66db671e5a
1 /* Common subexpression elimination for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 /* stdio.h must precede rtl.h for FFS. */
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
28 #include "rtl.h"
29 #include "tm_p.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "basic-block.h"
33 #include "flags.h"
34 #include "real.h"
35 #include "insn-config.h"
36 #include "recog.h"
37 #include "function.h"
38 #include "expr.h"
39 #include "toplev.h"
40 #include "output.h"
41 #include "ggc.h"
42 #include "timevar.h"
43 #include "except.h"
44 #include "target.h"
45 #include "params.h"
47 /* The basic idea of common subexpression elimination is to go
48 through the code, keeping a record of expressions that would
49 have the same value at the current scan point, and replacing
50 expressions encountered with the cheapest equivalent expression.
52 It is too complicated to keep track of the different possibilities
53 when control paths merge in this code; so, at each label, we forget all
54 that is known and start fresh. This can be described as processing each
55 extended basic block separately. We have a separate pass to perform
56 global CSE.
58 Note CSE can turn a conditional or computed jump into a nop or
59 an unconditional jump. When this occurs we arrange to run the jump
60 optimizer after CSE to delete the unreachable code.
62 We use two data structures to record the equivalent expressions:
63 a hash table for most expressions, and a vector of "quantity
64 numbers" to record equivalent (pseudo) registers.
66 The use of the special data structure for registers is desirable
67 because it is faster. It is possible because registers references
68 contain a fairly small number, the register number, taken from
69 a contiguously allocated series, and two register references are
70 identical if they have the same number. General expressions
71 do not have any such thing, so the only way to retrieve the
72 information recorded on an expression other than a register
73 is to keep it in a hash table.
75 Registers and "quantity numbers":
77 At the start of each basic block, all of the (hardware and pseudo)
78 registers used in the function are given distinct quantity
79 numbers to indicate their contents. During scan, when the code
80 copies one register into another, we copy the quantity number.
81 When a register is loaded in any other way, we allocate a new
82 quantity number to describe the value generated by this operation.
83 `reg_qty' records what quantity a register is currently thought
84 of as containing.
86 All real quantity numbers are greater than or equal to `max_reg'.
87 If register N has not been assigned a quantity, reg_qty[N] will equal N.
89 Quantity numbers below `max_reg' do not exist and none of the `qty_table'
90 entries should be referenced with an index below `max_reg'.
92 We also maintain a bidirectional chain of registers for each
93 quantity number. The `qty_table` members `first_reg' and `last_reg',
94 and `reg_eqv_table' members `next' and `prev' hold these chains.
96 The first register in a chain is the one whose lifespan is least local.
97 Among equals, it is the one that was seen first.
98 We replace any equivalent register with that one.
100 If two registers have the same quantity number, it must be true that
101 REG expressions with qty_table `mode' must be in the hash table for both
102 registers and must be in the same class.
104 The converse is not true. Since hard registers may be referenced in
105 any mode, two REG expressions might be equivalent in the hash table
106 but not have the same quantity number if the quantity number of one
107 of the registers is not the same mode as those expressions.
109 Constants and quantity numbers
111 When a quantity has a known constant value, that value is stored
112 in the appropriate qty_table `const_rtx'. This is in addition to
113 putting the constant in the hash table as is usual for non-regs.
115 Whether a reg or a constant is preferred is determined by the configuration
116 macro CONST_COSTS and will often depend on the constant value. In any
117 event, expressions containing constants can be simplified, by fold_rtx.
119 When a quantity has a known nearly constant value (such as an address
120 of a stack slot), that value is stored in the appropriate qty_table
121 `const_rtx'.
123 Integer constants don't have a machine mode. However, cse
124 determines the intended machine mode from the destination
125 of the instruction that moves the constant. The machine mode
126 is recorded in the hash table along with the actual RTL
127 constant expression so that different modes are kept separate.
129 Other expressions:
131 To record known equivalences among expressions in general
132 we use a hash table called `table'. It has a fixed number of buckets
133 that contain chains of `struct table_elt' elements for expressions.
134 These chains connect the elements whose expressions have the same
135 hash codes.
137 Other chains through the same elements connect the elements which
138 currently have equivalent values.
140 Register references in an expression are canonicalized before hashing
141 the expression. This is done using `reg_qty' and qty_table `first_reg'.
142 The hash code of a register reference is computed using the quantity
143 number, not the register number.
145 When the value of an expression changes, it is necessary to remove from the
146 hash table not just that expression but all expressions whose values
147 could be different as a result.
149 1. If the value changing is in memory, except in special cases
150 ANYTHING referring to memory could be changed. That is because
151 nobody knows where a pointer does not point.
152 The function `invalidate_memory' removes what is necessary.
154 The special cases are when the address is constant or is
155 a constant plus a fixed register such as the frame pointer
156 or a static chain pointer. When such addresses are stored in,
157 we can tell exactly which other such addresses must be invalidated
158 due to overlap. `invalidate' does this.
159 All expressions that refer to non-constant
160 memory addresses are also invalidated. `invalidate_memory' does this.
162 2. If the value changing is a register, all expressions
163 containing references to that register, and only those,
164 must be removed.
166 Because searching the entire hash table for expressions that contain
167 a register is very slow, we try to figure out when it isn't necessary.
168 Precisely, this is necessary only when expressions have been
169 entered in the hash table using this register, and then the value has
170 changed, and then another expression wants to be added to refer to
171 the register's new value. This sequence of circumstances is rare
172 within any one basic block.
174 The vectors `reg_tick' and `reg_in_table' are used to detect this case.
175 reg_tick[i] is incremented whenever a value is stored in register i.
176 reg_in_table[i] holds -1 if no references to register i have been
177 entered in the table; otherwise, it contains the value reg_tick[i] had
178 when the references were entered. If we want to enter a reference
179 and reg_in_table[i] != reg_tick[i], we must scan and remove old references.
180 Until we want to enter a new entry, the mere fact that the two vectors
181 don't match makes the entries be ignored if anyone tries to match them.
183 Registers themselves are entered in the hash table as well as in
184 the equivalent-register chains. However, the vectors `reg_tick'
185 and `reg_in_table' do not apply to expressions which are simple
186 register references. These expressions are removed from the table
187 immediately when they become invalid, and this can be done even if
188 we do not immediately search for all the expressions that refer to
189 the register.
191 A CLOBBER rtx in an instruction invalidates its operand for further
192 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
193 invalidates everything that resides in memory.
195 Related expressions:
197 Constant expressions that differ only by an additive integer
198 are called related. When a constant expression is put in
199 the table, the related expression with no constant term
200 is also entered. These are made to point at each other
201 so that it is possible to find out if there exists any
202 register equivalent to an expression related to a given expression. */
204 /* One plus largest register number used in this function. */
206 static int max_reg;
208 /* One plus largest instruction UID used in this function at time of
209 cse_main call. */
211 static int max_insn_uid;
213 /* Length of qty_table vector. We know in advance we will not need
214 a quantity number this big. */
216 static int max_qty;
218 /* Next quantity number to be allocated.
219 This is 1 + the largest number needed so far. */
221 static int next_qty;
223 /* Per-qty information tracking.
225 `first_reg' and `last_reg' track the head and tail of the
226 chain of registers which currently contain this quantity.
228 `mode' contains the machine mode of this quantity.
230 `const_rtx' holds the rtx of the constant value of this
231 quantity, if known. A summations of the frame/arg pointer
232 and a constant can also be entered here. When this holds
233 a known value, `const_insn' is the insn which stored the
234 constant value.
236 `comparison_{code,const,qty}' are used to track when a
237 comparison between a quantity and some constant or register has
238 been passed. In such a case, we know the results of the comparison
239 in case we see it again. These members record a comparison that
240 is known to be true. `comparison_code' holds the rtx code of such
241 a comparison, else it is set to UNKNOWN and the other two
242 comparison members are undefined. `comparison_const' holds
243 the constant being compared against, or zero if the comparison
244 is not against a constant. `comparison_qty' holds the quantity
245 being compared against when the result is known. If the comparison
246 is not with a register, `comparison_qty' is -1. */
248 struct qty_table_elem
250 rtx const_rtx;
251 rtx const_insn;
252 rtx comparison_const;
253 int comparison_qty;
254 unsigned int first_reg, last_reg;
255 /* The sizes of these fields should match the sizes of the
256 code and mode fields of struct rtx_def (see rtl.h). */
257 ENUM_BITFIELD(rtx_code) comparison_code : 16;
258 ENUM_BITFIELD(machine_mode) mode : 8;
261 /* The table of all qtys, indexed by qty number. */
262 static struct qty_table_elem *qty_table;
264 #ifdef HAVE_cc0
265 /* For machines that have a CC0, we do not record its value in the hash
266 table since its use is guaranteed to be the insn immediately following
267 its definition and any other insn is presumed to invalidate it.
269 Instead, we store below the value last assigned to CC0. If it should
270 happen to be a constant, it is stored in preference to the actual
271 assigned value. In case it is a constant, we store the mode in which
272 the constant should be interpreted. */
274 static rtx prev_insn_cc0;
275 static enum machine_mode prev_insn_cc0_mode;
277 /* Previous actual insn. 0 if at first insn of basic block. */
279 static rtx prev_insn;
280 #endif
282 /* Insn being scanned. */
284 static rtx this_insn;
286 /* Index by register number, gives the number of the next (or
287 previous) register in the chain of registers sharing the same
288 value.
290 Or -1 if this register is at the end of the chain.
292 If reg_qty[N] == N, reg_eqv_table[N].next is undefined. */
294 /* Per-register equivalence chain. */
295 struct reg_eqv_elem
297 int next, prev;
300 /* The table of all register equivalence chains. */
301 static struct reg_eqv_elem *reg_eqv_table;
303 struct cse_reg_info
305 /* Next in hash chain. */
306 struct cse_reg_info *hash_next;
308 /* The next cse_reg_info structure in the free or used list. */
309 struct cse_reg_info *next;
311 /* Search key */
312 unsigned int regno;
314 /* The quantity number of the register's current contents. */
315 int reg_qty;
317 /* The number of times the register has been altered in the current
318 basic block. */
319 int reg_tick;
321 /* The REG_TICK value at which rtx's containing this register are
322 valid in the hash table. If this does not equal the current
323 reg_tick value, such expressions existing in the hash table are
324 invalid. */
325 int reg_in_table;
327 /* The SUBREG that was set when REG_TICK was last incremented. Set
328 to -1 if the last store was to the whole register, not a subreg. */
329 unsigned int subreg_ticked;
332 /* A free list of cse_reg_info entries. */
333 static struct cse_reg_info *cse_reg_info_free_list;
335 /* A used list of cse_reg_info entries. */
336 static struct cse_reg_info *cse_reg_info_used_list;
337 static struct cse_reg_info *cse_reg_info_used_list_end;
339 /* A mapping from registers to cse_reg_info data structures. */
340 #define REGHASH_SHIFT 7
341 #define REGHASH_SIZE (1 << REGHASH_SHIFT)
342 #define REGHASH_MASK (REGHASH_SIZE - 1)
343 static struct cse_reg_info *reg_hash[REGHASH_SIZE];
345 #define REGHASH_FN(REGNO) \
346 (((REGNO) ^ ((REGNO) >> REGHASH_SHIFT)) & REGHASH_MASK)
348 /* The last lookup we did into the cse_reg_info_tree. This allows us
349 to cache repeated lookups. */
350 static unsigned int cached_regno;
351 static struct cse_reg_info *cached_cse_reg_info;
353 /* A HARD_REG_SET containing all the hard registers for which there is
354 currently a REG expression in the hash table. Note the difference
355 from the above variables, which indicate if the REG is mentioned in some
356 expression in the table. */
358 static HARD_REG_SET hard_regs_in_table;
360 /* CUID of insn that starts the basic block currently being cse-processed. */
362 static int cse_basic_block_start;
364 /* CUID of insn that ends the basic block currently being cse-processed. */
366 static int cse_basic_block_end;
368 /* Vector mapping INSN_UIDs to cuids.
369 The cuids are like uids but increase monotonically always.
370 We use them to see whether a reg is used outside a given basic block. */
372 static int *uid_cuid;
374 /* Highest UID in UID_CUID. */
375 static int max_uid;
377 /* Get the cuid of an insn. */
379 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
381 /* Nonzero if this pass has made changes, and therefore it's
382 worthwhile to run the garbage collector. */
384 static int cse_altered;
386 /* Nonzero if cse has altered conditional jump insns
387 in such a way that jump optimization should be redone. */
389 static int cse_jumps_altered;
391 /* Nonzero if we put a LABEL_REF into the hash table for an INSN without a
392 REG_LABEL, we have to rerun jump after CSE to put in the note. */
393 static int recorded_label_ref;
395 /* canon_hash stores 1 in do_not_record
396 if it notices a reference to CC0, PC, or some other volatile
397 subexpression. */
399 static int do_not_record;
401 #ifdef LOAD_EXTEND_OP
403 /* Scratch rtl used when looking for load-extended copy of a MEM. */
404 static rtx memory_extend_rtx;
405 #endif
407 /* canon_hash stores 1 in hash_arg_in_memory
408 if it notices a reference to memory within the expression being hashed. */
410 static int hash_arg_in_memory;
412 /* The hash table contains buckets which are chains of `struct table_elt's,
413 each recording one expression's information.
414 That expression is in the `exp' field.
416 The canon_exp field contains a canonical (from the point of view of
417 alias analysis) version of the `exp' field.
419 Those elements with the same hash code are chained in both directions
420 through the `next_same_hash' and `prev_same_hash' fields.
422 Each set of expressions with equivalent values
423 are on a two-way chain through the `next_same_value'
424 and `prev_same_value' fields, and all point with
425 the `first_same_value' field at the first element in
426 that chain. The chain is in order of increasing cost.
427 Each element's cost value is in its `cost' field.
429 The `in_memory' field is nonzero for elements that
430 involve any reference to memory. These elements are removed
431 whenever a write is done to an unidentified location in memory.
432 To be safe, we assume that a memory address is unidentified unless
433 the address is either a symbol constant or a constant plus
434 the frame pointer or argument pointer.
436 The `related_value' field is used to connect related expressions
437 (that differ by adding an integer).
438 The related expressions are chained in a circular fashion.
439 `related_value' is zero for expressions for which this
440 chain is not useful.
442 The `cost' field stores the cost of this element's expression.
443 The `regcost' field stores the value returned by approx_reg_cost for
444 this element's expression.
446 The `is_const' flag is set if the element is a constant (including
447 a fixed address).
449 The `flag' field is used as a temporary during some search routines.
451 The `mode' field is usually the same as GET_MODE (`exp'), but
452 if `exp' is a CONST_INT and has no machine mode then the `mode'
453 field is the mode it was being used as. Each constant is
454 recorded separately for each mode it is used with. */
456 struct table_elt
458 rtx exp;
459 rtx canon_exp;
460 struct table_elt *next_same_hash;
461 struct table_elt *prev_same_hash;
462 struct table_elt *next_same_value;
463 struct table_elt *prev_same_value;
464 struct table_elt *first_same_value;
465 struct table_elt *related_value;
466 int cost;
467 int regcost;
468 /* The size of this field should match the size
469 of the mode field of struct rtx_def (see rtl.h). */
470 ENUM_BITFIELD(machine_mode) mode : 8;
471 char in_memory;
472 char is_const;
473 char flag;
476 /* We don't want a lot of buckets, because we rarely have very many
477 things stored in the hash table, and a lot of buckets slows
478 down a lot of loops that happen frequently. */
479 #define HASH_SHIFT 5
480 #define HASH_SIZE (1 << HASH_SHIFT)
481 #define HASH_MASK (HASH_SIZE - 1)
483 /* Compute hash code of X in mode M. Special-case case where X is a pseudo
484 register (hard registers may require `do_not_record' to be set). */
486 #define HASH(X, M) \
487 ((GET_CODE (X) == REG && REGNO (X) >= FIRST_PSEUDO_REGISTER \
488 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
489 : canon_hash (X, M)) & HASH_MASK)
491 /* Determine whether register number N is considered a fixed register for the
492 purpose of approximating register costs.
493 It is desirable to replace other regs with fixed regs, to reduce need for
494 non-fixed hard regs.
495 A reg wins if it is either the frame pointer or designated as fixed. */
496 #define FIXED_REGNO_P(N) \
497 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
498 || fixed_regs[N] || global_regs[N])
500 /* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
501 hard registers and pointers into the frame are the cheapest with a cost
502 of 0. Next come pseudos with a cost of one and other hard registers with
503 a cost of 2. Aside from these special cases, call `rtx_cost'. */
505 #define CHEAP_REGNO(N) \
506 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
507 || (N) == STACK_POINTER_REGNUM || (N) == ARG_POINTER_REGNUM \
508 || ((N) >= FIRST_VIRTUAL_REGISTER && (N) <= LAST_VIRTUAL_REGISTER) \
509 || ((N) < FIRST_PSEUDO_REGISTER \
510 && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
512 #define COST(X) (GET_CODE (X) == REG ? 0 : notreg_cost (X, SET))
513 #define COST_IN(X,OUTER) (GET_CODE (X) == REG ? 0 : notreg_cost (X, OUTER))
515 /* Get the info associated with register N. */
517 #define GET_CSE_REG_INFO(N) \
518 (((N) == cached_regno && cached_cse_reg_info) \
519 ? cached_cse_reg_info : get_cse_reg_info ((N)))
521 /* Get the number of times this register has been updated in this
522 basic block. */
524 #define REG_TICK(N) ((GET_CSE_REG_INFO (N))->reg_tick)
526 /* Get the point at which REG was recorded in the table. */
528 #define REG_IN_TABLE(N) ((GET_CSE_REG_INFO (N))->reg_in_table)
530 /* Get the SUBREG set at the last increment to REG_TICK (-1 if not a
531 SUBREG). */
533 #define SUBREG_TICKED(N) ((GET_CSE_REG_INFO (N))->subreg_ticked)
535 /* Get the quantity number for REG. */
537 #define REG_QTY(N) ((GET_CSE_REG_INFO (N))->reg_qty)
539 /* Determine if the quantity number for register X represents a valid index
540 into the qty_table. */
542 #define REGNO_QTY_VALID_P(N) (REG_QTY (N) != (int) (N))
544 static struct table_elt *table[HASH_SIZE];
546 /* Chain of `struct table_elt's made so far for this function
547 but currently removed from the table. */
549 static struct table_elt *free_element_chain;
551 /* Number of `struct table_elt' structures made so far for this function. */
553 static int n_elements_made;
555 /* Maximum value `n_elements_made' has had so far in this compilation
556 for functions previously processed. */
558 static int max_elements_made;
560 /* Surviving equivalence class when two equivalence classes are merged
561 by recording the effects of a jump in the last insn. Zero if the
562 last insn was not a conditional jump. */
564 static struct table_elt *last_jump_equiv_class;
566 /* Set to the cost of a constant pool reference if one was found for a
567 symbolic constant. If this was found, it means we should try to
568 convert constants into constant pool entries if they don't fit in
569 the insn. */
571 static int constant_pool_entries_cost;
572 static int constant_pool_entries_regcost;
574 /* This data describes a block that will be processed by cse_basic_block. */
576 struct cse_basic_block_data
578 /* Lowest CUID value of insns in block. */
579 int low_cuid;
580 /* Highest CUID value of insns in block. */
581 int high_cuid;
582 /* Total number of SETs in block. */
583 int nsets;
584 /* Last insn in the block. */
585 rtx last;
586 /* Size of current branch path, if any. */
587 int path_size;
588 /* Current branch path, indicating which branches will be taken. */
589 struct branch_path
591 /* The branch insn. */
592 rtx branch;
593 /* Whether it should be taken or not. AROUND is the same as taken
594 except that it is used when the destination label is not preceded
595 by a BARRIER. */
596 enum taken {TAKEN, NOT_TAKEN, AROUND} status;
597 } *path;
600 static bool fixed_base_plus_p (rtx x);
601 static int notreg_cost (rtx, enum rtx_code);
602 static int approx_reg_cost_1 (rtx *, void *);
603 static int approx_reg_cost (rtx);
604 static int preferable (int, int, int, int);
605 static void new_basic_block (void);
606 static void make_new_qty (unsigned int, enum machine_mode);
607 static void make_regs_eqv (unsigned int, unsigned int);
608 static void delete_reg_equiv (unsigned int);
609 static int mention_regs (rtx);
610 static int insert_regs (rtx, struct table_elt *, int);
611 static void remove_from_table (struct table_elt *, unsigned);
612 static struct table_elt *lookup (rtx, unsigned, enum machine_mode);
613 static struct table_elt *lookup_for_remove (rtx, unsigned, enum machine_mode);
614 static rtx lookup_as_function (rtx, enum rtx_code);
615 static struct table_elt *insert (rtx, struct table_elt *, unsigned,
616 enum machine_mode);
617 static void merge_equiv_classes (struct table_elt *, struct table_elt *);
618 static void invalidate (rtx, enum machine_mode);
619 static int cse_rtx_varies_p (rtx, int);
620 static void remove_invalid_refs (unsigned int);
621 static void remove_invalid_subreg_refs (unsigned int, unsigned int,
622 enum machine_mode);
623 static void rehash_using_reg (rtx);
624 static void invalidate_memory (void);
625 static void invalidate_for_call (void);
626 static rtx use_related_value (rtx, struct table_elt *);
627 static unsigned canon_hash (rtx, enum machine_mode);
628 static unsigned canon_hash_string (const char *);
629 static unsigned safe_hash (rtx, enum machine_mode);
630 static int exp_equiv_p (rtx, rtx, int, int);
631 static rtx canon_reg (rtx, rtx);
632 static void find_best_addr (rtx, rtx *, enum machine_mode);
633 static enum rtx_code find_comparison_args (enum rtx_code, rtx *, rtx *,
634 enum machine_mode *,
635 enum machine_mode *);
636 static rtx fold_rtx (rtx, rtx);
637 static rtx equiv_constant (rtx);
638 static void record_jump_equiv (rtx, int);
639 static void record_jump_cond (enum rtx_code, enum machine_mode, rtx, rtx,
640 int);
641 static void cse_insn (rtx, rtx);
642 static int addr_affects_sp_p (rtx);
643 static void invalidate_from_clobbers (rtx);
644 static rtx cse_process_notes (rtx, rtx);
645 static void cse_around_loop (rtx);
646 static void invalidate_skipped_set (rtx, rtx, void *);
647 static void invalidate_skipped_block (rtx);
648 static void cse_check_loop_start (rtx, rtx, void *);
649 static void cse_set_around_loop (rtx, rtx, rtx);
650 static rtx cse_basic_block (rtx, rtx, struct branch_path *, int);
651 static void count_reg_usage (rtx, int *, int);
652 static int check_for_label_ref (rtx *, void *);
653 extern void dump_class (struct table_elt*);
654 static struct cse_reg_info * get_cse_reg_info (unsigned int);
655 static int check_dependence (rtx *, void *);
657 static void flush_hash_table (void);
658 static bool insn_live_p (rtx, int *);
659 static bool set_live_p (rtx, rtx, int *);
660 static bool dead_libcall_p (rtx, int *);
661 static int cse_change_cc_mode (rtx *, void *);
662 static void cse_change_cc_mode_insns (rtx, rtx, rtx);
663 static enum machine_mode cse_cc_succs (basic_block, rtx, rtx, bool);
665 /* Nonzero if X has the form (PLUS frame-pointer integer). We check for
666 virtual regs here because the simplify_*_operation routines are called
667 by integrate.c, which is called before virtual register instantiation. */
669 static bool
670 fixed_base_plus_p (rtx x)
672 switch (GET_CODE (x))
674 case REG:
675 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx)
676 return true;
677 if (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
678 return true;
679 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
680 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
681 return true;
682 return false;
684 case PLUS:
685 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
686 return false;
687 return fixed_base_plus_p (XEXP (x, 0));
689 case ADDRESSOF:
690 return true;
692 default:
693 return false;
697 /* Dump the expressions in the equivalence class indicated by CLASSP.
698 This function is used only for debugging. */
699 void
700 dump_class (struct table_elt *classp)
702 struct table_elt *elt;
704 fprintf (stderr, "Equivalence chain for ");
705 print_rtl (stderr, classp->exp);
706 fprintf (stderr, ": \n");
708 for (elt = classp->first_same_value; elt; elt = elt->next_same_value)
710 print_rtl (stderr, elt->exp);
711 fprintf (stderr, "\n");
715 /* Subroutine of approx_reg_cost; called through for_each_rtx. */
717 static int
718 approx_reg_cost_1 (rtx *xp, void *data)
720 rtx x = *xp;
721 int *cost_p = data;
723 if (x && GET_CODE (x) == REG)
725 unsigned int regno = REGNO (x);
727 if (! CHEAP_REGNO (regno))
729 if (regno < FIRST_PSEUDO_REGISTER)
731 if (SMALL_REGISTER_CLASSES)
732 return 1;
733 *cost_p += 2;
735 else
736 *cost_p += 1;
740 return 0;
743 /* Return an estimate of the cost of the registers used in an rtx.
744 This is mostly the number of different REG expressions in the rtx;
745 however for some exceptions like fixed registers we use a cost of
746 0. If any other hard register reference occurs, return MAX_COST. */
748 static int
749 approx_reg_cost (rtx x)
751 int cost = 0;
753 if (for_each_rtx (&x, approx_reg_cost_1, (void *) &cost))
754 return MAX_COST;
756 return cost;
759 /* Return a negative value if an rtx A, whose costs are given by COST_A
760 and REGCOST_A, is more desirable than an rtx B.
761 Return a positive value if A is less desirable, or 0 if the two are
762 equally good. */
763 static int
764 preferable (int cost_a, int regcost_a, int cost_b, int regcost_b)
766 /* First, get rid of cases involving expressions that are entirely
767 unwanted. */
768 if (cost_a != cost_b)
770 if (cost_a == MAX_COST)
771 return 1;
772 if (cost_b == MAX_COST)
773 return -1;
776 /* Avoid extending lifetimes of hardregs. */
777 if (regcost_a != regcost_b)
779 if (regcost_a == MAX_COST)
780 return 1;
781 if (regcost_b == MAX_COST)
782 return -1;
785 /* Normal operation costs take precedence. */
786 if (cost_a != cost_b)
787 return cost_a - cost_b;
788 /* Only if these are identical consider effects on register pressure. */
789 if (regcost_a != regcost_b)
790 return regcost_a - regcost_b;
791 return 0;
794 /* Internal function, to compute cost when X is not a register; called
795 from COST macro to keep it simple. */
797 static int
798 notreg_cost (rtx x, enum rtx_code outer)
800 return ((GET_CODE (x) == SUBREG
801 && GET_CODE (SUBREG_REG (x)) == REG
802 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
803 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
804 && (GET_MODE_SIZE (GET_MODE (x))
805 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
806 && subreg_lowpart_p (x)
807 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (x)),
808 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))))
810 : rtx_cost (x, outer) * 2);
813 /* Return an estimate of the cost of computing rtx X.
814 One use is in cse, to decide which expression to keep in the hash table.
815 Another is in rtl generation, to pick the cheapest way to multiply.
816 Other uses like the latter are expected in the future. */
819 rtx_cost (rtx x, enum rtx_code outer_code ATTRIBUTE_UNUSED)
821 int i, j;
822 enum rtx_code code;
823 const char *fmt;
824 int total;
826 if (x == 0)
827 return 0;
829 /* Compute the default costs of certain things.
830 Note that targetm.rtx_costs can override the defaults. */
832 code = GET_CODE (x);
833 switch (code)
835 case MULT:
836 total = COSTS_N_INSNS (5);
837 break;
838 case DIV:
839 case UDIV:
840 case MOD:
841 case UMOD:
842 total = COSTS_N_INSNS (7);
843 break;
844 case USE:
845 /* Used in loop.c and combine.c as a marker. */
846 total = 0;
847 break;
848 default:
849 total = COSTS_N_INSNS (1);
852 switch (code)
854 case REG:
855 return 0;
857 case SUBREG:
858 /* If we can't tie these modes, make this expensive. The larger
859 the mode, the more expensive it is. */
860 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
861 return COSTS_N_INSNS (2
862 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
863 break;
865 default:
866 if ((*targetm.rtx_costs) (x, code, outer_code, &total))
867 return total;
868 break;
871 /* Sum the costs of the sub-rtx's, plus cost of this operation,
872 which is already in total. */
874 fmt = GET_RTX_FORMAT (code);
875 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
876 if (fmt[i] == 'e')
877 total += rtx_cost (XEXP (x, i), code);
878 else if (fmt[i] == 'E')
879 for (j = 0; j < XVECLEN (x, i); j++)
880 total += rtx_cost (XVECEXP (x, i, j), code);
882 return total;
885 /* Return cost of address expression X.
886 Expect that X is properly formed address reference. */
889 address_cost (rtx x, enum machine_mode mode)
891 /* The address_cost target hook does not deal with ADDRESSOF nodes. But,
892 during CSE, such nodes are present. Using an ADDRESSOF node which
893 refers to the address of a REG is a good thing because we can then
894 turn (MEM (ADDRESSSOF (REG))) into just plain REG. */
896 if (GET_CODE (x) == ADDRESSOF && REG_P (XEXP ((x), 0)))
897 return -1;
899 /* We may be asked for cost of various unusual addresses, such as operands
900 of push instruction. It is not worthwhile to complicate writing
901 of the target hook by such cases. */
903 if (!memory_address_p (mode, x))
904 return 1000;
906 return (*targetm.address_cost) (x);
909 /* If the target doesn't override, compute the cost as with arithmetic. */
912 default_address_cost (rtx x)
914 return rtx_cost (x, MEM);
917 static struct cse_reg_info *
918 get_cse_reg_info (unsigned int regno)
920 struct cse_reg_info **hash_head = &reg_hash[REGHASH_FN (regno)];
921 struct cse_reg_info *p;
923 for (p = *hash_head; p != NULL; p = p->hash_next)
924 if (p->regno == regno)
925 break;
927 if (p == NULL)
929 /* Get a new cse_reg_info structure. */
930 if (cse_reg_info_free_list)
932 p = cse_reg_info_free_list;
933 cse_reg_info_free_list = p->next;
935 else
936 p = xmalloc (sizeof (struct cse_reg_info));
938 /* Insert into hash table. */
939 p->hash_next = *hash_head;
940 *hash_head = p;
942 /* Initialize it. */
943 p->reg_tick = 1;
944 p->reg_in_table = -1;
945 p->subreg_ticked = -1;
946 p->reg_qty = regno;
947 p->regno = regno;
948 p->next = cse_reg_info_used_list;
949 cse_reg_info_used_list = p;
950 if (!cse_reg_info_used_list_end)
951 cse_reg_info_used_list_end = p;
954 /* Cache this lookup; we tend to be looking up information about the
955 same register several times in a row. */
956 cached_regno = regno;
957 cached_cse_reg_info = p;
959 return p;
962 /* Clear the hash table and initialize each register with its own quantity,
963 for a new basic block. */
965 static void
966 new_basic_block (void)
968 int i;
970 next_qty = max_reg;
972 /* Clear out hash table state for this pass. */
974 memset (reg_hash, 0, sizeof reg_hash);
976 if (cse_reg_info_used_list)
978 cse_reg_info_used_list_end->next = cse_reg_info_free_list;
979 cse_reg_info_free_list = cse_reg_info_used_list;
980 cse_reg_info_used_list = cse_reg_info_used_list_end = 0;
982 cached_cse_reg_info = 0;
984 CLEAR_HARD_REG_SET (hard_regs_in_table);
986 /* The per-quantity values used to be initialized here, but it is
987 much faster to initialize each as it is made in `make_new_qty'. */
989 for (i = 0; i < HASH_SIZE; i++)
991 struct table_elt *first;
993 first = table[i];
994 if (first != NULL)
996 struct table_elt *last = first;
998 table[i] = NULL;
1000 while (last->next_same_hash != NULL)
1001 last = last->next_same_hash;
1003 /* Now relink this hash entire chain into
1004 the free element list. */
1006 last->next_same_hash = free_element_chain;
1007 free_element_chain = first;
1011 #ifdef HAVE_cc0
1012 prev_insn = 0;
1013 prev_insn_cc0 = 0;
1014 #endif
1017 /* Say that register REG contains a quantity in mode MODE not in any
1018 register before and initialize that quantity. */
1020 static void
1021 make_new_qty (unsigned int reg, enum machine_mode mode)
1023 int q;
1024 struct qty_table_elem *ent;
1025 struct reg_eqv_elem *eqv;
1027 if (next_qty >= max_qty)
1028 abort ();
1030 q = REG_QTY (reg) = next_qty++;
1031 ent = &qty_table[q];
1032 ent->first_reg = reg;
1033 ent->last_reg = reg;
1034 ent->mode = mode;
1035 ent->const_rtx = ent->const_insn = NULL_RTX;
1036 ent->comparison_code = UNKNOWN;
1038 eqv = &reg_eqv_table[reg];
1039 eqv->next = eqv->prev = -1;
1042 /* Make reg NEW equivalent to reg OLD.
1043 OLD is not changing; NEW is. */
1045 static void
1046 make_regs_eqv (unsigned int new, unsigned int old)
1048 unsigned int lastr, firstr;
1049 int q = REG_QTY (old);
1050 struct qty_table_elem *ent;
1052 ent = &qty_table[q];
1054 /* Nothing should become eqv until it has a "non-invalid" qty number. */
1055 if (! REGNO_QTY_VALID_P (old))
1056 abort ();
1058 REG_QTY (new) = q;
1059 firstr = ent->first_reg;
1060 lastr = ent->last_reg;
1062 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
1063 hard regs. Among pseudos, if NEW will live longer than any other reg
1064 of the same qty, and that is beyond the current basic block,
1065 make it the new canonical replacement for this qty. */
1066 if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
1067 /* Certain fixed registers might be of the class NO_REGS. This means
1068 that not only can they not be allocated by the compiler, but
1069 they cannot be used in substitutions or canonicalizations
1070 either. */
1071 && (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
1072 && ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
1073 || (new >= FIRST_PSEUDO_REGISTER
1074 && (firstr < FIRST_PSEUDO_REGISTER
1075 || ((uid_cuid[REGNO_LAST_UID (new)] > cse_basic_block_end
1076 || (uid_cuid[REGNO_FIRST_UID (new)]
1077 < cse_basic_block_start))
1078 && (uid_cuid[REGNO_LAST_UID (new)]
1079 > uid_cuid[REGNO_LAST_UID (firstr)]))))))
1081 reg_eqv_table[firstr].prev = new;
1082 reg_eqv_table[new].next = firstr;
1083 reg_eqv_table[new].prev = -1;
1084 ent->first_reg = new;
1086 else
1088 /* If NEW is a hard reg (known to be non-fixed), insert at end.
1089 Otherwise, insert before any non-fixed hard regs that are at the
1090 end. Registers of class NO_REGS cannot be used as an
1091 equivalent for anything. */
1092 while (lastr < FIRST_PSEUDO_REGISTER && reg_eqv_table[lastr].prev >= 0
1093 && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
1094 && new >= FIRST_PSEUDO_REGISTER)
1095 lastr = reg_eqv_table[lastr].prev;
1096 reg_eqv_table[new].next = reg_eqv_table[lastr].next;
1097 if (reg_eqv_table[lastr].next >= 0)
1098 reg_eqv_table[reg_eqv_table[lastr].next].prev = new;
1099 else
1100 qty_table[q].last_reg = new;
1101 reg_eqv_table[lastr].next = new;
1102 reg_eqv_table[new].prev = lastr;
1106 /* Remove REG from its equivalence class. */
1108 static void
1109 delete_reg_equiv (unsigned int reg)
1111 struct qty_table_elem *ent;
1112 int q = REG_QTY (reg);
1113 int p, n;
1115 /* If invalid, do nothing. */
1116 if (q == (int) reg)
1117 return;
1119 ent = &qty_table[q];
1121 p = reg_eqv_table[reg].prev;
1122 n = reg_eqv_table[reg].next;
1124 if (n != -1)
1125 reg_eqv_table[n].prev = p;
1126 else
1127 ent->last_reg = p;
1128 if (p != -1)
1129 reg_eqv_table[p].next = n;
1130 else
1131 ent->first_reg = n;
1133 REG_QTY (reg) = reg;
1136 /* Remove any invalid expressions from the hash table
1137 that refer to any of the registers contained in expression X.
1139 Make sure that newly inserted references to those registers
1140 as subexpressions will be considered valid.
1142 mention_regs is not called when a register itself
1143 is being stored in the table.
1145 Return 1 if we have done something that may have changed the hash code
1146 of X. */
1148 static int
1149 mention_regs (rtx x)
1151 enum rtx_code code;
1152 int i, j;
1153 const char *fmt;
1154 int changed = 0;
1156 if (x == 0)
1157 return 0;
1159 code = GET_CODE (x);
1160 if (code == REG)
1162 unsigned int regno = REGNO (x);
1163 unsigned int endregno
1164 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
1165 : hard_regno_nregs[regno][GET_MODE (x)]);
1166 unsigned int i;
1168 for (i = regno; i < endregno; i++)
1170 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1171 remove_invalid_refs (i);
1173 REG_IN_TABLE (i) = REG_TICK (i);
1174 SUBREG_TICKED (i) = -1;
1177 return 0;
1180 /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1181 pseudo if they don't use overlapping words. We handle only pseudos
1182 here for simplicity. */
1183 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1184 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1186 unsigned int i = REGNO (SUBREG_REG (x));
1188 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1190 /* If REG_IN_TABLE (i) differs from REG_TICK (i) by one, and
1191 the last store to this register really stored into this
1192 subreg, then remove the memory of this subreg.
1193 Otherwise, remove any memory of the entire register and
1194 all its subregs from the table. */
1195 if (REG_TICK (i) - REG_IN_TABLE (i) > 1
1196 || SUBREG_TICKED (i) != REGNO (SUBREG_REG (x)))
1197 remove_invalid_refs (i);
1198 else
1199 remove_invalid_subreg_refs (i, SUBREG_BYTE (x), GET_MODE (x));
1202 REG_IN_TABLE (i) = REG_TICK (i);
1203 SUBREG_TICKED (i) = REGNO (SUBREG_REG (x));
1204 return 0;
1207 /* If X is a comparison or a COMPARE and either operand is a register
1208 that does not have a quantity, give it one. This is so that a later
1209 call to record_jump_equiv won't cause X to be assigned a different
1210 hash code and not found in the table after that call.
1212 It is not necessary to do this here, since rehash_using_reg can
1213 fix up the table later, but doing this here eliminates the need to
1214 call that expensive function in the most common case where the only
1215 use of the register is in the comparison. */
1217 if (code == COMPARE || COMPARISON_P (x))
1219 if (GET_CODE (XEXP (x, 0)) == REG
1220 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
1221 if (insert_regs (XEXP (x, 0), NULL, 0))
1223 rehash_using_reg (XEXP (x, 0));
1224 changed = 1;
1227 if (GET_CODE (XEXP (x, 1)) == REG
1228 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
1229 if (insert_regs (XEXP (x, 1), NULL, 0))
1231 rehash_using_reg (XEXP (x, 1));
1232 changed = 1;
1236 fmt = GET_RTX_FORMAT (code);
1237 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1238 if (fmt[i] == 'e')
1239 changed |= mention_regs (XEXP (x, i));
1240 else if (fmt[i] == 'E')
1241 for (j = 0; j < XVECLEN (x, i); j++)
1242 changed |= mention_regs (XVECEXP (x, i, j));
1244 return changed;
1247 /* Update the register quantities for inserting X into the hash table
1248 with a value equivalent to CLASSP.
1249 (If the class does not contain a REG, it is irrelevant.)
1250 If MODIFIED is nonzero, X is a destination; it is being modified.
1251 Note that delete_reg_equiv should be called on a register
1252 before insert_regs is done on that register with MODIFIED != 0.
1254 Nonzero value means that elements of reg_qty have changed
1255 so X's hash code may be different. */
1257 static int
1258 insert_regs (rtx x, struct table_elt *classp, int modified)
1260 if (GET_CODE (x) == REG)
1262 unsigned int regno = REGNO (x);
1263 int qty_valid;
1265 /* If REGNO is in the equivalence table already but is of the
1266 wrong mode for that equivalence, don't do anything here. */
1268 qty_valid = REGNO_QTY_VALID_P (regno);
1269 if (qty_valid)
1271 struct qty_table_elem *ent = &qty_table[REG_QTY (regno)];
1273 if (ent->mode != GET_MODE (x))
1274 return 0;
1277 if (modified || ! qty_valid)
1279 if (classp)
1280 for (classp = classp->first_same_value;
1281 classp != 0;
1282 classp = classp->next_same_value)
1283 if (GET_CODE (classp->exp) == REG
1284 && GET_MODE (classp->exp) == GET_MODE (x))
1286 make_regs_eqv (regno, REGNO (classp->exp));
1287 return 1;
1290 /* Mention_regs for a SUBREG checks if REG_TICK is exactly one larger
1291 than REG_IN_TABLE to find out if there was only a single preceding
1292 invalidation - for the SUBREG - or another one, which would be
1293 for the full register. However, if we find here that REG_TICK
1294 indicates that the register is invalid, it means that it has
1295 been invalidated in a separate operation. The SUBREG might be used
1296 now (then this is a recursive call), or we might use the full REG
1297 now and a SUBREG of it later. So bump up REG_TICK so that
1298 mention_regs will do the right thing. */
1299 if (! modified
1300 && REG_IN_TABLE (regno) >= 0
1301 && REG_TICK (regno) == REG_IN_TABLE (regno) + 1)
1302 REG_TICK (regno)++;
1303 make_new_qty (regno, GET_MODE (x));
1304 return 1;
1307 return 0;
1310 /* If X is a SUBREG, we will likely be inserting the inner register in the
1311 table. If that register doesn't have an assigned quantity number at
1312 this point but does later, the insertion that we will be doing now will
1313 not be accessible because its hash code will have changed. So assign
1314 a quantity number now. */
1316 else if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1317 && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1319 insert_regs (SUBREG_REG (x), NULL, 0);
1320 mention_regs (x);
1321 return 1;
1323 else
1324 return mention_regs (x);
1327 /* Look in or update the hash table. */
1329 /* Remove table element ELT from use in the table.
1330 HASH is its hash code, made using the HASH macro.
1331 It's an argument because often that is known in advance
1332 and we save much time not recomputing it. */
1334 static void
1335 remove_from_table (struct table_elt *elt, unsigned int hash)
1337 if (elt == 0)
1338 return;
1340 /* Mark this element as removed. See cse_insn. */
1341 elt->first_same_value = 0;
1343 /* Remove the table element from its equivalence class. */
1346 struct table_elt *prev = elt->prev_same_value;
1347 struct table_elt *next = elt->next_same_value;
1349 if (next)
1350 next->prev_same_value = prev;
1352 if (prev)
1353 prev->next_same_value = next;
1354 else
1356 struct table_elt *newfirst = next;
1357 while (next)
1359 next->first_same_value = newfirst;
1360 next = next->next_same_value;
1365 /* Remove the table element from its hash bucket. */
1368 struct table_elt *prev = elt->prev_same_hash;
1369 struct table_elt *next = elt->next_same_hash;
1371 if (next)
1372 next->prev_same_hash = prev;
1374 if (prev)
1375 prev->next_same_hash = next;
1376 else if (table[hash] == elt)
1377 table[hash] = next;
1378 else
1380 /* This entry is not in the proper hash bucket. This can happen
1381 when two classes were merged by `merge_equiv_classes'. Search
1382 for the hash bucket that it heads. This happens only very
1383 rarely, so the cost is acceptable. */
1384 for (hash = 0; hash < HASH_SIZE; hash++)
1385 if (table[hash] == elt)
1386 table[hash] = next;
1390 /* Remove the table element from its related-value circular chain. */
1392 if (elt->related_value != 0 && elt->related_value != elt)
1394 struct table_elt *p = elt->related_value;
1396 while (p->related_value != elt)
1397 p = p->related_value;
1398 p->related_value = elt->related_value;
1399 if (p->related_value == p)
1400 p->related_value = 0;
1403 /* Now add it to the free element chain. */
1404 elt->next_same_hash = free_element_chain;
1405 free_element_chain = elt;
1408 /* Look up X in the hash table and return its table element,
1409 or 0 if X is not in the table.
1411 MODE is the machine-mode of X, or if X is an integer constant
1412 with VOIDmode then MODE is the mode with which X will be used.
1414 Here we are satisfied to find an expression whose tree structure
1415 looks like X. */
1417 static struct table_elt *
1418 lookup (rtx x, unsigned int hash, enum machine_mode mode)
1420 struct table_elt *p;
1422 for (p = table[hash]; p; p = p->next_same_hash)
1423 if (mode == p->mode && ((x == p->exp && GET_CODE (x) == REG)
1424 || exp_equiv_p (x, p->exp, GET_CODE (x) != REG, 0)))
1425 return p;
1427 return 0;
1430 /* Like `lookup' but don't care whether the table element uses invalid regs.
1431 Also ignore discrepancies in the machine mode of a register. */
1433 static struct table_elt *
1434 lookup_for_remove (rtx x, unsigned int hash, enum machine_mode mode)
1436 struct table_elt *p;
1438 if (GET_CODE (x) == REG)
1440 unsigned int regno = REGNO (x);
1442 /* Don't check the machine mode when comparing registers;
1443 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1444 for (p = table[hash]; p; p = p->next_same_hash)
1445 if (GET_CODE (p->exp) == REG
1446 && REGNO (p->exp) == regno)
1447 return p;
1449 else
1451 for (p = table[hash]; p; p = p->next_same_hash)
1452 if (mode == p->mode && (x == p->exp || exp_equiv_p (x, p->exp, 0, 0)))
1453 return p;
1456 return 0;
1459 /* Look for an expression equivalent to X and with code CODE.
1460 If one is found, return that expression. */
1462 static rtx
1463 lookup_as_function (rtx x, enum rtx_code code)
1465 struct table_elt *p
1466 = lookup (x, safe_hash (x, VOIDmode) & HASH_MASK, GET_MODE (x));
1468 /* If we are looking for a CONST_INT, the mode doesn't really matter, as
1469 long as we are narrowing. So if we looked in vain for a mode narrower
1470 than word_mode before, look for word_mode now. */
1471 if (p == 0 && code == CONST_INT
1472 && GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (word_mode))
1474 x = copy_rtx (x);
1475 PUT_MODE (x, word_mode);
1476 p = lookup (x, safe_hash (x, VOIDmode) & HASH_MASK, word_mode);
1479 if (p == 0)
1480 return 0;
1482 for (p = p->first_same_value; p; p = p->next_same_value)
1483 if (GET_CODE (p->exp) == code
1484 /* Make sure this is a valid entry in the table. */
1485 && exp_equiv_p (p->exp, p->exp, 1, 0))
1486 return p->exp;
1488 return 0;
1491 /* Insert X in the hash table, assuming HASH is its hash code
1492 and CLASSP is an element of the class it should go in
1493 (or 0 if a new class should be made).
1494 It is inserted at the proper position to keep the class in
1495 the order cheapest first.
1497 MODE is the machine-mode of X, or if X is an integer constant
1498 with VOIDmode then MODE is the mode with which X will be used.
1500 For elements of equal cheapness, the most recent one
1501 goes in front, except that the first element in the list
1502 remains first unless a cheaper element is added. The order of
1503 pseudo-registers does not matter, as canon_reg will be called to
1504 find the cheapest when a register is retrieved from the table.
1506 The in_memory field in the hash table element is set to 0.
1507 The caller must set it nonzero if appropriate.
1509 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1510 and if insert_regs returns a nonzero value
1511 you must then recompute its hash code before calling here.
1513 If necessary, update table showing constant values of quantities. */
1515 #define CHEAPER(X, Y) \
1516 (preferable ((X)->cost, (X)->regcost, (Y)->cost, (Y)->regcost) < 0)
1518 static struct table_elt *
1519 insert (rtx x, struct table_elt *classp, unsigned int hash, enum machine_mode mode)
1521 struct table_elt *elt;
1523 /* If X is a register and we haven't made a quantity for it,
1524 something is wrong. */
1525 if (GET_CODE (x) == REG && ! REGNO_QTY_VALID_P (REGNO (x)))
1526 abort ();
1528 /* If X is a hard register, show it is being put in the table. */
1529 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
1531 unsigned int regno = REGNO (x);
1532 unsigned int endregno = regno + hard_regno_nregs[regno][GET_MODE (x)];
1533 unsigned int i;
1535 for (i = regno; i < endregno; i++)
1536 SET_HARD_REG_BIT (hard_regs_in_table, i);
1539 /* Put an element for X into the right hash bucket. */
1541 elt = free_element_chain;
1542 if (elt)
1543 free_element_chain = elt->next_same_hash;
1544 else
1546 n_elements_made++;
1547 elt = xmalloc (sizeof (struct table_elt));
1550 elt->exp = x;
1551 elt->canon_exp = NULL_RTX;
1552 elt->cost = COST (x);
1553 elt->regcost = approx_reg_cost (x);
1554 elt->next_same_value = 0;
1555 elt->prev_same_value = 0;
1556 elt->next_same_hash = table[hash];
1557 elt->prev_same_hash = 0;
1558 elt->related_value = 0;
1559 elt->in_memory = 0;
1560 elt->mode = mode;
1561 elt->is_const = (CONSTANT_P (x)
1562 /* GNU C++ takes advantage of this for `this'
1563 (and other const values). */
1564 || (GET_CODE (x) == REG
1565 && RTX_UNCHANGING_P (x)
1566 && REGNO (x) >= FIRST_PSEUDO_REGISTER)
1567 || fixed_base_plus_p (x));
1569 if (table[hash])
1570 table[hash]->prev_same_hash = elt;
1571 table[hash] = elt;
1573 /* Put it into the proper value-class. */
1574 if (classp)
1576 classp = classp->first_same_value;
1577 if (CHEAPER (elt, classp))
1578 /* Insert at the head of the class. */
1580 struct table_elt *p;
1581 elt->next_same_value = classp;
1582 classp->prev_same_value = elt;
1583 elt->first_same_value = elt;
1585 for (p = classp; p; p = p->next_same_value)
1586 p->first_same_value = elt;
1588 else
1590 /* Insert not at head of the class. */
1591 /* Put it after the last element cheaper than X. */
1592 struct table_elt *p, *next;
1594 for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1595 p = next);
1597 /* Put it after P and before NEXT. */
1598 elt->next_same_value = next;
1599 if (next)
1600 next->prev_same_value = elt;
1602 elt->prev_same_value = p;
1603 p->next_same_value = elt;
1604 elt->first_same_value = classp;
1607 else
1608 elt->first_same_value = elt;
1610 /* If this is a constant being set equivalent to a register or a register
1611 being set equivalent to a constant, note the constant equivalence.
1613 If this is a constant, it cannot be equivalent to a different constant,
1614 and a constant is the only thing that can be cheaper than a register. So
1615 we know the register is the head of the class (before the constant was
1616 inserted).
1618 If this is a register that is not already known equivalent to a
1619 constant, we must check the entire class.
1621 If this is a register that is already known equivalent to an insn,
1622 update the qtys `const_insn' to show that `this_insn' is the latest
1623 insn making that quantity equivalent to the constant. */
1625 if (elt->is_const && classp && GET_CODE (classp->exp) == REG
1626 && GET_CODE (x) != REG)
1628 int exp_q = REG_QTY (REGNO (classp->exp));
1629 struct qty_table_elem *exp_ent = &qty_table[exp_q];
1631 exp_ent->const_rtx = gen_lowpart (exp_ent->mode, x);
1632 exp_ent->const_insn = this_insn;
1635 else if (GET_CODE (x) == REG
1636 && classp
1637 && ! qty_table[REG_QTY (REGNO (x))].const_rtx
1638 && ! elt->is_const)
1640 struct table_elt *p;
1642 for (p = classp; p != 0; p = p->next_same_value)
1644 if (p->is_const && GET_CODE (p->exp) != REG)
1646 int x_q = REG_QTY (REGNO (x));
1647 struct qty_table_elem *x_ent = &qty_table[x_q];
1649 x_ent->const_rtx
1650 = gen_lowpart (GET_MODE (x), p->exp);
1651 x_ent->const_insn = this_insn;
1652 break;
1657 else if (GET_CODE (x) == REG
1658 && qty_table[REG_QTY (REGNO (x))].const_rtx
1659 && GET_MODE (x) == qty_table[REG_QTY (REGNO (x))].mode)
1660 qty_table[REG_QTY (REGNO (x))].const_insn = this_insn;
1662 /* If this is a constant with symbolic value,
1663 and it has a term with an explicit integer value,
1664 link it up with related expressions. */
1665 if (GET_CODE (x) == CONST)
1667 rtx subexp = get_related_value (x);
1668 unsigned subhash;
1669 struct table_elt *subelt, *subelt_prev;
1671 if (subexp != 0)
1673 /* Get the integer-free subexpression in the hash table. */
1674 subhash = safe_hash (subexp, mode) & HASH_MASK;
1675 subelt = lookup (subexp, subhash, mode);
1676 if (subelt == 0)
1677 subelt = insert (subexp, NULL, subhash, mode);
1678 /* Initialize SUBELT's circular chain if it has none. */
1679 if (subelt->related_value == 0)
1680 subelt->related_value = subelt;
1681 /* Find the element in the circular chain that precedes SUBELT. */
1682 subelt_prev = subelt;
1683 while (subelt_prev->related_value != subelt)
1684 subelt_prev = subelt_prev->related_value;
1685 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1686 This way the element that follows SUBELT is the oldest one. */
1687 elt->related_value = subelt_prev->related_value;
1688 subelt_prev->related_value = elt;
1692 return elt;
1695 /* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1696 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1697 the two classes equivalent.
1699 CLASS1 will be the surviving class; CLASS2 should not be used after this
1700 call.
1702 Any invalid entries in CLASS2 will not be copied. */
1704 static void
1705 merge_equiv_classes (struct table_elt *class1, struct table_elt *class2)
1707 struct table_elt *elt, *next, *new;
1709 /* Ensure we start with the head of the classes. */
1710 class1 = class1->first_same_value;
1711 class2 = class2->first_same_value;
1713 /* If they were already equal, forget it. */
1714 if (class1 == class2)
1715 return;
1717 for (elt = class2; elt; elt = next)
1719 unsigned int hash;
1720 rtx exp = elt->exp;
1721 enum machine_mode mode = elt->mode;
1723 next = elt->next_same_value;
1725 /* Remove old entry, make a new one in CLASS1's class.
1726 Don't do this for invalid entries as we cannot find their
1727 hash code (it also isn't necessary). */
1728 if (GET_CODE (exp) == REG || exp_equiv_p (exp, exp, 1, 0))
1730 hash_arg_in_memory = 0;
1731 hash = HASH (exp, mode);
1733 if (GET_CODE (exp) == REG)
1734 delete_reg_equiv (REGNO (exp));
1736 remove_from_table (elt, hash);
1738 if (insert_regs (exp, class1, 0))
1740 rehash_using_reg (exp);
1741 hash = HASH (exp, mode);
1743 new = insert (exp, class1, hash, mode);
1744 new->in_memory = hash_arg_in_memory;
1749 /* Flush the entire hash table. */
1751 static void
1752 flush_hash_table (void)
1754 int i;
1755 struct table_elt *p;
1757 for (i = 0; i < HASH_SIZE; i++)
1758 for (p = table[i]; p; p = table[i])
1760 /* Note that invalidate can remove elements
1761 after P in the current hash chain. */
1762 if (GET_CODE (p->exp) == REG)
1763 invalidate (p->exp, p->mode);
1764 else
1765 remove_from_table (p, i);
1769 /* Function called for each rtx to check whether true dependence exist. */
1770 struct check_dependence_data
1772 enum machine_mode mode;
1773 rtx exp;
1774 rtx addr;
1777 static int
1778 check_dependence (rtx *x, void *data)
1780 struct check_dependence_data *d = (struct check_dependence_data *) data;
1781 if (*x && GET_CODE (*x) == MEM)
1782 return canon_true_dependence (d->exp, d->mode, d->addr, *x,
1783 cse_rtx_varies_p);
1784 else
1785 return 0;
1788 /* Remove from the hash table, or mark as invalid, all expressions whose
1789 values could be altered by storing in X. X is a register, a subreg, or
1790 a memory reference with nonvarying address (because, when a memory
1791 reference with a varying address is stored in, all memory references are
1792 removed by invalidate_memory so specific invalidation is superfluous).
1793 FULL_MODE, if not VOIDmode, indicates that this much should be
1794 invalidated instead of just the amount indicated by the mode of X. This
1795 is only used for bitfield stores into memory.
1797 A nonvarying address may be just a register or just a symbol reference,
1798 or it may be either of those plus a numeric offset. */
1800 static void
1801 invalidate (rtx x, enum machine_mode full_mode)
1803 int i;
1804 struct table_elt *p;
1805 rtx addr;
1807 switch (GET_CODE (x))
1809 case REG:
1811 /* If X is a register, dependencies on its contents are recorded
1812 through the qty number mechanism. Just change the qty number of
1813 the register, mark it as invalid for expressions that refer to it,
1814 and remove it itself. */
1815 unsigned int regno = REGNO (x);
1816 unsigned int hash = HASH (x, GET_MODE (x));
1818 /* Remove REGNO from any quantity list it might be on and indicate
1819 that its value might have changed. If it is a pseudo, remove its
1820 entry from the hash table.
1822 For a hard register, we do the first two actions above for any
1823 additional hard registers corresponding to X. Then, if any of these
1824 registers are in the table, we must remove any REG entries that
1825 overlap these registers. */
1827 delete_reg_equiv (regno);
1828 REG_TICK (regno)++;
1829 SUBREG_TICKED (regno) = -1;
1831 if (regno >= FIRST_PSEUDO_REGISTER)
1833 /* Because a register can be referenced in more than one mode,
1834 we might have to remove more than one table entry. */
1835 struct table_elt *elt;
1837 while ((elt = lookup_for_remove (x, hash, GET_MODE (x))))
1838 remove_from_table (elt, hash);
1840 else
1842 HOST_WIDE_INT in_table
1843 = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1844 unsigned int endregno
1845 = regno + hard_regno_nregs[regno][GET_MODE (x)];
1846 unsigned int tregno, tendregno, rn;
1847 struct table_elt *p, *next;
1849 CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1851 for (rn = regno + 1; rn < endregno; rn++)
1853 in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, rn);
1854 CLEAR_HARD_REG_BIT (hard_regs_in_table, rn);
1855 delete_reg_equiv (rn);
1856 REG_TICK (rn)++;
1857 SUBREG_TICKED (rn) = -1;
1860 if (in_table)
1861 for (hash = 0; hash < HASH_SIZE; hash++)
1862 for (p = table[hash]; p; p = next)
1864 next = p->next_same_hash;
1866 if (GET_CODE (p->exp) != REG
1867 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1868 continue;
1870 tregno = REGNO (p->exp);
1871 tendregno
1872 = tregno + hard_regno_nregs[tregno][GET_MODE (p->exp)];
1873 if (tendregno > regno && tregno < endregno)
1874 remove_from_table (p, hash);
1878 return;
1880 case SUBREG:
1881 invalidate (SUBREG_REG (x), VOIDmode);
1882 return;
1884 case PARALLEL:
1885 for (i = XVECLEN (x, 0) - 1; i >= 0; --i)
1886 invalidate (XVECEXP (x, 0, i), VOIDmode);
1887 return;
1889 case EXPR_LIST:
1890 /* This is part of a disjoint return value; extract the location in
1891 question ignoring the offset. */
1892 invalidate (XEXP (x, 0), VOIDmode);
1893 return;
1895 case MEM:
1896 addr = canon_rtx (get_addr (XEXP (x, 0)));
1897 /* Calculate the canonical version of X here so that
1898 true_dependence doesn't generate new RTL for X on each call. */
1899 x = canon_rtx (x);
1901 /* Remove all hash table elements that refer to overlapping pieces of
1902 memory. */
1903 if (full_mode == VOIDmode)
1904 full_mode = GET_MODE (x);
1906 for (i = 0; i < HASH_SIZE; i++)
1908 struct table_elt *next;
1910 for (p = table[i]; p; p = next)
1912 next = p->next_same_hash;
1913 if (p->in_memory)
1915 struct check_dependence_data d;
1917 /* Just canonicalize the expression once;
1918 otherwise each time we call invalidate
1919 true_dependence will canonicalize the
1920 expression again. */
1921 if (!p->canon_exp)
1922 p->canon_exp = canon_rtx (p->exp);
1923 d.exp = x;
1924 d.addr = addr;
1925 d.mode = full_mode;
1926 if (for_each_rtx (&p->canon_exp, check_dependence, &d))
1927 remove_from_table (p, i);
1931 return;
1933 default:
1934 abort ();
1938 /* Remove all expressions that refer to register REGNO,
1939 since they are already invalid, and we are about to
1940 mark that register valid again and don't want the old
1941 expressions to reappear as valid. */
1943 static void
1944 remove_invalid_refs (unsigned int regno)
1946 unsigned int i;
1947 struct table_elt *p, *next;
1949 for (i = 0; i < HASH_SIZE; i++)
1950 for (p = table[i]; p; p = next)
1952 next = p->next_same_hash;
1953 if (GET_CODE (p->exp) != REG
1954 && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
1955 remove_from_table (p, i);
1959 /* Likewise for a subreg with subreg_reg REGNO, subreg_byte OFFSET,
1960 and mode MODE. */
1961 static void
1962 remove_invalid_subreg_refs (unsigned int regno, unsigned int offset,
1963 enum machine_mode mode)
1965 unsigned int i;
1966 struct table_elt *p, *next;
1967 unsigned int end = offset + (GET_MODE_SIZE (mode) - 1);
1969 for (i = 0; i < HASH_SIZE; i++)
1970 for (p = table[i]; p; p = next)
1972 rtx exp = p->exp;
1973 next = p->next_same_hash;
1975 if (GET_CODE (exp) != REG
1976 && (GET_CODE (exp) != SUBREG
1977 || GET_CODE (SUBREG_REG (exp)) != REG
1978 || REGNO (SUBREG_REG (exp)) != regno
1979 || (((SUBREG_BYTE (exp)
1980 + (GET_MODE_SIZE (GET_MODE (exp)) - 1)) >= offset)
1981 && SUBREG_BYTE (exp) <= end))
1982 && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
1983 remove_from_table (p, i);
1987 /* Recompute the hash codes of any valid entries in the hash table that
1988 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
1990 This is called when we make a jump equivalence. */
1992 static void
1993 rehash_using_reg (rtx x)
1995 unsigned int i;
1996 struct table_elt *p, *next;
1997 unsigned hash;
1999 if (GET_CODE (x) == SUBREG)
2000 x = SUBREG_REG (x);
2002 /* If X is not a register or if the register is known not to be in any
2003 valid entries in the table, we have no work to do. */
2005 if (GET_CODE (x) != REG
2006 || REG_IN_TABLE (REGNO (x)) < 0
2007 || REG_IN_TABLE (REGNO (x)) != REG_TICK (REGNO (x)))
2008 return;
2010 /* Scan all hash chains looking for valid entries that mention X.
2011 If we find one and it is in the wrong hash chain, move it. We can skip
2012 objects that are registers, since they are handled specially. */
2014 for (i = 0; i < HASH_SIZE; i++)
2015 for (p = table[i]; p; p = next)
2017 next = p->next_same_hash;
2018 if (GET_CODE (p->exp) != REG && reg_mentioned_p (x, p->exp)
2019 && exp_equiv_p (p->exp, p->exp, 1, 0)
2020 && i != (hash = safe_hash (p->exp, p->mode) & HASH_MASK))
2022 if (p->next_same_hash)
2023 p->next_same_hash->prev_same_hash = p->prev_same_hash;
2025 if (p->prev_same_hash)
2026 p->prev_same_hash->next_same_hash = p->next_same_hash;
2027 else
2028 table[i] = p->next_same_hash;
2030 p->next_same_hash = table[hash];
2031 p->prev_same_hash = 0;
2032 if (table[hash])
2033 table[hash]->prev_same_hash = p;
2034 table[hash] = p;
2039 /* Remove from the hash table any expression that is a call-clobbered
2040 register. Also update their TICK values. */
2042 static void
2043 invalidate_for_call (void)
2045 unsigned int regno, endregno;
2046 unsigned int i;
2047 unsigned hash;
2048 struct table_elt *p, *next;
2049 int in_table = 0;
2051 /* Go through all the hard registers. For each that is clobbered in
2052 a CALL_INSN, remove the register from quantity chains and update
2053 reg_tick if defined. Also see if any of these registers is currently
2054 in the table. */
2056 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2057 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2059 delete_reg_equiv (regno);
2060 if (REG_TICK (regno) >= 0)
2062 REG_TICK (regno)++;
2063 SUBREG_TICKED (regno) = -1;
2066 in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
2069 /* In the case where we have no call-clobbered hard registers in the
2070 table, we are done. Otherwise, scan the table and remove any
2071 entry that overlaps a call-clobbered register. */
2073 if (in_table)
2074 for (hash = 0; hash < HASH_SIZE; hash++)
2075 for (p = table[hash]; p; p = next)
2077 next = p->next_same_hash;
2079 if (GET_CODE (p->exp) != REG
2080 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
2081 continue;
2083 regno = REGNO (p->exp);
2084 endregno = regno + hard_regno_nregs[regno][GET_MODE (p->exp)];
2086 for (i = regno; i < endregno; i++)
2087 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
2089 remove_from_table (p, hash);
2090 break;
2095 /* Given an expression X of type CONST,
2096 and ELT which is its table entry (or 0 if it
2097 is not in the hash table),
2098 return an alternate expression for X as a register plus integer.
2099 If none can be found, return 0. */
2101 static rtx
2102 use_related_value (rtx x, struct table_elt *elt)
2104 struct table_elt *relt = 0;
2105 struct table_elt *p, *q;
2106 HOST_WIDE_INT offset;
2108 /* First, is there anything related known?
2109 If we have a table element, we can tell from that.
2110 Otherwise, must look it up. */
2112 if (elt != 0 && elt->related_value != 0)
2113 relt = elt;
2114 else if (elt == 0 && GET_CODE (x) == CONST)
2116 rtx subexp = get_related_value (x);
2117 if (subexp != 0)
2118 relt = lookup (subexp,
2119 safe_hash (subexp, GET_MODE (subexp)) & HASH_MASK,
2120 GET_MODE (subexp));
2123 if (relt == 0)
2124 return 0;
2126 /* Search all related table entries for one that has an
2127 equivalent register. */
2129 p = relt;
2130 while (1)
2132 /* This loop is strange in that it is executed in two different cases.
2133 The first is when X is already in the table. Then it is searching
2134 the RELATED_VALUE list of X's class (RELT). The second case is when
2135 X is not in the table. Then RELT points to a class for the related
2136 value.
2138 Ensure that, whatever case we are in, that we ignore classes that have
2139 the same value as X. */
2141 if (rtx_equal_p (x, p->exp))
2142 q = 0;
2143 else
2144 for (q = p->first_same_value; q; q = q->next_same_value)
2145 if (GET_CODE (q->exp) == REG)
2146 break;
2148 if (q)
2149 break;
2151 p = p->related_value;
2153 /* We went all the way around, so there is nothing to be found.
2154 Alternatively, perhaps RELT was in the table for some other reason
2155 and it has no related values recorded. */
2156 if (p == relt || p == 0)
2157 break;
2160 if (q == 0)
2161 return 0;
2163 offset = (get_integer_term (x) - get_integer_term (p->exp));
2164 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
2165 return plus_constant (q->exp, offset);
2168 /* Hash a string. Just add its bytes up. */
2169 static inline unsigned
2170 canon_hash_string (const char *ps)
2172 unsigned hash = 0;
2173 const unsigned char *p = (const unsigned char *) ps;
2175 if (p)
2176 while (*p)
2177 hash += *p++;
2179 return hash;
2182 /* Hash an rtx. We are careful to make sure the value is never negative.
2183 Equivalent registers hash identically.
2184 MODE is used in hashing for CONST_INTs only;
2185 otherwise the mode of X is used.
2187 Store 1 in do_not_record if any subexpression is volatile.
2189 Store 1 in hash_arg_in_memory if X contains a MEM rtx
2190 which does not have the RTX_UNCHANGING_P bit set.
2192 Note that cse_insn knows that the hash code of a MEM expression
2193 is just (int) MEM plus the hash code of the address. */
2195 static unsigned
2196 canon_hash (rtx x, enum machine_mode mode)
2198 int i, j;
2199 unsigned hash = 0;
2200 enum rtx_code code;
2201 const char *fmt;
2203 /* repeat is used to turn tail-recursion into iteration. */
2204 repeat:
2205 if (x == 0)
2206 return hash;
2208 code = GET_CODE (x);
2209 switch (code)
2211 case REG:
2213 unsigned int regno = REGNO (x);
2214 bool record;
2216 /* On some machines, we can't record any non-fixed hard register,
2217 because extending its life will cause reload problems. We
2218 consider ap, fp, sp, gp to be fixed for this purpose.
2220 We also consider CCmode registers to be fixed for this purpose;
2221 failure to do so leads to failure to simplify 0<100 type of
2222 conditionals.
2224 On all machines, we can't record any global registers.
2225 Nor should we record any register that is in a small
2226 class, as defined by CLASS_LIKELY_SPILLED_P. */
2228 if (regno >= FIRST_PSEUDO_REGISTER)
2229 record = true;
2230 else if (x == frame_pointer_rtx
2231 || x == hard_frame_pointer_rtx
2232 || x == arg_pointer_rtx
2233 || x == stack_pointer_rtx
2234 || x == pic_offset_table_rtx)
2235 record = true;
2236 else if (global_regs[regno])
2237 record = false;
2238 else if (fixed_regs[regno])
2239 record = true;
2240 else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_CC)
2241 record = true;
2242 else if (SMALL_REGISTER_CLASSES)
2243 record = false;
2244 else if (CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (regno)))
2245 record = false;
2246 else
2247 record = true;
2249 if (!record)
2251 do_not_record = 1;
2252 return 0;
2255 hash += ((unsigned) REG << 7) + (unsigned) REG_QTY (regno);
2256 return hash;
2259 /* We handle SUBREG of a REG specially because the underlying
2260 reg changes its hash value with every value change; we don't
2261 want to have to forget unrelated subregs when one subreg changes. */
2262 case SUBREG:
2264 if (GET_CODE (SUBREG_REG (x)) == REG)
2266 hash += (((unsigned) SUBREG << 7)
2267 + REGNO (SUBREG_REG (x))
2268 + (SUBREG_BYTE (x) / UNITS_PER_WORD));
2269 return hash;
2271 break;
2274 case CONST_INT:
2276 unsigned HOST_WIDE_INT tem = INTVAL (x);
2277 hash += ((unsigned) CONST_INT << 7) + (unsigned) mode + tem;
2278 return hash;
2281 case CONST_DOUBLE:
2282 /* This is like the general case, except that it only counts
2283 the integers representing the constant. */
2284 hash += (unsigned) code + (unsigned) GET_MODE (x);
2285 if (GET_MODE (x) != VOIDmode)
2286 hash += real_hash (CONST_DOUBLE_REAL_VALUE (x));
2287 else
2288 hash += ((unsigned) CONST_DOUBLE_LOW (x)
2289 + (unsigned) CONST_DOUBLE_HIGH (x));
2290 return hash;
2292 case CONST_VECTOR:
2294 int units;
2295 rtx elt;
2297 units = CONST_VECTOR_NUNITS (x);
2299 for (i = 0; i < units; ++i)
2301 elt = CONST_VECTOR_ELT (x, i);
2302 hash += canon_hash (elt, GET_MODE (elt));
2305 return hash;
2308 /* Assume there is only one rtx object for any given label. */
2309 case LABEL_REF:
2310 hash += ((unsigned) LABEL_REF << 7) + (unsigned long) XEXP (x, 0);
2311 return hash;
2313 case SYMBOL_REF:
2314 hash += ((unsigned) SYMBOL_REF << 7) + (unsigned long) XSTR (x, 0);
2315 return hash;
2317 case MEM:
2318 /* We don't record if marked volatile or if BLKmode since we don't
2319 know the size of the move. */
2320 if (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode)
2322 do_not_record = 1;
2323 return 0;
2325 if (! RTX_UNCHANGING_P (x) || fixed_base_plus_p (XEXP (x, 0)))
2326 hash_arg_in_memory = 1;
2328 /* Now that we have already found this special case,
2329 might as well speed it up as much as possible. */
2330 hash += (unsigned) MEM;
2331 x = XEXP (x, 0);
2332 goto repeat;
2334 case USE:
2335 /* A USE that mentions non-volatile memory needs special
2336 handling since the MEM may be BLKmode which normally
2337 prevents an entry from being made. Pure calls are
2338 marked by a USE which mentions BLKmode memory. */
2339 if (GET_CODE (XEXP (x, 0)) == MEM
2340 && ! MEM_VOLATILE_P (XEXP (x, 0)))
2342 hash += (unsigned) USE;
2343 x = XEXP (x, 0);
2345 if (! RTX_UNCHANGING_P (x) || fixed_base_plus_p (XEXP (x, 0)))
2346 hash_arg_in_memory = 1;
2348 /* Now that we have already found this special case,
2349 might as well speed it up as much as possible. */
2350 hash += (unsigned) MEM;
2351 x = XEXP (x, 0);
2352 goto repeat;
2354 break;
2356 case PRE_DEC:
2357 case PRE_INC:
2358 case POST_DEC:
2359 case POST_INC:
2360 case PRE_MODIFY:
2361 case POST_MODIFY:
2362 case PC:
2363 case CC0:
2364 case CALL:
2365 case UNSPEC_VOLATILE:
2366 do_not_record = 1;
2367 return 0;
2369 case ASM_OPERANDS:
2370 if (MEM_VOLATILE_P (x))
2372 do_not_record = 1;
2373 return 0;
2375 else
2377 /* We don't want to take the filename and line into account. */
2378 hash += (unsigned) code + (unsigned) GET_MODE (x)
2379 + canon_hash_string (ASM_OPERANDS_TEMPLATE (x))
2380 + canon_hash_string (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
2381 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
2383 if (ASM_OPERANDS_INPUT_LENGTH (x))
2385 for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2387 hash += (canon_hash (ASM_OPERANDS_INPUT (x, i),
2388 GET_MODE (ASM_OPERANDS_INPUT (x, i)))
2389 + canon_hash_string (ASM_OPERANDS_INPUT_CONSTRAINT
2390 (x, i)));
2393 hash += canon_hash_string (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
2394 x = ASM_OPERANDS_INPUT (x, 0);
2395 mode = GET_MODE (x);
2396 goto repeat;
2399 return hash;
2401 break;
2403 default:
2404 break;
2407 i = GET_RTX_LENGTH (code) - 1;
2408 hash += (unsigned) code + (unsigned) GET_MODE (x);
2409 fmt = GET_RTX_FORMAT (code);
2410 for (; i >= 0; i--)
2412 if (fmt[i] == 'e')
2414 rtx tem = XEXP (x, i);
2416 /* If we are about to do the last recursive call
2417 needed at this level, change it into iteration.
2418 This function is called enough to be worth it. */
2419 if (i == 0)
2421 x = tem;
2422 goto repeat;
2424 hash += canon_hash (tem, 0);
2426 else if (fmt[i] == 'E')
2427 for (j = 0; j < XVECLEN (x, i); j++)
2428 hash += canon_hash (XVECEXP (x, i, j), 0);
2429 else if (fmt[i] == 's')
2430 hash += canon_hash_string (XSTR (x, i));
2431 else if (fmt[i] == 'i')
2433 unsigned tem = XINT (x, i);
2434 hash += tem;
2436 else if (fmt[i] == '0' || fmt[i] == 't')
2437 /* Unused. */
2439 else
2440 abort ();
2442 return hash;
2445 /* Like canon_hash but with no side effects. */
2447 static unsigned
2448 safe_hash (rtx x, enum machine_mode mode)
2450 int save_do_not_record = do_not_record;
2451 int save_hash_arg_in_memory = hash_arg_in_memory;
2452 unsigned hash = canon_hash (x, mode);
2453 hash_arg_in_memory = save_hash_arg_in_memory;
2454 do_not_record = save_do_not_record;
2455 return hash;
2458 /* Return 1 iff X and Y would canonicalize into the same thing,
2459 without actually constructing the canonicalization of either one.
2460 If VALIDATE is nonzero,
2461 we assume X is an expression being processed from the rtl
2462 and Y was found in the hash table. We check register refs
2463 in Y for being marked as valid.
2465 If EQUAL_VALUES is nonzero, we allow a register to match a constant value
2466 that is known to be in the register. Ordinarily, we don't allow them
2467 to match, because letting them match would cause unpredictable results
2468 in all the places that search a hash table chain for an equivalent
2469 for a given value. A possible equivalent that has different structure
2470 has its hash code computed from different data. Whether the hash code
2471 is the same as that of the given value is pure luck. */
2473 static int
2474 exp_equiv_p (rtx x, rtx y, int validate, int equal_values)
2476 int i, j;
2477 enum rtx_code code;
2478 const char *fmt;
2480 /* Note: it is incorrect to assume an expression is equivalent to itself
2481 if VALIDATE is nonzero. */
2482 if (x == y && !validate)
2483 return 1;
2484 if (x == 0 || y == 0)
2485 return x == y;
2487 code = GET_CODE (x);
2488 if (code != GET_CODE (y))
2490 if (!equal_values)
2491 return 0;
2493 /* If X is a constant and Y is a register or vice versa, they may be
2494 equivalent. We only have to validate if Y is a register. */
2495 if (CONSTANT_P (x) && GET_CODE (y) == REG
2496 && REGNO_QTY_VALID_P (REGNO (y)))
2498 int y_q = REG_QTY (REGNO (y));
2499 struct qty_table_elem *y_ent = &qty_table[y_q];
2501 if (GET_MODE (y) == y_ent->mode
2502 && rtx_equal_p (x, y_ent->const_rtx)
2503 && (! validate || REG_IN_TABLE (REGNO (y)) == REG_TICK (REGNO (y))))
2504 return 1;
2507 if (CONSTANT_P (y) && code == REG
2508 && REGNO_QTY_VALID_P (REGNO (x)))
2510 int x_q = REG_QTY (REGNO (x));
2511 struct qty_table_elem *x_ent = &qty_table[x_q];
2513 if (GET_MODE (x) == x_ent->mode
2514 && rtx_equal_p (y, x_ent->const_rtx))
2515 return 1;
2518 return 0;
2521 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2522 if (GET_MODE (x) != GET_MODE (y))
2523 return 0;
2525 switch (code)
2527 case PC:
2528 case CC0:
2529 case CONST_INT:
2530 return x == y;
2532 case LABEL_REF:
2533 return XEXP (x, 0) == XEXP (y, 0);
2535 case SYMBOL_REF:
2536 return XSTR (x, 0) == XSTR (y, 0);
2538 case REG:
2540 unsigned int regno = REGNO (y);
2541 unsigned int endregno
2542 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
2543 : hard_regno_nregs[regno][GET_MODE (y)]);
2544 unsigned int i;
2546 /* If the quantities are not the same, the expressions are not
2547 equivalent. If there are and we are not to validate, they
2548 are equivalent. Otherwise, ensure all regs are up-to-date. */
2550 if (REG_QTY (REGNO (x)) != REG_QTY (regno))
2551 return 0;
2553 if (! validate)
2554 return 1;
2556 for (i = regno; i < endregno; i++)
2557 if (REG_IN_TABLE (i) != REG_TICK (i))
2558 return 0;
2560 return 1;
2563 /* For commutative operations, check both orders. */
2564 case PLUS:
2565 case MULT:
2566 case AND:
2567 case IOR:
2568 case XOR:
2569 case NE:
2570 case EQ:
2571 return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0), validate, equal_values)
2572 && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2573 validate, equal_values))
2574 || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2575 validate, equal_values)
2576 && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2577 validate, equal_values)));
2579 case ASM_OPERANDS:
2580 /* We don't use the generic code below because we want to
2581 disregard filename and line numbers. */
2583 /* A volatile asm isn't equivalent to any other. */
2584 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2585 return 0;
2587 if (GET_MODE (x) != GET_MODE (y)
2588 || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
2589 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2590 ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
2591 || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
2592 || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
2593 return 0;
2595 if (ASM_OPERANDS_INPUT_LENGTH (x))
2597 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
2598 if (! exp_equiv_p (ASM_OPERANDS_INPUT (x, i),
2599 ASM_OPERANDS_INPUT (y, i),
2600 validate, equal_values)
2601 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
2602 ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
2603 return 0;
2606 return 1;
2608 default:
2609 break;
2612 /* Compare the elements. If any pair of corresponding elements
2613 fail to match, return 0 for the whole things. */
2615 fmt = GET_RTX_FORMAT (code);
2616 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2618 switch (fmt[i])
2620 case 'e':
2621 if (! exp_equiv_p (XEXP (x, i), XEXP (y, i), validate, equal_values))
2622 return 0;
2623 break;
2625 case 'E':
2626 if (XVECLEN (x, i) != XVECLEN (y, i))
2627 return 0;
2628 for (j = 0; j < XVECLEN (x, i); j++)
2629 if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2630 validate, equal_values))
2631 return 0;
2632 break;
2634 case 's':
2635 if (strcmp (XSTR (x, i), XSTR (y, i)))
2636 return 0;
2637 break;
2639 case 'i':
2640 if (XINT (x, i) != XINT (y, i))
2641 return 0;
2642 break;
2644 case 'w':
2645 if (XWINT (x, i) != XWINT (y, i))
2646 return 0;
2647 break;
2649 case '0':
2650 case 't':
2651 break;
2653 default:
2654 abort ();
2658 return 1;
2661 /* Return 1 if X has a value that can vary even between two
2662 executions of the program. 0 means X can be compared reliably
2663 against certain constants or near-constants. */
2665 static int
2666 cse_rtx_varies_p (rtx x, int from_alias)
2668 /* We need not check for X and the equivalence class being of the same
2669 mode because if X is equivalent to a constant in some mode, it
2670 doesn't vary in any mode. */
2672 if (GET_CODE (x) == REG
2673 && REGNO_QTY_VALID_P (REGNO (x)))
2675 int x_q = REG_QTY (REGNO (x));
2676 struct qty_table_elem *x_ent = &qty_table[x_q];
2678 if (GET_MODE (x) == x_ent->mode
2679 && x_ent->const_rtx != NULL_RTX)
2680 return 0;
2683 if (GET_CODE (x) == PLUS
2684 && GET_CODE (XEXP (x, 1)) == CONST_INT
2685 && GET_CODE (XEXP (x, 0)) == REG
2686 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
2688 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2689 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2691 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2692 && x0_ent->const_rtx != NULL_RTX)
2693 return 0;
2696 /* This can happen as the result of virtual register instantiation, if
2697 the initial constant is too large to be a valid address. This gives
2698 us a three instruction sequence, load large offset into a register,
2699 load fp minus a constant into a register, then a MEM which is the
2700 sum of the two `constant' registers. */
2701 if (GET_CODE (x) == PLUS
2702 && GET_CODE (XEXP (x, 0)) == REG
2703 && GET_CODE (XEXP (x, 1)) == REG
2704 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2705 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
2707 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2708 int x1_q = REG_QTY (REGNO (XEXP (x, 1)));
2709 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2710 struct qty_table_elem *x1_ent = &qty_table[x1_q];
2712 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2713 && x0_ent->const_rtx != NULL_RTX
2714 && (GET_MODE (XEXP (x, 1)) == x1_ent->mode)
2715 && x1_ent->const_rtx != NULL_RTX)
2716 return 0;
2719 return rtx_varies_p (x, from_alias);
2722 /* Canonicalize an expression:
2723 replace each register reference inside it
2724 with the "oldest" equivalent register.
2726 If INSN is nonzero and we are replacing a pseudo with a hard register
2727 or vice versa, validate_change is used to ensure that INSN remains valid
2728 after we make our substitution. The calls are made with IN_GROUP nonzero
2729 so apply_change_group must be called upon the outermost return from this
2730 function (unless INSN is zero). The result of apply_change_group can
2731 generally be discarded since the changes we are making are optional. */
2733 static rtx
2734 canon_reg (rtx x, rtx insn)
2736 int i;
2737 enum rtx_code code;
2738 const char *fmt;
2740 if (x == 0)
2741 return x;
2743 code = GET_CODE (x);
2744 switch (code)
2746 case PC:
2747 case CC0:
2748 case CONST:
2749 case CONST_INT:
2750 case CONST_DOUBLE:
2751 case CONST_VECTOR:
2752 case SYMBOL_REF:
2753 case LABEL_REF:
2754 case ADDR_VEC:
2755 case ADDR_DIFF_VEC:
2756 return x;
2758 case REG:
2760 int first;
2761 int q;
2762 struct qty_table_elem *ent;
2764 /* Never replace a hard reg, because hard regs can appear
2765 in more than one machine mode, and we must preserve the mode
2766 of each occurrence. Also, some hard regs appear in
2767 MEMs that are shared and mustn't be altered. Don't try to
2768 replace any reg that maps to a reg of class NO_REGS. */
2769 if (REGNO (x) < FIRST_PSEUDO_REGISTER
2770 || ! REGNO_QTY_VALID_P (REGNO (x)))
2771 return x;
2773 q = REG_QTY (REGNO (x));
2774 ent = &qty_table[q];
2775 first = ent->first_reg;
2776 return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2777 : REGNO_REG_CLASS (first) == NO_REGS ? x
2778 : gen_rtx_REG (ent->mode, first));
2781 default:
2782 break;
2785 fmt = GET_RTX_FORMAT (code);
2786 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2788 int j;
2790 if (fmt[i] == 'e')
2792 rtx new = canon_reg (XEXP (x, i), insn);
2793 int insn_code;
2795 /* If replacing pseudo with hard reg or vice versa, ensure the
2796 insn remains valid. Likewise if the insn has MATCH_DUPs. */
2797 if (insn != 0 && new != 0
2798 && GET_CODE (new) == REG && GET_CODE (XEXP (x, i)) == REG
2799 && (((REGNO (new) < FIRST_PSEUDO_REGISTER)
2800 != (REGNO (XEXP (x, i)) < FIRST_PSEUDO_REGISTER))
2801 || (insn_code = recog_memoized (insn)) < 0
2802 || insn_data[insn_code].n_dups > 0))
2803 validate_change (insn, &XEXP (x, i), new, 1);
2804 else
2805 XEXP (x, i) = new;
2807 else if (fmt[i] == 'E')
2808 for (j = 0; j < XVECLEN (x, i); j++)
2809 XVECEXP (x, i, j) = canon_reg (XVECEXP (x, i, j), insn);
2812 return x;
2815 /* LOC is a location within INSN that is an operand address (the contents of
2816 a MEM). Find the best equivalent address to use that is valid for this
2817 insn.
2819 On most CISC machines, complicated address modes are costly, and rtx_cost
2820 is a good approximation for that cost. However, most RISC machines have
2821 only a few (usually only one) memory reference formats. If an address is
2822 valid at all, it is often just as cheap as any other address. Hence, for
2823 RISC machines, we use `address_cost' to compare the costs of various
2824 addresses. For two addresses of equal cost, choose the one with the
2825 highest `rtx_cost' value as that has the potential of eliminating the
2826 most insns. For equal costs, we choose the first in the equivalence
2827 class. Note that we ignore the fact that pseudo registers are cheaper than
2828 hard registers here because we would also prefer the pseudo registers. */
2830 static void
2831 find_best_addr (rtx insn, rtx *loc, enum machine_mode mode)
2833 struct table_elt *elt;
2834 rtx addr = *loc;
2835 struct table_elt *p;
2836 int found_better = 1;
2837 int save_do_not_record = do_not_record;
2838 int save_hash_arg_in_memory = hash_arg_in_memory;
2839 int addr_volatile;
2840 int regno;
2841 unsigned hash;
2843 /* Do not try to replace constant addresses or addresses of local and
2844 argument slots. These MEM expressions are made only once and inserted
2845 in many instructions, as well as being used to control symbol table
2846 output. It is not safe to clobber them.
2848 There are some uncommon cases where the address is already in a register
2849 for some reason, but we cannot take advantage of that because we have
2850 no easy way to unshare the MEM. In addition, looking up all stack
2851 addresses is costly. */
2852 if ((GET_CODE (addr) == PLUS
2853 && GET_CODE (XEXP (addr, 0)) == REG
2854 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2855 && (regno = REGNO (XEXP (addr, 0)),
2856 regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM
2857 || regno == ARG_POINTER_REGNUM))
2858 || (GET_CODE (addr) == REG
2859 && (regno = REGNO (addr), regno == FRAME_POINTER_REGNUM
2860 || regno == HARD_FRAME_POINTER_REGNUM
2861 || regno == ARG_POINTER_REGNUM))
2862 || GET_CODE (addr) == ADDRESSOF
2863 || CONSTANT_ADDRESS_P (addr))
2864 return;
2866 /* If this address is not simply a register, try to fold it. This will
2867 sometimes simplify the expression. Many simplifications
2868 will not be valid, but some, usually applying the associative rule, will
2869 be valid and produce better code. */
2870 if (GET_CODE (addr) != REG)
2872 rtx folded = fold_rtx (copy_rtx (addr), NULL_RTX);
2873 int addr_folded_cost = address_cost (folded, mode);
2874 int addr_cost = address_cost (addr, mode);
2876 if ((addr_folded_cost < addr_cost
2877 || (addr_folded_cost == addr_cost
2878 /* ??? The rtx_cost comparison is left over from an older
2879 version of this code. It is probably no longer helpful. */
2880 && (rtx_cost (folded, MEM) > rtx_cost (addr, MEM)
2881 || approx_reg_cost (folded) < approx_reg_cost (addr))))
2882 && validate_change (insn, loc, folded, 0))
2883 addr = folded;
2886 /* If this address is not in the hash table, we can't look for equivalences
2887 of the whole address. Also, ignore if volatile. */
2889 do_not_record = 0;
2890 hash = HASH (addr, Pmode);
2891 addr_volatile = do_not_record;
2892 do_not_record = save_do_not_record;
2893 hash_arg_in_memory = save_hash_arg_in_memory;
2895 if (addr_volatile)
2896 return;
2898 elt = lookup (addr, hash, Pmode);
2900 if (elt)
2902 /* We need to find the best (under the criteria documented above) entry
2903 in the class that is valid. We use the `flag' field to indicate
2904 choices that were invalid and iterate until we can't find a better
2905 one that hasn't already been tried. */
2907 for (p = elt->first_same_value; p; p = p->next_same_value)
2908 p->flag = 0;
2910 while (found_better)
2912 int best_addr_cost = address_cost (*loc, mode);
2913 int best_rtx_cost = (elt->cost + 1) >> 1;
2914 int exp_cost;
2915 struct table_elt *best_elt = elt;
2917 found_better = 0;
2918 for (p = elt->first_same_value; p; p = p->next_same_value)
2919 if (! p->flag)
2921 if ((GET_CODE (p->exp) == REG
2922 || exp_equiv_p (p->exp, p->exp, 1, 0))
2923 && ((exp_cost = address_cost (p->exp, mode)) < best_addr_cost
2924 || (exp_cost == best_addr_cost
2925 && ((p->cost + 1) >> 1) > best_rtx_cost)))
2927 found_better = 1;
2928 best_addr_cost = exp_cost;
2929 best_rtx_cost = (p->cost + 1) >> 1;
2930 best_elt = p;
2934 if (found_better)
2936 if (validate_change (insn, loc,
2937 canon_reg (copy_rtx (best_elt->exp),
2938 NULL_RTX), 0))
2939 return;
2940 else
2941 best_elt->flag = 1;
2946 /* If the address is a binary operation with the first operand a register
2947 and the second a constant, do the same as above, but looking for
2948 equivalences of the register. Then try to simplify before checking for
2949 the best address to use. This catches a few cases: First is when we
2950 have REG+const and the register is another REG+const. We can often merge
2951 the constants and eliminate one insn and one register. It may also be
2952 that a machine has a cheap REG+REG+const. Finally, this improves the
2953 code on the Alpha for unaligned byte stores. */
2955 if (flag_expensive_optimizations
2956 && ARITHMETIC_P (*loc)
2957 && GET_CODE (XEXP (*loc, 0)) == REG)
2959 rtx op1 = XEXP (*loc, 1);
2961 do_not_record = 0;
2962 hash = HASH (XEXP (*loc, 0), Pmode);
2963 do_not_record = save_do_not_record;
2964 hash_arg_in_memory = save_hash_arg_in_memory;
2966 elt = lookup (XEXP (*loc, 0), hash, Pmode);
2967 if (elt == 0)
2968 return;
2970 /* We need to find the best (under the criteria documented above) entry
2971 in the class that is valid. We use the `flag' field to indicate
2972 choices that were invalid and iterate until we can't find a better
2973 one that hasn't already been tried. */
2975 for (p = elt->first_same_value; p; p = p->next_same_value)
2976 p->flag = 0;
2978 while (found_better)
2980 int best_addr_cost = address_cost (*loc, mode);
2981 int best_rtx_cost = (COST (*loc) + 1) >> 1;
2982 struct table_elt *best_elt = elt;
2983 rtx best_rtx = *loc;
2984 int count;
2986 /* This is at worst case an O(n^2) algorithm, so limit our search
2987 to the first 32 elements on the list. This avoids trouble
2988 compiling code with very long basic blocks that can easily
2989 call simplify_gen_binary so many times that we run out of
2990 memory. */
2992 found_better = 0;
2993 for (p = elt->first_same_value, count = 0;
2994 p && count < 32;
2995 p = p->next_same_value, count++)
2996 if (! p->flag
2997 && (GET_CODE (p->exp) == REG
2998 || exp_equiv_p (p->exp, p->exp, 1, 0)))
3000 rtx new = simplify_gen_binary (GET_CODE (*loc), Pmode,
3001 p->exp, op1);
3002 int new_cost;
3003 new_cost = address_cost (new, mode);
3005 if (new_cost < best_addr_cost
3006 || (new_cost == best_addr_cost
3007 && (COST (new) + 1) >> 1 > best_rtx_cost))
3009 found_better = 1;
3010 best_addr_cost = new_cost;
3011 best_rtx_cost = (COST (new) + 1) >> 1;
3012 best_elt = p;
3013 best_rtx = new;
3017 if (found_better)
3019 if (validate_change (insn, loc,
3020 canon_reg (copy_rtx (best_rtx),
3021 NULL_RTX), 0))
3022 return;
3023 else
3024 best_elt->flag = 1;
3030 /* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
3031 operation (EQ, NE, GT, etc.), follow it back through the hash table and
3032 what values are being compared.
3034 *PARG1 and *PARG2 are updated to contain the rtx representing the values
3035 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
3036 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
3037 compared to produce cc0.
3039 The return value is the comparison operator and is either the code of
3040 A or the code corresponding to the inverse of the comparison. */
3042 static enum rtx_code
3043 find_comparison_args (enum rtx_code code, rtx *parg1, rtx *parg2,
3044 enum machine_mode *pmode1, enum machine_mode *pmode2)
3046 rtx arg1, arg2;
3048 arg1 = *parg1, arg2 = *parg2;
3050 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
3052 while (arg2 == CONST0_RTX (GET_MODE (arg1)))
3054 /* Set nonzero when we find something of interest. */
3055 rtx x = 0;
3056 int reverse_code = 0;
3057 struct table_elt *p = 0;
3059 /* If arg1 is a COMPARE, extract the comparison arguments from it.
3060 On machines with CC0, this is the only case that can occur, since
3061 fold_rtx will return the COMPARE or item being compared with zero
3062 when given CC0. */
3064 if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
3065 x = arg1;
3067 /* If ARG1 is a comparison operator and CODE is testing for
3068 STORE_FLAG_VALUE, get the inner arguments. */
3070 else if (COMPARISON_P (arg1))
3072 #ifdef FLOAT_STORE_FLAG_VALUE
3073 REAL_VALUE_TYPE fsfv;
3074 #endif
3076 if (code == NE
3077 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3078 && code == LT && STORE_FLAG_VALUE == -1)
3079 #ifdef FLOAT_STORE_FLAG_VALUE
3080 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3081 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3082 REAL_VALUE_NEGATIVE (fsfv)))
3083 #endif
3085 x = arg1;
3086 else if (code == EQ
3087 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3088 && code == GE && STORE_FLAG_VALUE == -1)
3089 #ifdef FLOAT_STORE_FLAG_VALUE
3090 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3091 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3092 REAL_VALUE_NEGATIVE (fsfv)))
3093 #endif
3095 x = arg1, reverse_code = 1;
3098 /* ??? We could also check for
3100 (ne (and (eq (...) (const_int 1))) (const_int 0))
3102 and related forms, but let's wait until we see them occurring. */
3104 if (x == 0)
3105 /* Look up ARG1 in the hash table and see if it has an equivalence
3106 that lets us see what is being compared. */
3107 p = lookup (arg1, safe_hash (arg1, GET_MODE (arg1)) & HASH_MASK,
3108 GET_MODE (arg1));
3109 if (p)
3111 p = p->first_same_value;
3113 /* If what we compare is already known to be constant, that is as
3114 good as it gets.
3115 We need to break the loop in this case, because otherwise we
3116 can have an infinite loop when looking at a reg that is known
3117 to be a constant which is the same as a comparison of a reg
3118 against zero which appears later in the insn stream, which in
3119 turn is constant and the same as the comparison of the first reg
3120 against zero... */
3121 if (p->is_const)
3122 break;
3125 for (; p; p = p->next_same_value)
3127 enum machine_mode inner_mode = GET_MODE (p->exp);
3128 #ifdef FLOAT_STORE_FLAG_VALUE
3129 REAL_VALUE_TYPE fsfv;
3130 #endif
3132 /* If the entry isn't valid, skip it. */
3133 if (! exp_equiv_p (p->exp, p->exp, 1, 0))
3134 continue;
3136 if (GET_CODE (p->exp) == COMPARE
3137 /* Another possibility is that this machine has a compare insn
3138 that includes the comparison code. In that case, ARG1 would
3139 be equivalent to a comparison operation that would set ARG1 to
3140 either STORE_FLAG_VALUE or zero. If this is an NE operation,
3141 ORIG_CODE is the actual comparison being done; if it is an EQ,
3142 we must reverse ORIG_CODE. On machine with a negative value
3143 for STORE_FLAG_VALUE, also look at LT and GE operations. */
3144 || ((code == NE
3145 || (code == LT
3146 && GET_MODE_CLASS (inner_mode) == MODE_INT
3147 && (GET_MODE_BITSIZE (inner_mode)
3148 <= HOST_BITS_PER_WIDE_INT)
3149 && (STORE_FLAG_VALUE
3150 & ((HOST_WIDE_INT) 1
3151 << (GET_MODE_BITSIZE (inner_mode) - 1))))
3152 #ifdef FLOAT_STORE_FLAG_VALUE
3153 || (code == LT
3154 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3155 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3156 REAL_VALUE_NEGATIVE (fsfv)))
3157 #endif
3159 && COMPARISON_P (p->exp)))
3161 x = p->exp;
3162 break;
3164 else if ((code == EQ
3165 || (code == GE
3166 && GET_MODE_CLASS (inner_mode) == MODE_INT
3167 && (GET_MODE_BITSIZE (inner_mode)
3168 <= HOST_BITS_PER_WIDE_INT)
3169 && (STORE_FLAG_VALUE
3170 & ((HOST_WIDE_INT) 1
3171 << (GET_MODE_BITSIZE (inner_mode) - 1))))
3172 #ifdef FLOAT_STORE_FLAG_VALUE
3173 || (code == GE
3174 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3175 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3176 REAL_VALUE_NEGATIVE (fsfv)))
3177 #endif
3179 && COMPARISON_P (p->exp))
3181 reverse_code = 1;
3182 x = p->exp;
3183 break;
3186 /* If this non-trapping address, e.g. fp + constant, the
3187 equivalent is a better operand since it may let us predict
3188 the value of the comparison. */
3189 else if (!rtx_addr_can_trap_p (p->exp))
3191 arg1 = p->exp;
3192 continue;
3196 /* If we didn't find a useful equivalence for ARG1, we are done.
3197 Otherwise, set up for the next iteration. */
3198 if (x == 0)
3199 break;
3201 /* If we need to reverse the comparison, make sure that that is
3202 possible -- we can't necessarily infer the value of GE from LT
3203 with floating-point operands. */
3204 if (reverse_code)
3206 enum rtx_code reversed = reversed_comparison_code (x, NULL_RTX);
3207 if (reversed == UNKNOWN)
3208 break;
3209 else
3210 code = reversed;
3212 else if (COMPARISON_P (x))
3213 code = GET_CODE (x);
3214 arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
3217 /* Return our results. Return the modes from before fold_rtx
3218 because fold_rtx might produce const_int, and then it's too late. */
3219 *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
3220 *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
3222 return code;
3225 /* If X is a nontrivial arithmetic operation on an argument
3226 for which a constant value can be determined, return
3227 the result of operating on that value, as a constant.
3228 Otherwise, return X, possibly with one or more operands
3229 modified by recursive calls to this function.
3231 If X is a register whose contents are known, we do NOT
3232 return those contents here. equiv_constant is called to
3233 perform that task.
3235 INSN is the insn that we may be modifying. If it is 0, make a copy
3236 of X before modifying it. */
3238 static rtx
3239 fold_rtx (rtx x, rtx insn)
3241 enum rtx_code code;
3242 enum machine_mode mode;
3243 const char *fmt;
3244 int i;
3245 rtx new = 0;
3246 int copied = 0;
3247 int must_swap = 0;
3249 /* Folded equivalents of first two operands of X. */
3250 rtx folded_arg0;
3251 rtx folded_arg1;
3253 /* Constant equivalents of first three operands of X;
3254 0 when no such equivalent is known. */
3255 rtx const_arg0;
3256 rtx const_arg1;
3257 rtx const_arg2;
3259 /* The mode of the first operand of X. We need this for sign and zero
3260 extends. */
3261 enum machine_mode mode_arg0;
3263 if (x == 0)
3264 return x;
3266 mode = GET_MODE (x);
3267 code = GET_CODE (x);
3268 switch (code)
3270 case CONST:
3271 case CONST_INT:
3272 case CONST_DOUBLE:
3273 case CONST_VECTOR:
3274 case SYMBOL_REF:
3275 case LABEL_REF:
3276 case REG:
3277 /* No use simplifying an EXPR_LIST
3278 since they are used only for lists of args
3279 in a function call's REG_EQUAL note. */
3280 case EXPR_LIST:
3281 /* Changing anything inside an ADDRESSOF is incorrect; we don't
3282 want to (e.g.,) make (addressof (const_int 0)) just because
3283 the location is known to be zero. */
3284 case ADDRESSOF:
3285 return x;
3287 #ifdef HAVE_cc0
3288 case CC0:
3289 return prev_insn_cc0;
3290 #endif
3292 case PC:
3293 /* If the next insn is a CODE_LABEL followed by a jump table,
3294 PC's value is a LABEL_REF pointing to that label. That
3295 lets us fold switch statements on the VAX. */
3297 rtx next;
3298 if (insn && tablejump_p (insn, &next, NULL))
3299 return gen_rtx_LABEL_REF (Pmode, next);
3301 break;
3303 case SUBREG:
3304 /* See if we previously assigned a constant value to this SUBREG. */
3305 if ((new = lookup_as_function (x, CONST_INT)) != 0
3306 || (new = lookup_as_function (x, CONST_DOUBLE)) != 0)
3307 return new;
3309 /* If this is a paradoxical SUBREG, we have no idea what value the
3310 extra bits would have. However, if the operand is equivalent
3311 to a SUBREG whose operand is the same as our mode, and all the
3312 modes are within a word, we can just use the inner operand
3313 because these SUBREGs just say how to treat the register.
3315 Similarly if we find an integer constant. */
3317 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3319 enum machine_mode imode = GET_MODE (SUBREG_REG (x));
3320 struct table_elt *elt;
3322 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
3323 && GET_MODE_SIZE (imode) <= UNITS_PER_WORD
3324 && (elt = lookup (SUBREG_REG (x), HASH (SUBREG_REG (x), imode),
3325 imode)) != 0)
3326 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
3328 if (CONSTANT_P (elt->exp)
3329 && GET_MODE (elt->exp) == VOIDmode)
3330 return elt->exp;
3332 if (GET_CODE (elt->exp) == SUBREG
3333 && GET_MODE (SUBREG_REG (elt->exp)) == mode
3334 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
3335 return copy_rtx (SUBREG_REG (elt->exp));
3338 return x;
3341 /* Fold SUBREG_REG. If it changed, see if we can simplify the SUBREG.
3342 We might be able to if the SUBREG is extracting a single word in an
3343 integral mode or extracting the low part. */
3345 folded_arg0 = fold_rtx (SUBREG_REG (x), insn);
3346 const_arg0 = equiv_constant (folded_arg0);
3347 if (const_arg0)
3348 folded_arg0 = const_arg0;
3350 if (folded_arg0 != SUBREG_REG (x))
3352 new = simplify_subreg (mode, folded_arg0,
3353 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
3354 if (new)
3355 return new;
3358 if (GET_CODE (folded_arg0) == REG
3359 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (folded_arg0)))
3361 struct table_elt *elt;
3363 /* We can use HASH here since we know that canon_hash won't be
3364 called. */
3365 elt = lookup (folded_arg0,
3366 HASH (folded_arg0, GET_MODE (folded_arg0)),
3367 GET_MODE (folded_arg0));
3369 if (elt)
3370 elt = elt->first_same_value;
3372 if (subreg_lowpart_p (x))
3373 /* If this is a narrowing SUBREG and our operand is a REG, see
3374 if we can find an equivalence for REG that is an arithmetic
3375 operation in a wider mode where both operands are paradoxical
3376 SUBREGs from objects of our result mode. In that case, we
3377 couldn-t report an equivalent value for that operation, since we
3378 don't know what the extra bits will be. But we can find an
3379 equivalence for this SUBREG by folding that operation in the
3380 narrow mode. This allows us to fold arithmetic in narrow modes
3381 when the machine only supports word-sized arithmetic.
3383 Also look for a case where we have a SUBREG whose operand
3384 is the same as our result. If both modes are smaller
3385 than a word, we are simply interpreting a register in
3386 different modes and we can use the inner value. */
3388 for (; elt; elt = elt->next_same_value)
3390 enum rtx_code eltcode = GET_CODE (elt->exp);
3392 /* Just check for unary and binary operations. */
3393 if (UNARY_P (elt->exp)
3394 && eltcode != SIGN_EXTEND
3395 && eltcode != ZERO_EXTEND
3396 && GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3397 && GET_MODE (SUBREG_REG (XEXP (elt->exp, 0))) == mode
3398 && (GET_MODE_CLASS (mode)
3399 == GET_MODE_CLASS (GET_MODE (XEXP (elt->exp, 0)))))
3401 rtx op0 = SUBREG_REG (XEXP (elt->exp, 0));
3403 if (GET_CODE (op0) != REG && ! CONSTANT_P (op0))
3404 op0 = fold_rtx (op0, NULL_RTX);
3406 op0 = equiv_constant (op0);
3407 if (op0)
3408 new = simplify_unary_operation (GET_CODE (elt->exp), mode,
3409 op0, mode);
3411 else if (ARITHMETIC_P (elt->exp)
3412 && eltcode != DIV && eltcode != MOD
3413 && eltcode != UDIV && eltcode != UMOD
3414 && eltcode != ASHIFTRT && eltcode != LSHIFTRT
3415 && eltcode != ROTATE && eltcode != ROTATERT
3416 && ((GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3417 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 0)))
3418 == mode))
3419 || CONSTANT_P (XEXP (elt->exp, 0)))
3420 && ((GET_CODE (XEXP (elt->exp, 1)) == SUBREG
3421 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 1)))
3422 == mode))
3423 || CONSTANT_P (XEXP (elt->exp, 1))))
3425 rtx op0 = gen_lowpart_common (mode, XEXP (elt->exp, 0));
3426 rtx op1 = gen_lowpart_common (mode, XEXP (elt->exp, 1));
3428 if (op0 && GET_CODE (op0) != REG && ! CONSTANT_P (op0))
3429 op0 = fold_rtx (op0, NULL_RTX);
3431 if (op0)
3432 op0 = equiv_constant (op0);
3434 if (op1 && GET_CODE (op1) != REG && ! CONSTANT_P (op1))
3435 op1 = fold_rtx (op1, NULL_RTX);
3437 if (op1)
3438 op1 = equiv_constant (op1);
3440 /* If we are looking for the low SImode part of
3441 (ashift:DI c (const_int 32)), it doesn't work
3442 to compute that in SImode, because a 32-bit shift
3443 in SImode is unpredictable. We know the value is 0. */
3444 if (op0 && op1
3445 && GET_CODE (elt->exp) == ASHIFT
3446 && GET_CODE (op1) == CONST_INT
3447 && INTVAL (op1) >= GET_MODE_BITSIZE (mode))
3449 if (INTVAL (op1)
3450 < GET_MODE_BITSIZE (GET_MODE (elt->exp)))
3451 /* If the count fits in the inner mode's width,
3452 but exceeds the outer mode's width,
3453 the value will get truncated to 0
3454 by the subreg. */
3455 new = CONST0_RTX (mode);
3456 else
3457 /* If the count exceeds even the inner mode's width,
3458 don't fold this expression. */
3459 new = 0;
3461 else if (op0 && op1)
3462 new = simplify_binary_operation (GET_CODE (elt->exp), mode, op0, op1);
3465 else if (GET_CODE (elt->exp) == SUBREG
3466 && GET_MODE (SUBREG_REG (elt->exp)) == mode
3467 && (GET_MODE_SIZE (GET_MODE (folded_arg0))
3468 <= UNITS_PER_WORD)
3469 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
3470 new = copy_rtx (SUBREG_REG (elt->exp));
3472 if (new)
3473 return new;
3475 else
3476 /* A SUBREG resulting from a zero extension may fold to zero if
3477 it extracts higher bits than the ZERO_EXTEND's source bits.
3478 FIXME: if combine tried to, er, combine these instructions,
3479 this transformation may be moved to simplify_subreg. */
3480 for (; elt; elt = elt->next_same_value)
3482 if (GET_CODE (elt->exp) == ZERO_EXTEND
3483 && subreg_lsb (x)
3484 >= GET_MODE_BITSIZE (GET_MODE (XEXP (elt->exp, 0))))
3485 return CONST0_RTX (mode);
3489 return x;
3491 case NOT:
3492 case NEG:
3493 /* If we have (NOT Y), see if Y is known to be (NOT Z).
3494 If so, (NOT Y) simplifies to Z. Similarly for NEG. */
3495 new = lookup_as_function (XEXP (x, 0), code);
3496 if (new)
3497 return fold_rtx (copy_rtx (XEXP (new, 0)), insn);
3498 break;
3500 case MEM:
3501 /* If we are not actually processing an insn, don't try to find the
3502 best address. Not only don't we care, but we could modify the
3503 MEM in an invalid way since we have no insn to validate against. */
3504 if (insn != 0)
3505 find_best_addr (insn, &XEXP (x, 0), GET_MODE (x));
3508 /* Even if we don't fold in the insn itself,
3509 we can safely do so here, in hopes of getting a constant. */
3510 rtx addr = fold_rtx (XEXP (x, 0), NULL_RTX);
3511 rtx base = 0;
3512 HOST_WIDE_INT offset = 0;
3514 if (GET_CODE (addr) == REG
3515 && REGNO_QTY_VALID_P (REGNO (addr)))
3517 int addr_q = REG_QTY (REGNO (addr));
3518 struct qty_table_elem *addr_ent = &qty_table[addr_q];
3520 if (GET_MODE (addr) == addr_ent->mode
3521 && addr_ent->const_rtx != NULL_RTX)
3522 addr = addr_ent->const_rtx;
3525 /* If address is constant, split it into a base and integer offset. */
3526 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
3527 base = addr;
3528 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
3529 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
3531 base = XEXP (XEXP (addr, 0), 0);
3532 offset = INTVAL (XEXP (XEXP (addr, 0), 1));
3534 else if (GET_CODE (addr) == LO_SUM
3535 && GET_CODE (XEXP (addr, 1)) == SYMBOL_REF)
3536 base = XEXP (addr, 1);
3537 else if (GET_CODE (addr) == ADDRESSOF)
3538 return change_address (x, VOIDmode, addr);
3540 /* If this is a constant pool reference, we can fold it into its
3541 constant to allow better value tracking. */
3542 if (base && GET_CODE (base) == SYMBOL_REF
3543 && CONSTANT_POOL_ADDRESS_P (base))
3545 rtx constant = get_pool_constant (base);
3546 enum machine_mode const_mode = get_pool_mode (base);
3547 rtx new;
3549 if (CONSTANT_P (constant) && GET_CODE (constant) != CONST_INT)
3551 constant_pool_entries_cost = COST (constant);
3552 constant_pool_entries_regcost = approx_reg_cost (constant);
3555 /* If we are loading the full constant, we have an equivalence. */
3556 if (offset == 0 && mode == const_mode)
3557 return constant;
3559 /* If this actually isn't a constant (weird!), we can't do
3560 anything. Otherwise, handle the two most common cases:
3561 extracting a word from a multi-word constant, and extracting
3562 the low-order bits. Other cases don't seem common enough to
3563 worry about. */
3564 if (! CONSTANT_P (constant))
3565 return x;
3567 if (GET_MODE_CLASS (mode) == MODE_INT
3568 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
3569 && offset % UNITS_PER_WORD == 0
3570 && (new = operand_subword (constant,
3571 offset / UNITS_PER_WORD,
3572 0, const_mode)) != 0)
3573 return new;
3575 if (((BYTES_BIG_ENDIAN
3576 && offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
3577 || (! BYTES_BIG_ENDIAN && offset == 0))
3578 && (new = gen_lowpart (mode, constant)) != 0)
3579 return new;
3582 /* If this is a reference to a label at a known position in a jump
3583 table, we also know its value. */
3584 if (base && GET_CODE (base) == LABEL_REF)
3586 rtx label = XEXP (base, 0);
3587 rtx table_insn = NEXT_INSN (label);
3589 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
3590 && GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
3592 rtx table = PATTERN (table_insn);
3594 if (offset >= 0
3595 && (offset / GET_MODE_SIZE (GET_MODE (table))
3596 < XVECLEN (table, 0)))
3597 return XVECEXP (table, 0,
3598 offset / GET_MODE_SIZE (GET_MODE (table)));
3600 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
3601 && GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
3603 rtx table = PATTERN (table_insn);
3605 if (offset >= 0
3606 && (offset / GET_MODE_SIZE (GET_MODE (table))
3607 < XVECLEN (table, 1)))
3609 offset /= GET_MODE_SIZE (GET_MODE (table));
3610 new = gen_rtx_MINUS (Pmode, XVECEXP (table, 1, offset),
3611 XEXP (table, 0));
3613 if (GET_MODE (table) != Pmode)
3614 new = gen_rtx_TRUNCATE (GET_MODE (table), new);
3616 /* Indicate this is a constant. This isn't a
3617 valid form of CONST, but it will only be used
3618 to fold the next insns and then discarded, so
3619 it should be safe.
3621 Note this expression must be explicitly discarded,
3622 by cse_insn, else it may end up in a REG_EQUAL note
3623 and "escape" to cause problems elsewhere. */
3624 return gen_rtx_CONST (GET_MODE (new), new);
3629 return x;
3632 #ifdef NO_FUNCTION_CSE
3633 case CALL:
3634 if (CONSTANT_P (XEXP (XEXP (x, 0), 0)))
3635 return x;
3636 break;
3637 #endif
3639 case ASM_OPERANDS:
3640 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
3641 validate_change (insn, &ASM_OPERANDS_INPUT (x, i),
3642 fold_rtx (ASM_OPERANDS_INPUT (x, i), insn), 0);
3643 break;
3645 default:
3646 break;
3649 const_arg0 = 0;
3650 const_arg1 = 0;
3651 const_arg2 = 0;
3652 mode_arg0 = VOIDmode;
3654 /* Try folding our operands.
3655 Then see which ones have constant values known. */
3657 fmt = GET_RTX_FORMAT (code);
3658 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3659 if (fmt[i] == 'e')
3661 rtx arg = XEXP (x, i);
3662 rtx folded_arg = arg, const_arg = 0;
3663 enum machine_mode mode_arg = GET_MODE (arg);
3664 rtx cheap_arg, expensive_arg;
3665 rtx replacements[2];
3666 int j;
3667 int old_cost = COST_IN (XEXP (x, i), code);
3669 /* Most arguments are cheap, so handle them specially. */
3670 switch (GET_CODE (arg))
3672 case REG:
3673 /* This is the same as calling equiv_constant; it is duplicated
3674 here for speed. */
3675 if (REGNO_QTY_VALID_P (REGNO (arg)))
3677 int arg_q = REG_QTY (REGNO (arg));
3678 struct qty_table_elem *arg_ent = &qty_table[arg_q];
3680 if (arg_ent->const_rtx != NULL_RTX
3681 && GET_CODE (arg_ent->const_rtx) != REG
3682 && GET_CODE (arg_ent->const_rtx) != PLUS)
3683 const_arg
3684 = gen_lowpart (GET_MODE (arg),
3685 arg_ent->const_rtx);
3687 break;
3689 case CONST:
3690 case CONST_INT:
3691 case SYMBOL_REF:
3692 case LABEL_REF:
3693 case CONST_DOUBLE:
3694 case CONST_VECTOR:
3695 const_arg = arg;
3696 break;
3698 #ifdef HAVE_cc0
3699 case CC0:
3700 folded_arg = prev_insn_cc0;
3701 mode_arg = prev_insn_cc0_mode;
3702 const_arg = equiv_constant (folded_arg);
3703 break;
3704 #endif
3706 default:
3707 folded_arg = fold_rtx (arg, insn);
3708 const_arg = equiv_constant (folded_arg);
3711 /* For the first three operands, see if the operand
3712 is constant or equivalent to a constant. */
3713 switch (i)
3715 case 0:
3716 folded_arg0 = folded_arg;
3717 const_arg0 = const_arg;
3718 mode_arg0 = mode_arg;
3719 break;
3720 case 1:
3721 folded_arg1 = folded_arg;
3722 const_arg1 = const_arg;
3723 break;
3724 case 2:
3725 const_arg2 = const_arg;
3726 break;
3729 /* Pick the least expensive of the folded argument and an
3730 equivalent constant argument. */
3731 if (const_arg == 0 || const_arg == folded_arg
3732 || COST_IN (const_arg, code) > COST_IN (folded_arg, code))
3733 cheap_arg = folded_arg, expensive_arg = const_arg;
3734 else
3735 cheap_arg = const_arg, expensive_arg = folded_arg;
3737 /* Try to replace the operand with the cheapest of the two
3738 possibilities. If it doesn't work and this is either of the first
3739 two operands of a commutative operation, try swapping them.
3740 If THAT fails, try the more expensive, provided it is cheaper
3741 than what is already there. */
3743 if (cheap_arg == XEXP (x, i))
3744 continue;
3746 if (insn == 0 && ! copied)
3748 x = copy_rtx (x);
3749 copied = 1;
3752 /* Order the replacements from cheapest to most expensive. */
3753 replacements[0] = cheap_arg;
3754 replacements[1] = expensive_arg;
3756 for (j = 0; j < 2 && replacements[j]; j++)
3758 int new_cost = COST_IN (replacements[j], code);
3760 /* Stop if what existed before was cheaper. Prefer constants
3761 in the case of a tie. */
3762 if (new_cost > old_cost
3763 || (new_cost == old_cost && CONSTANT_P (XEXP (x, i))))
3764 break;
3766 if (validate_change (insn, &XEXP (x, i), replacements[j], 0))
3767 break;
3769 if (GET_RTX_CLASS (code) == RTX_COMM_COMPARE
3770 || GET_RTX_CLASS (code) == RTX_COMM_ARITH)
3772 validate_change (insn, &XEXP (x, i), XEXP (x, 1 - i), 1);
3773 validate_change (insn, &XEXP (x, 1 - i), replacements[j], 1);
3775 if (apply_change_group ())
3777 /* Swap them back to be invalid so that this loop can
3778 continue and flag them to be swapped back later. */
3779 rtx tem;
3781 tem = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1);
3782 XEXP (x, 1) = tem;
3783 must_swap = 1;
3784 break;
3790 else
3792 if (fmt[i] == 'E')
3793 /* Don't try to fold inside of a vector of expressions.
3794 Doing nothing is harmless. */
3798 /* If a commutative operation, place a constant integer as the second
3799 operand unless the first operand is also a constant integer. Otherwise,
3800 place any constant second unless the first operand is also a constant. */
3802 if (COMMUTATIVE_P (x))
3804 if (must_swap
3805 || swap_commutative_operands_p (const_arg0 ? const_arg0
3806 : XEXP (x, 0),
3807 const_arg1 ? const_arg1
3808 : XEXP (x, 1)))
3810 rtx tem = XEXP (x, 0);
3812 if (insn == 0 && ! copied)
3814 x = copy_rtx (x);
3815 copied = 1;
3818 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
3819 validate_change (insn, &XEXP (x, 1), tem, 1);
3820 if (apply_change_group ())
3822 tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
3823 tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
3828 /* If X is an arithmetic operation, see if we can simplify it. */
3830 switch (GET_RTX_CLASS (code))
3832 case RTX_UNARY:
3834 int is_const = 0;
3836 /* We can't simplify extension ops unless we know the
3837 original mode. */
3838 if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
3839 && mode_arg0 == VOIDmode)
3840 break;
3842 /* If we had a CONST, strip it off and put it back later if we
3843 fold. */
3844 if (const_arg0 != 0 && GET_CODE (const_arg0) == CONST)
3845 is_const = 1, const_arg0 = XEXP (const_arg0, 0);
3847 new = simplify_unary_operation (code, mode,
3848 const_arg0 ? const_arg0 : folded_arg0,
3849 mode_arg0);
3850 if (new != 0 && is_const)
3851 new = gen_rtx_CONST (mode, new);
3853 break;
3855 case RTX_COMPARE:
3856 case RTX_COMM_COMPARE:
3857 /* See what items are actually being compared and set FOLDED_ARG[01]
3858 to those values and CODE to the actual comparison code. If any are
3859 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
3860 do anything if both operands are already known to be constant. */
3862 if (const_arg0 == 0 || const_arg1 == 0)
3864 struct table_elt *p0, *p1;
3865 rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
3866 enum machine_mode mode_arg1;
3868 #ifdef FLOAT_STORE_FLAG_VALUE
3869 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3871 true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
3872 (FLOAT_STORE_FLAG_VALUE (mode), mode));
3873 false_rtx = CONST0_RTX (mode);
3875 #endif
3877 code = find_comparison_args (code, &folded_arg0, &folded_arg1,
3878 &mode_arg0, &mode_arg1);
3879 const_arg0 = equiv_constant (folded_arg0);
3880 const_arg1 = equiv_constant (folded_arg1);
3882 /* If the mode is VOIDmode or a MODE_CC mode, we don't know
3883 what kinds of things are being compared, so we can't do
3884 anything with this comparison. */
3886 if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
3887 break;
3889 /* If we do not now have two constants being compared, see
3890 if we can nevertheless deduce some things about the
3891 comparison. */
3892 if (const_arg0 == 0 || const_arg1 == 0)
3894 /* Some addresses are known to be nonzero. We don't know
3895 their sign, but equality comparisons are known. */
3896 if (const_arg1 == const0_rtx
3897 && nonzero_address_p (folded_arg0))
3899 if (code == EQ)
3900 return false_rtx;
3901 else if (code == NE)
3902 return true_rtx;
3905 /* See if the two operands are the same. */
3907 if (folded_arg0 == folded_arg1
3908 || (GET_CODE (folded_arg0) == REG
3909 && GET_CODE (folded_arg1) == REG
3910 && (REG_QTY (REGNO (folded_arg0))
3911 == REG_QTY (REGNO (folded_arg1))))
3912 || ((p0 = lookup (folded_arg0,
3913 (safe_hash (folded_arg0, mode_arg0)
3914 & HASH_MASK), mode_arg0))
3915 && (p1 = lookup (folded_arg1,
3916 (safe_hash (folded_arg1, mode_arg0)
3917 & HASH_MASK), mode_arg0))
3918 && p0->first_same_value == p1->first_same_value))
3920 /* Sadly two equal NaNs are not equivalent. */
3921 if (!HONOR_NANS (mode_arg0))
3922 return ((code == EQ || code == LE || code == GE
3923 || code == LEU || code == GEU || code == UNEQ
3924 || code == UNLE || code == UNGE
3925 || code == ORDERED)
3926 ? true_rtx : false_rtx);
3927 /* Take care for the FP compares we can resolve. */
3928 if (code == UNEQ || code == UNLE || code == UNGE)
3929 return true_rtx;
3930 if (code == LTGT || code == LT || code == GT)
3931 return false_rtx;
3934 /* If FOLDED_ARG0 is a register, see if the comparison we are
3935 doing now is either the same as we did before or the reverse
3936 (we only check the reverse if not floating-point). */
3937 else if (GET_CODE (folded_arg0) == REG)
3939 int qty = REG_QTY (REGNO (folded_arg0));
3941 if (REGNO_QTY_VALID_P (REGNO (folded_arg0)))
3943 struct qty_table_elem *ent = &qty_table[qty];
3945 if ((comparison_dominates_p (ent->comparison_code, code)
3946 || (! FLOAT_MODE_P (mode_arg0)
3947 && comparison_dominates_p (ent->comparison_code,
3948 reverse_condition (code))))
3949 && (rtx_equal_p (ent->comparison_const, folded_arg1)
3950 || (const_arg1
3951 && rtx_equal_p (ent->comparison_const,
3952 const_arg1))
3953 || (GET_CODE (folded_arg1) == REG
3954 && (REG_QTY (REGNO (folded_arg1)) == ent->comparison_qty))))
3955 return (comparison_dominates_p (ent->comparison_code, code)
3956 ? true_rtx : false_rtx);
3962 /* If we are comparing against zero, see if the first operand is
3963 equivalent to an IOR with a constant. If so, we may be able to
3964 determine the result of this comparison. */
3966 if (const_arg1 == const0_rtx)
3968 rtx y = lookup_as_function (folded_arg0, IOR);
3969 rtx inner_const;
3971 if (y != 0
3972 && (inner_const = equiv_constant (XEXP (y, 1))) != 0
3973 && GET_CODE (inner_const) == CONST_INT
3974 && INTVAL (inner_const) != 0)
3976 int sign_bitnum = GET_MODE_BITSIZE (mode_arg0) - 1;
3977 int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
3978 && (INTVAL (inner_const)
3979 & ((HOST_WIDE_INT) 1 << sign_bitnum)));
3980 rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
3982 #ifdef FLOAT_STORE_FLAG_VALUE
3983 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3985 true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
3986 (FLOAT_STORE_FLAG_VALUE (mode), mode));
3987 false_rtx = CONST0_RTX (mode);
3989 #endif
3991 switch (code)
3993 case EQ:
3994 return false_rtx;
3995 case NE:
3996 return true_rtx;
3997 case LT: case LE:
3998 if (has_sign)
3999 return true_rtx;
4000 break;
4001 case GT: case GE:
4002 if (has_sign)
4003 return false_rtx;
4004 break;
4005 default:
4006 break;
4011 new = simplify_relational_operation (code,
4012 (mode_arg0 != VOIDmode
4013 ? mode_arg0
4014 : (GET_MODE (const_arg0
4015 ? const_arg0
4016 : folded_arg0)
4017 != VOIDmode)
4018 ? GET_MODE (const_arg0
4019 ? const_arg0
4020 : folded_arg0)
4021 : GET_MODE (const_arg1
4022 ? const_arg1
4023 : folded_arg1)),
4024 const_arg0 ? const_arg0 : folded_arg0,
4025 const_arg1 ? const_arg1 : folded_arg1);
4026 #ifdef FLOAT_STORE_FLAG_VALUE
4027 if (new != 0 && GET_MODE_CLASS (mode) == MODE_FLOAT)
4029 if (new == const0_rtx)
4030 new = CONST0_RTX (mode);
4031 else
4032 new = (CONST_DOUBLE_FROM_REAL_VALUE
4033 (FLOAT_STORE_FLAG_VALUE (mode), mode));
4035 #endif
4036 break;
4038 case RTX_BIN_ARITH:
4039 case RTX_COMM_ARITH:
4040 switch (code)
4042 case PLUS:
4043 /* If the second operand is a LABEL_REF, see if the first is a MINUS
4044 with that LABEL_REF as its second operand. If so, the result is
4045 the first operand of that MINUS. This handles switches with an
4046 ADDR_DIFF_VEC table. */
4047 if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
4049 rtx y
4050 = GET_CODE (folded_arg0) == MINUS ? folded_arg0
4051 : lookup_as_function (folded_arg0, MINUS);
4053 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
4054 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
4055 return XEXP (y, 0);
4057 /* Now try for a CONST of a MINUS like the above. */
4058 if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
4059 : lookup_as_function (folded_arg0, CONST))) != 0
4060 && GET_CODE (XEXP (y, 0)) == MINUS
4061 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
4062 && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg1, 0))
4063 return XEXP (XEXP (y, 0), 0);
4066 /* Likewise if the operands are in the other order. */
4067 if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
4069 rtx y
4070 = GET_CODE (folded_arg1) == MINUS ? folded_arg1
4071 : lookup_as_function (folded_arg1, MINUS);
4073 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
4074 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
4075 return XEXP (y, 0);
4077 /* Now try for a CONST of a MINUS like the above. */
4078 if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
4079 : lookup_as_function (folded_arg1, CONST))) != 0
4080 && GET_CODE (XEXP (y, 0)) == MINUS
4081 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
4082 && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg0, 0))
4083 return XEXP (XEXP (y, 0), 0);
4086 /* If second operand is a register equivalent to a negative
4087 CONST_INT, see if we can find a register equivalent to the
4088 positive constant. Make a MINUS if so. Don't do this for
4089 a non-negative constant since we might then alternate between
4090 choosing positive and negative constants. Having the positive
4091 constant previously-used is the more common case. Be sure
4092 the resulting constant is non-negative; if const_arg1 were
4093 the smallest negative number this would overflow: depending
4094 on the mode, this would either just be the same value (and
4095 hence not save anything) or be incorrect. */
4096 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT
4097 && INTVAL (const_arg1) < 0
4098 /* This used to test
4100 -INTVAL (const_arg1) >= 0
4102 But The Sun V5.0 compilers mis-compiled that test. So
4103 instead we test for the problematic value in a more direct
4104 manner and hope the Sun compilers get it correct. */
4105 && INTVAL (const_arg1) !=
4106 ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1))
4107 && GET_CODE (folded_arg1) == REG)
4109 rtx new_const = GEN_INT (-INTVAL (const_arg1));
4110 struct table_elt *p
4111 = lookup (new_const, safe_hash (new_const, mode) & HASH_MASK,
4112 mode);
4114 if (p)
4115 for (p = p->first_same_value; p; p = p->next_same_value)
4116 if (GET_CODE (p->exp) == REG)
4117 return simplify_gen_binary (MINUS, mode, folded_arg0,
4118 canon_reg (p->exp, NULL_RTX));
4120 goto from_plus;
4122 case MINUS:
4123 /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
4124 If so, produce (PLUS Z C2-C). */
4125 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT)
4127 rtx y = lookup_as_function (XEXP (x, 0), PLUS);
4128 if (y && GET_CODE (XEXP (y, 1)) == CONST_INT)
4129 return fold_rtx (plus_constant (copy_rtx (y),
4130 -INTVAL (const_arg1)),
4131 NULL_RTX);
4134 /* Fall through. */
4136 from_plus:
4137 case SMIN: case SMAX: case UMIN: case UMAX:
4138 case IOR: case AND: case XOR:
4139 case MULT:
4140 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
4141 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
4142 is known to be of similar form, we may be able to replace the
4143 operation with a combined operation. This may eliminate the
4144 intermediate operation if every use is simplified in this way.
4145 Note that the similar optimization done by combine.c only works
4146 if the intermediate operation's result has only one reference. */
4148 if (GET_CODE (folded_arg0) == REG
4149 && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
4151 int is_shift
4152 = (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
4153 rtx y = lookup_as_function (folded_arg0, code);
4154 rtx inner_const;
4155 enum rtx_code associate_code;
4156 rtx new_const;
4158 if (y == 0
4159 || 0 == (inner_const
4160 = equiv_constant (fold_rtx (XEXP (y, 1), 0)))
4161 || GET_CODE (inner_const) != CONST_INT
4162 /* If we have compiled a statement like
4163 "if (x == (x & mask1))", and now are looking at
4164 "x & mask2", we will have a case where the first operand
4165 of Y is the same as our first operand. Unless we detect
4166 this case, an infinite loop will result. */
4167 || XEXP (y, 0) == folded_arg0)
4168 break;
4170 /* Don't associate these operations if they are a PLUS with the
4171 same constant and it is a power of two. These might be doable
4172 with a pre- or post-increment. Similarly for two subtracts of
4173 identical powers of two with post decrement. */
4175 if (code == PLUS && const_arg1 == inner_const
4176 && ((HAVE_PRE_INCREMENT
4177 && exact_log2 (INTVAL (const_arg1)) >= 0)
4178 || (HAVE_POST_INCREMENT
4179 && exact_log2 (INTVAL (const_arg1)) >= 0)
4180 || (HAVE_PRE_DECREMENT
4181 && exact_log2 (- INTVAL (const_arg1)) >= 0)
4182 || (HAVE_POST_DECREMENT
4183 && exact_log2 (- INTVAL (const_arg1)) >= 0)))
4184 break;
4186 /* Compute the code used to compose the constants. For example,
4187 A-C1-C2 is A-(C1 + C2), so if CODE == MINUS, we want PLUS. */
4189 associate_code = (is_shift || code == MINUS ? PLUS : code);
4191 new_const = simplify_binary_operation (associate_code, mode,
4192 const_arg1, inner_const);
4194 if (new_const == 0)
4195 break;
4197 /* If we are associating shift operations, don't let this
4198 produce a shift of the size of the object or larger.
4199 This could occur when we follow a sign-extend by a right
4200 shift on a machine that does a sign-extend as a pair
4201 of shifts. */
4203 if (is_shift && GET_CODE (new_const) == CONST_INT
4204 && INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
4206 /* As an exception, we can turn an ASHIFTRT of this
4207 form into a shift of the number of bits - 1. */
4208 if (code == ASHIFTRT)
4209 new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
4210 else
4211 break;
4214 y = copy_rtx (XEXP (y, 0));
4216 /* If Y contains our first operand (the most common way this
4217 can happen is if Y is a MEM), we would do into an infinite
4218 loop if we tried to fold it. So don't in that case. */
4220 if (! reg_mentioned_p (folded_arg0, y))
4221 y = fold_rtx (y, insn);
4223 return simplify_gen_binary (code, mode, y, new_const);
4225 break;
4227 case DIV: case UDIV:
4228 /* ??? The associative optimization performed immediately above is
4229 also possible for DIV and UDIV using associate_code of MULT.
4230 However, we would need extra code to verify that the
4231 multiplication does not overflow, that is, there is no overflow
4232 in the calculation of new_const. */
4233 break;
4235 default:
4236 break;
4239 new = simplify_binary_operation (code, mode,
4240 const_arg0 ? const_arg0 : folded_arg0,
4241 const_arg1 ? const_arg1 : folded_arg1);
4242 break;
4244 case RTX_OBJ:
4245 /* (lo_sum (high X) X) is simply X. */
4246 if (code == LO_SUM && const_arg0 != 0
4247 && GET_CODE (const_arg0) == HIGH
4248 && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
4249 return const_arg1;
4250 break;
4252 case RTX_TERNARY:
4253 case RTX_BITFIELD_OPS:
4254 new = simplify_ternary_operation (code, mode, mode_arg0,
4255 const_arg0 ? const_arg0 : folded_arg0,
4256 const_arg1 ? const_arg1 : folded_arg1,
4257 const_arg2 ? const_arg2 : XEXP (x, 2));
4258 break;
4260 case RTX_EXTRA:
4261 /* Eliminate CONSTANT_P_RTX if its constant. */
4262 if (code == CONSTANT_P_RTX)
4264 if (const_arg0)
4265 return const1_rtx;
4266 if (optimize == 0 || !flag_gcse)
4267 return const0_rtx;
4269 break;
4271 default:
4272 break;
4275 return new ? new : x;
4278 /* Return a constant value currently equivalent to X.
4279 Return 0 if we don't know one. */
4281 static rtx
4282 equiv_constant (rtx x)
4284 if (GET_CODE (x) == REG
4285 && REGNO_QTY_VALID_P (REGNO (x)))
4287 int x_q = REG_QTY (REGNO (x));
4288 struct qty_table_elem *x_ent = &qty_table[x_q];
4290 if (x_ent->const_rtx)
4291 x = gen_lowpart (GET_MODE (x), x_ent->const_rtx);
4294 if (x == 0 || CONSTANT_P (x))
4295 return x;
4297 /* If X is a MEM, try to fold it outside the context of any insn to see if
4298 it might be equivalent to a constant. That handles the case where it
4299 is a constant-pool reference. Then try to look it up in the hash table
4300 in case it is something whose value we have seen before. */
4302 if (GET_CODE (x) == MEM)
4304 struct table_elt *elt;
4306 x = fold_rtx (x, NULL_RTX);
4307 if (CONSTANT_P (x))
4308 return x;
4310 elt = lookup (x, safe_hash (x, GET_MODE (x)) & HASH_MASK, GET_MODE (x));
4311 if (elt == 0)
4312 return 0;
4314 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
4315 if (elt->is_const && CONSTANT_P (elt->exp))
4316 return elt->exp;
4319 return 0;
4322 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a fixed-point
4323 number, return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
4324 least-significant part of X.
4325 MODE specifies how big a part of X to return.
4327 If the requested operation cannot be done, 0 is returned.
4329 This is similar to gen_lowpart_general in emit-rtl.c. */
4332 gen_lowpart_if_possible (enum machine_mode mode, rtx x)
4334 rtx result = gen_lowpart_common (mode, x);
4336 if (result)
4337 return result;
4338 else if (GET_CODE (x) == MEM)
4340 /* This is the only other case we handle. */
4341 int offset = 0;
4342 rtx new;
4344 if (WORDS_BIG_ENDIAN)
4345 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
4346 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
4347 if (BYTES_BIG_ENDIAN)
4348 /* Adjust the address so that the address-after-the-data is
4349 unchanged. */
4350 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
4351 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
4353 new = adjust_address_nv (x, mode, offset);
4354 if (! memory_address_p (mode, XEXP (new, 0)))
4355 return 0;
4357 return new;
4359 else
4360 return 0;
4363 /* Given INSN, a jump insn, TAKEN indicates if we are following the "taken"
4364 branch. It will be zero if not.
4366 In certain cases, this can cause us to add an equivalence. For example,
4367 if we are following the taken case of
4368 if (i == 2)
4369 we can add the fact that `i' and '2' are now equivalent.
4371 In any case, we can record that this comparison was passed. If the same
4372 comparison is seen later, we will know its value. */
4374 static void
4375 record_jump_equiv (rtx insn, int taken)
4377 int cond_known_true;
4378 rtx op0, op1;
4379 rtx set;
4380 enum machine_mode mode, mode0, mode1;
4381 int reversed_nonequality = 0;
4382 enum rtx_code code;
4384 /* Ensure this is the right kind of insn. */
4385 if (! any_condjump_p (insn))
4386 return;
4387 set = pc_set (insn);
4389 /* See if this jump condition is known true or false. */
4390 if (taken)
4391 cond_known_true = (XEXP (SET_SRC (set), 2) == pc_rtx);
4392 else
4393 cond_known_true = (XEXP (SET_SRC (set), 1) == pc_rtx);
4395 /* Get the type of comparison being done and the operands being compared.
4396 If we had to reverse a non-equality condition, record that fact so we
4397 know that it isn't valid for floating-point. */
4398 code = GET_CODE (XEXP (SET_SRC (set), 0));
4399 op0 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 0), insn);
4400 op1 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 1), insn);
4402 code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
4403 if (! cond_known_true)
4405 code = reversed_comparison_code_parts (code, op0, op1, insn);
4407 /* Don't remember if we can't find the inverse. */
4408 if (code == UNKNOWN)
4409 return;
4412 /* The mode is the mode of the non-constant. */
4413 mode = mode0;
4414 if (mode1 != VOIDmode)
4415 mode = mode1;
4417 record_jump_cond (code, mode, op0, op1, reversed_nonequality);
4420 /* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
4421 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
4422 Make any useful entries we can with that information. Called from
4423 above function and called recursively. */
4425 static void
4426 record_jump_cond (enum rtx_code code, enum machine_mode mode, rtx op0,
4427 rtx op1, int reversed_nonequality)
4429 unsigned op0_hash, op1_hash;
4430 int op0_in_memory, op1_in_memory;
4431 struct table_elt *op0_elt, *op1_elt;
4433 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
4434 we know that they are also equal in the smaller mode (this is also
4435 true for all smaller modes whether or not there is a SUBREG, but
4436 is not worth testing for with no SUBREG). */
4438 /* Note that GET_MODE (op0) may not equal MODE. */
4439 if (code == EQ && GET_CODE (op0) == SUBREG
4440 && (GET_MODE_SIZE (GET_MODE (op0))
4441 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4443 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4444 rtx tem = gen_lowpart (inner_mode, op1);
4446 record_jump_cond (code, mode, SUBREG_REG (op0),
4447 tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
4448 reversed_nonequality);
4451 if (code == EQ && GET_CODE (op1) == SUBREG
4452 && (GET_MODE_SIZE (GET_MODE (op1))
4453 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4455 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4456 rtx tem = gen_lowpart (inner_mode, op0);
4458 record_jump_cond (code, mode, SUBREG_REG (op1),
4459 tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
4460 reversed_nonequality);
4463 /* Similarly, if this is an NE comparison, and either is a SUBREG
4464 making a smaller mode, we know the whole thing is also NE. */
4466 /* Note that GET_MODE (op0) may not equal MODE;
4467 if we test MODE instead, we can get an infinite recursion
4468 alternating between two modes each wider than MODE. */
4470 if (code == NE && GET_CODE (op0) == SUBREG
4471 && subreg_lowpart_p (op0)
4472 && (GET_MODE_SIZE (GET_MODE (op0))
4473 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4475 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4476 rtx tem = gen_lowpart (inner_mode, op1);
4478 record_jump_cond (code, mode, SUBREG_REG (op0),
4479 tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
4480 reversed_nonequality);
4483 if (code == NE && GET_CODE (op1) == SUBREG
4484 && subreg_lowpart_p (op1)
4485 && (GET_MODE_SIZE (GET_MODE (op1))
4486 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4488 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4489 rtx tem = gen_lowpart (inner_mode, op0);
4491 record_jump_cond (code, mode, SUBREG_REG (op1),
4492 tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
4493 reversed_nonequality);
4496 /* Hash both operands. */
4498 do_not_record = 0;
4499 hash_arg_in_memory = 0;
4500 op0_hash = HASH (op0, mode);
4501 op0_in_memory = hash_arg_in_memory;
4503 if (do_not_record)
4504 return;
4506 do_not_record = 0;
4507 hash_arg_in_memory = 0;
4508 op1_hash = HASH (op1, mode);
4509 op1_in_memory = hash_arg_in_memory;
4511 if (do_not_record)
4512 return;
4514 /* Look up both operands. */
4515 op0_elt = lookup (op0, op0_hash, mode);
4516 op1_elt = lookup (op1, op1_hash, mode);
4518 /* If both operands are already equivalent or if they are not in the
4519 table but are identical, do nothing. */
4520 if ((op0_elt != 0 && op1_elt != 0
4521 && op0_elt->first_same_value == op1_elt->first_same_value)
4522 || op0 == op1 || rtx_equal_p (op0, op1))
4523 return;
4525 /* If we aren't setting two things equal all we can do is save this
4526 comparison. Similarly if this is floating-point. In the latter
4527 case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
4528 If we record the equality, we might inadvertently delete code
4529 whose intent was to change -0 to +0. */
4531 if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
4533 struct qty_table_elem *ent;
4534 int qty;
4536 /* If we reversed a floating-point comparison, if OP0 is not a
4537 register, or if OP1 is neither a register or constant, we can't
4538 do anything. */
4540 if (GET_CODE (op1) != REG)
4541 op1 = equiv_constant (op1);
4543 if ((reversed_nonequality && FLOAT_MODE_P (mode))
4544 || GET_CODE (op0) != REG || op1 == 0)
4545 return;
4547 /* Put OP0 in the hash table if it isn't already. This gives it a
4548 new quantity number. */
4549 if (op0_elt == 0)
4551 if (insert_regs (op0, NULL, 0))
4553 rehash_using_reg (op0);
4554 op0_hash = HASH (op0, mode);
4556 /* If OP0 is contained in OP1, this changes its hash code
4557 as well. Faster to rehash than to check, except
4558 for the simple case of a constant. */
4559 if (! CONSTANT_P (op1))
4560 op1_hash = HASH (op1,mode);
4563 op0_elt = insert (op0, NULL, op0_hash, mode);
4564 op0_elt->in_memory = op0_in_memory;
4567 qty = REG_QTY (REGNO (op0));
4568 ent = &qty_table[qty];
4570 ent->comparison_code = code;
4571 if (GET_CODE (op1) == REG)
4573 /* Look it up again--in case op0 and op1 are the same. */
4574 op1_elt = lookup (op1, op1_hash, mode);
4576 /* Put OP1 in the hash table so it gets a new quantity number. */
4577 if (op1_elt == 0)
4579 if (insert_regs (op1, NULL, 0))
4581 rehash_using_reg (op1);
4582 op1_hash = HASH (op1, mode);
4585 op1_elt = insert (op1, NULL, op1_hash, mode);
4586 op1_elt->in_memory = op1_in_memory;
4589 ent->comparison_const = NULL_RTX;
4590 ent->comparison_qty = REG_QTY (REGNO (op1));
4592 else
4594 ent->comparison_const = op1;
4595 ent->comparison_qty = -1;
4598 return;
4601 /* If either side is still missing an equivalence, make it now,
4602 then merge the equivalences. */
4604 if (op0_elt == 0)
4606 if (insert_regs (op0, NULL, 0))
4608 rehash_using_reg (op0);
4609 op0_hash = HASH (op0, mode);
4612 op0_elt = insert (op0, NULL, op0_hash, mode);
4613 op0_elt->in_memory = op0_in_memory;
4616 if (op1_elt == 0)
4618 if (insert_regs (op1, NULL, 0))
4620 rehash_using_reg (op1);
4621 op1_hash = HASH (op1, mode);
4624 op1_elt = insert (op1, NULL, op1_hash, mode);
4625 op1_elt->in_memory = op1_in_memory;
4628 merge_equiv_classes (op0_elt, op1_elt);
4629 last_jump_equiv_class = op0_elt;
4632 /* CSE processing for one instruction.
4633 First simplify sources and addresses of all assignments
4634 in the instruction, using previously-computed equivalents values.
4635 Then install the new sources and destinations in the table
4636 of available values.
4638 If LIBCALL_INSN is nonzero, don't record any equivalence made in
4639 the insn. It means that INSN is inside libcall block. In this
4640 case LIBCALL_INSN is the corresponding insn with REG_LIBCALL. */
4642 /* Data on one SET contained in the instruction. */
4644 struct set
4646 /* The SET rtx itself. */
4647 rtx rtl;
4648 /* The SET_SRC of the rtx (the original value, if it is changing). */
4649 rtx src;
4650 /* The hash-table element for the SET_SRC of the SET. */
4651 struct table_elt *src_elt;
4652 /* Hash value for the SET_SRC. */
4653 unsigned src_hash;
4654 /* Hash value for the SET_DEST. */
4655 unsigned dest_hash;
4656 /* The SET_DEST, with SUBREG, etc., stripped. */
4657 rtx inner_dest;
4658 /* Nonzero if the SET_SRC is in memory. */
4659 char src_in_memory;
4660 /* Nonzero if the SET_SRC contains something
4661 whose value cannot be predicted and understood. */
4662 char src_volatile;
4663 /* Original machine mode, in case it becomes a CONST_INT.
4664 The size of this field should match the size of the mode
4665 field of struct rtx_def (see rtl.h). */
4666 ENUM_BITFIELD(machine_mode) mode : 8;
4667 /* A constant equivalent for SET_SRC, if any. */
4668 rtx src_const;
4669 /* Original SET_SRC value used for libcall notes. */
4670 rtx orig_src;
4671 /* Hash value of constant equivalent for SET_SRC. */
4672 unsigned src_const_hash;
4673 /* Table entry for constant equivalent for SET_SRC, if any. */
4674 struct table_elt *src_const_elt;
4677 static void
4678 cse_insn (rtx insn, rtx libcall_insn)
4680 rtx x = PATTERN (insn);
4681 int i;
4682 rtx tem;
4683 int n_sets = 0;
4685 #ifdef HAVE_cc0
4686 /* Records what this insn does to set CC0. */
4687 rtx this_insn_cc0 = 0;
4688 enum machine_mode this_insn_cc0_mode = VOIDmode;
4689 #endif
4691 rtx src_eqv = 0;
4692 struct table_elt *src_eqv_elt = 0;
4693 int src_eqv_volatile = 0;
4694 int src_eqv_in_memory = 0;
4695 unsigned src_eqv_hash = 0;
4697 struct set *sets = (struct set *) 0;
4699 this_insn = insn;
4701 /* Find all the SETs and CLOBBERs in this instruction.
4702 Record all the SETs in the array `set' and count them.
4703 Also determine whether there is a CLOBBER that invalidates
4704 all memory references, or all references at varying addresses. */
4706 if (GET_CODE (insn) == CALL_INSN)
4708 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4710 if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
4711 invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
4712 XEXP (tem, 0) = canon_reg (XEXP (tem, 0), insn);
4716 if (GET_CODE (x) == SET)
4718 sets = alloca (sizeof (struct set));
4719 sets[0].rtl = x;
4721 /* Ignore SETs that are unconditional jumps.
4722 They never need cse processing, so this does not hurt.
4723 The reason is not efficiency but rather
4724 so that we can test at the end for instructions
4725 that have been simplified to unconditional jumps
4726 and not be misled by unchanged instructions
4727 that were unconditional jumps to begin with. */
4728 if (SET_DEST (x) == pc_rtx
4729 && GET_CODE (SET_SRC (x)) == LABEL_REF)
4732 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
4733 The hard function value register is used only once, to copy to
4734 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
4735 Ensure we invalidate the destination register. On the 80386 no
4736 other code would invalidate it since it is a fixed_reg.
4737 We need not check the return of apply_change_group; see canon_reg. */
4739 else if (GET_CODE (SET_SRC (x)) == CALL)
4741 canon_reg (SET_SRC (x), insn);
4742 apply_change_group ();
4743 fold_rtx (SET_SRC (x), insn);
4744 invalidate (SET_DEST (x), VOIDmode);
4746 else
4747 n_sets = 1;
4749 else if (GET_CODE (x) == PARALLEL)
4751 int lim = XVECLEN (x, 0);
4753 sets = alloca (lim * sizeof (struct set));
4755 /* Find all regs explicitly clobbered in this insn,
4756 and ensure they are not replaced with any other regs
4757 elsewhere in this insn.
4758 When a reg that is clobbered is also used for input,
4759 we should presume that that is for a reason,
4760 and we should not substitute some other register
4761 which is not supposed to be clobbered.
4762 Therefore, this loop cannot be merged into the one below
4763 because a CALL may precede a CLOBBER and refer to the
4764 value clobbered. We must not let a canonicalization do
4765 anything in that case. */
4766 for (i = 0; i < lim; i++)
4768 rtx y = XVECEXP (x, 0, i);
4769 if (GET_CODE (y) == CLOBBER)
4771 rtx clobbered = XEXP (y, 0);
4773 if (GET_CODE (clobbered) == REG
4774 || GET_CODE (clobbered) == SUBREG)
4775 invalidate (clobbered, VOIDmode);
4776 else if (GET_CODE (clobbered) == STRICT_LOW_PART
4777 || GET_CODE (clobbered) == ZERO_EXTRACT)
4778 invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
4782 for (i = 0; i < lim; i++)
4784 rtx y = XVECEXP (x, 0, i);
4785 if (GET_CODE (y) == SET)
4787 /* As above, we ignore unconditional jumps and call-insns and
4788 ignore the result of apply_change_group. */
4789 if (GET_CODE (SET_SRC (y)) == CALL)
4791 canon_reg (SET_SRC (y), insn);
4792 apply_change_group ();
4793 fold_rtx (SET_SRC (y), insn);
4794 invalidate (SET_DEST (y), VOIDmode);
4796 else if (SET_DEST (y) == pc_rtx
4797 && GET_CODE (SET_SRC (y)) == LABEL_REF)
4799 else
4800 sets[n_sets++].rtl = y;
4802 else if (GET_CODE (y) == CLOBBER)
4804 /* If we clobber memory, canon the address.
4805 This does nothing when a register is clobbered
4806 because we have already invalidated the reg. */
4807 if (GET_CODE (XEXP (y, 0)) == MEM)
4808 canon_reg (XEXP (y, 0), NULL_RTX);
4810 else if (GET_CODE (y) == USE
4811 && ! (GET_CODE (XEXP (y, 0)) == REG
4812 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
4813 canon_reg (y, NULL_RTX);
4814 else if (GET_CODE (y) == CALL)
4816 /* The result of apply_change_group can be ignored; see
4817 canon_reg. */
4818 canon_reg (y, insn);
4819 apply_change_group ();
4820 fold_rtx (y, insn);
4824 else if (GET_CODE (x) == CLOBBER)
4826 if (GET_CODE (XEXP (x, 0)) == MEM)
4827 canon_reg (XEXP (x, 0), NULL_RTX);
4830 /* Canonicalize a USE of a pseudo register or memory location. */
4831 else if (GET_CODE (x) == USE
4832 && ! (GET_CODE (XEXP (x, 0)) == REG
4833 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
4834 canon_reg (XEXP (x, 0), NULL_RTX);
4835 else if (GET_CODE (x) == CALL)
4837 /* The result of apply_change_group can be ignored; see canon_reg. */
4838 canon_reg (x, insn);
4839 apply_change_group ();
4840 fold_rtx (x, insn);
4843 /* Store the equivalent value in SRC_EQV, if different, or if the DEST
4844 is a STRICT_LOW_PART. The latter condition is necessary because SRC_EQV
4845 is handled specially for this case, and if it isn't set, then there will
4846 be no equivalence for the destination. */
4847 if (n_sets == 1 && REG_NOTES (insn) != 0
4848 && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
4849 && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
4850 || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
4852 src_eqv = fold_rtx (canon_reg (XEXP (tem, 0), NULL_RTX), insn);
4853 XEXP (tem, 0) = src_eqv;
4856 /* Canonicalize sources and addresses of destinations.
4857 We do this in a separate pass to avoid problems when a MATCH_DUP is
4858 present in the insn pattern. In that case, we want to ensure that
4859 we don't break the duplicate nature of the pattern. So we will replace
4860 both operands at the same time. Otherwise, we would fail to find an
4861 equivalent substitution in the loop calling validate_change below.
4863 We used to suppress canonicalization of DEST if it appears in SRC,
4864 but we don't do this any more. */
4866 for (i = 0; i < n_sets; i++)
4868 rtx dest = SET_DEST (sets[i].rtl);
4869 rtx src = SET_SRC (sets[i].rtl);
4870 rtx new = canon_reg (src, insn);
4871 int insn_code;
4873 sets[i].orig_src = src;
4874 if ((GET_CODE (new) == REG && GET_CODE (src) == REG
4875 && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
4876 != (REGNO (src) < FIRST_PSEUDO_REGISTER)))
4877 || (insn_code = recog_memoized (insn)) < 0
4878 || insn_data[insn_code].n_dups > 0)
4879 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
4880 else
4881 SET_SRC (sets[i].rtl) = new;
4883 if (GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SIGN_EXTRACT)
4885 validate_change (insn, &XEXP (dest, 1),
4886 canon_reg (XEXP (dest, 1), insn), 1);
4887 validate_change (insn, &XEXP (dest, 2),
4888 canon_reg (XEXP (dest, 2), insn), 1);
4891 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
4892 || GET_CODE (dest) == ZERO_EXTRACT
4893 || GET_CODE (dest) == SIGN_EXTRACT)
4894 dest = XEXP (dest, 0);
4896 if (GET_CODE (dest) == MEM)
4897 canon_reg (dest, insn);
4900 /* Now that we have done all the replacements, we can apply the change
4901 group and see if they all work. Note that this will cause some
4902 canonicalizations that would have worked individually not to be applied
4903 because some other canonicalization didn't work, but this should not
4904 occur often.
4906 The result of apply_change_group can be ignored; see canon_reg. */
4908 apply_change_group ();
4910 /* Set sets[i].src_elt to the class each source belongs to.
4911 Detect assignments from or to volatile things
4912 and set set[i] to zero so they will be ignored
4913 in the rest of this function.
4915 Nothing in this loop changes the hash table or the register chains. */
4917 for (i = 0; i < n_sets; i++)
4919 rtx src, dest;
4920 rtx src_folded;
4921 struct table_elt *elt = 0, *p;
4922 enum machine_mode mode;
4923 rtx src_eqv_here;
4924 rtx src_const = 0;
4925 rtx src_related = 0;
4926 struct table_elt *src_const_elt = 0;
4927 int src_cost = MAX_COST;
4928 int src_eqv_cost = MAX_COST;
4929 int src_folded_cost = MAX_COST;
4930 int src_related_cost = MAX_COST;
4931 int src_elt_cost = MAX_COST;
4932 int src_regcost = MAX_COST;
4933 int src_eqv_regcost = MAX_COST;
4934 int src_folded_regcost = MAX_COST;
4935 int src_related_regcost = MAX_COST;
4936 int src_elt_regcost = MAX_COST;
4937 /* Set nonzero if we need to call force_const_mem on with the
4938 contents of src_folded before using it. */
4939 int src_folded_force_flag = 0;
4941 dest = SET_DEST (sets[i].rtl);
4942 src = SET_SRC (sets[i].rtl);
4944 /* If SRC is a constant that has no machine mode,
4945 hash it with the destination's machine mode.
4946 This way we can keep different modes separate. */
4948 mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
4949 sets[i].mode = mode;
4951 if (src_eqv)
4953 enum machine_mode eqvmode = mode;
4954 if (GET_CODE (dest) == STRICT_LOW_PART)
4955 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
4956 do_not_record = 0;
4957 hash_arg_in_memory = 0;
4958 src_eqv_hash = HASH (src_eqv, eqvmode);
4960 /* Find the equivalence class for the equivalent expression. */
4962 if (!do_not_record)
4963 src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
4965 src_eqv_volatile = do_not_record;
4966 src_eqv_in_memory = hash_arg_in_memory;
4969 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
4970 value of the INNER register, not the destination. So it is not
4971 a valid substitution for the source. But save it for later. */
4972 if (GET_CODE (dest) == STRICT_LOW_PART)
4973 src_eqv_here = 0;
4974 else
4975 src_eqv_here = src_eqv;
4977 /* Simplify and foldable subexpressions in SRC. Then get the fully-
4978 simplified result, which may not necessarily be valid. */
4979 src_folded = fold_rtx (src, insn);
4981 #if 0
4982 /* ??? This caused bad code to be generated for the m68k port with -O2.
4983 Suppose src is (CONST_INT -1), and that after truncation src_folded
4984 is (CONST_INT 3). Suppose src_folded is then used for src_const.
4985 At the end we will add src and src_const to the same equivalence
4986 class. We now have 3 and -1 on the same equivalence class. This
4987 causes later instructions to be mis-optimized. */
4988 /* If storing a constant in a bitfield, pre-truncate the constant
4989 so we will be able to record it later. */
4990 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
4991 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
4993 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
4995 if (GET_CODE (src) == CONST_INT
4996 && GET_CODE (width) == CONST_INT
4997 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
4998 && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
4999 src_folded
5000 = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
5001 << INTVAL (width)) - 1));
5003 #endif
5005 /* Compute SRC's hash code, and also notice if it
5006 should not be recorded at all. In that case,
5007 prevent any further processing of this assignment. */
5008 do_not_record = 0;
5009 hash_arg_in_memory = 0;
5011 sets[i].src = src;
5012 sets[i].src_hash = HASH (src, mode);
5013 sets[i].src_volatile = do_not_record;
5014 sets[i].src_in_memory = hash_arg_in_memory;
5016 /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
5017 a pseudo, do not record SRC. Using SRC as a replacement for
5018 anything else will be incorrect in that situation. Note that
5019 this usually occurs only for stack slots, in which case all the
5020 RTL would be referring to SRC, so we don't lose any optimization
5021 opportunities by not having SRC in the hash table. */
5023 if (GET_CODE (src) == MEM
5024 && find_reg_note (insn, REG_EQUIV, NULL_RTX) != 0
5025 && GET_CODE (dest) == REG
5026 && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
5027 sets[i].src_volatile = 1;
5029 #if 0
5030 /* It is no longer clear why we used to do this, but it doesn't
5031 appear to still be needed. So let's try without it since this
5032 code hurts cse'ing widened ops. */
5033 /* If source is a paradoxical subreg (such as QI treated as an SI),
5034 treat it as volatile. It may do the work of an SI in one context
5035 where the extra bits are not being used, but cannot replace an SI
5036 in general. */
5037 if (GET_CODE (src) == SUBREG
5038 && (GET_MODE_SIZE (GET_MODE (src))
5039 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
5040 sets[i].src_volatile = 1;
5041 #endif
5043 /* Locate all possible equivalent forms for SRC. Try to replace
5044 SRC in the insn with each cheaper equivalent.
5046 We have the following types of equivalents: SRC itself, a folded
5047 version, a value given in a REG_EQUAL note, or a value related
5048 to a constant.
5050 Each of these equivalents may be part of an additional class
5051 of equivalents (if more than one is in the table, they must be in
5052 the same class; we check for this).
5054 If the source is volatile, we don't do any table lookups.
5056 We note any constant equivalent for possible later use in a
5057 REG_NOTE. */
5059 if (!sets[i].src_volatile)
5060 elt = lookup (src, sets[i].src_hash, mode);
5062 sets[i].src_elt = elt;
5064 if (elt && src_eqv_here && src_eqv_elt)
5066 if (elt->first_same_value != src_eqv_elt->first_same_value)
5068 /* The REG_EQUAL is indicating that two formerly distinct
5069 classes are now equivalent. So merge them. */
5070 merge_equiv_classes (elt, src_eqv_elt);
5071 src_eqv_hash = HASH (src_eqv, elt->mode);
5072 src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
5075 src_eqv_here = 0;
5078 else if (src_eqv_elt)
5079 elt = src_eqv_elt;
5081 /* Try to find a constant somewhere and record it in `src_const'.
5082 Record its table element, if any, in `src_const_elt'. Look in
5083 any known equivalences first. (If the constant is not in the
5084 table, also set `sets[i].src_const_hash'). */
5085 if (elt)
5086 for (p = elt->first_same_value; p; p = p->next_same_value)
5087 if (p->is_const)
5089 src_const = p->exp;
5090 src_const_elt = elt;
5091 break;
5094 if (src_const == 0
5095 && (CONSTANT_P (src_folded)
5096 /* Consider (minus (label_ref L1) (label_ref L2)) as
5097 "constant" here so we will record it. This allows us
5098 to fold switch statements when an ADDR_DIFF_VEC is used. */
5099 || (GET_CODE (src_folded) == MINUS
5100 && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
5101 && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
5102 src_const = src_folded, src_const_elt = elt;
5103 else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
5104 src_const = src_eqv_here, src_const_elt = src_eqv_elt;
5106 /* If we don't know if the constant is in the table, get its
5107 hash code and look it up. */
5108 if (src_const && src_const_elt == 0)
5110 sets[i].src_const_hash = HASH (src_const, mode);
5111 src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
5114 sets[i].src_const = src_const;
5115 sets[i].src_const_elt = src_const_elt;
5117 /* If the constant and our source are both in the table, mark them as
5118 equivalent. Otherwise, if a constant is in the table but the source
5119 isn't, set ELT to it. */
5120 if (src_const_elt && elt
5121 && src_const_elt->first_same_value != elt->first_same_value)
5122 merge_equiv_classes (elt, src_const_elt);
5123 else if (src_const_elt && elt == 0)
5124 elt = src_const_elt;
5126 /* See if there is a register linearly related to a constant
5127 equivalent of SRC. */
5128 if (src_const
5129 && (GET_CODE (src_const) == CONST
5130 || (src_const_elt && src_const_elt->related_value != 0)))
5132 src_related = use_related_value (src_const, src_const_elt);
5133 if (src_related)
5135 struct table_elt *src_related_elt
5136 = lookup (src_related, HASH (src_related, mode), mode);
5137 if (src_related_elt && elt)
5139 if (elt->first_same_value
5140 != src_related_elt->first_same_value)
5141 /* This can occur when we previously saw a CONST
5142 involving a SYMBOL_REF and then see the SYMBOL_REF
5143 twice. Merge the involved classes. */
5144 merge_equiv_classes (elt, src_related_elt);
5146 src_related = 0;
5147 src_related_elt = 0;
5149 else if (src_related_elt && elt == 0)
5150 elt = src_related_elt;
5154 /* See if we have a CONST_INT that is already in a register in a
5155 wider mode. */
5157 if (src_const && src_related == 0 && GET_CODE (src_const) == CONST_INT
5158 && GET_MODE_CLASS (mode) == MODE_INT
5159 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
5161 enum machine_mode wider_mode;
5163 for (wider_mode = GET_MODE_WIDER_MODE (mode);
5164 GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
5165 && src_related == 0;
5166 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5168 struct table_elt *const_elt
5169 = lookup (src_const, HASH (src_const, wider_mode), wider_mode);
5171 if (const_elt == 0)
5172 continue;
5174 for (const_elt = const_elt->first_same_value;
5175 const_elt; const_elt = const_elt->next_same_value)
5176 if (GET_CODE (const_elt->exp) == REG)
5178 src_related = gen_lowpart (mode,
5179 const_elt->exp);
5180 break;
5185 /* Another possibility is that we have an AND with a constant in
5186 a mode narrower than a word. If so, it might have been generated
5187 as part of an "if" which would narrow the AND. If we already
5188 have done the AND in a wider mode, we can use a SUBREG of that
5189 value. */
5191 if (flag_expensive_optimizations && ! src_related
5192 && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
5193 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5195 enum machine_mode tmode;
5196 rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
5198 for (tmode = GET_MODE_WIDER_MODE (mode);
5199 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5200 tmode = GET_MODE_WIDER_MODE (tmode))
5202 rtx inner = gen_lowpart (tmode, XEXP (src, 0));
5203 struct table_elt *larger_elt;
5205 if (inner)
5207 PUT_MODE (new_and, tmode);
5208 XEXP (new_and, 0) = inner;
5209 larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
5210 if (larger_elt == 0)
5211 continue;
5213 for (larger_elt = larger_elt->first_same_value;
5214 larger_elt; larger_elt = larger_elt->next_same_value)
5215 if (GET_CODE (larger_elt->exp) == REG)
5217 src_related
5218 = gen_lowpart (mode, larger_elt->exp);
5219 break;
5222 if (src_related)
5223 break;
5228 #ifdef LOAD_EXTEND_OP
5229 /* See if a MEM has already been loaded with a widening operation;
5230 if it has, we can use a subreg of that. Many CISC machines
5231 also have such operations, but this is only likely to be
5232 beneficial these machines. */
5234 if (flag_expensive_optimizations && src_related == 0
5235 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5236 && GET_MODE_CLASS (mode) == MODE_INT
5237 && GET_CODE (src) == MEM && ! do_not_record
5238 && LOAD_EXTEND_OP (mode) != NIL)
5240 enum machine_mode tmode;
5242 /* Set what we are trying to extend and the operation it might
5243 have been extended with. */
5244 PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
5245 XEXP (memory_extend_rtx, 0) = src;
5247 for (tmode = GET_MODE_WIDER_MODE (mode);
5248 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5249 tmode = GET_MODE_WIDER_MODE (tmode))
5251 struct table_elt *larger_elt;
5253 PUT_MODE (memory_extend_rtx, tmode);
5254 larger_elt = lookup (memory_extend_rtx,
5255 HASH (memory_extend_rtx, tmode), tmode);
5256 if (larger_elt == 0)
5257 continue;
5259 for (larger_elt = larger_elt->first_same_value;
5260 larger_elt; larger_elt = larger_elt->next_same_value)
5261 if (GET_CODE (larger_elt->exp) == REG)
5263 src_related = gen_lowpart (mode,
5264 larger_elt->exp);
5265 break;
5268 if (src_related)
5269 break;
5272 #endif /* LOAD_EXTEND_OP */
5274 if (src == src_folded)
5275 src_folded = 0;
5277 /* At this point, ELT, if nonzero, points to a class of expressions
5278 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
5279 and SRC_RELATED, if nonzero, each contain additional equivalent
5280 expressions. Prune these latter expressions by deleting expressions
5281 already in the equivalence class.
5283 Check for an equivalent identical to the destination. If found,
5284 this is the preferred equivalent since it will likely lead to
5285 elimination of the insn. Indicate this by placing it in
5286 `src_related'. */
5288 if (elt)
5289 elt = elt->first_same_value;
5290 for (p = elt; p; p = p->next_same_value)
5292 enum rtx_code code = GET_CODE (p->exp);
5294 /* If the expression is not valid, ignore it. Then we do not
5295 have to check for validity below. In most cases, we can use
5296 `rtx_equal_p', since canonicalization has already been done. */
5297 if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, 0))
5298 continue;
5300 /* Also skip paradoxical subregs, unless that's what we're
5301 looking for. */
5302 if (code == SUBREG
5303 && (GET_MODE_SIZE (GET_MODE (p->exp))
5304 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))
5305 && ! (src != 0
5306 && GET_CODE (src) == SUBREG
5307 && GET_MODE (src) == GET_MODE (p->exp)
5308 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5309 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))))
5310 continue;
5312 if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
5313 src = 0;
5314 else if (src_folded && GET_CODE (src_folded) == code
5315 && rtx_equal_p (src_folded, p->exp))
5316 src_folded = 0;
5317 else if (src_eqv_here && GET_CODE (src_eqv_here) == code
5318 && rtx_equal_p (src_eqv_here, p->exp))
5319 src_eqv_here = 0;
5320 else if (src_related && GET_CODE (src_related) == code
5321 && rtx_equal_p (src_related, p->exp))
5322 src_related = 0;
5324 /* This is the same as the destination of the insns, we want
5325 to prefer it. Copy it to src_related. The code below will
5326 then give it a negative cost. */
5327 if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
5328 src_related = dest;
5331 /* Find the cheapest valid equivalent, trying all the available
5332 possibilities. Prefer items not in the hash table to ones
5333 that are when they are equal cost. Note that we can never
5334 worsen an insn as the current contents will also succeed.
5335 If we find an equivalent identical to the destination, use it as best,
5336 since this insn will probably be eliminated in that case. */
5337 if (src)
5339 if (rtx_equal_p (src, dest))
5340 src_cost = src_regcost = -1;
5341 else
5343 src_cost = COST (src);
5344 src_regcost = approx_reg_cost (src);
5348 if (src_eqv_here)
5350 if (rtx_equal_p (src_eqv_here, dest))
5351 src_eqv_cost = src_eqv_regcost = -1;
5352 else
5354 src_eqv_cost = COST (src_eqv_here);
5355 src_eqv_regcost = approx_reg_cost (src_eqv_here);
5359 if (src_folded)
5361 if (rtx_equal_p (src_folded, dest))
5362 src_folded_cost = src_folded_regcost = -1;
5363 else
5365 src_folded_cost = COST (src_folded);
5366 src_folded_regcost = approx_reg_cost (src_folded);
5370 if (src_related)
5372 if (rtx_equal_p (src_related, dest))
5373 src_related_cost = src_related_regcost = -1;
5374 else
5376 src_related_cost = COST (src_related);
5377 src_related_regcost = approx_reg_cost (src_related);
5381 /* If this was an indirect jump insn, a known label will really be
5382 cheaper even though it looks more expensive. */
5383 if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
5384 src_folded = src_const, src_folded_cost = src_folded_regcost = -1;
5386 /* Terminate loop when replacement made. This must terminate since
5387 the current contents will be tested and will always be valid. */
5388 while (1)
5390 rtx trial;
5392 /* Skip invalid entries. */
5393 while (elt && GET_CODE (elt->exp) != REG
5394 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
5395 elt = elt->next_same_value;
5397 /* A paradoxical subreg would be bad here: it'll be the right
5398 size, but later may be adjusted so that the upper bits aren't
5399 what we want. So reject it. */
5400 if (elt != 0
5401 && GET_CODE (elt->exp) == SUBREG
5402 && (GET_MODE_SIZE (GET_MODE (elt->exp))
5403 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))
5404 /* It is okay, though, if the rtx we're trying to match
5405 will ignore any of the bits we can't predict. */
5406 && ! (src != 0
5407 && GET_CODE (src) == SUBREG
5408 && GET_MODE (src) == GET_MODE (elt->exp)
5409 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5410 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))))
5412 elt = elt->next_same_value;
5413 continue;
5416 if (elt)
5418 src_elt_cost = elt->cost;
5419 src_elt_regcost = elt->regcost;
5422 /* Find cheapest and skip it for the next time. For items
5423 of equal cost, use this order:
5424 src_folded, src, src_eqv, src_related and hash table entry. */
5425 if (src_folded
5426 && preferable (src_folded_cost, src_folded_regcost,
5427 src_cost, src_regcost) <= 0
5428 && preferable (src_folded_cost, src_folded_regcost,
5429 src_eqv_cost, src_eqv_regcost) <= 0
5430 && preferable (src_folded_cost, src_folded_regcost,
5431 src_related_cost, src_related_regcost) <= 0
5432 && preferable (src_folded_cost, src_folded_regcost,
5433 src_elt_cost, src_elt_regcost) <= 0)
5435 trial = src_folded, src_folded_cost = MAX_COST;
5436 if (src_folded_force_flag)
5438 rtx forced = force_const_mem (mode, trial);
5439 if (forced)
5440 trial = forced;
5443 else if (src
5444 && preferable (src_cost, src_regcost,
5445 src_eqv_cost, src_eqv_regcost) <= 0
5446 && preferable (src_cost, src_regcost,
5447 src_related_cost, src_related_regcost) <= 0
5448 && preferable (src_cost, src_regcost,
5449 src_elt_cost, src_elt_regcost) <= 0)
5450 trial = src, src_cost = MAX_COST;
5451 else if (src_eqv_here
5452 && preferable (src_eqv_cost, src_eqv_regcost,
5453 src_related_cost, src_related_regcost) <= 0
5454 && preferable (src_eqv_cost, src_eqv_regcost,
5455 src_elt_cost, src_elt_regcost) <= 0)
5456 trial = copy_rtx (src_eqv_here), src_eqv_cost = MAX_COST;
5457 else if (src_related
5458 && preferable (src_related_cost, src_related_regcost,
5459 src_elt_cost, src_elt_regcost) <= 0)
5460 trial = copy_rtx (src_related), src_related_cost = MAX_COST;
5461 else
5463 trial = copy_rtx (elt->exp);
5464 elt = elt->next_same_value;
5465 src_elt_cost = MAX_COST;
5468 /* We don't normally have an insn matching (set (pc) (pc)), so
5469 check for this separately here. We will delete such an
5470 insn below.
5472 For other cases such as a table jump or conditional jump
5473 where we know the ultimate target, go ahead and replace the
5474 operand. While that may not make a valid insn, we will
5475 reemit the jump below (and also insert any necessary
5476 barriers). */
5477 if (n_sets == 1 && dest == pc_rtx
5478 && (trial == pc_rtx
5479 || (GET_CODE (trial) == LABEL_REF
5480 && ! condjump_p (insn))))
5482 SET_SRC (sets[i].rtl) = trial;
5483 cse_jumps_altered = 1;
5484 break;
5487 /* Look for a substitution that makes a valid insn. */
5488 else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
5490 rtx new = canon_reg (SET_SRC (sets[i].rtl), insn);
5492 /* If we just made a substitution inside a libcall, then we
5493 need to make the same substitution in any notes attached
5494 to the RETVAL insn. */
5495 if (libcall_insn
5496 && (GET_CODE (sets[i].orig_src) == REG
5497 || GET_CODE (sets[i].orig_src) == SUBREG
5498 || GET_CODE (sets[i].orig_src) == MEM))
5499 simplify_replace_rtx (REG_NOTES (libcall_insn),
5500 sets[i].orig_src, copy_rtx (new));
5502 /* The result of apply_change_group can be ignored; see
5503 canon_reg. */
5505 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
5506 apply_change_group ();
5507 break;
5510 /* If we previously found constant pool entries for
5511 constants and this is a constant, try making a
5512 pool entry. Put it in src_folded unless we already have done
5513 this since that is where it likely came from. */
5515 else if (constant_pool_entries_cost
5516 && CONSTANT_P (trial)
5517 /* Reject cases that will abort in decode_rtx_const.
5518 On the alpha when simplifying a switch, we get
5519 (const (truncate (minus (label_ref) (label_ref)))). */
5520 && ! (GET_CODE (trial) == CONST
5521 && GET_CODE (XEXP (trial, 0)) == TRUNCATE)
5522 /* Likewise on IA-64, except without the truncate. */
5523 && ! (GET_CODE (trial) == CONST
5524 && GET_CODE (XEXP (trial, 0)) == MINUS
5525 && GET_CODE (XEXP (XEXP (trial, 0), 0)) == LABEL_REF
5526 && GET_CODE (XEXP (XEXP (trial, 0), 1)) == LABEL_REF)
5527 && (src_folded == 0
5528 || (GET_CODE (src_folded) != MEM
5529 && ! src_folded_force_flag))
5530 && GET_MODE_CLASS (mode) != MODE_CC
5531 && mode != VOIDmode)
5533 src_folded_force_flag = 1;
5534 src_folded = trial;
5535 src_folded_cost = constant_pool_entries_cost;
5536 src_folded_regcost = constant_pool_entries_regcost;
5540 src = SET_SRC (sets[i].rtl);
5542 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
5543 However, there is an important exception: If both are registers
5544 that are not the head of their equivalence class, replace SET_SRC
5545 with the head of the class. If we do not do this, we will have
5546 both registers live over a portion of the basic block. This way,
5547 their lifetimes will likely abut instead of overlapping. */
5548 if (GET_CODE (dest) == REG
5549 && REGNO_QTY_VALID_P (REGNO (dest)))
5551 int dest_q = REG_QTY (REGNO (dest));
5552 struct qty_table_elem *dest_ent = &qty_table[dest_q];
5554 if (dest_ent->mode == GET_MODE (dest)
5555 && dest_ent->first_reg != REGNO (dest)
5556 && GET_CODE (src) == REG && REGNO (src) == REGNO (dest)
5557 /* Don't do this if the original insn had a hard reg as
5558 SET_SRC or SET_DEST. */
5559 && (GET_CODE (sets[i].src) != REG
5560 || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER)
5561 && (GET_CODE (dest) != REG || REGNO (dest) >= FIRST_PSEUDO_REGISTER))
5562 /* We can't call canon_reg here because it won't do anything if
5563 SRC is a hard register. */
5565 int src_q = REG_QTY (REGNO (src));
5566 struct qty_table_elem *src_ent = &qty_table[src_q];
5567 int first = src_ent->first_reg;
5568 rtx new_src
5569 = (first >= FIRST_PSEUDO_REGISTER
5570 ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first));
5572 /* We must use validate-change even for this, because this
5573 might be a special no-op instruction, suitable only to
5574 tag notes onto. */
5575 if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0))
5577 src = new_src;
5578 /* If we had a constant that is cheaper than what we are now
5579 setting SRC to, use that constant. We ignored it when we
5580 thought we could make this into a no-op. */
5581 if (src_const && COST (src_const) < COST (src)
5582 && validate_change (insn, &SET_SRC (sets[i].rtl),
5583 src_const, 0))
5584 src = src_const;
5589 /* If we made a change, recompute SRC values. */
5590 if (src != sets[i].src)
5592 cse_altered = 1;
5593 do_not_record = 0;
5594 hash_arg_in_memory = 0;
5595 sets[i].src = src;
5596 sets[i].src_hash = HASH (src, mode);
5597 sets[i].src_volatile = do_not_record;
5598 sets[i].src_in_memory = hash_arg_in_memory;
5599 sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
5602 /* If this is a single SET, we are setting a register, and we have an
5603 equivalent constant, we want to add a REG_NOTE. We don't want
5604 to write a REG_EQUAL note for a constant pseudo since verifying that
5605 that pseudo hasn't been eliminated is a pain. Such a note also
5606 won't help anything.
5608 Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
5609 which can be created for a reference to a compile time computable
5610 entry in a jump table. */
5612 if (n_sets == 1 && src_const && GET_CODE (dest) == REG
5613 && GET_CODE (src_const) != REG
5614 && ! (GET_CODE (src_const) == CONST
5615 && GET_CODE (XEXP (src_const, 0)) == MINUS
5616 && GET_CODE (XEXP (XEXP (src_const, 0), 0)) == LABEL_REF
5617 && GET_CODE (XEXP (XEXP (src_const, 0), 1)) == LABEL_REF))
5619 /* We only want a REG_EQUAL note if src_const != src. */
5620 if (! rtx_equal_p (src, src_const))
5622 /* Make sure that the rtx is not shared. */
5623 src_const = copy_rtx (src_const);
5625 /* Record the actual constant value in a REG_EQUAL note,
5626 making a new one if one does not already exist. */
5627 set_unique_reg_note (insn, REG_EQUAL, src_const);
5631 /* Now deal with the destination. */
5632 do_not_record = 0;
5634 /* Look within any SIGN_EXTRACT or ZERO_EXTRACT
5635 to the MEM or REG within it. */
5636 while (GET_CODE (dest) == SIGN_EXTRACT
5637 || GET_CODE (dest) == ZERO_EXTRACT
5638 || GET_CODE (dest) == SUBREG
5639 || GET_CODE (dest) == STRICT_LOW_PART)
5640 dest = XEXP (dest, 0);
5642 sets[i].inner_dest = dest;
5644 if (GET_CODE (dest) == MEM)
5646 #ifdef PUSH_ROUNDING
5647 /* Stack pushes invalidate the stack pointer. */
5648 rtx addr = XEXP (dest, 0);
5649 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC
5650 && XEXP (addr, 0) == stack_pointer_rtx)
5651 invalidate (stack_pointer_rtx, Pmode);
5652 #endif
5653 dest = fold_rtx (dest, insn);
5656 /* Compute the hash code of the destination now,
5657 before the effects of this instruction are recorded,
5658 since the register values used in the address computation
5659 are those before this instruction. */
5660 sets[i].dest_hash = HASH (dest, mode);
5662 /* Don't enter a bit-field in the hash table
5663 because the value in it after the store
5664 may not equal what was stored, due to truncation. */
5666 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
5667 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
5669 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5671 if (src_const != 0 && GET_CODE (src_const) == CONST_INT
5672 && GET_CODE (width) == CONST_INT
5673 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5674 && ! (INTVAL (src_const)
5675 & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5676 /* Exception: if the value is constant,
5677 and it won't be truncated, record it. */
5679 else
5681 /* This is chosen so that the destination will be invalidated
5682 but no new value will be recorded.
5683 We must invalidate because sometimes constant
5684 values can be recorded for bitfields. */
5685 sets[i].src_elt = 0;
5686 sets[i].src_volatile = 1;
5687 src_eqv = 0;
5688 src_eqv_elt = 0;
5692 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
5693 the insn. */
5694 else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
5696 /* One less use of the label this insn used to jump to. */
5697 delete_insn (insn);
5698 cse_jumps_altered = 1;
5699 /* No more processing for this set. */
5700 sets[i].rtl = 0;
5703 /* If this SET is now setting PC to a label, we know it used to
5704 be a conditional or computed branch. */
5705 else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF)
5707 /* Now emit a BARRIER after the unconditional jump. */
5708 if (NEXT_INSN (insn) == 0
5709 || GET_CODE (NEXT_INSN (insn)) != BARRIER)
5710 emit_barrier_after (insn);
5712 /* We reemit the jump in as many cases as possible just in
5713 case the form of an unconditional jump is significantly
5714 different than a computed jump or conditional jump.
5716 If this insn has multiple sets, then reemitting the
5717 jump is nontrivial. So instead we just force rerecognition
5718 and hope for the best. */
5719 if (n_sets == 1)
5721 rtx new, note;
5723 new = emit_jump_insn_after (gen_jump (XEXP (src, 0)), insn);
5724 JUMP_LABEL (new) = XEXP (src, 0);
5725 LABEL_NUSES (XEXP (src, 0))++;
5727 /* Make sure to copy over REG_NON_LOCAL_GOTO. */
5728 note = find_reg_note (insn, REG_NON_LOCAL_GOTO, 0);
5729 if (note)
5731 XEXP (note, 1) = NULL_RTX;
5732 REG_NOTES (new) = note;
5735 delete_insn (insn);
5736 insn = new;
5738 /* Now emit a BARRIER after the unconditional jump. */
5739 if (NEXT_INSN (insn) == 0
5740 || GET_CODE (NEXT_INSN (insn)) != BARRIER)
5741 emit_barrier_after (insn);
5743 else
5744 INSN_CODE (insn) = -1;
5746 never_reached_warning (insn, NULL);
5748 /* Do not bother deleting any unreachable code,
5749 let jump/flow do that. */
5751 cse_jumps_altered = 1;
5752 sets[i].rtl = 0;
5755 /* If destination is volatile, invalidate it and then do no further
5756 processing for this assignment. */
5758 else if (do_not_record)
5760 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
5761 invalidate (dest, VOIDmode);
5762 else if (GET_CODE (dest) == MEM)
5764 /* Outgoing arguments for a libcall don't
5765 affect any recorded expressions. */
5766 if (! libcall_insn || insn == libcall_insn)
5767 invalidate (dest, VOIDmode);
5769 else if (GET_CODE (dest) == STRICT_LOW_PART
5770 || GET_CODE (dest) == ZERO_EXTRACT)
5771 invalidate (XEXP (dest, 0), GET_MODE (dest));
5772 sets[i].rtl = 0;
5775 if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
5776 sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
5778 #ifdef HAVE_cc0
5779 /* If setting CC0, record what it was set to, or a constant, if it
5780 is equivalent to a constant. If it is being set to a floating-point
5781 value, make a COMPARE with the appropriate constant of 0. If we
5782 don't do this, later code can interpret this as a test against
5783 const0_rtx, which can cause problems if we try to put it into an
5784 insn as a floating-point operand. */
5785 if (dest == cc0_rtx)
5787 this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
5788 this_insn_cc0_mode = mode;
5789 if (FLOAT_MODE_P (mode))
5790 this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0,
5791 CONST0_RTX (mode));
5793 #endif
5796 /* Now enter all non-volatile source expressions in the hash table
5797 if they are not already present.
5798 Record their equivalence classes in src_elt.
5799 This way we can insert the corresponding destinations into
5800 the same classes even if the actual sources are no longer in them
5801 (having been invalidated). */
5803 if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
5804 && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
5806 struct table_elt *elt;
5807 struct table_elt *classp = sets[0].src_elt;
5808 rtx dest = SET_DEST (sets[0].rtl);
5809 enum machine_mode eqvmode = GET_MODE (dest);
5811 if (GET_CODE (dest) == STRICT_LOW_PART)
5813 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5814 classp = 0;
5816 if (insert_regs (src_eqv, classp, 0))
5818 rehash_using_reg (src_eqv);
5819 src_eqv_hash = HASH (src_eqv, eqvmode);
5821 elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
5822 elt->in_memory = src_eqv_in_memory;
5823 src_eqv_elt = elt;
5825 /* Check to see if src_eqv_elt is the same as a set source which
5826 does not yet have an elt, and if so set the elt of the set source
5827 to src_eqv_elt. */
5828 for (i = 0; i < n_sets; i++)
5829 if (sets[i].rtl && sets[i].src_elt == 0
5830 && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
5831 sets[i].src_elt = src_eqv_elt;
5834 for (i = 0; i < n_sets; i++)
5835 if (sets[i].rtl && ! sets[i].src_volatile
5836 && ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
5838 if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
5840 /* REG_EQUAL in setting a STRICT_LOW_PART
5841 gives an equivalent for the entire destination register,
5842 not just for the subreg being stored in now.
5843 This is a more interesting equivalence, so we arrange later
5844 to treat the entire reg as the destination. */
5845 sets[i].src_elt = src_eqv_elt;
5846 sets[i].src_hash = src_eqv_hash;
5848 else
5850 /* Insert source and constant equivalent into hash table, if not
5851 already present. */
5852 struct table_elt *classp = src_eqv_elt;
5853 rtx src = sets[i].src;
5854 rtx dest = SET_DEST (sets[i].rtl);
5855 enum machine_mode mode
5856 = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5858 /* It's possible that we have a source value known to be
5859 constant but don't have a REG_EQUAL note on the insn.
5860 Lack of a note will mean src_eqv_elt will be NULL. This
5861 can happen where we've generated a SUBREG to access a
5862 CONST_INT that is already in a register in a wider mode.
5863 Ensure that the source expression is put in the proper
5864 constant class. */
5865 if (!classp)
5866 classp = sets[i].src_const_elt;
5868 if (sets[i].src_elt == 0)
5870 /* Don't put a hard register source into the table if this is
5871 the last insn of a libcall. In this case, we only need
5872 to put src_eqv_elt in src_elt. */
5873 if (! find_reg_note (insn, REG_RETVAL, NULL_RTX))
5875 struct table_elt *elt;
5877 /* Note that these insert_regs calls cannot remove
5878 any of the src_elt's, because they would have failed to
5879 match if not still valid. */
5880 if (insert_regs (src, classp, 0))
5882 rehash_using_reg (src);
5883 sets[i].src_hash = HASH (src, mode);
5885 elt = insert (src, classp, sets[i].src_hash, mode);
5886 elt->in_memory = sets[i].src_in_memory;
5887 sets[i].src_elt = classp = elt;
5889 else
5890 sets[i].src_elt = classp;
5892 if (sets[i].src_const && sets[i].src_const_elt == 0
5893 && src != sets[i].src_const
5894 && ! rtx_equal_p (sets[i].src_const, src))
5895 sets[i].src_elt = insert (sets[i].src_const, classp,
5896 sets[i].src_const_hash, mode);
5899 else if (sets[i].src_elt == 0)
5900 /* If we did not insert the source into the hash table (e.g., it was
5901 volatile), note the equivalence class for the REG_EQUAL value, if any,
5902 so that the destination goes into that class. */
5903 sets[i].src_elt = src_eqv_elt;
5905 invalidate_from_clobbers (x);
5907 /* Some registers are invalidated by subroutine calls. Memory is
5908 invalidated by non-constant calls. */
5910 if (GET_CODE (insn) == CALL_INSN)
5912 if (! CONST_OR_PURE_CALL_P (insn))
5913 invalidate_memory ();
5914 invalidate_for_call ();
5917 /* Now invalidate everything set by this instruction.
5918 If a SUBREG or other funny destination is being set,
5919 sets[i].rtl is still nonzero, so here we invalidate the reg
5920 a part of which is being set. */
5922 for (i = 0; i < n_sets; i++)
5923 if (sets[i].rtl)
5925 /* We can't use the inner dest, because the mode associated with
5926 a ZERO_EXTRACT is significant. */
5927 rtx dest = SET_DEST (sets[i].rtl);
5929 /* Needed for registers to remove the register from its
5930 previous quantity's chain.
5931 Needed for memory if this is a nonvarying address, unless
5932 we have just done an invalidate_memory that covers even those. */
5933 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
5934 invalidate (dest, VOIDmode);
5935 else if (GET_CODE (dest) == MEM)
5937 /* Outgoing arguments for a libcall don't
5938 affect any recorded expressions. */
5939 if (! libcall_insn || insn == libcall_insn)
5940 invalidate (dest, VOIDmode);
5942 else if (GET_CODE (dest) == STRICT_LOW_PART
5943 || GET_CODE (dest) == ZERO_EXTRACT)
5944 invalidate (XEXP (dest, 0), GET_MODE (dest));
5947 /* A volatile ASM invalidates everything. */
5948 if (GET_CODE (insn) == INSN
5949 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
5950 && MEM_VOLATILE_P (PATTERN (insn)))
5951 flush_hash_table ();
5953 /* Make sure registers mentioned in destinations
5954 are safe for use in an expression to be inserted.
5955 This removes from the hash table
5956 any invalid entry that refers to one of these registers.
5958 We don't care about the return value from mention_regs because
5959 we are going to hash the SET_DEST values unconditionally. */
5961 for (i = 0; i < n_sets; i++)
5963 if (sets[i].rtl)
5965 rtx x = SET_DEST (sets[i].rtl);
5967 if (GET_CODE (x) != REG)
5968 mention_regs (x);
5969 else
5971 /* We used to rely on all references to a register becoming
5972 inaccessible when a register changes to a new quantity,
5973 since that changes the hash code. However, that is not
5974 safe, since after HASH_SIZE new quantities we get a
5975 hash 'collision' of a register with its own invalid
5976 entries. And since SUBREGs have been changed not to
5977 change their hash code with the hash code of the register,
5978 it wouldn't work any longer at all. So we have to check
5979 for any invalid references lying around now.
5980 This code is similar to the REG case in mention_regs,
5981 but it knows that reg_tick has been incremented, and
5982 it leaves reg_in_table as -1 . */
5983 unsigned int regno = REGNO (x);
5984 unsigned int endregno
5985 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
5986 : hard_regno_nregs[regno][GET_MODE (x)]);
5987 unsigned int i;
5989 for (i = regno; i < endregno; i++)
5991 if (REG_IN_TABLE (i) >= 0)
5993 remove_invalid_refs (i);
5994 REG_IN_TABLE (i) = -1;
6001 /* We may have just removed some of the src_elt's from the hash table.
6002 So replace each one with the current head of the same class. */
6004 for (i = 0; i < n_sets; i++)
6005 if (sets[i].rtl)
6007 if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
6008 /* If elt was removed, find current head of same class,
6009 or 0 if nothing remains of that class. */
6011 struct table_elt *elt = sets[i].src_elt;
6013 while (elt && elt->prev_same_value)
6014 elt = elt->prev_same_value;
6016 while (elt && elt->first_same_value == 0)
6017 elt = elt->next_same_value;
6018 sets[i].src_elt = elt ? elt->first_same_value : 0;
6022 /* Now insert the destinations into their equivalence classes. */
6024 for (i = 0; i < n_sets; i++)
6025 if (sets[i].rtl)
6027 rtx dest = SET_DEST (sets[i].rtl);
6028 rtx inner_dest = sets[i].inner_dest;
6029 struct table_elt *elt;
6031 /* Don't record value if we are not supposed to risk allocating
6032 floating-point values in registers that might be wider than
6033 memory. */
6034 if ((flag_float_store
6035 && GET_CODE (dest) == MEM
6036 && FLOAT_MODE_P (GET_MODE (dest)))
6037 /* Don't record BLKmode values, because we don't know the
6038 size of it, and can't be sure that other BLKmode values
6039 have the same or smaller size. */
6040 || GET_MODE (dest) == BLKmode
6041 /* Don't record values of destinations set inside a libcall block
6042 since we might delete the libcall. Things should have been set
6043 up so we won't want to reuse such a value, but we play it safe
6044 here. */
6045 || libcall_insn
6046 /* If we didn't put a REG_EQUAL value or a source into the hash
6047 table, there is no point is recording DEST. */
6048 || sets[i].src_elt == 0
6049 /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
6050 or SIGN_EXTEND, don't record DEST since it can cause
6051 some tracking to be wrong.
6053 ??? Think about this more later. */
6054 || (GET_CODE (dest) == SUBREG
6055 && (GET_MODE_SIZE (GET_MODE (dest))
6056 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6057 && (GET_CODE (sets[i].src) == SIGN_EXTEND
6058 || GET_CODE (sets[i].src) == ZERO_EXTEND)))
6059 continue;
6061 /* STRICT_LOW_PART isn't part of the value BEING set,
6062 and neither is the SUBREG inside it.
6063 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
6064 if (GET_CODE (dest) == STRICT_LOW_PART)
6065 dest = SUBREG_REG (XEXP (dest, 0));
6067 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
6068 /* Registers must also be inserted into chains for quantities. */
6069 if (insert_regs (dest, sets[i].src_elt, 1))
6071 /* If `insert_regs' changes something, the hash code must be
6072 recalculated. */
6073 rehash_using_reg (dest);
6074 sets[i].dest_hash = HASH (dest, GET_MODE (dest));
6077 if (GET_CODE (inner_dest) == MEM
6078 && GET_CODE (XEXP (inner_dest, 0)) == ADDRESSOF)
6079 /* Given (SET (MEM (ADDRESSOF (X))) Y) we don't want to say
6080 that (MEM (ADDRESSOF (X))) is equivalent to Y.
6081 Consider the case in which the address of the MEM is
6082 passed to a function, which alters the MEM. Then, if we
6083 later use Y instead of the MEM we'll miss the update. */
6084 elt = insert (dest, 0, sets[i].dest_hash, GET_MODE (dest));
6085 else
6086 elt = insert (dest, sets[i].src_elt,
6087 sets[i].dest_hash, GET_MODE (dest));
6089 elt->in_memory = (GET_CODE (sets[i].inner_dest) == MEM
6090 && (! RTX_UNCHANGING_P (sets[i].inner_dest)
6091 || fixed_base_plus_p (XEXP (sets[i].inner_dest,
6092 0))));
6094 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
6095 narrower than M2, and both M1 and M2 are the same number of words,
6096 we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
6097 make that equivalence as well.
6099 However, BAR may have equivalences for which gen_lowpart
6100 will produce a simpler value than gen_lowpart applied to
6101 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
6102 BAR's equivalences. If we don't get a simplified form, make
6103 the SUBREG. It will not be used in an equivalence, but will
6104 cause two similar assignments to be detected.
6106 Note the loop below will find SUBREG_REG (DEST) since we have
6107 already entered SRC and DEST of the SET in the table. */
6109 if (GET_CODE (dest) == SUBREG
6110 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
6111 / UNITS_PER_WORD)
6112 == (GET_MODE_SIZE (GET_MODE (dest)) - 1) / UNITS_PER_WORD)
6113 && (GET_MODE_SIZE (GET_MODE (dest))
6114 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6115 && sets[i].src_elt != 0)
6117 enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
6118 struct table_elt *elt, *classp = 0;
6120 for (elt = sets[i].src_elt->first_same_value; elt;
6121 elt = elt->next_same_value)
6123 rtx new_src = 0;
6124 unsigned src_hash;
6125 struct table_elt *src_elt;
6126 int byte = 0;
6128 /* Ignore invalid entries. */
6129 if (GET_CODE (elt->exp) != REG
6130 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
6131 continue;
6133 /* We may have already been playing subreg games. If the
6134 mode is already correct for the destination, use it. */
6135 if (GET_MODE (elt->exp) == new_mode)
6136 new_src = elt->exp;
6137 else
6139 /* Calculate big endian correction for the SUBREG_BYTE.
6140 We have already checked that M1 (GET_MODE (dest))
6141 is not narrower than M2 (new_mode). */
6142 if (BYTES_BIG_ENDIAN)
6143 byte = (GET_MODE_SIZE (GET_MODE (dest))
6144 - GET_MODE_SIZE (new_mode));
6146 new_src = simplify_gen_subreg (new_mode, elt->exp,
6147 GET_MODE (dest), byte);
6150 /* The call to simplify_gen_subreg fails if the value
6151 is VOIDmode, yet we can't do any simplification, e.g.
6152 for EXPR_LISTs denoting function call results.
6153 It is invalid to construct a SUBREG with a VOIDmode
6154 SUBREG_REG, hence a zero new_src means we can't do
6155 this substitution. */
6156 if (! new_src)
6157 continue;
6159 src_hash = HASH (new_src, new_mode);
6160 src_elt = lookup (new_src, src_hash, new_mode);
6162 /* Put the new source in the hash table is if isn't
6163 already. */
6164 if (src_elt == 0)
6166 if (insert_regs (new_src, classp, 0))
6168 rehash_using_reg (new_src);
6169 src_hash = HASH (new_src, new_mode);
6171 src_elt = insert (new_src, classp, src_hash, new_mode);
6172 src_elt->in_memory = elt->in_memory;
6174 else if (classp && classp != src_elt->first_same_value)
6175 /* Show that two things that we've seen before are
6176 actually the same. */
6177 merge_equiv_classes (src_elt, classp);
6179 classp = src_elt->first_same_value;
6180 /* Ignore invalid entries. */
6181 while (classp
6182 && GET_CODE (classp->exp) != REG
6183 && ! exp_equiv_p (classp->exp, classp->exp, 1, 0))
6184 classp = classp->next_same_value;
6189 /* Special handling for (set REG0 REG1) where REG0 is the
6190 "cheapest", cheaper than REG1. After cse, REG1 will probably not
6191 be used in the sequel, so (if easily done) change this insn to
6192 (set REG1 REG0) and replace REG1 with REG0 in the previous insn
6193 that computed their value. Then REG1 will become a dead store
6194 and won't cloud the situation for later optimizations.
6196 Do not make this change if REG1 is a hard register, because it will
6197 then be used in the sequel and we may be changing a two-operand insn
6198 into a three-operand insn.
6200 Also do not do this if we are operating on a copy of INSN.
6202 Also don't do this if INSN ends a libcall; this would cause an unrelated
6203 register to be set in the middle of a libcall, and we then get bad code
6204 if the libcall is deleted. */
6206 if (n_sets == 1 && sets[0].rtl && GET_CODE (SET_DEST (sets[0].rtl)) == REG
6207 && NEXT_INSN (PREV_INSN (insn)) == insn
6208 && GET_CODE (SET_SRC (sets[0].rtl)) == REG
6209 && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
6210 && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl))))
6212 int src_q = REG_QTY (REGNO (SET_SRC (sets[0].rtl)));
6213 struct qty_table_elem *src_ent = &qty_table[src_q];
6215 if ((src_ent->first_reg == REGNO (SET_DEST (sets[0].rtl)))
6216 && ! find_reg_note (insn, REG_RETVAL, NULL_RTX))
6218 rtx prev = insn;
6219 /* Scan for the previous nonnote insn, but stop at a basic
6220 block boundary. */
6223 prev = PREV_INSN (prev);
6225 while (prev && GET_CODE (prev) == NOTE
6226 && NOTE_LINE_NUMBER (prev) != NOTE_INSN_BASIC_BLOCK);
6228 /* Do not swap the registers around if the previous instruction
6229 attaches a REG_EQUIV note to REG1.
6231 ??? It's not entirely clear whether we can transfer a REG_EQUIV
6232 from the pseudo that originally shadowed an incoming argument
6233 to another register. Some uses of REG_EQUIV might rely on it
6234 being attached to REG1 rather than REG2.
6236 This section previously turned the REG_EQUIV into a REG_EQUAL
6237 note. We cannot do that because REG_EQUIV may provide an
6238 uninitialized stack slot when REG_PARM_STACK_SPACE is used. */
6240 if (prev != 0 && GET_CODE (prev) == INSN
6241 && GET_CODE (PATTERN (prev)) == SET
6242 && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl)
6243 && ! find_reg_note (prev, REG_EQUIV, NULL_RTX))
6245 rtx dest = SET_DEST (sets[0].rtl);
6246 rtx src = SET_SRC (sets[0].rtl);
6247 rtx note;
6249 validate_change (prev, &SET_DEST (PATTERN (prev)), dest, 1);
6250 validate_change (insn, &SET_DEST (sets[0].rtl), src, 1);
6251 validate_change (insn, &SET_SRC (sets[0].rtl), dest, 1);
6252 apply_change_group ();
6254 /* If INSN has a REG_EQUAL note, and this note mentions
6255 REG0, then we must delete it, because the value in
6256 REG0 has changed. If the note's value is REG1, we must
6257 also delete it because that is now this insn's dest. */
6258 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
6259 if (note != 0
6260 && (reg_mentioned_p (dest, XEXP (note, 0))
6261 || rtx_equal_p (src, XEXP (note, 0))))
6262 remove_note (insn, note);
6267 /* If this is a conditional jump insn, record any known equivalences due to
6268 the condition being tested. */
6270 last_jump_equiv_class = 0;
6271 if (GET_CODE (insn) == JUMP_INSN
6272 && n_sets == 1 && GET_CODE (x) == SET
6273 && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
6274 record_jump_equiv (insn, 0);
6276 #ifdef HAVE_cc0
6277 /* If the previous insn set CC0 and this insn no longer references CC0,
6278 delete the previous insn. Here we use the fact that nothing expects CC0
6279 to be valid over an insn, which is true until the final pass. */
6280 if (prev_insn && GET_CODE (prev_insn) == INSN
6281 && (tem = single_set (prev_insn)) != 0
6282 && SET_DEST (tem) == cc0_rtx
6283 && ! reg_mentioned_p (cc0_rtx, x))
6284 delete_insn (prev_insn);
6286 prev_insn_cc0 = this_insn_cc0;
6287 prev_insn_cc0_mode = this_insn_cc0_mode;
6288 prev_insn = insn;
6289 #endif
6292 /* Remove from the hash table all expressions that reference memory. */
6294 static void
6295 invalidate_memory (void)
6297 int i;
6298 struct table_elt *p, *next;
6300 for (i = 0; i < HASH_SIZE; i++)
6301 for (p = table[i]; p; p = next)
6303 next = p->next_same_hash;
6304 if (p->in_memory)
6305 remove_from_table (p, i);
6309 /* If ADDR is an address that implicitly affects the stack pointer, return
6310 1 and update the register tables to show the effect. Else, return 0. */
6312 static int
6313 addr_affects_sp_p (rtx addr)
6315 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC
6316 && GET_CODE (XEXP (addr, 0)) == REG
6317 && REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
6319 if (REG_TICK (STACK_POINTER_REGNUM) >= 0)
6321 REG_TICK (STACK_POINTER_REGNUM)++;
6322 /* Is it possible to use a subreg of SP? */
6323 SUBREG_TICKED (STACK_POINTER_REGNUM) = -1;
6326 /* This should be *very* rare. */
6327 if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
6328 invalidate (stack_pointer_rtx, VOIDmode);
6330 return 1;
6333 return 0;
6336 /* Perform invalidation on the basis of everything about an insn
6337 except for invalidating the actual places that are SET in it.
6338 This includes the places CLOBBERed, and anything that might
6339 alias with something that is SET or CLOBBERed.
6341 X is the pattern of the insn. */
6343 static void
6344 invalidate_from_clobbers (rtx x)
6346 if (GET_CODE (x) == CLOBBER)
6348 rtx ref = XEXP (x, 0);
6349 if (ref)
6351 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
6352 || GET_CODE (ref) == MEM)
6353 invalidate (ref, VOIDmode);
6354 else if (GET_CODE (ref) == STRICT_LOW_PART
6355 || GET_CODE (ref) == ZERO_EXTRACT)
6356 invalidate (XEXP (ref, 0), GET_MODE (ref));
6359 else if (GET_CODE (x) == PARALLEL)
6361 int i;
6362 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
6364 rtx y = XVECEXP (x, 0, i);
6365 if (GET_CODE (y) == CLOBBER)
6367 rtx ref = XEXP (y, 0);
6368 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
6369 || GET_CODE (ref) == MEM)
6370 invalidate (ref, VOIDmode);
6371 else if (GET_CODE (ref) == STRICT_LOW_PART
6372 || GET_CODE (ref) == ZERO_EXTRACT)
6373 invalidate (XEXP (ref, 0), GET_MODE (ref));
6379 /* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes
6380 and replace any registers in them with either an equivalent constant
6381 or the canonical form of the register. If we are inside an address,
6382 only do this if the address remains valid.
6384 OBJECT is 0 except when within a MEM in which case it is the MEM.
6386 Return the replacement for X. */
6388 static rtx
6389 cse_process_notes (rtx x, rtx object)
6391 enum rtx_code code = GET_CODE (x);
6392 const char *fmt = GET_RTX_FORMAT (code);
6393 int i;
6395 switch (code)
6397 case CONST_INT:
6398 case CONST:
6399 case SYMBOL_REF:
6400 case LABEL_REF:
6401 case CONST_DOUBLE:
6402 case CONST_VECTOR:
6403 case PC:
6404 case CC0:
6405 case LO_SUM:
6406 return x;
6408 case MEM:
6409 validate_change (x, &XEXP (x, 0),
6410 cse_process_notes (XEXP (x, 0), x), 0);
6411 return x;
6413 case EXPR_LIST:
6414 case INSN_LIST:
6415 if (REG_NOTE_KIND (x) == REG_EQUAL)
6416 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX);
6417 if (XEXP (x, 1))
6418 XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX);
6419 return x;
6421 case SIGN_EXTEND:
6422 case ZERO_EXTEND:
6423 case SUBREG:
6425 rtx new = cse_process_notes (XEXP (x, 0), object);
6426 /* We don't substitute VOIDmode constants into these rtx,
6427 since they would impede folding. */
6428 if (GET_MODE (new) != VOIDmode)
6429 validate_change (object, &XEXP (x, 0), new, 0);
6430 return x;
6433 case REG:
6434 i = REG_QTY (REGNO (x));
6436 /* Return a constant or a constant register. */
6437 if (REGNO_QTY_VALID_P (REGNO (x)))
6439 struct qty_table_elem *ent = &qty_table[i];
6441 if (ent->const_rtx != NULL_RTX
6442 && (CONSTANT_P (ent->const_rtx)
6443 || GET_CODE (ent->const_rtx) == REG))
6445 rtx new = gen_lowpart (GET_MODE (x), ent->const_rtx);
6446 if (new)
6447 return new;
6451 /* Otherwise, canonicalize this register. */
6452 return canon_reg (x, NULL_RTX);
6454 default:
6455 break;
6458 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6459 if (fmt[i] == 'e')
6460 validate_change (object, &XEXP (x, i),
6461 cse_process_notes (XEXP (x, i), object), 0);
6463 return x;
6466 /* Find common subexpressions between the end test of a loop and the beginning
6467 of the loop. LOOP_START is the CODE_LABEL at the start of a loop.
6469 Often we have a loop where an expression in the exit test is used
6470 in the body of the loop. For example "while (*p) *q++ = *p++;".
6471 Because of the way we duplicate the loop exit test in front of the loop,
6472 however, we don't detect that common subexpression. This will be caught
6473 when global cse is implemented, but this is a quite common case.
6475 This function handles the most common cases of these common expressions.
6476 It is called after we have processed the basic block ending with the
6477 NOTE_INSN_LOOP_END note that ends a loop and the previous JUMP_INSN
6478 jumps to a label used only once. */
6480 static void
6481 cse_around_loop (rtx loop_start)
6483 rtx insn;
6484 int i;
6485 struct table_elt *p;
6487 /* If the jump at the end of the loop doesn't go to the start, we don't
6488 do anything. */
6489 for (insn = PREV_INSN (loop_start);
6490 insn && (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0);
6491 insn = PREV_INSN (insn))
6494 if (insn == 0
6495 || GET_CODE (insn) != NOTE
6496 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_BEG)
6497 return;
6499 /* If the last insn of the loop (the end test) was an NE comparison,
6500 we will interpret it as an EQ comparison, since we fell through
6501 the loop. Any equivalences resulting from that comparison are
6502 therefore not valid and must be invalidated. */
6503 if (last_jump_equiv_class)
6504 for (p = last_jump_equiv_class->first_same_value; p;
6505 p = p->next_same_value)
6507 if (GET_CODE (p->exp) == MEM || GET_CODE (p->exp) == REG
6508 || (GET_CODE (p->exp) == SUBREG
6509 && GET_CODE (SUBREG_REG (p->exp)) == REG))
6510 invalidate (p->exp, VOIDmode);
6511 else if (GET_CODE (p->exp) == STRICT_LOW_PART
6512 || GET_CODE (p->exp) == ZERO_EXTRACT)
6513 invalidate (XEXP (p->exp, 0), GET_MODE (p->exp));
6516 /* Process insns starting after LOOP_START until we hit a CALL_INSN or
6517 a CODE_LABEL (we could handle a CALL_INSN, but it isn't worth it).
6519 The only thing we do with SET_DEST is invalidate entries, so we
6520 can safely process each SET in order. It is slightly less efficient
6521 to do so, but we only want to handle the most common cases.
6523 The gen_move_insn call in cse_set_around_loop may create new pseudos.
6524 These pseudos won't have valid entries in any of the tables indexed
6525 by register number, such as reg_qty. We avoid out-of-range array
6526 accesses by not processing any instructions created after cse started. */
6528 for (insn = NEXT_INSN (loop_start);
6529 GET_CODE (insn) != CALL_INSN && GET_CODE (insn) != CODE_LABEL
6530 && INSN_UID (insn) < max_insn_uid
6531 && ! (GET_CODE (insn) == NOTE
6532 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
6533 insn = NEXT_INSN (insn))
6535 if (INSN_P (insn)
6536 && (GET_CODE (PATTERN (insn)) == SET
6537 || GET_CODE (PATTERN (insn)) == CLOBBER))
6538 cse_set_around_loop (PATTERN (insn), insn, loop_start);
6539 else if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == PARALLEL)
6540 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6541 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET
6542 || GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == CLOBBER)
6543 cse_set_around_loop (XVECEXP (PATTERN (insn), 0, i), insn,
6544 loop_start);
6548 /* Process one SET of an insn that was skipped. We ignore CLOBBERs
6549 since they are done elsewhere. This function is called via note_stores. */
6551 static void
6552 invalidate_skipped_set (rtx dest, rtx set, void *data ATTRIBUTE_UNUSED)
6554 enum rtx_code code = GET_CODE (dest);
6556 if (code == MEM
6557 && ! addr_affects_sp_p (dest) /* If this is not a stack push ... */
6558 /* There are times when an address can appear varying and be a PLUS
6559 during this scan when it would be a fixed address were we to know
6560 the proper equivalences. So invalidate all memory if there is
6561 a BLKmode or nonscalar memory reference or a reference to a
6562 variable address. */
6563 && (MEM_IN_STRUCT_P (dest) || GET_MODE (dest) == BLKmode
6564 || cse_rtx_varies_p (XEXP (dest, 0), 0)))
6566 invalidate_memory ();
6567 return;
6570 if (GET_CODE (set) == CLOBBER
6571 || CC0_P (dest)
6572 || dest == pc_rtx)
6573 return;
6575 if (code == STRICT_LOW_PART || code == ZERO_EXTRACT)
6576 invalidate (XEXP (dest, 0), GET_MODE (dest));
6577 else if (code == REG || code == SUBREG || code == MEM)
6578 invalidate (dest, VOIDmode);
6581 /* Invalidate all insns from START up to the end of the function or the
6582 next label. This called when we wish to CSE around a block that is
6583 conditionally executed. */
6585 static void
6586 invalidate_skipped_block (rtx start)
6588 rtx insn;
6590 for (insn = start; insn && GET_CODE (insn) != CODE_LABEL;
6591 insn = NEXT_INSN (insn))
6593 if (! INSN_P (insn))
6594 continue;
6596 if (GET_CODE (insn) == CALL_INSN)
6598 if (! CONST_OR_PURE_CALL_P (insn))
6599 invalidate_memory ();
6600 invalidate_for_call ();
6603 invalidate_from_clobbers (PATTERN (insn));
6604 note_stores (PATTERN (insn), invalidate_skipped_set, NULL);
6608 /* If modifying X will modify the value in *DATA (which is really an
6609 `rtx *'), indicate that fact by setting the pointed to value to
6610 NULL_RTX. */
6612 static void
6613 cse_check_loop_start (rtx x, rtx set ATTRIBUTE_UNUSED, void *data)
6615 rtx *cse_check_loop_start_value = (rtx *) data;
6617 if (*cse_check_loop_start_value == NULL_RTX
6618 || GET_CODE (x) == CC0 || GET_CODE (x) == PC)
6619 return;
6621 if ((GET_CODE (x) == MEM && GET_CODE (*cse_check_loop_start_value) == MEM)
6622 || reg_overlap_mentioned_p (x, *cse_check_loop_start_value))
6623 *cse_check_loop_start_value = NULL_RTX;
6626 /* X is a SET or CLOBBER contained in INSN that was found near the start of
6627 a loop that starts with the label at LOOP_START.
6629 If X is a SET, we see if its SET_SRC is currently in our hash table.
6630 If so, we see if it has a value equal to some register used only in the
6631 loop exit code (as marked by jump.c).
6633 If those two conditions are true, we search backwards from the start of
6634 the loop to see if that same value was loaded into a register that still
6635 retains its value at the start of the loop.
6637 If so, we insert an insn after the load to copy the destination of that
6638 load into the equivalent register and (try to) replace our SET_SRC with that
6639 register.
6641 In any event, we invalidate whatever this SET or CLOBBER modifies. */
6643 static void
6644 cse_set_around_loop (rtx x, rtx insn, rtx loop_start)
6646 struct table_elt *src_elt;
6648 /* If this is a SET, see if we can replace SET_SRC, but ignore SETs that
6649 are setting PC or CC0 or whose SET_SRC is already a register. */
6650 if (GET_CODE (x) == SET
6651 && GET_CODE (SET_DEST (x)) != PC && GET_CODE (SET_DEST (x)) != CC0
6652 && GET_CODE (SET_SRC (x)) != REG)
6654 src_elt = lookup (SET_SRC (x),
6655 HASH (SET_SRC (x), GET_MODE (SET_DEST (x))),
6656 GET_MODE (SET_DEST (x)));
6658 if (src_elt)
6659 for (src_elt = src_elt->first_same_value; src_elt;
6660 src_elt = src_elt->next_same_value)
6661 if (GET_CODE (src_elt->exp) == REG && REG_LOOP_TEST_P (src_elt->exp)
6662 && COST (src_elt->exp) < COST (SET_SRC (x)))
6664 rtx p, set;
6666 /* Look for an insn in front of LOOP_START that sets
6667 something in the desired mode to SET_SRC (x) before we hit
6668 a label or CALL_INSN. */
6670 for (p = prev_nonnote_insn (loop_start);
6671 p && GET_CODE (p) != CALL_INSN
6672 && GET_CODE (p) != CODE_LABEL;
6673 p = prev_nonnote_insn (p))
6674 if ((set = single_set (p)) != 0
6675 && GET_CODE (SET_DEST (set)) == REG
6676 && GET_MODE (SET_DEST (set)) == src_elt->mode
6677 && rtx_equal_p (SET_SRC (set), SET_SRC (x)))
6679 /* We now have to ensure that nothing between P
6680 and LOOP_START modified anything referenced in
6681 SET_SRC (x). We know that nothing within the loop
6682 can modify it, or we would have invalidated it in
6683 the hash table. */
6684 rtx q;
6685 rtx cse_check_loop_start_value = SET_SRC (x);
6686 for (q = p; q != loop_start; q = NEXT_INSN (q))
6687 if (INSN_P (q))
6688 note_stores (PATTERN (q),
6689 cse_check_loop_start,
6690 &cse_check_loop_start_value);
6692 /* If nothing was changed and we can replace our
6693 SET_SRC, add an insn after P to copy its destination
6694 to what we will be replacing SET_SRC with. */
6695 if (cse_check_loop_start_value
6696 && single_set (p)
6697 && !can_throw_internal (insn)
6698 && validate_change (insn, &SET_SRC (x),
6699 src_elt->exp, 0))
6701 /* If this creates new pseudos, this is unsafe,
6702 because the regno of new pseudo is unsuitable
6703 to index into reg_qty when cse_insn processes
6704 the new insn. Therefore, if a new pseudo was
6705 created, discard this optimization. */
6706 int nregs = max_reg_num ();
6707 rtx move
6708 = gen_move_insn (src_elt->exp, SET_DEST (set));
6709 if (nregs != max_reg_num ())
6711 if (! validate_change (insn, &SET_SRC (x),
6712 SET_SRC (set), 0))
6713 abort ();
6715 else
6717 if (CONSTANT_P (SET_SRC (set))
6718 && ! find_reg_equal_equiv_note (insn))
6719 set_unique_reg_note (insn, REG_EQUAL,
6720 SET_SRC (set));
6721 if (control_flow_insn_p (p))
6722 /* p can cause a control flow transfer so it
6723 is the last insn of a basic block. We can't
6724 therefore use emit_insn_after. */
6725 emit_insn_before (move, next_nonnote_insn (p));
6726 else
6727 emit_insn_after (move, p);
6730 break;
6735 /* Deal with the destination of X affecting the stack pointer. */
6736 addr_affects_sp_p (SET_DEST (x));
6738 /* See comment on similar code in cse_insn for explanation of these
6739 tests. */
6740 if (GET_CODE (SET_DEST (x)) == REG || GET_CODE (SET_DEST (x)) == SUBREG
6741 || GET_CODE (SET_DEST (x)) == MEM)
6742 invalidate (SET_DEST (x), VOIDmode);
6743 else if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
6744 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
6745 invalidate (XEXP (SET_DEST (x), 0), GET_MODE (SET_DEST (x)));
6748 /* Find the end of INSN's basic block and return its range,
6749 the total number of SETs in all the insns of the block, the last insn of the
6750 block, and the branch path.
6752 The branch path indicates which branches should be followed. If a nonzero
6753 path size is specified, the block should be rescanned and a different set
6754 of branches will be taken. The branch path is only used if
6755 FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is nonzero.
6757 DATA is a pointer to a struct cse_basic_block_data, defined below, that is
6758 used to describe the block. It is filled in with the information about
6759 the current block. The incoming structure's branch path, if any, is used
6760 to construct the output branch path. */
6762 void
6763 cse_end_of_basic_block (rtx insn, struct cse_basic_block_data *data,
6764 int follow_jumps, int after_loop, int skip_blocks)
6766 rtx p = insn, q;
6767 int nsets = 0;
6768 int low_cuid = INSN_CUID (insn), high_cuid = INSN_CUID (insn);
6769 rtx next = INSN_P (insn) ? insn : next_real_insn (insn);
6770 int path_size = data->path_size;
6771 int path_entry = 0;
6772 int i;
6774 /* Update the previous branch path, if any. If the last branch was
6775 previously TAKEN, mark it NOT_TAKEN. If it was previously NOT_TAKEN,
6776 shorten the path by one and look at the previous branch. We know that
6777 at least one branch must have been taken if PATH_SIZE is nonzero. */
6778 while (path_size > 0)
6780 if (data->path[path_size - 1].status != NOT_TAKEN)
6782 data->path[path_size - 1].status = NOT_TAKEN;
6783 break;
6785 else
6786 path_size--;
6789 /* If the first instruction is marked with QImode, that means we've
6790 already processed this block. Our caller will look at DATA->LAST
6791 to figure out where to go next. We want to return the next block
6792 in the instruction stream, not some branched-to block somewhere
6793 else. We accomplish this by pretending our called forbid us to
6794 follow jumps, or skip blocks. */
6795 if (GET_MODE (insn) == QImode)
6796 follow_jumps = skip_blocks = 0;
6798 /* Scan to end of this basic block. */
6799 while (p && GET_CODE (p) != CODE_LABEL)
6801 /* Don't cse out the end of a loop. This makes a difference
6802 only for the unusual loops that always execute at least once;
6803 all other loops have labels there so we will stop in any case.
6804 Cse'ing out the end of the loop is dangerous because it
6805 might cause an invariant expression inside the loop
6806 to be reused after the end of the loop. This would make it
6807 hard to move the expression out of the loop in loop.c,
6808 especially if it is one of several equivalent expressions
6809 and loop.c would like to eliminate it.
6811 If we are running after loop.c has finished, we can ignore
6812 the NOTE_INSN_LOOP_END. */
6814 if (! after_loop && GET_CODE (p) == NOTE
6815 && NOTE_LINE_NUMBER (p) == NOTE_INSN_LOOP_END)
6816 break;
6818 /* Don't cse over a call to setjmp; on some machines (eg VAX)
6819 the regs restored by the longjmp come from
6820 a later time than the setjmp. */
6821 if (PREV_INSN (p) && GET_CODE (PREV_INSN (p)) == CALL_INSN
6822 && find_reg_note (PREV_INSN (p), REG_SETJMP, NULL))
6823 break;
6825 /* A PARALLEL can have lots of SETs in it,
6826 especially if it is really an ASM_OPERANDS. */
6827 if (INSN_P (p) && GET_CODE (PATTERN (p)) == PARALLEL)
6828 nsets += XVECLEN (PATTERN (p), 0);
6829 else if (GET_CODE (p) != NOTE)
6830 nsets += 1;
6832 /* Ignore insns made by CSE; they cannot affect the boundaries of
6833 the basic block. */
6835 if (INSN_UID (p) <= max_uid && INSN_CUID (p) > high_cuid)
6836 high_cuid = INSN_CUID (p);
6837 if (INSN_UID (p) <= max_uid && INSN_CUID (p) < low_cuid)
6838 low_cuid = INSN_CUID (p);
6840 /* See if this insn is in our branch path. If it is and we are to
6841 take it, do so. */
6842 if (path_entry < path_size && data->path[path_entry].branch == p)
6844 if (data->path[path_entry].status != NOT_TAKEN)
6845 p = JUMP_LABEL (p);
6847 /* Point to next entry in path, if any. */
6848 path_entry++;
6851 /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
6852 was specified, we haven't reached our maximum path length, there are
6853 insns following the target of the jump, this is the only use of the
6854 jump label, and the target label is preceded by a BARRIER.
6856 Alternatively, we can follow the jump if it branches around a
6857 block of code and there are no other branches into the block.
6858 In this case invalidate_skipped_block will be called to invalidate any
6859 registers set in the block when following the jump. */
6861 else if ((follow_jumps || skip_blocks) && path_size < PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH) - 1
6862 && GET_CODE (p) == JUMP_INSN
6863 && GET_CODE (PATTERN (p)) == SET
6864 && GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
6865 && JUMP_LABEL (p) != 0
6866 && LABEL_NUSES (JUMP_LABEL (p)) == 1
6867 && NEXT_INSN (JUMP_LABEL (p)) != 0)
6869 for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
6870 if ((GET_CODE (q) != NOTE
6871 || NOTE_LINE_NUMBER (q) == NOTE_INSN_LOOP_END
6872 || (PREV_INSN (q) && GET_CODE (PREV_INSN (q)) == CALL_INSN
6873 && find_reg_note (PREV_INSN (q), REG_SETJMP, NULL)))
6874 && (GET_CODE (q) != CODE_LABEL || LABEL_NUSES (q) != 0))
6875 break;
6877 /* If we ran into a BARRIER, this code is an extension of the
6878 basic block when the branch is taken. */
6879 if (follow_jumps && q != 0 && GET_CODE (q) == BARRIER)
6881 /* Don't allow ourself to keep walking around an
6882 always-executed loop. */
6883 if (next_real_insn (q) == next)
6885 p = NEXT_INSN (p);
6886 continue;
6889 /* Similarly, don't put a branch in our path more than once. */
6890 for (i = 0; i < path_entry; i++)
6891 if (data->path[i].branch == p)
6892 break;
6894 if (i != path_entry)
6895 break;
6897 data->path[path_entry].branch = p;
6898 data->path[path_entry++].status = TAKEN;
6900 /* This branch now ends our path. It was possible that we
6901 didn't see this branch the last time around (when the
6902 insn in front of the target was a JUMP_INSN that was
6903 turned into a no-op). */
6904 path_size = path_entry;
6906 p = JUMP_LABEL (p);
6907 /* Mark block so we won't scan it again later. */
6908 PUT_MODE (NEXT_INSN (p), QImode);
6910 /* Detect a branch around a block of code. */
6911 else if (skip_blocks && q != 0 && GET_CODE (q) != CODE_LABEL)
6913 rtx tmp;
6915 if (next_real_insn (q) == next)
6917 p = NEXT_INSN (p);
6918 continue;
6921 for (i = 0; i < path_entry; i++)
6922 if (data->path[i].branch == p)
6923 break;
6925 if (i != path_entry)
6926 break;
6928 /* This is no_labels_between_p (p, q) with an added check for
6929 reaching the end of a function (in case Q precedes P). */
6930 for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
6931 if (GET_CODE (tmp) == CODE_LABEL)
6932 break;
6934 if (tmp == q)
6936 data->path[path_entry].branch = p;
6937 data->path[path_entry++].status = AROUND;
6939 path_size = path_entry;
6941 p = JUMP_LABEL (p);
6942 /* Mark block so we won't scan it again later. */
6943 PUT_MODE (NEXT_INSN (p), QImode);
6947 p = NEXT_INSN (p);
6950 data->low_cuid = low_cuid;
6951 data->high_cuid = high_cuid;
6952 data->nsets = nsets;
6953 data->last = p;
6955 /* If all jumps in the path are not taken, set our path length to zero
6956 so a rescan won't be done. */
6957 for (i = path_size - 1; i >= 0; i--)
6958 if (data->path[i].status != NOT_TAKEN)
6959 break;
6961 if (i == -1)
6962 data->path_size = 0;
6963 else
6964 data->path_size = path_size;
6966 /* End the current branch path. */
6967 data->path[path_size].branch = 0;
6970 /* Perform cse on the instructions of a function.
6971 F is the first instruction.
6972 NREGS is one plus the highest pseudo-reg number used in the instruction.
6974 AFTER_LOOP is 1 if this is the cse call done after loop optimization
6975 (only if -frerun-cse-after-loop).
6977 Returns 1 if jump_optimize should be redone due to simplifications
6978 in conditional jump instructions. */
6981 cse_main (rtx f, int nregs, int after_loop, FILE *file)
6983 struct cse_basic_block_data val;
6984 rtx insn = f;
6985 int i;
6987 val.path = xmalloc (sizeof (struct branch_path)
6988 * PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH));
6990 cse_jumps_altered = 0;
6991 recorded_label_ref = 0;
6992 constant_pool_entries_cost = 0;
6993 constant_pool_entries_regcost = 0;
6994 val.path_size = 0;
6995 gen_lowpart = gen_lowpart_if_possible;
6997 init_recog ();
6998 init_alias_analysis ();
7000 max_reg = nregs;
7002 max_insn_uid = get_max_uid ();
7004 reg_eqv_table = xmalloc (nregs * sizeof (struct reg_eqv_elem));
7006 #ifdef LOAD_EXTEND_OP
7008 /* Allocate scratch rtl here. cse_insn will fill in the memory reference
7009 and change the code and mode as appropriate. */
7010 memory_extend_rtx = gen_rtx_ZERO_EXTEND (VOIDmode, NULL_RTX);
7011 #endif
7013 /* Reset the counter indicating how many elements have been made
7014 thus far. */
7015 n_elements_made = 0;
7017 /* Find the largest uid. */
7019 max_uid = get_max_uid ();
7020 uid_cuid = xcalloc (max_uid + 1, sizeof (int));
7022 /* Compute the mapping from uids to cuids.
7023 CUIDs are numbers assigned to insns, like uids,
7024 except that cuids increase monotonically through the code.
7025 Don't assign cuids to line-number NOTEs, so that the distance in cuids
7026 between two insns is not affected by -g. */
7028 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
7030 if (GET_CODE (insn) != NOTE
7031 || NOTE_LINE_NUMBER (insn) < 0)
7032 INSN_CUID (insn) = ++i;
7033 else
7034 /* Give a line number note the same cuid as preceding insn. */
7035 INSN_CUID (insn) = i;
7038 ggc_push_context ();
7040 /* Loop over basic blocks.
7041 Compute the maximum number of qty's needed for each basic block
7042 (which is 2 for each SET). */
7043 insn = f;
7044 while (insn)
7046 cse_altered = 0;
7047 cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps, after_loop,
7048 flag_cse_skip_blocks);
7050 /* If this basic block was already processed or has no sets, skip it. */
7051 if (val.nsets == 0 || GET_MODE (insn) == QImode)
7053 PUT_MODE (insn, VOIDmode);
7054 insn = (val.last ? NEXT_INSN (val.last) : 0);
7055 val.path_size = 0;
7056 continue;
7059 cse_basic_block_start = val.low_cuid;
7060 cse_basic_block_end = val.high_cuid;
7061 max_qty = val.nsets * 2;
7063 if (file)
7064 fnotice (file, ";; Processing block from %d to %d, %d sets.\n",
7065 INSN_UID (insn), val.last ? INSN_UID (val.last) : 0,
7066 val.nsets);
7068 /* Make MAX_QTY bigger to give us room to optimize
7069 past the end of this basic block, if that should prove useful. */
7070 if (max_qty < 500)
7071 max_qty = 500;
7073 max_qty += max_reg;
7075 /* If this basic block is being extended by following certain jumps,
7076 (see `cse_end_of_basic_block'), we reprocess the code from the start.
7077 Otherwise, we start after this basic block. */
7078 if (val.path_size > 0)
7079 cse_basic_block (insn, val.last, val.path, 0);
7080 else
7082 int old_cse_jumps_altered = cse_jumps_altered;
7083 rtx temp;
7085 /* When cse changes a conditional jump to an unconditional
7086 jump, we want to reprocess the block, since it will give
7087 us a new branch path to investigate. */
7088 cse_jumps_altered = 0;
7089 temp = cse_basic_block (insn, val.last, val.path, ! after_loop);
7090 if (cse_jumps_altered == 0
7091 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7092 insn = temp;
7094 cse_jumps_altered |= old_cse_jumps_altered;
7097 if (cse_altered)
7098 ggc_collect ();
7100 #ifdef USE_C_ALLOCA
7101 alloca (0);
7102 #endif
7105 ggc_pop_context ();
7107 if (max_elements_made < n_elements_made)
7108 max_elements_made = n_elements_made;
7110 /* Clean up. */
7111 end_alias_analysis ();
7112 free (uid_cuid);
7113 free (reg_eqv_table);
7114 free (val.path);
7115 gen_lowpart = gen_lowpart_general;
7117 return cse_jumps_altered || recorded_label_ref;
7120 /* Process a single basic block. FROM and TO and the limits of the basic
7121 block. NEXT_BRANCH points to the branch path when following jumps or
7122 a null path when not following jumps.
7124 AROUND_LOOP is nonzero if we are to try to cse around to the start of a
7125 loop. This is true when we are being called for the last time on a
7126 block and this CSE pass is before loop.c. */
7128 static rtx
7129 cse_basic_block (rtx from, rtx to, struct branch_path *next_branch,
7130 int around_loop)
7132 rtx insn;
7133 int to_usage = 0;
7134 rtx libcall_insn = NULL_RTX;
7135 int num_insns = 0;
7136 int no_conflict = 0;
7138 /* This array is undefined before max_reg, so only allocate
7139 the space actually needed and adjust the start. */
7141 qty_table = xmalloc ((max_qty - max_reg) * sizeof (struct qty_table_elem));
7142 qty_table -= max_reg;
7144 new_basic_block ();
7146 /* TO might be a label. If so, protect it from being deleted. */
7147 if (to != 0 && GET_CODE (to) == CODE_LABEL)
7148 ++LABEL_NUSES (to);
7150 for (insn = from; insn != to; insn = NEXT_INSN (insn))
7152 enum rtx_code code = GET_CODE (insn);
7154 /* If we have processed 1,000 insns, flush the hash table to
7155 avoid extreme quadratic behavior. We must not include NOTEs
7156 in the count since there may be more of them when generating
7157 debugging information. If we clear the table at different
7158 times, code generated with -g -O might be different than code
7159 generated with -O but not -g.
7161 ??? This is a real kludge and needs to be done some other way.
7162 Perhaps for 2.9. */
7163 if (code != NOTE && num_insns++ > 1000)
7165 flush_hash_table ();
7166 num_insns = 0;
7169 /* See if this is a branch that is part of the path. If so, and it is
7170 to be taken, do so. */
7171 if (next_branch->branch == insn)
7173 enum taken status = next_branch++->status;
7174 if (status != NOT_TAKEN)
7176 if (status == TAKEN)
7177 record_jump_equiv (insn, 1);
7178 else
7179 invalidate_skipped_block (NEXT_INSN (insn));
7181 /* Set the last insn as the jump insn; it doesn't affect cc0.
7182 Then follow this branch. */
7183 #ifdef HAVE_cc0
7184 prev_insn_cc0 = 0;
7185 prev_insn = insn;
7186 #endif
7187 insn = JUMP_LABEL (insn);
7188 continue;
7192 if (GET_MODE (insn) == QImode)
7193 PUT_MODE (insn, VOIDmode);
7195 if (GET_RTX_CLASS (code) == RTX_INSN)
7197 rtx p;
7199 /* Process notes first so we have all notes in canonical forms when
7200 looking for duplicate operations. */
7202 if (REG_NOTES (insn))
7203 REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn), NULL_RTX);
7205 /* Track when we are inside in LIBCALL block. Inside such a block,
7206 we do not want to record destinations. The last insn of a
7207 LIBCALL block is not considered to be part of the block, since
7208 its destination is the result of the block and hence should be
7209 recorded. */
7211 if (REG_NOTES (insn) != 0)
7213 if ((p = find_reg_note (insn, REG_LIBCALL, NULL_RTX)))
7214 libcall_insn = XEXP (p, 0);
7215 else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7217 /* Keep libcall_insn for the last SET insn of a no-conflict
7218 block to prevent changing the destination. */
7219 if (! no_conflict)
7220 libcall_insn = 0;
7221 else
7222 no_conflict = -1;
7224 else if (find_reg_note (insn, REG_NO_CONFLICT, NULL_RTX))
7225 no_conflict = 1;
7228 cse_insn (insn, libcall_insn);
7230 if (no_conflict == -1)
7232 libcall_insn = 0;
7233 no_conflict = 0;
7236 /* If we haven't already found an insn where we added a LABEL_REF,
7237 check this one. */
7238 if (GET_CODE (insn) == INSN && ! recorded_label_ref
7239 && for_each_rtx (&PATTERN (insn), check_for_label_ref,
7240 (void *) insn))
7241 recorded_label_ref = 1;
7244 /* If INSN is now an unconditional jump, skip to the end of our
7245 basic block by pretending that we just did the last insn in the
7246 basic block. If we are jumping to the end of our block, show
7247 that we can have one usage of TO. */
7249 if (any_uncondjump_p (insn))
7251 if (to == 0)
7253 free (qty_table + max_reg);
7254 return 0;
7257 if (JUMP_LABEL (insn) == to)
7258 to_usage = 1;
7260 /* Maybe TO was deleted because the jump is unconditional.
7261 If so, there is nothing left in this basic block. */
7262 /* ??? Perhaps it would be smarter to set TO
7263 to whatever follows this insn,
7264 and pretend the basic block had always ended here. */
7265 if (INSN_DELETED_P (to))
7266 break;
7268 insn = PREV_INSN (to);
7271 /* See if it is ok to keep on going past the label
7272 which used to end our basic block. Remember that we incremented
7273 the count of that label, so we decrement it here. If we made
7274 a jump unconditional, TO_USAGE will be one; in that case, we don't
7275 want to count the use in that jump. */
7277 if (to != 0 && NEXT_INSN (insn) == to
7278 && GET_CODE (to) == CODE_LABEL && --LABEL_NUSES (to) == to_usage)
7280 struct cse_basic_block_data val;
7281 rtx prev;
7283 insn = NEXT_INSN (to);
7285 /* If TO was the last insn in the function, we are done. */
7286 if (insn == 0)
7288 free (qty_table + max_reg);
7289 return 0;
7292 /* If TO was preceded by a BARRIER we are done with this block
7293 because it has no continuation. */
7294 prev = prev_nonnote_insn (to);
7295 if (prev && GET_CODE (prev) == BARRIER)
7297 free (qty_table + max_reg);
7298 return insn;
7301 /* Find the end of the following block. Note that we won't be
7302 following branches in this case. */
7303 to_usage = 0;
7304 val.path_size = 0;
7305 val.path = xmalloc (sizeof (struct branch_path)
7306 * PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH));
7307 cse_end_of_basic_block (insn, &val, 0, 0, 0);
7308 free (val.path);
7310 /* If the tables we allocated have enough space left
7311 to handle all the SETs in the next basic block,
7312 continue through it. Otherwise, return,
7313 and that block will be scanned individually. */
7314 if (val.nsets * 2 + next_qty > max_qty)
7315 break;
7317 cse_basic_block_start = val.low_cuid;
7318 cse_basic_block_end = val.high_cuid;
7319 to = val.last;
7321 /* Prevent TO from being deleted if it is a label. */
7322 if (to != 0 && GET_CODE (to) == CODE_LABEL)
7323 ++LABEL_NUSES (to);
7325 /* Back up so we process the first insn in the extension. */
7326 insn = PREV_INSN (insn);
7330 if (next_qty > max_qty)
7331 abort ();
7333 /* If we are running before loop.c, we stopped on a NOTE_INSN_LOOP_END, and
7334 the previous insn is the only insn that branches to the head of a loop,
7335 we can cse into the loop. Don't do this if we changed the jump
7336 structure of a loop unless we aren't going to be following jumps. */
7338 insn = prev_nonnote_insn (to);
7339 if ((cse_jumps_altered == 0
7340 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7341 && around_loop && to != 0
7342 && GET_CODE (to) == NOTE && NOTE_LINE_NUMBER (to) == NOTE_INSN_LOOP_END
7343 && GET_CODE (insn) == JUMP_INSN
7344 && JUMP_LABEL (insn) != 0
7345 && LABEL_NUSES (JUMP_LABEL (insn)) == 1)
7346 cse_around_loop (JUMP_LABEL (insn));
7348 free (qty_table + max_reg);
7350 return to ? NEXT_INSN (to) : 0;
7353 /* Called via for_each_rtx to see if an insn is using a LABEL_REF for which
7354 there isn't a REG_LABEL note. Return one if so. DATA is the insn. */
7356 static int
7357 check_for_label_ref (rtx *rtl, void *data)
7359 rtx insn = (rtx) data;
7361 /* If this insn uses a LABEL_REF and there isn't a REG_LABEL note for it,
7362 we must rerun jump since it needs to place the note. If this is a
7363 LABEL_REF for a CODE_LABEL that isn't in the insn chain, don't do this
7364 since no REG_LABEL will be added. */
7365 return (GET_CODE (*rtl) == LABEL_REF
7366 && ! LABEL_REF_NONLOCAL_P (*rtl)
7367 && LABEL_P (XEXP (*rtl, 0))
7368 && INSN_UID (XEXP (*rtl, 0)) != 0
7369 && ! find_reg_note (insn, REG_LABEL, XEXP (*rtl, 0)));
7372 /* Count the number of times registers are used (not set) in X.
7373 COUNTS is an array in which we accumulate the count, INCR is how much
7374 we count each register usage. */
7376 static void
7377 count_reg_usage (rtx x, int *counts, int incr)
7379 enum rtx_code code;
7380 rtx note;
7381 const char *fmt;
7382 int i, j;
7384 if (x == 0)
7385 return;
7387 switch (code = GET_CODE (x))
7389 case REG:
7390 counts[REGNO (x)] += incr;
7391 return;
7393 case PC:
7394 case CC0:
7395 case CONST:
7396 case CONST_INT:
7397 case CONST_DOUBLE:
7398 case CONST_VECTOR:
7399 case SYMBOL_REF:
7400 case LABEL_REF:
7401 return;
7403 case CLOBBER:
7404 /* If we are clobbering a MEM, mark any registers inside the address
7405 as being used. */
7406 if (GET_CODE (XEXP (x, 0)) == MEM)
7407 count_reg_usage (XEXP (XEXP (x, 0), 0), counts, incr);
7408 return;
7410 case SET:
7411 /* Unless we are setting a REG, count everything in SET_DEST. */
7412 if (GET_CODE (SET_DEST (x)) != REG)
7413 count_reg_usage (SET_DEST (x), counts, incr);
7414 count_reg_usage (SET_SRC (x), counts, incr);
7415 return;
7417 case CALL_INSN:
7418 count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, incr);
7419 /* Fall through. */
7421 case INSN:
7422 case JUMP_INSN:
7423 count_reg_usage (PATTERN (x), counts, incr);
7425 /* Things used in a REG_EQUAL note aren't dead since loop may try to
7426 use them. */
7428 note = find_reg_equal_equiv_note (x);
7429 if (note)
7431 rtx eqv = XEXP (note, 0);
7433 if (GET_CODE (eqv) == EXPR_LIST)
7434 /* This REG_EQUAL note describes the result of a function call.
7435 Process all the arguments. */
7438 count_reg_usage (XEXP (eqv, 0), counts, incr);
7439 eqv = XEXP (eqv, 1);
7441 while (eqv && GET_CODE (eqv) == EXPR_LIST);
7442 else
7443 count_reg_usage (eqv, counts, incr);
7445 return;
7447 case EXPR_LIST:
7448 if (REG_NOTE_KIND (x) == REG_EQUAL
7449 || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE)
7450 /* FUNCTION_USAGE expression lists may include (CLOBBER (mem /u)),
7451 involving registers in the address. */
7452 || GET_CODE (XEXP (x, 0)) == CLOBBER)
7453 count_reg_usage (XEXP (x, 0), counts, incr);
7455 count_reg_usage (XEXP (x, 1), counts, incr);
7456 return;
7458 case ASM_OPERANDS:
7459 /* Iterate over just the inputs, not the constraints as well. */
7460 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
7461 count_reg_usage (ASM_OPERANDS_INPUT (x, i), counts, incr);
7462 return;
7464 case INSN_LIST:
7465 abort ();
7467 default:
7468 break;
7471 fmt = GET_RTX_FORMAT (code);
7472 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7474 if (fmt[i] == 'e')
7475 count_reg_usage (XEXP (x, i), counts, incr);
7476 else if (fmt[i] == 'E')
7477 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7478 count_reg_usage (XVECEXP (x, i, j), counts, incr);
7482 /* Return true if set is live. */
7483 static bool
7484 set_live_p (rtx set, rtx insn ATTRIBUTE_UNUSED, /* Only used with HAVE_cc0. */
7485 int *counts)
7487 #ifdef HAVE_cc0
7488 rtx tem;
7489 #endif
7491 if (set_noop_p (set))
7494 #ifdef HAVE_cc0
7495 else if (GET_CODE (SET_DEST (set)) == CC0
7496 && !side_effects_p (SET_SRC (set))
7497 && ((tem = next_nonnote_insn (insn)) == 0
7498 || !INSN_P (tem)
7499 || !reg_referenced_p (cc0_rtx, PATTERN (tem))))
7500 return false;
7501 #endif
7502 else if (GET_CODE (SET_DEST (set)) != REG
7503 || REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
7504 || counts[REGNO (SET_DEST (set))] != 0
7505 || side_effects_p (SET_SRC (set))
7506 /* An ADDRESSOF expression can turn into a use of the
7507 internal arg pointer, so always consider the
7508 internal arg pointer live. If it is truly dead,
7509 flow will delete the initializing insn. */
7510 || (SET_DEST (set) == current_function_internal_arg_pointer))
7511 return true;
7512 return false;
7515 /* Return true if insn is live. */
7517 static bool
7518 insn_live_p (rtx insn, int *counts)
7520 int i;
7521 if (flag_non_call_exceptions && may_trap_p (PATTERN (insn)))
7522 return true;
7523 else if (GET_CODE (PATTERN (insn)) == SET)
7524 return set_live_p (PATTERN (insn), insn, counts);
7525 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
7527 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7529 rtx elt = XVECEXP (PATTERN (insn), 0, i);
7531 if (GET_CODE (elt) == SET)
7533 if (set_live_p (elt, insn, counts))
7534 return true;
7536 else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
7537 return true;
7539 return false;
7541 else
7542 return true;
7545 /* Return true if libcall is dead as a whole. */
7547 static bool
7548 dead_libcall_p (rtx insn, int *counts)
7550 rtx note, set, new;
7552 /* See if there's a REG_EQUAL note on this insn and try to
7553 replace the source with the REG_EQUAL expression.
7555 We assume that insns with REG_RETVALs can only be reg->reg
7556 copies at this point. */
7557 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
7558 if (!note)
7559 return false;
7561 set = single_set (insn);
7562 if (!set)
7563 return false;
7565 new = simplify_rtx (XEXP (note, 0));
7566 if (!new)
7567 new = XEXP (note, 0);
7569 /* While changing insn, we must update the counts accordingly. */
7570 count_reg_usage (insn, counts, -1);
7572 if (validate_change (insn, &SET_SRC (set), new, 0))
7574 count_reg_usage (insn, counts, 1);
7575 remove_note (insn, find_reg_note (insn, REG_RETVAL, NULL_RTX));
7576 remove_note (insn, note);
7577 return true;
7580 if (CONSTANT_P (new))
7582 new = force_const_mem (GET_MODE (SET_DEST (set)), new);
7583 if (new && validate_change (insn, &SET_SRC (set), new, 0))
7585 count_reg_usage (insn, counts, 1);
7586 remove_note (insn, find_reg_note (insn, REG_RETVAL, NULL_RTX));
7587 remove_note (insn, note);
7588 return true;
7592 count_reg_usage (insn, counts, 1);
7593 return false;
7596 /* Scan all the insns and delete any that are dead; i.e., they store a register
7597 that is never used or they copy a register to itself.
7599 This is used to remove insns made obviously dead by cse, loop or other
7600 optimizations. It improves the heuristics in loop since it won't try to
7601 move dead invariants out of loops or make givs for dead quantities. The
7602 remaining passes of the compilation are also sped up. */
7605 delete_trivially_dead_insns (rtx insns, int nreg)
7607 int *counts;
7608 rtx insn, prev;
7609 int in_libcall = 0, dead_libcall = 0;
7610 int ndead = 0, nlastdead, niterations = 0;
7612 timevar_push (TV_DELETE_TRIVIALLY_DEAD);
7613 /* First count the number of times each register is used. */
7614 counts = xcalloc (nreg, sizeof (int));
7615 for (insn = next_real_insn (insns); insn; insn = next_real_insn (insn))
7616 count_reg_usage (insn, counts, 1);
7620 nlastdead = ndead;
7621 niterations++;
7622 /* Go from the last insn to the first and delete insns that only set unused
7623 registers or copy a register to itself. As we delete an insn, remove
7624 usage counts for registers it uses.
7626 The first jump optimization pass may leave a real insn as the last
7627 insn in the function. We must not skip that insn or we may end
7628 up deleting code that is not really dead. */
7629 insn = get_last_insn ();
7630 if (! INSN_P (insn))
7631 insn = prev_real_insn (insn);
7633 for (; insn; insn = prev)
7635 int live_insn = 0;
7637 prev = prev_real_insn (insn);
7639 /* Don't delete any insns that are part of a libcall block unless
7640 we can delete the whole libcall block.
7642 Flow or loop might get confused if we did that. Remember
7643 that we are scanning backwards. */
7644 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7646 in_libcall = 1;
7647 live_insn = 1;
7648 dead_libcall = dead_libcall_p (insn, counts);
7650 else if (in_libcall)
7651 live_insn = ! dead_libcall;
7652 else
7653 live_insn = insn_live_p (insn, counts);
7655 /* If this is a dead insn, delete it and show registers in it aren't
7656 being used. */
7658 if (! live_insn)
7660 count_reg_usage (insn, counts, -1);
7661 delete_insn_and_edges (insn);
7662 ndead++;
7665 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
7667 in_libcall = 0;
7668 dead_libcall = 0;
7672 while (ndead != nlastdead);
7674 if (dump_file && ndead)
7675 fprintf (dump_file, "Deleted %i trivially dead insns; %i iterations\n",
7676 ndead, niterations);
7677 /* Clean up. */
7678 free (counts);
7679 timevar_pop (TV_DELETE_TRIVIALLY_DEAD);
7680 return ndead;
7683 /* This function is called via for_each_rtx. The argument, NEWREG, is
7684 a condition code register with the desired mode. If we are looking
7685 at the same register in a different mode, replace it with
7686 NEWREG. */
7688 static int
7689 cse_change_cc_mode (rtx *loc, void *data)
7691 rtx newreg = (rtx) data;
7693 if (*loc
7694 && GET_CODE (*loc) == REG
7695 && REGNO (*loc) == REGNO (newreg)
7696 && GET_MODE (*loc) != GET_MODE (newreg))
7698 *loc = newreg;
7699 return -1;
7701 return 0;
7704 /* Change the mode of any reference to the register REGNO (NEWREG) to
7705 GET_MODE (NEWREG), starting at START. Stop before END. Stop at
7706 any instruction which modifies NEWREG. */
7708 static void
7709 cse_change_cc_mode_insns (rtx start, rtx end, rtx newreg)
7711 rtx insn;
7713 for (insn = start; insn != end; insn = NEXT_INSN (insn))
7715 if (! INSN_P (insn))
7716 continue;
7718 if (reg_set_p (newreg, insn))
7719 return;
7721 for_each_rtx (&PATTERN (insn), cse_change_cc_mode, newreg);
7722 for_each_rtx (&REG_NOTES (insn), cse_change_cc_mode, newreg);
7726 /* BB is a basic block which finishes with CC_REG as a condition code
7727 register which is set to CC_SRC. Look through the successors of BB
7728 to find blocks which have a single predecessor (i.e., this one),
7729 and look through those blocks for an assignment to CC_REG which is
7730 equivalent to CC_SRC. CAN_CHANGE_MODE indicates whether we are
7731 permitted to change the mode of CC_SRC to a compatible mode. This
7732 returns VOIDmode if no equivalent assignments were found.
7733 Otherwise it returns the mode which CC_SRC should wind up with.
7735 The main complexity in this function is handling the mode issues.
7736 We may have more than one duplicate which we can eliminate, and we
7737 try to find a mode which will work for multiple duplicates. */
7739 static enum machine_mode
7740 cse_cc_succs (basic_block bb, rtx cc_reg, rtx cc_src, bool can_change_mode)
7742 bool found_equiv;
7743 enum machine_mode mode;
7744 unsigned int insn_count;
7745 edge e;
7746 rtx insns[2];
7747 enum machine_mode modes[2];
7748 rtx last_insns[2];
7749 unsigned int i;
7750 rtx newreg;
7752 /* We expect to have two successors. Look at both before picking
7753 the final mode for the comparison. If we have more successors
7754 (i.e., some sort of table jump, although that seems unlikely),
7755 then we require all beyond the first two to use the same
7756 mode. */
7758 found_equiv = false;
7759 mode = GET_MODE (cc_src);
7760 insn_count = 0;
7761 for (e = bb->succ; e; e = e->succ_next)
7763 rtx insn;
7764 rtx end;
7766 if (e->flags & EDGE_COMPLEX)
7767 continue;
7769 if (! e->dest->pred
7770 || e->dest->pred->pred_next
7771 || e->dest == EXIT_BLOCK_PTR)
7772 continue;
7774 end = NEXT_INSN (BB_END (e->dest));
7775 for (insn = BB_HEAD (e->dest); insn != end; insn = NEXT_INSN (insn))
7777 rtx set;
7779 if (! INSN_P (insn))
7780 continue;
7782 /* If CC_SRC is modified, we have to stop looking for
7783 something which uses it. */
7784 if (modified_in_p (cc_src, insn))
7785 break;
7787 /* Check whether INSN sets CC_REG to CC_SRC. */
7788 set = single_set (insn);
7789 if (set
7790 && GET_CODE (SET_DEST (set)) == REG
7791 && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7793 bool found;
7794 enum machine_mode set_mode;
7795 enum machine_mode comp_mode;
7797 found = false;
7798 set_mode = GET_MODE (SET_SRC (set));
7799 comp_mode = set_mode;
7800 if (rtx_equal_p (cc_src, SET_SRC (set)))
7801 found = true;
7802 else if (GET_CODE (cc_src) == COMPARE
7803 && GET_CODE (SET_SRC (set)) == COMPARE
7804 && mode != set_mode
7805 && rtx_equal_p (XEXP (cc_src, 0),
7806 XEXP (SET_SRC (set), 0))
7807 && rtx_equal_p (XEXP (cc_src, 1),
7808 XEXP (SET_SRC (set), 1)))
7811 comp_mode = (*targetm.cc_modes_compatible) (mode, set_mode);
7812 if (comp_mode != VOIDmode
7813 && (can_change_mode || comp_mode == mode))
7814 found = true;
7817 if (found)
7819 found_equiv = true;
7820 if (insn_count < ARRAY_SIZE (insns))
7822 insns[insn_count] = insn;
7823 modes[insn_count] = set_mode;
7824 last_insns[insn_count] = end;
7825 ++insn_count;
7827 if (mode != comp_mode)
7829 if (! can_change_mode)
7830 abort ();
7831 mode = comp_mode;
7832 PUT_MODE (cc_src, mode);
7835 else
7837 if (set_mode != mode)
7839 /* We found a matching expression in the
7840 wrong mode, but we don't have room to
7841 store it in the array. Punt. This case
7842 should be rare. */
7843 break;
7845 /* INSN sets CC_REG to a value equal to CC_SRC
7846 with the right mode. We can simply delete
7847 it. */
7848 delete_insn (insn);
7851 /* We found an instruction to delete. Keep looking,
7852 in the hopes of finding a three-way jump. */
7853 continue;
7856 /* We found an instruction which sets the condition
7857 code, so don't look any farther. */
7858 break;
7861 /* If INSN sets CC_REG in some other way, don't look any
7862 farther. */
7863 if (reg_set_p (cc_reg, insn))
7864 break;
7867 /* If we fell off the bottom of the block, we can keep looking
7868 through successors. We pass CAN_CHANGE_MODE as false because
7869 we aren't prepared to handle compatibility between the
7870 further blocks and this block. */
7871 if (insn == end)
7873 enum machine_mode submode;
7875 submode = cse_cc_succs (e->dest, cc_reg, cc_src, false);
7876 if (submode != VOIDmode)
7878 if (submode != mode)
7879 abort ();
7880 found_equiv = true;
7881 can_change_mode = false;
7886 if (! found_equiv)
7887 return VOIDmode;
7889 /* Now INSN_COUNT is the number of instructions we found which set
7890 CC_REG to a value equivalent to CC_SRC. The instructions are in
7891 INSNS. The modes used by those instructions are in MODES. */
7893 newreg = NULL_RTX;
7894 for (i = 0; i < insn_count; ++i)
7896 if (modes[i] != mode)
7898 /* We need to change the mode of CC_REG in INSNS[i] and
7899 subsequent instructions. */
7900 if (! newreg)
7902 if (GET_MODE (cc_reg) == mode)
7903 newreg = cc_reg;
7904 else
7905 newreg = gen_rtx_REG (mode, REGNO (cc_reg));
7907 cse_change_cc_mode_insns (NEXT_INSN (insns[i]), last_insns[i],
7908 newreg);
7911 delete_insn (insns[i]);
7914 return mode;
7917 /* If we have a fixed condition code register (or two), walk through
7918 the instructions and try to eliminate duplicate assignments. */
7920 void
7921 cse_condition_code_reg (void)
7923 unsigned int cc_regno_1;
7924 unsigned int cc_regno_2;
7925 rtx cc_reg_1;
7926 rtx cc_reg_2;
7927 basic_block bb;
7929 if (! (*targetm.fixed_condition_code_regs) (&cc_regno_1, &cc_regno_2))
7930 return;
7932 cc_reg_1 = gen_rtx_REG (CCmode, cc_regno_1);
7933 if (cc_regno_2 != INVALID_REGNUM)
7934 cc_reg_2 = gen_rtx_REG (CCmode, cc_regno_2);
7935 else
7936 cc_reg_2 = NULL_RTX;
7938 FOR_EACH_BB (bb)
7940 rtx last_insn;
7941 rtx cc_reg;
7942 rtx insn;
7943 rtx cc_src_insn;
7944 rtx cc_src;
7945 enum machine_mode mode;
7946 enum machine_mode orig_mode;
7948 /* Look for blocks which end with a conditional jump based on a
7949 condition code register. Then look for the instruction which
7950 sets the condition code register. Then look through the
7951 successor blocks for instructions which set the condition
7952 code register to the same value. There are other possible
7953 uses of the condition code register, but these are by far the
7954 most common and the ones which we are most likely to be able
7955 to optimize. */
7957 last_insn = BB_END (bb);
7958 if (GET_CODE (last_insn) != JUMP_INSN)
7959 continue;
7961 if (reg_referenced_p (cc_reg_1, PATTERN (last_insn)))
7962 cc_reg = cc_reg_1;
7963 else if (cc_reg_2 && reg_referenced_p (cc_reg_2, PATTERN (last_insn)))
7964 cc_reg = cc_reg_2;
7965 else
7966 continue;
7968 cc_src_insn = NULL_RTX;
7969 cc_src = NULL_RTX;
7970 for (insn = PREV_INSN (last_insn);
7971 insn && insn != PREV_INSN (BB_HEAD (bb));
7972 insn = PREV_INSN (insn))
7974 rtx set;
7976 if (! INSN_P (insn))
7977 continue;
7978 set = single_set (insn);
7979 if (set
7980 && GET_CODE (SET_DEST (set)) == REG
7981 && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7983 cc_src_insn = insn;
7984 cc_src = SET_SRC (set);
7985 break;
7987 else if (reg_set_p (cc_reg, insn))
7988 break;
7991 if (! cc_src_insn)
7992 continue;
7994 if (modified_between_p (cc_src, cc_src_insn, NEXT_INSN (last_insn)))
7995 continue;
7997 /* Now CC_REG is a condition code register used for a
7998 conditional jump at the end of the block, and CC_SRC, in
7999 CC_SRC_INSN, is the value to which that condition code
8000 register is set, and CC_SRC is still meaningful at the end of
8001 the basic block. */
8003 orig_mode = GET_MODE (cc_src);
8004 mode = cse_cc_succs (bb, cc_reg, cc_src, true);
8005 if (mode != VOIDmode)
8007 if (mode != GET_MODE (cc_src))
8008 abort ();
8009 if (mode != orig_mode)
8011 rtx newreg = gen_rtx_REG (mode, REGNO (cc_reg));
8013 /* Change the mode of CC_REG in CC_SRC_INSN to
8014 GET_MODE (NEWREG). */
8015 for_each_rtx (&PATTERN (cc_src_insn), cse_change_cc_mode,
8016 newreg);
8017 for_each_rtx (&REG_NOTES (cc_src_insn), cse_change_cc_mode,
8018 newreg);
8020 /* Do the same in the following insns that use the
8021 current value of CC_REG within BB. */
8022 cse_change_cc_mode_insns (NEXT_INSN (cc_src_insn),
8023 NEXT_INSN (last_insn),
8024 newreg);