re PR target/8343 ([m68k] [3.2 regression] m68k-elf/rtems ICE at instantiate_virtual_...
[official-gcc.git] / gcc / cse.c
blobc9ba3c0e5932ca3c822007e879703c467a185101
1 /* Common subexpression elimination for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 /* stdio.h must precede rtl.h for FFS. */
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
28 #include "rtl.h"
29 #include "tm_p.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "basic-block.h"
33 #include "flags.h"
34 #include "real.h"
35 #include "insn-config.h"
36 #include "recog.h"
37 #include "function.h"
38 #include "expr.h"
39 #include "toplev.h"
40 #include "output.h"
41 #include "ggc.h"
42 #include "timevar.h"
44 /* The basic idea of common subexpression elimination is to go
45 through the code, keeping a record of expressions that would
46 have the same value at the current scan point, and replacing
47 expressions encountered with the cheapest equivalent expression.
49 It is too complicated to keep track of the different possibilities
50 when control paths merge in this code; so, at each label, we forget all
51 that is known and start fresh. This can be described as processing each
52 extended basic block separately. We have a separate pass to perform
53 global CSE.
55 Note CSE can turn a conditional or computed jump into a nop or
56 an unconditional jump. When this occurs we arrange to run the jump
57 optimizer after CSE to delete the unreachable code.
59 We use two data structures to record the equivalent expressions:
60 a hash table for most expressions, and a vector of "quantity
61 numbers" to record equivalent (pseudo) registers.
63 The use of the special data structure for registers is desirable
64 because it is faster. It is possible because registers references
65 contain a fairly small number, the register number, taken from
66 a contiguously allocated series, and two register references are
67 identical if they have the same number. General expressions
68 do not have any such thing, so the only way to retrieve the
69 information recorded on an expression other than a register
70 is to keep it in a hash table.
72 Registers and "quantity numbers":
74 At the start of each basic block, all of the (hardware and pseudo)
75 registers used in the function are given distinct quantity
76 numbers to indicate their contents. During scan, when the code
77 copies one register into another, we copy the quantity number.
78 When a register is loaded in any other way, we allocate a new
79 quantity number to describe the value generated by this operation.
80 `reg_qty' records what quantity a register is currently thought
81 of as containing.
83 All real quantity numbers are greater than or equal to `max_reg'.
84 If register N has not been assigned a quantity, reg_qty[N] will equal N.
86 Quantity numbers below `max_reg' do not exist and none of the `qty_table'
87 entries should be referenced with an index below `max_reg'.
89 We also maintain a bidirectional chain of registers for each
90 quantity number. The `qty_table` members `first_reg' and `last_reg',
91 and `reg_eqv_table' members `next' and `prev' hold these chains.
93 The first register in a chain is the one whose lifespan is least local.
94 Among equals, it is the one that was seen first.
95 We replace any equivalent register with that one.
97 If two registers have the same quantity number, it must be true that
98 REG expressions with qty_table `mode' must be in the hash table for both
99 registers and must be in the same class.
101 The converse is not true. Since hard registers may be referenced in
102 any mode, two REG expressions might be equivalent in the hash table
103 but not have the same quantity number if the quantity number of one
104 of the registers is not the same mode as those expressions.
106 Constants and quantity numbers
108 When a quantity has a known constant value, that value is stored
109 in the appropriate qty_table `const_rtx'. This is in addition to
110 putting the constant in the hash table as is usual for non-regs.
112 Whether a reg or a constant is preferred is determined by the configuration
113 macro CONST_COSTS and will often depend on the constant value. In any
114 event, expressions containing constants can be simplified, by fold_rtx.
116 When a quantity has a known nearly constant value (such as an address
117 of a stack slot), that value is stored in the appropriate qty_table
118 `const_rtx'.
120 Integer constants don't have a machine mode. However, cse
121 determines the intended machine mode from the destination
122 of the instruction that moves the constant. The machine mode
123 is recorded in the hash table along with the actual RTL
124 constant expression so that different modes are kept separate.
126 Other expressions:
128 To record known equivalences among expressions in general
129 we use a hash table called `table'. It has a fixed number of buckets
130 that contain chains of `struct table_elt' elements for expressions.
131 These chains connect the elements whose expressions have the same
132 hash codes.
134 Other chains through the same elements connect the elements which
135 currently have equivalent values.
137 Register references in an expression are canonicalized before hashing
138 the expression. This is done using `reg_qty' and qty_table `first_reg'.
139 The hash code of a register reference is computed using the quantity
140 number, not the register number.
142 When the value of an expression changes, it is necessary to remove from the
143 hash table not just that expression but all expressions whose values
144 could be different as a result.
146 1. If the value changing is in memory, except in special cases
147 ANYTHING referring to memory could be changed. That is because
148 nobody knows where a pointer does not point.
149 The function `invalidate_memory' removes what is necessary.
151 The special cases are when the address is constant or is
152 a constant plus a fixed register such as the frame pointer
153 or a static chain pointer. When such addresses are stored in,
154 we can tell exactly which other such addresses must be invalidated
155 due to overlap. `invalidate' does this.
156 All expressions that refer to non-constant
157 memory addresses are also invalidated. `invalidate_memory' does this.
159 2. If the value changing is a register, all expressions
160 containing references to that register, and only those,
161 must be removed.
163 Because searching the entire hash table for expressions that contain
164 a register is very slow, we try to figure out when it isn't necessary.
165 Precisely, this is necessary only when expressions have been
166 entered in the hash table using this register, and then the value has
167 changed, and then another expression wants to be added to refer to
168 the register's new value. This sequence of circumstances is rare
169 within any one basic block.
171 The vectors `reg_tick' and `reg_in_table' are used to detect this case.
172 reg_tick[i] is incremented whenever a value is stored in register i.
173 reg_in_table[i] holds -1 if no references to register i have been
174 entered in the table; otherwise, it contains the value reg_tick[i] had
175 when the references were entered. If we want to enter a reference
176 and reg_in_table[i] != reg_tick[i], we must scan and remove old references.
177 Until we want to enter a new entry, the mere fact that the two vectors
178 don't match makes the entries be ignored if anyone tries to match them.
180 Registers themselves are entered in the hash table as well as in
181 the equivalent-register chains. However, the vectors `reg_tick'
182 and `reg_in_table' do not apply to expressions which are simple
183 register references. These expressions are removed from the table
184 immediately when they become invalid, and this can be done even if
185 we do not immediately search for all the expressions that refer to
186 the register.
188 A CLOBBER rtx in an instruction invalidates its operand for further
189 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
190 invalidates everything that resides in memory.
192 Related expressions:
194 Constant expressions that differ only by an additive integer
195 are called related. When a constant expression is put in
196 the table, the related expression with no constant term
197 is also entered. These are made to point at each other
198 so that it is possible to find out if there exists any
199 register equivalent to an expression related to a given expression. */
201 /* One plus largest register number used in this function. */
203 static int max_reg;
205 /* One plus largest instruction UID used in this function at time of
206 cse_main call. */
208 static int max_insn_uid;
210 /* Length of qty_table vector. We know in advance we will not need
211 a quantity number this big. */
213 static int max_qty;
215 /* Next quantity number to be allocated.
216 This is 1 + the largest number needed so far. */
218 static int next_qty;
220 /* Per-qty information tracking.
222 `first_reg' and `last_reg' track the head and tail of the
223 chain of registers which currently contain this quantity.
225 `mode' contains the machine mode of this quantity.
227 `const_rtx' holds the rtx of the constant value of this
228 quantity, if known. A summations of the frame/arg pointer
229 and a constant can also be entered here. When this holds
230 a known value, `const_insn' is the insn which stored the
231 constant value.
233 `comparison_{code,const,qty}' are used to track when a
234 comparison between a quantity and some constant or register has
235 been passed. In such a case, we know the results of the comparison
236 in case we see it again. These members record a comparison that
237 is known to be true. `comparison_code' holds the rtx code of such
238 a comparison, else it is set to UNKNOWN and the other two
239 comparison members are undefined. `comparison_const' holds
240 the constant being compared against, or zero if the comparison
241 is not against a constant. `comparison_qty' holds the quantity
242 being compared against when the result is known. If the comparison
243 is not with a register, `comparison_qty' is -1. */
245 struct qty_table_elem
247 rtx const_rtx;
248 rtx const_insn;
249 rtx comparison_const;
250 int comparison_qty;
251 unsigned int first_reg, last_reg;
252 enum machine_mode mode;
253 enum rtx_code comparison_code;
256 /* The table of all qtys, indexed by qty number. */
257 static struct qty_table_elem *qty_table;
259 #ifdef HAVE_cc0
260 /* For machines that have a CC0, we do not record its value in the hash
261 table since its use is guaranteed to be the insn immediately following
262 its definition and any other insn is presumed to invalidate it.
264 Instead, we store below the value last assigned to CC0. If it should
265 happen to be a constant, it is stored in preference to the actual
266 assigned value. In case it is a constant, we store the mode in which
267 the constant should be interpreted. */
269 static rtx prev_insn_cc0;
270 static enum machine_mode prev_insn_cc0_mode;
272 /* Previous actual insn. 0 if at first insn of basic block. */
274 static rtx prev_insn;
275 #endif
277 /* Insn being scanned. */
279 static rtx this_insn;
281 /* Index by register number, gives the number of the next (or
282 previous) register in the chain of registers sharing the same
283 value.
285 Or -1 if this register is at the end of the chain.
287 If reg_qty[N] == N, reg_eqv_table[N].next is undefined. */
289 /* Per-register equivalence chain. */
290 struct reg_eqv_elem
292 int next, prev;
295 /* The table of all register equivalence chains. */
296 static struct reg_eqv_elem *reg_eqv_table;
298 struct cse_reg_info
300 /* Next in hash chain. */
301 struct cse_reg_info *hash_next;
303 /* The next cse_reg_info structure in the free or used list. */
304 struct cse_reg_info *next;
306 /* Search key */
307 unsigned int regno;
309 /* The quantity number of the register's current contents. */
310 int reg_qty;
312 /* The number of times the register has been altered in the current
313 basic block. */
314 int reg_tick;
316 /* The REG_TICK value at which rtx's containing this register are
317 valid in the hash table. If this does not equal the current
318 reg_tick value, such expressions existing in the hash table are
319 invalid. */
320 int reg_in_table;
322 /* The SUBREG that was set when REG_TICK was last incremented. Set
323 to -1 if the last store was to the whole register, not a subreg. */
324 unsigned int subreg_ticked;
327 /* A free list of cse_reg_info entries. */
328 static struct cse_reg_info *cse_reg_info_free_list;
330 /* A used list of cse_reg_info entries. */
331 static struct cse_reg_info *cse_reg_info_used_list;
332 static struct cse_reg_info *cse_reg_info_used_list_end;
334 /* A mapping from registers to cse_reg_info data structures. */
335 #define REGHASH_SHIFT 7
336 #define REGHASH_SIZE (1 << REGHASH_SHIFT)
337 #define REGHASH_MASK (REGHASH_SIZE - 1)
338 static struct cse_reg_info *reg_hash[REGHASH_SIZE];
340 #define REGHASH_FN(REGNO) \
341 (((REGNO) ^ ((REGNO) >> REGHASH_SHIFT)) & REGHASH_MASK)
343 /* The last lookup we did into the cse_reg_info_tree. This allows us
344 to cache repeated lookups. */
345 static unsigned int cached_regno;
346 static struct cse_reg_info *cached_cse_reg_info;
348 /* A HARD_REG_SET containing all the hard registers for which there is
349 currently a REG expression in the hash table. Note the difference
350 from the above variables, which indicate if the REG is mentioned in some
351 expression in the table. */
353 static HARD_REG_SET hard_regs_in_table;
355 /* CUID of insn that starts the basic block currently being cse-processed. */
357 static int cse_basic_block_start;
359 /* CUID of insn that ends the basic block currently being cse-processed. */
361 static int cse_basic_block_end;
363 /* Vector mapping INSN_UIDs to cuids.
364 The cuids are like uids but increase monotonically always.
365 We use them to see whether a reg is used outside a given basic block. */
367 static int *uid_cuid;
369 /* Highest UID in UID_CUID. */
370 static int max_uid;
372 /* Get the cuid of an insn. */
374 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
376 /* Nonzero if this pass has made changes, and therefore it's
377 worthwhile to run the garbage collector. */
379 static int cse_altered;
381 /* Nonzero if cse has altered conditional jump insns
382 in such a way that jump optimization should be redone. */
384 static int cse_jumps_altered;
386 /* Nonzero if we put a LABEL_REF into the hash table for an INSN without a
387 REG_LABEL, we have to rerun jump after CSE to put in the note. */
388 static int recorded_label_ref;
390 /* canon_hash stores 1 in do_not_record
391 if it notices a reference to CC0, PC, or some other volatile
392 subexpression. */
394 static int do_not_record;
396 #ifdef LOAD_EXTEND_OP
398 /* Scratch rtl used when looking for load-extended copy of a MEM. */
399 static rtx memory_extend_rtx;
400 #endif
402 /* canon_hash stores 1 in hash_arg_in_memory
403 if it notices a reference to memory within the expression being hashed. */
405 static int hash_arg_in_memory;
407 /* The hash table contains buckets which are chains of `struct table_elt's,
408 each recording one expression's information.
409 That expression is in the `exp' field.
411 The canon_exp field contains a canonical (from the point of view of
412 alias analysis) version of the `exp' field.
414 Those elements with the same hash code are chained in both directions
415 through the `next_same_hash' and `prev_same_hash' fields.
417 Each set of expressions with equivalent values
418 are on a two-way chain through the `next_same_value'
419 and `prev_same_value' fields, and all point with
420 the `first_same_value' field at the first element in
421 that chain. The chain is in order of increasing cost.
422 Each element's cost value is in its `cost' field.
424 The `in_memory' field is nonzero for elements that
425 involve any reference to memory. These elements are removed
426 whenever a write is done to an unidentified location in memory.
427 To be safe, we assume that a memory address is unidentified unless
428 the address is either a symbol constant or a constant plus
429 the frame pointer or argument pointer.
431 The `related_value' field is used to connect related expressions
432 (that differ by adding an integer).
433 The related expressions are chained in a circular fashion.
434 `related_value' is zero for expressions for which this
435 chain is not useful.
437 The `cost' field stores the cost of this element's expression.
438 The `regcost' field stores the value returned by approx_reg_cost for
439 this element's expression.
441 The `is_const' flag is set if the element is a constant (including
442 a fixed address).
444 The `flag' field is used as a temporary during some search routines.
446 The `mode' field is usually the same as GET_MODE (`exp'), but
447 if `exp' is a CONST_INT and has no machine mode then the `mode'
448 field is the mode it was being used as. Each constant is
449 recorded separately for each mode it is used with. */
451 struct table_elt
453 rtx exp;
454 rtx canon_exp;
455 struct table_elt *next_same_hash;
456 struct table_elt *prev_same_hash;
457 struct table_elt *next_same_value;
458 struct table_elt *prev_same_value;
459 struct table_elt *first_same_value;
460 struct table_elt *related_value;
461 int cost;
462 int regcost;
463 enum machine_mode mode;
464 char in_memory;
465 char is_const;
466 char flag;
469 /* We don't want a lot of buckets, because we rarely have very many
470 things stored in the hash table, and a lot of buckets slows
471 down a lot of loops that happen frequently. */
472 #define HASH_SHIFT 5
473 #define HASH_SIZE (1 << HASH_SHIFT)
474 #define HASH_MASK (HASH_SIZE - 1)
476 /* Compute hash code of X in mode M. Special-case case where X is a pseudo
477 register (hard registers may require `do_not_record' to be set). */
479 #define HASH(X, M) \
480 ((GET_CODE (X) == REG && REGNO (X) >= FIRST_PSEUDO_REGISTER \
481 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
482 : canon_hash (X, M)) & HASH_MASK)
484 /* Determine whether register number N is considered a fixed register for the
485 purpose of approximating register costs.
486 It is desirable to replace other regs with fixed regs, to reduce need for
487 non-fixed hard regs.
488 A reg wins if it is either the frame pointer or designated as fixed. */
489 #define FIXED_REGNO_P(N) \
490 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
491 || fixed_regs[N] || global_regs[N])
493 /* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
494 hard registers and pointers into the frame are the cheapest with a cost
495 of 0. Next come pseudos with a cost of one and other hard registers with
496 a cost of 2. Aside from these special cases, call `rtx_cost'. */
498 #define CHEAP_REGNO(N) \
499 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
500 || (N) == STACK_POINTER_REGNUM || (N) == ARG_POINTER_REGNUM \
501 || ((N) >= FIRST_VIRTUAL_REGISTER && (N) <= LAST_VIRTUAL_REGISTER) \
502 || ((N) < FIRST_PSEUDO_REGISTER \
503 && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
505 #define COST(X) (GET_CODE (X) == REG ? 0 : notreg_cost (X, SET))
506 #define COST_IN(X,OUTER) (GET_CODE (X) == REG ? 0 : notreg_cost (X, OUTER))
508 /* Get the info associated with register N. */
510 #define GET_CSE_REG_INFO(N) \
511 (((N) == cached_regno && cached_cse_reg_info) \
512 ? cached_cse_reg_info : get_cse_reg_info ((N)))
514 /* Get the number of times this register has been updated in this
515 basic block. */
517 #define REG_TICK(N) ((GET_CSE_REG_INFO (N))->reg_tick)
519 /* Get the point at which REG was recorded in the table. */
521 #define REG_IN_TABLE(N) ((GET_CSE_REG_INFO (N))->reg_in_table)
523 /* Get the SUBREG set at the last increment to REG_TICK (-1 if not a
524 SUBREG). */
526 #define SUBREG_TICKED(N) ((GET_CSE_REG_INFO (N))->subreg_ticked)
528 /* Get the quantity number for REG. */
530 #define REG_QTY(N) ((GET_CSE_REG_INFO (N))->reg_qty)
532 /* Determine if the quantity number for register X represents a valid index
533 into the qty_table. */
535 #define REGNO_QTY_VALID_P(N) (REG_QTY (N) != (int) (N))
537 static struct table_elt *table[HASH_SIZE];
539 /* Chain of `struct table_elt's made so far for this function
540 but currently removed from the table. */
542 static struct table_elt *free_element_chain;
544 /* Number of `struct table_elt' structures made so far for this function. */
546 static int n_elements_made;
548 /* Maximum value `n_elements_made' has had so far in this compilation
549 for functions previously processed. */
551 static int max_elements_made;
553 /* Surviving equivalence class when two equivalence classes are merged
554 by recording the effects of a jump in the last insn. Zero if the
555 last insn was not a conditional jump. */
557 static struct table_elt *last_jump_equiv_class;
559 /* Set to the cost of a constant pool reference if one was found for a
560 symbolic constant. If this was found, it means we should try to
561 convert constants into constant pool entries if they don't fit in
562 the insn. */
564 static int constant_pool_entries_cost;
566 /* Define maximum length of a branch path. */
568 #define PATHLENGTH 10
570 /* This data describes a block that will be processed by cse_basic_block. */
572 struct cse_basic_block_data
574 /* Lowest CUID value of insns in block. */
575 int low_cuid;
576 /* Highest CUID value of insns in block. */
577 int high_cuid;
578 /* Total number of SETs in block. */
579 int nsets;
580 /* Last insn in the block. */
581 rtx last;
582 /* Size of current branch path, if any. */
583 int path_size;
584 /* Current branch path, indicating which branches will be taken. */
585 struct branch_path
587 /* The branch insn. */
588 rtx branch;
589 /* Whether it should be taken or not. AROUND is the same as taken
590 except that it is used when the destination label is not preceded
591 by a BARRIER. */
592 enum taken {TAKEN, NOT_TAKEN, AROUND} status;
593 } path[PATHLENGTH];
596 static bool fixed_base_plus_p PARAMS ((rtx x));
597 static int notreg_cost PARAMS ((rtx, enum rtx_code));
598 static int approx_reg_cost_1 PARAMS ((rtx *, void *));
599 static int approx_reg_cost PARAMS ((rtx));
600 static int preferrable PARAMS ((int, int, int, int));
601 static void new_basic_block PARAMS ((void));
602 static void make_new_qty PARAMS ((unsigned int, enum machine_mode));
603 static void make_regs_eqv PARAMS ((unsigned int, unsigned int));
604 static void delete_reg_equiv PARAMS ((unsigned int));
605 static int mention_regs PARAMS ((rtx));
606 static int insert_regs PARAMS ((rtx, struct table_elt *, int));
607 static void remove_from_table PARAMS ((struct table_elt *, unsigned));
608 static struct table_elt *lookup PARAMS ((rtx, unsigned, enum machine_mode)),
609 *lookup_for_remove PARAMS ((rtx, unsigned, enum machine_mode));
610 static rtx lookup_as_function PARAMS ((rtx, enum rtx_code));
611 static struct table_elt *insert PARAMS ((rtx, struct table_elt *, unsigned,
612 enum machine_mode));
613 static void merge_equiv_classes PARAMS ((struct table_elt *,
614 struct table_elt *));
615 static void invalidate PARAMS ((rtx, enum machine_mode));
616 static int cse_rtx_varies_p PARAMS ((rtx, int));
617 static void remove_invalid_refs PARAMS ((unsigned int));
618 static void remove_invalid_subreg_refs PARAMS ((unsigned int, unsigned int,
619 enum machine_mode));
620 static void rehash_using_reg PARAMS ((rtx));
621 static void invalidate_memory PARAMS ((void));
622 static void invalidate_for_call PARAMS ((void));
623 static rtx use_related_value PARAMS ((rtx, struct table_elt *));
624 static unsigned canon_hash PARAMS ((rtx, enum machine_mode));
625 static unsigned canon_hash_string PARAMS ((const char *));
626 static unsigned safe_hash PARAMS ((rtx, enum machine_mode));
627 static int exp_equiv_p PARAMS ((rtx, rtx, int, int));
628 static rtx canon_reg PARAMS ((rtx, rtx));
629 static void find_best_addr PARAMS ((rtx, rtx *, enum machine_mode));
630 static enum rtx_code find_comparison_args PARAMS ((enum rtx_code, rtx *, rtx *,
631 enum machine_mode *,
632 enum machine_mode *));
633 static rtx fold_rtx PARAMS ((rtx, rtx));
634 static rtx equiv_constant PARAMS ((rtx));
635 static void record_jump_equiv PARAMS ((rtx, int));
636 static void record_jump_cond PARAMS ((enum rtx_code, enum machine_mode,
637 rtx, rtx, int));
638 static void cse_insn PARAMS ((rtx, rtx));
639 static int addr_affects_sp_p PARAMS ((rtx));
640 static void invalidate_from_clobbers PARAMS ((rtx));
641 static rtx cse_process_notes PARAMS ((rtx, rtx));
642 static void cse_around_loop PARAMS ((rtx));
643 static void invalidate_skipped_set PARAMS ((rtx, rtx, void *));
644 static void invalidate_skipped_block PARAMS ((rtx));
645 static void cse_check_loop_start PARAMS ((rtx, rtx, void *));
646 static void cse_set_around_loop PARAMS ((rtx, rtx, rtx));
647 static rtx cse_basic_block PARAMS ((rtx, rtx, struct branch_path *, int));
648 static void count_reg_usage PARAMS ((rtx, int *, rtx, int));
649 static int check_for_label_ref PARAMS ((rtx *, void *));
650 extern void dump_class PARAMS ((struct table_elt*));
651 static struct cse_reg_info * get_cse_reg_info PARAMS ((unsigned int));
652 static int check_dependence PARAMS ((rtx *, void *));
654 static void flush_hash_table PARAMS ((void));
655 static bool insn_live_p PARAMS ((rtx, int *));
656 static bool set_live_p PARAMS ((rtx, rtx, int *));
657 static bool dead_libcall_p PARAMS ((rtx, int *));
659 /* Nonzero if X has the form (PLUS frame-pointer integer). We check for
660 virtual regs here because the simplify_*_operation routines are called
661 by integrate.c, which is called before virtual register instantiation. */
663 static bool
664 fixed_base_plus_p (x)
665 rtx x;
667 switch (GET_CODE (x))
669 case REG:
670 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx)
671 return true;
672 if (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
673 return true;
674 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
675 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
676 return true;
677 return false;
679 case PLUS:
680 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
681 return false;
682 return fixed_base_plus_p (XEXP (x, 0));
684 case ADDRESSOF:
685 return true;
687 default:
688 return false;
692 /* Dump the expressions in the equivalence class indicated by CLASSP.
693 This function is used only for debugging. */
694 void
695 dump_class (classp)
696 struct table_elt *classp;
698 struct table_elt *elt;
700 fprintf (stderr, "Equivalence chain for ");
701 print_rtl (stderr, classp->exp);
702 fprintf (stderr, ": \n");
704 for (elt = classp->first_same_value; elt; elt = elt->next_same_value)
706 print_rtl (stderr, elt->exp);
707 fprintf (stderr, "\n");
711 /* Subroutine of approx_reg_cost; called through for_each_rtx. */
713 static int
714 approx_reg_cost_1 (xp, data)
715 rtx *xp;
716 void *data;
718 rtx x = *xp;
719 int *cost_p = data;
721 if (x && GET_CODE (x) == REG)
723 unsigned int regno = REGNO (x);
725 if (! CHEAP_REGNO (regno))
727 if (regno < FIRST_PSEUDO_REGISTER)
729 if (SMALL_REGISTER_CLASSES)
730 return 1;
731 *cost_p += 2;
733 else
734 *cost_p += 1;
738 return 0;
741 /* Return an estimate of the cost of the registers used in an rtx.
742 This is mostly the number of different REG expressions in the rtx;
743 however for some exceptions like fixed registers we use a cost of
744 0. If any other hard register reference occurs, return MAX_COST. */
746 static int
747 approx_reg_cost (x)
748 rtx x;
750 int cost = 0;
752 if (for_each_rtx (&x, approx_reg_cost_1, (void *) &cost))
753 return MAX_COST;
755 return cost;
758 /* Return a negative value if an rtx A, whose costs are given by COST_A
759 and REGCOST_A, is more desirable than an rtx B.
760 Return a positive value if A is less desirable, or 0 if the two are
761 equally good. */
762 static int
763 preferrable (cost_a, regcost_a, cost_b, regcost_b)
764 int cost_a, regcost_a, cost_b, regcost_b;
766 /* First, get rid of cases involving expressions that are entirely
767 unwanted. */
768 if (cost_a != cost_b)
770 if (cost_a == MAX_COST)
771 return 1;
772 if (cost_b == MAX_COST)
773 return -1;
776 /* Avoid extending lifetimes of hardregs. */
777 if (regcost_a != regcost_b)
779 if (regcost_a == MAX_COST)
780 return 1;
781 if (regcost_b == MAX_COST)
782 return -1;
785 /* Normal operation costs take precedence. */
786 if (cost_a != cost_b)
787 return cost_a - cost_b;
788 /* Only if these are identical consider effects on register pressure. */
789 if (regcost_a != regcost_b)
790 return regcost_a - regcost_b;
791 return 0;
794 /* Internal function, to compute cost when X is not a register; called
795 from COST macro to keep it simple. */
797 static int
798 notreg_cost (x, outer)
799 rtx x;
800 enum rtx_code outer;
802 return ((GET_CODE (x) == SUBREG
803 && GET_CODE (SUBREG_REG (x)) == REG
804 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
805 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
806 && (GET_MODE_SIZE (GET_MODE (x))
807 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
808 && subreg_lowpart_p (x)
809 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (x)),
810 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))))
812 : rtx_cost (x, outer) * 2);
815 /* Return an estimate of the cost of computing rtx X.
816 One use is in cse, to decide which expression to keep in the hash table.
817 Another is in rtl generation, to pick the cheapest way to multiply.
818 Other uses like the latter are expected in the future. */
821 rtx_cost (x, outer_code)
822 rtx x;
823 enum rtx_code outer_code ATTRIBUTE_UNUSED;
825 int i, j;
826 enum rtx_code code;
827 const char *fmt;
828 int total;
830 if (x == 0)
831 return 0;
833 /* Compute the default costs of certain things.
834 Note that RTX_COSTS can override the defaults. */
836 code = GET_CODE (x);
837 switch (code)
839 case MULT:
840 total = COSTS_N_INSNS (5);
841 break;
842 case DIV:
843 case UDIV:
844 case MOD:
845 case UMOD:
846 total = COSTS_N_INSNS (7);
847 break;
848 case USE:
849 /* Used in loop.c and combine.c as a marker. */
850 total = 0;
851 break;
852 default:
853 total = COSTS_N_INSNS (1);
856 switch (code)
858 case REG:
859 return 0;
861 case SUBREG:
862 /* If we can't tie these modes, make this expensive. The larger
863 the mode, the more expensive it is. */
864 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
865 return COSTS_N_INSNS (2
866 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
867 break;
869 #ifdef RTX_COSTS
870 RTX_COSTS (x, code, outer_code);
871 #endif
872 #ifdef CONST_COSTS
873 CONST_COSTS (x, code, outer_code);
874 #endif
876 default:
877 #ifdef DEFAULT_RTX_COSTS
878 DEFAULT_RTX_COSTS (x, code, outer_code);
879 #endif
880 break;
883 /* Sum the costs of the sub-rtx's, plus cost of this operation,
884 which is already in total. */
886 fmt = GET_RTX_FORMAT (code);
887 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
888 if (fmt[i] == 'e')
889 total += rtx_cost (XEXP (x, i), code);
890 else if (fmt[i] == 'E')
891 for (j = 0; j < XVECLEN (x, i); j++)
892 total += rtx_cost (XVECEXP (x, i, j), code);
894 return total;
897 /* Return cost of address expression X.
898 Expect that X is properly formed address reference. */
901 address_cost (x, mode)
902 rtx x;
903 enum machine_mode mode;
905 /* The ADDRESS_COST macro does not deal with ADDRESSOF nodes. But,
906 during CSE, such nodes are present. Using an ADDRESSOF node which
907 refers to the address of a REG is a good thing because we can then
908 turn (MEM (ADDRESSSOF (REG))) into just plain REG. */
910 if (GET_CODE (x) == ADDRESSOF && REG_P (XEXP ((x), 0)))
911 return -1;
913 /* We may be asked for cost of various unusual addresses, such as operands
914 of push instruction. It is not worthwhile to complicate writing
915 of ADDRESS_COST macro by such cases. */
917 if (!memory_address_p (mode, x))
918 return 1000;
919 #ifdef ADDRESS_COST
920 return ADDRESS_COST (x);
921 #else
922 return rtx_cost (x, MEM);
923 #endif
927 static struct cse_reg_info *
928 get_cse_reg_info (regno)
929 unsigned int regno;
931 struct cse_reg_info **hash_head = &reg_hash[REGHASH_FN (regno)];
932 struct cse_reg_info *p;
934 for (p = *hash_head; p != NULL; p = p->hash_next)
935 if (p->regno == regno)
936 break;
938 if (p == NULL)
940 /* Get a new cse_reg_info structure. */
941 if (cse_reg_info_free_list)
943 p = cse_reg_info_free_list;
944 cse_reg_info_free_list = p->next;
946 else
947 p = (struct cse_reg_info *) xmalloc (sizeof (struct cse_reg_info));
949 /* Insert into hash table. */
950 p->hash_next = *hash_head;
951 *hash_head = p;
953 /* Initialize it. */
954 p->reg_tick = 1;
955 p->reg_in_table = -1;
956 p->subreg_ticked = -1;
957 p->reg_qty = regno;
958 p->regno = regno;
959 p->next = cse_reg_info_used_list;
960 cse_reg_info_used_list = p;
961 if (!cse_reg_info_used_list_end)
962 cse_reg_info_used_list_end = p;
965 /* Cache this lookup; we tend to be looking up information about the
966 same register several times in a row. */
967 cached_regno = regno;
968 cached_cse_reg_info = p;
970 return p;
973 /* Clear the hash table and initialize each register with its own quantity,
974 for a new basic block. */
976 static void
977 new_basic_block ()
979 int i;
981 next_qty = max_reg;
983 /* Clear out hash table state for this pass. */
985 memset ((char *) reg_hash, 0, sizeof reg_hash);
987 if (cse_reg_info_used_list)
989 cse_reg_info_used_list_end->next = cse_reg_info_free_list;
990 cse_reg_info_free_list = cse_reg_info_used_list;
991 cse_reg_info_used_list = cse_reg_info_used_list_end = 0;
993 cached_cse_reg_info = 0;
995 CLEAR_HARD_REG_SET (hard_regs_in_table);
997 /* The per-quantity values used to be initialized here, but it is
998 much faster to initialize each as it is made in `make_new_qty'. */
1000 for (i = 0; i < HASH_SIZE; i++)
1002 struct table_elt *first;
1004 first = table[i];
1005 if (first != NULL)
1007 struct table_elt *last = first;
1009 table[i] = NULL;
1011 while (last->next_same_hash != NULL)
1012 last = last->next_same_hash;
1014 /* Now relink this hash entire chain into
1015 the free element list. */
1017 last->next_same_hash = free_element_chain;
1018 free_element_chain = first;
1022 #ifdef HAVE_cc0
1023 prev_insn = 0;
1024 prev_insn_cc0 = 0;
1025 #endif
1028 /* Say that register REG contains a quantity in mode MODE not in any
1029 register before and initialize that quantity. */
1031 static void
1032 make_new_qty (reg, mode)
1033 unsigned int reg;
1034 enum machine_mode mode;
1036 int q;
1037 struct qty_table_elem *ent;
1038 struct reg_eqv_elem *eqv;
1040 if (next_qty >= max_qty)
1041 abort ();
1043 q = REG_QTY (reg) = next_qty++;
1044 ent = &qty_table[q];
1045 ent->first_reg = reg;
1046 ent->last_reg = reg;
1047 ent->mode = mode;
1048 ent->const_rtx = ent->const_insn = NULL_RTX;
1049 ent->comparison_code = UNKNOWN;
1051 eqv = &reg_eqv_table[reg];
1052 eqv->next = eqv->prev = -1;
1055 /* Make reg NEW equivalent to reg OLD.
1056 OLD is not changing; NEW is. */
1058 static void
1059 make_regs_eqv (new, old)
1060 unsigned int new, old;
1062 unsigned int lastr, firstr;
1063 int q = REG_QTY (old);
1064 struct qty_table_elem *ent;
1066 ent = &qty_table[q];
1068 /* Nothing should become eqv until it has a "non-invalid" qty number. */
1069 if (! REGNO_QTY_VALID_P (old))
1070 abort ();
1072 REG_QTY (new) = q;
1073 firstr = ent->first_reg;
1074 lastr = ent->last_reg;
1076 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
1077 hard regs. Among pseudos, if NEW will live longer than any other reg
1078 of the same qty, and that is beyond the current basic block,
1079 make it the new canonical replacement for this qty. */
1080 if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
1081 /* Certain fixed registers might be of the class NO_REGS. This means
1082 that not only can they not be allocated by the compiler, but
1083 they cannot be used in substitutions or canonicalizations
1084 either. */
1085 && (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
1086 && ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
1087 || (new >= FIRST_PSEUDO_REGISTER
1088 && (firstr < FIRST_PSEUDO_REGISTER
1089 || ((uid_cuid[REGNO_LAST_UID (new)] > cse_basic_block_end
1090 || (uid_cuid[REGNO_FIRST_UID (new)]
1091 < cse_basic_block_start))
1092 && (uid_cuid[REGNO_LAST_UID (new)]
1093 > uid_cuid[REGNO_LAST_UID (firstr)]))))))
1095 reg_eqv_table[firstr].prev = new;
1096 reg_eqv_table[new].next = firstr;
1097 reg_eqv_table[new].prev = -1;
1098 ent->first_reg = new;
1100 else
1102 /* If NEW is a hard reg (known to be non-fixed), insert at end.
1103 Otherwise, insert before any non-fixed hard regs that are at the
1104 end. Registers of class NO_REGS cannot be used as an
1105 equivalent for anything. */
1106 while (lastr < FIRST_PSEUDO_REGISTER && reg_eqv_table[lastr].prev >= 0
1107 && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
1108 && new >= FIRST_PSEUDO_REGISTER)
1109 lastr = reg_eqv_table[lastr].prev;
1110 reg_eqv_table[new].next = reg_eqv_table[lastr].next;
1111 if (reg_eqv_table[lastr].next >= 0)
1112 reg_eqv_table[reg_eqv_table[lastr].next].prev = new;
1113 else
1114 qty_table[q].last_reg = new;
1115 reg_eqv_table[lastr].next = new;
1116 reg_eqv_table[new].prev = lastr;
1120 /* Remove REG from its equivalence class. */
1122 static void
1123 delete_reg_equiv (reg)
1124 unsigned int reg;
1126 struct qty_table_elem *ent;
1127 int q = REG_QTY (reg);
1128 int p, n;
1130 /* If invalid, do nothing. */
1131 if (q == (int) reg)
1132 return;
1134 ent = &qty_table[q];
1136 p = reg_eqv_table[reg].prev;
1137 n = reg_eqv_table[reg].next;
1139 if (n != -1)
1140 reg_eqv_table[n].prev = p;
1141 else
1142 ent->last_reg = p;
1143 if (p != -1)
1144 reg_eqv_table[p].next = n;
1145 else
1146 ent->first_reg = n;
1148 REG_QTY (reg) = reg;
1151 /* Remove any invalid expressions from the hash table
1152 that refer to any of the registers contained in expression X.
1154 Make sure that newly inserted references to those registers
1155 as subexpressions will be considered valid.
1157 mention_regs is not called when a register itself
1158 is being stored in the table.
1160 Return 1 if we have done something that may have changed the hash code
1161 of X. */
1163 static int
1164 mention_regs (x)
1165 rtx x;
1167 enum rtx_code code;
1168 int i, j;
1169 const char *fmt;
1170 int changed = 0;
1172 if (x == 0)
1173 return 0;
1175 code = GET_CODE (x);
1176 if (code == REG)
1178 unsigned int regno = REGNO (x);
1179 unsigned int endregno
1180 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
1181 : HARD_REGNO_NREGS (regno, GET_MODE (x)));
1182 unsigned int i;
1184 for (i = regno; i < endregno; i++)
1186 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1187 remove_invalid_refs (i);
1189 REG_IN_TABLE (i) = REG_TICK (i);
1190 SUBREG_TICKED (i) = -1;
1193 return 0;
1196 /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1197 pseudo if they don't use overlapping words. We handle only pseudos
1198 here for simplicity. */
1199 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1200 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1202 unsigned int i = REGNO (SUBREG_REG (x));
1204 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1206 /* If REG_IN_TABLE (i) differs from REG_TICK (i) by one, and
1207 the last store to this register really stored into this
1208 subreg, then remove the memory of this subreg.
1209 Otherwise, remove any memory of the entire register and
1210 all its subregs from the table. */
1211 if (REG_TICK (i) - REG_IN_TABLE (i) > 1
1212 || SUBREG_TICKED (i) != REGNO (SUBREG_REG (x)))
1213 remove_invalid_refs (i);
1214 else
1215 remove_invalid_subreg_refs (i, SUBREG_BYTE (x), GET_MODE (x));
1218 REG_IN_TABLE (i) = REG_TICK (i);
1219 SUBREG_TICKED (i) = REGNO (SUBREG_REG (x));
1220 return 0;
1223 /* If X is a comparison or a COMPARE and either operand is a register
1224 that does not have a quantity, give it one. This is so that a later
1225 call to record_jump_equiv won't cause X to be assigned a different
1226 hash code and not found in the table after that call.
1228 It is not necessary to do this here, since rehash_using_reg can
1229 fix up the table later, but doing this here eliminates the need to
1230 call that expensive function in the most common case where the only
1231 use of the register is in the comparison. */
1233 if (code == COMPARE || GET_RTX_CLASS (code) == '<')
1235 if (GET_CODE (XEXP (x, 0)) == REG
1236 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
1237 if (insert_regs (XEXP (x, 0), NULL, 0))
1239 rehash_using_reg (XEXP (x, 0));
1240 changed = 1;
1243 if (GET_CODE (XEXP (x, 1)) == REG
1244 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
1245 if (insert_regs (XEXP (x, 1), NULL, 0))
1247 rehash_using_reg (XEXP (x, 1));
1248 changed = 1;
1252 fmt = GET_RTX_FORMAT (code);
1253 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1254 if (fmt[i] == 'e')
1255 changed |= mention_regs (XEXP (x, i));
1256 else if (fmt[i] == 'E')
1257 for (j = 0; j < XVECLEN (x, i); j++)
1258 changed |= mention_regs (XVECEXP (x, i, j));
1260 return changed;
1263 /* Update the register quantities for inserting X into the hash table
1264 with a value equivalent to CLASSP.
1265 (If the class does not contain a REG, it is irrelevant.)
1266 If MODIFIED is nonzero, X is a destination; it is being modified.
1267 Note that delete_reg_equiv should be called on a register
1268 before insert_regs is done on that register with MODIFIED != 0.
1270 Nonzero value means that elements of reg_qty have changed
1271 so X's hash code may be different. */
1273 static int
1274 insert_regs (x, classp, modified)
1275 rtx x;
1276 struct table_elt *classp;
1277 int modified;
1279 if (GET_CODE (x) == REG)
1281 unsigned int regno = REGNO (x);
1282 int qty_valid;
1284 /* If REGNO is in the equivalence table already but is of the
1285 wrong mode for that equivalence, don't do anything here. */
1287 qty_valid = REGNO_QTY_VALID_P (regno);
1288 if (qty_valid)
1290 struct qty_table_elem *ent = &qty_table[REG_QTY (regno)];
1292 if (ent->mode != GET_MODE (x))
1293 return 0;
1296 if (modified || ! qty_valid)
1298 if (classp)
1299 for (classp = classp->first_same_value;
1300 classp != 0;
1301 classp = classp->next_same_value)
1302 if (GET_CODE (classp->exp) == REG
1303 && GET_MODE (classp->exp) == GET_MODE (x))
1305 make_regs_eqv (regno, REGNO (classp->exp));
1306 return 1;
1309 /* Mention_regs for a SUBREG checks if REG_TICK is exactly one larger
1310 than REG_IN_TABLE to find out if there was only a single preceding
1311 invalidation - for the SUBREG - or another one, which would be
1312 for the full register. However, if we find here that REG_TICK
1313 indicates that the register is invalid, it means that it has
1314 been invalidated in a separate operation. The SUBREG might be used
1315 now (then this is a recursive call), or we might use the full REG
1316 now and a SUBREG of it later. So bump up REG_TICK so that
1317 mention_regs will do the right thing. */
1318 if (! modified
1319 && REG_IN_TABLE (regno) >= 0
1320 && REG_TICK (regno) == REG_IN_TABLE (regno) + 1)
1321 REG_TICK (regno)++;
1322 make_new_qty (regno, GET_MODE (x));
1323 return 1;
1326 return 0;
1329 /* If X is a SUBREG, we will likely be inserting the inner register in the
1330 table. If that register doesn't have an assigned quantity number at
1331 this point but does later, the insertion that we will be doing now will
1332 not be accessible because its hash code will have changed. So assign
1333 a quantity number now. */
1335 else if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1336 && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1338 insert_regs (SUBREG_REG (x), NULL, 0);
1339 mention_regs (x);
1340 return 1;
1342 else
1343 return mention_regs (x);
1346 /* Look in or update the hash table. */
1348 /* Remove table element ELT from use in the table.
1349 HASH is its hash code, made using the HASH macro.
1350 It's an argument because often that is known in advance
1351 and we save much time not recomputing it. */
1353 static void
1354 remove_from_table (elt, hash)
1355 struct table_elt *elt;
1356 unsigned hash;
1358 if (elt == 0)
1359 return;
1361 /* Mark this element as removed. See cse_insn. */
1362 elt->first_same_value = 0;
1364 /* Remove the table element from its equivalence class. */
1367 struct table_elt *prev = elt->prev_same_value;
1368 struct table_elt *next = elt->next_same_value;
1370 if (next)
1371 next->prev_same_value = prev;
1373 if (prev)
1374 prev->next_same_value = next;
1375 else
1377 struct table_elt *newfirst = next;
1378 while (next)
1380 next->first_same_value = newfirst;
1381 next = next->next_same_value;
1386 /* Remove the table element from its hash bucket. */
1389 struct table_elt *prev = elt->prev_same_hash;
1390 struct table_elt *next = elt->next_same_hash;
1392 if (next)
1393 next->prev_same_hash = prev;
1395 if (prev)
1396 prev->next_same_hash = next;
1397 else if (table[hash] == elt)
1398 table[hash] = next;
1399 else
1401 /* This entry is not in the proper hash bucket. This can happen
1402 when two classes were merged by `merge_equiv_classes'. Search
1403 for the hash bucket that it heads. This happens only very
1404 rarely, so the cost is acceptable. */
1405 for (hash = 0; hash < HASH_SIZE; hash++)
1406 if (table[hash] == elt)
1407 table[hash] = next;
1411 /* Remove the table element from its related-value circular chain. */
1413 if (elt->related_value != 0 && elt->related_value != elt)
1415 struct table_elt *p = elt->related_value;
1417 while (p->related_value != elt)
1418 p = p->related_value;
1419 p->related_value = elt->related_value;
1420 if (p->related_value == p)
1421 p->related_value = 0;
1424 /* Now add it to the free element chain. */
1425 elt->next_same_hash = free_element_chain;
1426 free_element_chain = elt;
1429 /* Look up X in the hash table and return its table element,
1430 or 0 if X is not in the table.
1432 MODE is the machine-mode of X, or if X is an integer constant
1433 with VOIDmode then MODE is the mode with which X will be used.
1435 Here we are satisfied to find an expression whose tree structure
1436 looks like X. */
1438 static struct table_elt *
1439 lookup (x, hash, mode)
1440 rtx x;
1441 unsigned hash;
1442 enum machine_mode mode;
1444 struct table_elt *p;
1446 for (p = table[hash]; p; p = p->next_same_hash)
1447 if (mode == p->mode && ((x == p->exp && GET_CODE (x) == REG)
1448 || exp_equiv_p (x, p->exp, GET_CODE (x) != REG, 0)))
1449 return p;
1451 return 0;
1454 /* Like `lookup' but don't care whether the table element uses invalid regs.
1455 Also ignore discrepancies in the machine mode of a register. */
1457 static struct table_elt *
1458 lookup_for_remove (x, hash, mode)
1459 rtx x;
1460 unsigned hash;
1461 enum machine_mode mode;
1463 struct table_elt *p;
1465 if (GET_CODE (x) == REG)
1467 unsigned int regno = REGNO (x);
1469 /* Don't check the machine mode when comparing registers;
1470 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1471 for (p = table[hash]; p; p = p->next_same_hash)
1472 if (GET_CODE (p->exp) == REG
1473 && REGNO (p->exp) == regno)
1474 return p;
1476 else
1478 for (p = table[hash]; p; p = p->next_same_hash)
1479 if (mode == p->mode && (x == p->exp || exp_equiv_p (x, p->exp, 0, 0)))
1480 return p;
1483 return 0;
1486 /* Look for an expression equivalent to X and with code CODE.
1487 If one is found, return that expression. */
1489 static rtx
1490 lookup_as_function (x, code)
1491 rtx x;
1492 enum rtx_code code;
1494 struct table_elt *p
1495 = lookup (x, safe_hash (x, VOIDmode) & HASH_MASK, GET_MODE (x));
1497 /* If we are looking for a CONST_INT, the mode doesn't really matter, as
1498 long as we are narrowing. So if we looked in vain for a mode narrower
1499 than word_mode before, look for word_mode now. */
1500 if (p == 0 && code == CONST_INT
1501 && GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (word_mode))
1503 x = copy_rtx (x);
1504 PUT_MODE (x, word_mode);
1505 p = lookup (x, safe_hash (x, VOIDmode) & HASH_MASK, word_mode);
1508 if (p == 0)
1509 return 0;
1511 for (p = p->first_same_value; p; p = p->next_same_value)
1512 if (GET_CODE (p->exp) == code
1513 /* Make sure this is a valid entry in the table. */
1514 && exp_equiv_p (p->exp, p->exp, 1, 0))
1515 return p->exp;
1517 return 0;
1520 /* Insert X in the hash table, assuming HASH is its hash code
1521 and CLASSP is an element of the class it should go in
1522 (or 0 if a new class should be made).
1523 It is inserted at the proper position to keep the class in
1524 the order cheapest first.
1526 MODE is the machine-mode of X, or if X is an integer constant
1527 with VOIDmode then MODE is the mode with which X will be used.
1529 For elements of equal cheapness, the most recent one
1530 goes in front, except that the first element in the list
1531 remains first unless a cheaper element is added. The order of
1532 pseudo-registers does not matter, as canon_reg will be called to
1533 find the cheapest when a register is retrieved from the table.
1535 The in_memory field in the hash table element is set to 0.
1536 The caller must set it nonzero if appropriate.
1538 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1539 and if insert_regs returns a nonzero value
1540 you must then recompute its hash code before calling here.
1542 If necessary, update table showing constant values of quantities. */
1544 #define CHEAPER(X, Y) \
1545 (preferrable ((X)->cost, (X)->regcost, (Y)->cost, (Y)->regcost) < 0)
1547 static struct table_elt *
1548 insert (x, classp, hash, mode)
1549 rtx x;
1550 struct table_elt *classp;
1551 unsigned hash;
1552 enum machine_mode mode;
1554 struct table_elt *elt;
1556 /* If X is a register and we haven't made a quantity for it,
1557 something is wrong. */
1558 if (GET_CODE (x) == REG && ! REGNO_QTY_VALID_P (REGNO (x)))
1559 abort ();
1561 /* If X is a hard register, show it is being put in the table. */
1562 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
1564 unsigned int regno = REGNO (x);
1565 unsigned int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1566 unsigned int i;
1568 for (i = regno; i < endregno; i++)
1569 SET_HARD_REG_BIT (hard_regs_in_table, i);
1572 /* Put an element for X into the right hash bucket. */
1574 elt = free_element_chain;
1575 if (elt)
1576 free_element_chain = elt->next_same_hash;
1577 else
1579 n_elements_made++;
1580 elt = (struct table_elt *) xmalloc (sizeof (struct table_elt));
1583 elt->exp = x;
1584 elt->canon_exp = NULL_RTX;
1585 elt->cost = COST (x);
1586 elt->regcost = approx_reg_cost (x);
1587 elt->next_same_value = 0;
1588 elt->prev_same_value = 0;
1589 elt->next_same_hash = table[hash];
1590 elt->prev_same_hash = 0;
1591 elt->related_value = 0;
1592 elt->in_memory = 0;
1593 elt->mode = mode;
1594 elt->is_const = (CONSTANT_P (x)
1595 /* GNU C++ takes advantage of this for `this'
1596 (and other const values). */
1597 || (GET_CODE (x) == REG
1598 && RTX_UNCHANGING_P (x)
1599 && REGNO (x) >= FIRST_PSEUDO_REGISTER)
1600 || fixed_base_plus_p (x));
1602 if (table[hash])
1603 table[hash]->prev_same_hash = elt;
1604 table[hash] = elt;
1606 /* Put it into the proper value-class. */
1607 if (classp)
1609 classp = classp->first_same_value;
1610 if (CHEAPER (elt, classp))
1611 /* Insert at the head of the class */
1613 struct table_elt *p;
1614 elt->next_same_value = classp;
1615 classp->prev_same_value = elt;
1616 elt->first_same_value = elt;
1618 for (p = classp; p; p = p->next_same_value)
1619 p->first_same_value = elt;
1621 else
1623 /* Insert not at head of the class. */
1624 /* Put it after the last element cheaper than X. */
1625 struct table_elt *p, *next;
1627 for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1628 p = next);
1630 /* Put it after P and before NEXT. */
1631 elt->next_same_value = next;
1632 if (next)
1633 next->prev_same_value = elt;
1635 elt->prev_same_value = p;
1636 p->next_same_value = elt;
1637 elt->first_same_value = classp;
1640 else
1641 elt->first_same_value = elt;
1643 /* If this is a constant being set equivalent to a register or a register
1644 being set equivalent to a constant, note the constant equivalence.
1646 If this is a constant, it cannot be equivalent to a different constant,
1647 and a constant is the only thing that can be cheaper than a register. So
1648 we know the register is the head of the class (before the constant was
1649 inserted).
1651 If this is a register that is not already known equivalent to a
1652 constant, we must check the entire class.
1654 If this is a register that is already known equivalent to an insn,
1655 update the qtys `const_insn' to show that `this_insn' is the latest
1656 insn making that quantity equivalent to the constant. */
1658 if (elt->is_const && classp && GET_CODE (classp->exp) == REG
1659 && GET_CODE (x) != REG)
1661 int exp_q = REG_QTY (REGNO (classp->exp));
1662 struct qty_table_elem *exp_ent = &qty_table[exp_q];
1664 exp_ent->const_rtx = gen_lowpart_if_possible (exp_ent->mode, x);
1665 exp_ent->const_insn = this_insn;
1668 else if (GET_CODE (x) == REG
1669 && classp
1670 && ! qty_table[REG_QTY (REGNO (x))].const_rtx
1671 && ! elt->is_const)
1673 struct table_elt *p;
1675 for (p = classp; p != 0; p = p->next_same_value)
1677 if (p->is_const && GET_CODE (p->exp) != REG)
1679 int x_q = REG_QTY (REGNO (x));
1680 struct qty_table_elem *x_ent = &qty_table[x_q];
1682 x_ent->const_rtx
1683 = gen_lowpart_if_possible (GET_MODE (x), p->exp);
1684 x_ent->const_insn = this_insn;
1685 break;
1690 else if (GET_CODE (x) == REG
1691 && qty_table[REG_QTY (REGNO (x))].const_rtx
1692 && GET_MODE (x) == qty_table[REG_QTY (REGNO (x))].mode)
1693 qty_table[REG_QTY (REGNO (x))].const_insn = this_insn;
1695 /* If this is a constant with symbolic value,
1696 and it has a term with an explicit integer value,
1697 link it up with related expressions. */
1698 if (GET_CODE (x) == CONST)
1700 rtx subexp = get_related_value (x);
1701 unsigned subhash;
1702 struct table_elt *subelt, *subelt_prev;
1704 if (subexp != 0)
1706 /* Get the integer-free subexpression in the hash table. */
1707 subhash = safe_hash (subexp, mode) & HASH_MASK;
1708 subelt = lookup (subexp, subhash, mode);
1709 if (subelt == 0)
1710 subelt = insert (subexp, NULL, subhash, mode);
1711 /* Initialize SUBELT's circular chain if it has none. */
1712 if (subelt->related_value == 0)
1713 subelt->related_value = subelt;
1714 /* Find the element in the circular chain that precedes SUBELT. */
1715 subelt_prev = subelt;
1716 while (subelt_prev->related_value != subelt)
1717 subelt_prev = subelt_prev->related_value;
1718 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1719 This way the element that follows SUBELT is the oldest one. */
1720 elt->related_value = subelt_prev->related_value;
1721 subelt_prev->related_value = elt;
1725 return elt;
1728 /* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1729 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1730 the two classes equivalent.
1732 CLASS1 will be the surviving class; CLASS2 should not be used after this
1733 call.
1735 Any invalid entries in CLASS2 will not be copied. */
1737 static void
1738 merge_equiv_classes (class1, class2)
1739 struct table_elt *class1, *class2;
1741 struct table_elt *elt, *next, *new;
1743 /* Ensure we start with the head of the classes. */
1744 class1 = class1->first_same_value;
1745 class2 = class2->first_same_value;
1747 /* If they were already equal, forget it. */
1748 if (class1 == class2)
1749 return;
1751 for (elt = class2; elt; elt = next)
1753 unsigned int hash;
1754 rtx exp = elt->exp;
1755 enum machine_mode mode = elt->mode;
1757 next = elt->next_same_value;
1759 /* Remove old entry, make a new one in CLASS1's class.
1760 Don't do this for invalid entries as we cannot find their
1761 hash code (it also isn't necessary). */
1762 if (GET_CODE (exp) == REG || exp_equiv_p (exp, exp, 1, 0))
1764 hash_arg_in_memory = 0;
1765 hash = HASH (exp, mode);
1767 if (GET_CODE (exp) == REG)
1768 delete_reg_equiv (REGNO (exp));
1770 remove_from_table (elt, hash);
1772 if (insert_regs (exp, class1, 0))
1774 rehash_using_reg (exp);
1775 hash = HASH (exp, mode);
1777 new = insert (exp, class1, hash, mode);
1778 new->in_memory = hash_arg_in_memory;
1783 /* Flush the entire hash table. */
1785 static void
1786 flush_hash_table ()
1788 int i;
1789 struct table_elt *p;
1791 for (i = 0; i < HASH_SIZE; i++)
1792 for (p = table[i]; p; p = table[i])
1794 /* Note that invalidate can remove elements
1795 after P in the current hash chain. */
1796 if (GET_CODE (p->exp) == REG)
1797 invalidate (p->exp, p->mode);
1798 else
1799 remove_from_table (p, i);
1803 /* Function called for each rtx to check whether true dependence exist. */
1804 struct check_dependence_data
1806 enum machine_mode mode;
1807 rtx exp;
1810 static int
1811 check_dependence (x, data)
1812 rtx *x;
1813 void *data;
1815 struct check_dependence_data *d = (struct check_dependence_data *) data;
1816 if (*x && GET_CODE (*x) == MEM)
1817 return true_dependence (d->exp, d->mode, *x, cse_rtx_varies_p);
1818 else
1819 return 0;
1822 /* Remove from the hash table, or mark as invalid, all expressions whose
1823 values could be altered by storing in X. X is a register, a subreg, or
1824 a memory reference with nonvarying address (because, when a memory
1825 reference with a varying address is stored in, all memory references are
1826 removed by invalidate_memory so specific invalidation is superfluous).
1827 FULL_MODE, if not VOIDmode, indicates that this much should be
1828 invalidated instead of just the amount indicated by the mode of X. This
1829 is only used for bitfield stores into memory.
1831 A nonvarying address may be just a register or just a symbol reference,
1832 or it may be either of those plus a numeric offset. */
1834 static void
1835 invalidate (x, full_mode)
1836 rtx x;
1837 enum machine_mode full_mode;
1839 int i;
1840 struct table_elt *p;
1842 switch (GET_CODE (x))
1844 case REG:
1846 /* If X is a register, dependencies on its contents are recorded
1847 through the qty number mechanism. Just change the qty number of
1848 the register, mark it as invalid for expressions that refer to it,
1849 and remove it itself. */
1850 unsigned int regno = REGNO (x);
1851 unsigned int hash = HASH (x, GET_MODE (x));
1853 /* Remove REGNO from any quantity list it might be on and indicate
1854 that its value might have changed. If it is a pseudo, remove its
1855 entry from the hash table.
1857 For a hard register, we do the first two actions above for any
1858 additional hard registers corresponding to X. Then, if any of these
1859 registers are in the table, we must remove any REG entries that
1860 overlap these registers. */
1862 delete_reg_equiv (regno);
1863 REG_TICK (regno)++;
1864 SUBREG_TICKED (regno) = -1;
1866 if (regno >= FIRST_PSEUDO_REGISTER)
1868 /* Because a register can be referenced in more than one mode,
1869 we might have to remove more than one table entry. */
1870 struct table_elt *elt;
1872 while ((elt = lookup_for_remove (x, hash, GET_MODE (x))))
1873 remove_from_table (elt, hash);
1875 else
1877 HOST_WIDE_INT in_table
1878 = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1879 unsigned int endregno
1880 = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1881 unsigned int tregno, tendregno, rn;
1882 struct table_elt *p, *next;
1884 CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1886 for (rn = regno + 1; rn < endregno; rn++)
1888 in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, rn);
1889 CLEAR_HARD_REG_BIT (hard_regs_in_table, rn);
1890 delete_reg_equiv (rn);
1891 REG_TICK (rn)++;
1892 SUBREG_TICKED (rn) = -1;
1895 if (in_table)
1896 for (hash = 0; hash < HASH_SIZE; hash++)
1897 for (p = table[hash]; p; p = next)
1899 next = p->next_same_hash;
1901 if (GET_CODE (p->exp) != REG
1902 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1903 continue;
1905 tregno = REGNO (p->exp);
1906 tendregno
1907 = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (p->exp));
1908 if (tendregno > regno && tregno < endregno)
1909 remove_from_table (p, hash);
1913 return;
1915 case SUBREG:
1916 invalidate (SUBREG_REG (x), VOIDmode);
1917 return;
1919 case PARALLEL:
1920 for (i = XVECLEN (x, 0) - 1; i >= 0; --i)
1921 invalidate (XVECEXP (x, 0, i), VOIDmode);
1922 return;
1924 case EXPR_LIST:
1925 /* This is part of a disjoint return value; extract the location in
1926 question ignoring the offset. */
1927 invalidate (XEXP (x, 0), VOIDmode);
1928 return;
1930 case MEM:
1931 /* Calculate the canonical version of X here so that
1932 true_dependence doesn't generate new RTL for X on each call. */
1933 x = canon_rtx (x);
1935 /* Remove all hash table elements that refer to overlapping pieces of
1936 memory. */
1937 if (full_mode == VOIDmode)
1938 full_mode = GET_MODE (x);
1940 for (i = 0; i < HASH_SIZE; i++)
1942 struct table_elt *next;
1944 for (p = table[i]; p; p = next)
1946 next = p->next_same_hash;
1947 if (p->in_memory)
1949 struct check_dependence_data d;
1951 /* Just canonicalize the expression once;
1952 otherwise each time we call invalidate
1953 true_dependence will canonicalize the
1954 expression again. */
1955 if (!p->canon_exp)
1956 p->canon_exp = canon_rtx (p->exp);
1957 d.exp = x;
1958 d.mode = full_mode;
1959 if (for_each_rtx (&p->canon_exp, check_dependence, &d))
1960 remove_from_table (p, i);
1964 return;
1966 default:
1967 abort ();
1971 /* Remove all expressions that refer to register REGNO,
1972 since they are already invalid, and we are about to
1973 mark that register valid again and don't want the old
1974 expressions to reappear as valid. */
1976 static void
1977 remove_invalid_refs (regno)
1978 unsigned int regno;
1980 unsigned int i;
1981 struct table_elt *p, *next;
1983 for (i = 0; i < HASH_SIZE; i++)
1984 for (p = table[i]; p; p = next)
1986 next = p->next_same_hash;
1987 if (GET_CODE (p->exp) != REG
1988 && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
1989 remove_from_table (p, i);
1993 /* Likewise for a subreg with subreg_reg REGNO, subreg_byte OFFSET,
1994 and mode MODE. */
1995 static void
1996 remove_invalid_subreg_refs (regno, offset, mode)
1997 unsigned int regno;
1998 unsigned int offset;
1999 enum machine_mode mode;
2001 unsigned int i;
2002 struct table_elt *p, *next;
2003 unsigned int end = offset + (GET_MODE_SIZE (mode) - 1);
2005 for (i = 0; i < HASH_SIZE; i++)
2006 for (p = table[i]; p; p = next)
2008 rtx exp = p->exp;
2009 next = p->next_same_hash;
2011 if (GET_CODE (exp) != REG
2012 && (GET_CODE (exp) != SUBREG
2013 || GET_CODE (SUBREG_REG (exp)) != REG
2014 || REGNO (SUBREG_REG (exp)) != regno
2015 || (((SUBREG_BYTE (exp)
2016 + (GET_MODE_SIZE (GET_MODE (exp)) - 1)) >= offset)
2017 && SUBREG_BYTE (exp) <= end))
2018 && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
2019 remove_from_table (p, i);
2023 /* Recompute the hash codes of any valid entries in the hash table that
2024 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
2026 This is called when we make a jump equivalence. */
2028 static void
2029 rehash_using_reg (x)
2030 rtx x;
2032 unsigned int i;
2033 struct table_elt *p, *next;
2034 unsigned hash;
2036 if (GET_CODE (x) == SUBREG)
2037 x = SUBREG_REG (x);
2039 /* If X is not a register or if the register is known not to be in any
2040 valid entries in the table, we have no work to do. */
2042 if (GET_CODE (x) != REG
2043 || REG_IN_TABLE (REGNO (x)) < 0
2044 || REG_IN_TABLE (REGNO (x)) != REG_TICK (REGNO (x)))
2045 return;
2047 /* Scan all hash chains looking for valid entries that mention X.
2048 If we find one and it is in the wrong hash chain, move it. We can skip
2049 objects that are registers, since they are handled specially. */
2051 for (i = 0; i < HASH_SIZE; i++)
2052 for (p = table[i]; p; p = next)
2054 next = p->next_same_hash;
2055 if (GET_CODE (p->exp) != REG && reg_mentioned_p (x, p->exp)
2056 && exp_equiv_p (p->exp, p->exp, 1, 0)
2057 && i != (hash = safe_hash (p->exp, p->mode) & HASH_MASK))
2059 if (p->next_same_hash)
2060 p->next_same_hash->prev_same_hash = p->prev_same_hash;
2062 if (p->prev_same_hash)
2063 p->prev_same_hash->next_same_hash = p->next_same_hash;
2064 else
2065 table[i] = p->next_same_hash;
2067 p->next_same_hash = table[hash];
2068 p->prev_same_hash = 0;
2069 if (table[hash])
2070 table[hash]->prev_same_hash = p;
2071 table[hash] = p;
2076 /* Remove from the hash table any expression that is a call-clobbered
2077 register. Also update their TICK values. */
2079 static void
2080 invalidate_for_call ()
2082 unsigned int regno, endregno;
2083 unsigned int i;
2084 unsigned hash;
2085 struct table_elt *p, *next;
2086 int in_table = 0;
2088 /* Go through all the hard registers. For each that is clobbered in
2089 a CALL_INSN, remove the register from quantity chains and update
2090 reg_tick if defined. Also see if any of these registers is currently
2091 in the table. */
2093 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2094 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2096 delete_reg_equiv (regno);
2097 if (REG_TICK (regno) >= 0)
2099 REG_TICK (regno)++;
2100 SUBREG_TICKED (regno) = -1;
2103 in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
2106 /* In the case where we have no call-clobbered hard registers in the
2107 table, we are done. Otherwise, scan the table and remove any
2108 entry that overlaps a call-clobbered register. */
2110 if (in_table)
2111 for (hash = 0; hash < HASH_SIZE; hash++)
2112 for (p = table[hash]; p; p = next)
2114 next = p->next_same_hash;
2116 if (GET_CODE (p->exp) != REG
2117 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
2118 continue;
2120 regno = REGNO (p->exp);
2121 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (p->exp));
2123 for (i = regno; i < endregno; i++)
2124 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
2126 remove_from_table (p, hash);
2127 break;
2132 /* Given an expression X of type CONST,
2133 and ELT which is its table entry (or 0 if it
2134 is not in the hash table),
2135 return an alternate expression for X as a register plus integer.
2136 If none can be found, return 0. */
2138 static rtx
2139 use_related_value (x, elt)
2140 rtx x;
2141 struct table_elt *elt;
2143 struct table_elt *relt = 0;
2144 struct table_elt *p, *q;
2145 HOST_WIDE_INT offset;
2147 /* First, is there anything related known?
2148 If we have a table element, we can tell from that.
2149 Otherwise, must look it up. */
2151 if (elt != 0 && elt->related_value != 0)
2152 relt = elt;
2153 else if (elt == 0 && GET_CODE (x) == CONST)
2155 rtx subexp = get_related_value (x);
2156 if (subexp != 0)
2157 relt = lookup (subexp,
2158 safe_hash (subexp, GET_MODE (subexp)) & HASH_MASK,
2159 GET_MODE (subexp));
2162 if (relt == 0)
2163 return 0;
2165 /* Search all related table entries for one that has an
2166 equivalent register. */
2168 p = relt;
2169 while (1)
2171 /* This loop is strange in that it is executed in two different cases.
2172 The first is when X is already in the table. Then it is searching
2173 the RELATED_VALUE list of X's class (RELT). The second case is when
2174 X is not in the table. Then RELT points to a class for the related
2175 value.
2177 Ensure that, whatever case we are in, that we ignore classes that have
2178 the same value as X. */
2180 if (rtx_equal_p (x, p->exp))
2181 q = 0;
2182 else
2183 for (q = p->first_same_value; q; q = q->next_same_value)
2184 if (GET_CODE (q->exp) == REG)
2185 break;
2187 if (q)
2188 break;
2190 p = p->related_value;
2192 /* We went all the way around, so there is nothing to be found.
2193 Alternatively, perhaps RELT was in the table for some other reason
2194 and it has no related values recorded. */
2195 if (p == relt || p == 0)
2196 break;
2199 if (q == 0)
2200 return 0;
2202 offset = (get_integer_term (x) - get_integer_term (p->exp));
2203 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
2204 return plus_constant (q->exp, offset);
2207 /* Hash a string. Just add its bytes up. */
2208 static inline unsigned
2209 canon_hash_string (ps)
2210 const char *ps;
2212 unsigned hash = 0;
2213 const unsigned char *p = (const unsigned char *) ps;
2215 if (p)
2216 while (*p)
2217 hash += *p++;
2219 return hash;
2222 /* Hash an rtx. We are careful to make sure the value is never negative.
2223 Equivalent registers hash identically.
2224 MODE is used in hashing for CONST_INTs only;
2225 otherwise the mode of X is used.
2227 Store 1 in do_not_record if any subexpression is volatile.
2229 Store 1 in hash_arg_in_memory if X contains a MEM rtx
2230 which does not have the RTX_UNCHANGING_P bit set.
2232 Note that cse_insn knows that the hash code of a MEM expression
2233 is just (int) MEM plus the hash code of the address. */
2235 static unsigned
2236 canon_hash (x, mode)
2237 rtx x;
2238 enum machine_mode mode;
2240 int i, j;
2241 unsigned hash = 0;
2242 enum rtx_code code;
2243 const char *fmt;
2245 /* repeat is used to turn tail-recursion into iteration. */
2246 repeat:
2247 if (x == 0)
2248 return hash;
2250 code = GET_CODE (x);
2251 switch (code)
2253 case REG:
2255 unsigned int regno = REGNO (x);
2256 bool record;
2258 /* On some machines, we can't record any non-fixed hard register,
2259 because extending its life will cause reload problems. We
2260 consider ap, fp, sp, gp to be fixed for this purpose.
2262 We also consider CCmode registers to be fixed for this purpose;
2263 failure to do so leads to failure to simplify 0<100 type of
2264 conditionals.
2266 On all machines, we can't record any global registers.
2267 Nor should we record any register that is in a small
2268 class, as defined by CLASS_LIKELY_SPILLED_P. */
2270 if (regno >= FIRST_PSEUDO_REGISTER)
2271 record = true;
2272 else if (x == frame_pointer_rtx
2273 || x == hard_frame_pointer_rtx
2274 || x == arg_pointer_rtx
2275 || x == stack_pointer_rtx
2276 || x == pic_offset_table_rtx)
2277 record = true;
2278 else if (global_regs[regno])
2279 record = false;
2280 else if (fixed_regs[regno])
2281 record = true;
2282 else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_CC)
2283 record = true;
2284 else if (SMALL_REGISTER_CLASSES)
2285 record = false;
2286 else if (CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (regno)))
2287 record = false;
2288 else
2289 record = true;
2291 if (!record)
2293 do_not_record = 1;
2294 return 0;
2297 hash += ((unsigned) REG << 7) + (unsigned) REG_QTY (regno);
2298 return hash;
2301 /* We handle SUBREG of a REG specially because the underlying
2302 reg changes its hash value with every value change; we don't
2303 want to have to forget unrelated subregs when one subreg changes. */
2304 case SUBREG:
2306 if (GET_CODE (SUBREG_REG (x)) == REG)
2308 hash += (((unsigned) SUBREG << 7)
2309 + REGNO (SUBREG_REG (x))
2310 + (SUBREG_BYTE (x) / UNITS_PER_WORD));
2311 return hash;
2313 break;
2316 case CONST_INT:
2318 unsigned HOST_WIDE_INT tem = INTVAL (x);
2319 hash += ((unsigned) CONST_INT << 7) + (unsigned) mode + tem;
2320 return hash;
2323 case CONST_DOUBLE:
2324 /* This is like the general case, except that it only counts
2325 the integers representing the constant. */
2326 hash += (unsigned) code + (unsigned) GET_MODE (x);
2327 if (GET_MODE (x) != VOIDmode)
2328 hash += real_hash (CONST_DOUBLE_REAL_VALUE (x));
2329 else
2330 hash += ((unsigned) CONST_DOUBLE_LOW (x)
2331 + (unsigned) CONST_DOUBLE_HIGH (x));
2332 return hash;
2334 case CONST_VECTOR:
2336 int units;
2337 rtx elt;
2339 units = CONST_VECTOR_NUNITS (x);
2341 for (i = 0; i < units; ++i)
2343 elt = CONST_VECTOR_ELT (x, i);
2344 hash += canon_hash (elt, GET_MODE (elt));
2347 return hash;
2350 /* Assume there is only one rtx object for any given label. */
2351 case LABEL_REF:
2352 hash += ((unsigned) LABEL_REF << 7) + (unsigned long) XEXP (x, 0);
2353 return hash;
2355 case SYMBOL_REF:
2356 hash += ((unsigned) SYMBOL_REF << 7) + (unsigned long) XSTR (x, 0);
2357 return hash;
2359 case MEM:
2360 /* We don't record if marked volatile or if BLKmode since we don't
2361 know the size of the move. */
2362 if (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode)
2364 do_not_record = 1;
2365 return 0;
2367 if (! RTX_UNCHANGING_P (x) || fixed_base_plus_p (XEXP (x, 0)))
2368 hash_arg_in_memory = 1;
2370 /* Now that we have already found this special case,
2371 might as well speed it up as much as possible. */
2372 hash += (unsigned) MEM;
2373 x = XEXP (x, 0);
2374 goto repeat;
2376 case USE:
2377 /* A USE that mentions non-volatile memory needs special
2378 handling since the MEM may be BLKmode which normally
2379 prevents an entry from being made. Pure calls are
2380 marked by a USE which mentions BLKmode memory. */
2381 if (GET_CODE (XEXP (x, 0)) == MEM
2382 && ! MEM_VOLATILE_P (XEXP (x, 0)))
2384 hash += (unsigned) USE;
2385 x = XEXP (x, 0);
2387 if (! RTX_UNCHANGING_P (x) || fixed_base_plus_p (XEXP (x, 0)))
2388 hash_arg_in_memory = 1;
2390 /* Now that we have already found this special case,
2391 might as well speed it up as much as possible. */
2392 hash += (unsigned) MEM;
2393 x = XEXP (x, 0);
2394 goto repeat;
2396 break;
2398 case PRE_DEC:
2399 case PRE_INC:
2400 case POST_DEC:
2401 case POST_INC:
2402 case PRE_MODIFY:
2403 case POST_MODIFY:
2404 case PC:
2405 case CC0:
2406 case CALL:
2407 case UNSPEC_VOLATILE:
2408 do_not_record = 1;
2409 return 0;
2411 case ASM_OPERANDS:
2412 if (MEM_VOLATILE_P (x))
2414 do_not_record = 1;
2415 return 0;
2417 else
2419 /* We don't want to take the filename and line into account. */
2420 hash += (unsigned) code + (unsigned) GET_MODE (x)
2421 + canon_hash_string (ASM_OPERANDS_TEMPLATE (x))
2422 + canon_hash_string (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
2423 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
2425 if (ASM_OPERANDS_INPUT_LENGTH (x))
2427 for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2429 hash += (canon_hash (ASM_OPERANDS_INPUT (x, i),
2430 GET_MODE (ASM_OPERANDS_INPUT (x, i)))
2431 + canon_hash_string (ASM_OPERANDS_INPUT_CONSTRAINT
2432 (x, i)));
2435 hash += canon_hash_string (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
2436 x = ASM_OPERANDS_INPUT (x, 0);
2437 mode = GET_MODE (x);
2438 goto repeat;
2441 return hash;
2443 break;
2445 default:
2446 break;
2449 i = GET_RTX_LENGTH (code) - 1;
2450 hash += (unsigned) code + (unsigned) GET_MODE (x);
2451 fmt = GET_RTX_FORMAT (code);
2452 for (; i >= 0; i--)
2454 if (fmt[i] == 'e')
2456 rtx tem = XEXP (x, i);
2458 /* If we are about to do the last recursive call
2459 needed at this level, change it into iteration.
2460 This function is called enough to be worth it. */
2461 if (i == 0)
2463 x = tem;
2464 goto repeat;
2466 hash += canon_hash (tem, 0);
2468 else if (fmt[i] == 'E')
2469 for (j = 0; j < XVECLEN (x, i); j++)
2470 hash += canon_hash (XVECEXP (x, i, j), 0);
2471 else if (fmt[i] == 's')
2472 hash += canon_hash_string (XSTR (x, i));
2473 else if (fmt[i] == 'i')
2475 unsigned tem = XINT (x, i);
2476 hash += tem;
2478 else if (fmt[i] == '0' || fmt[i] == 't')
2479 /* Unused. */
2481 else
2482 abort ();
2484 return hash;
2487 /* Like canon_hash but with no side effects. */
2489 static unsigned
2490 safe_hash (x, mode)
2491 rtx x;
2492 enum machine_mode mode;
2494 int save_do_not_record = do_not_record;
2495 int save_hash_arg_in_memory = hash_arg_in_memory;
2496 unsigned hash = canon_hash (x, mode);
2497 hash_arg_in_memory = save_hash_arg_in_memory;
2498 do_not_record = save_do_not_record;
2499 return hash;
2502 /* Return 1 iff X and Y would canonicalize into the same thing,
2503 without actually constructing the canonicalization of either one.
2504 If VALIDATE is nonzero,
2505 we assume X is an expression being processed from the rtl
2506 and Y was found in the hash table. We check register refs
2507 in Y for being marked as valid.
2509 If EQUAL_VALUES is nonzero, we allow a register to match a constant value
2510 that is known to be in the register. Ordinarily, we don't allow them
2511 to match, because letting them match would cause unpredictable results
2512 in all the places that search a hash table chain for an equivalent
2513 for a given value. A possible equivalent that has different structure
2514 has its hash code computed from different data. Whether the hash code
2515 is the same as that of the given value is pure luck. */
2517 static int
2518 exp_equiv_p (x, y, validate, equal_values)
2519 rtx x, y;
2520 int validate;
2521 int equal_values;
2523 int i, j;
2524 enum rtx_code code;
2525 const char *fmt;
2527 /* Note: it is incorrect to assume an expression is equivalent to itself
2528 if VALIDATE is nonzero. */
2529 if (x == y && !validate)
2530 return 1;
2531 if (x == 0 || y == 0)
2532 return x == y;
2534 code = GET_CODE (x);
2535 if (code != GET_CODE (y))
2537 if (!equal_values)
2538 return 0;
2540 /* If X is a constant and Y is a register or vice versa, they may be
2541 equivalent. We only have to validate if Y is a register. */
2542 if (CONSTANT_P (x) && GET_CODE (y) == REG
2543 && REGNO_QTY_VALID_P (REGNO (y)))
2545 int y_q = REG_QTY (REGNO (y));
2546 struct qty_table_elem *y_ent = &qty_table[y_q];
2548 if (GET_MODE (y) == y_ent->mode
2549 && rtx_equal_p (x, y_ent->const_rtx)
2550 && (! validate || REG_IN_TABLE (REGNO (y)) == REG_TICK (REGNO (y))))
2551 return 1;
2554 if (CONSTANT_P (y) && code == REG
2555 && REGNO_QTY_VALID_P (REGNO (x)))
2557 int x_q = REG_QTY (REGNO (x));
2558 struct qty_table_elem *x_ent = &qty_table[x_q];
2560 if (GET_MODE (x) == x_ent->mode
2561 && rtx_equal_p (y, x_ent->const_rtx))
2562 return 1;
2565 return 0;
2568 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2569 if (GET_MODE (x) != GET_MODE (y))
2570 return 0;
2572 switch (code)
2574 case PC:
2575 case CC0:
2576 case CONST_INT:
2577 return x == y;
2579 case LABEL_REF:
2580 return XEXP (x, 0) == XEXP (y, 0);
2582 case SYMBOL_REF:
2583 return XSTR (x, 0) == XSTR (y, 0);
2585 case REG:
2587 unsigned int regno = REGNO (y);
2588 unsigned int endregno
2589 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
2590 : HARD_REGNO_NREGS (regno, GET_MODE (y)));
2591 unsigned int i;
2593 /* If the quantities are not the same, the expressions are not
2594 equivalent. If there are and we are not to validate, they
2595 are equivalent. Otherwise, ensure all regs are up-to-date. */
2597 if (REG_QTY (REGNO (x)) != REG_QTY (regno))
2598 return 0;
2600 if (! validate)
2601 return 1;
2603 for (i = regno; i < endregno; i++)
2604 if (REG_IN_TABLE (i) != REG_TICK (i))
2605 return 0;
2607 return 1;
2610 /* For commutative operations, check both orders. */
2611 case PLUS:
2612 case MULT:
2613 case AND:
2614 case IOR:
2615 case XOR:
2616 case NE:
2617 case EQ:
2618 return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0), validate, equal_values)
2619 && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2620 validate, equal_values))
2621 || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2622 validate, equal_values)
2623 && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2624 validate, equal_values)));
2626 case ASM_OPERANDS:
2627 /* We don't use the generic code below because we want to
2628 disregard filename and line numbers. */
2630 /* A volatile asm isn't equivalent to any other. */
2631 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2632 return 0;
2634 if (GET_MODE (x) != GET_MODE (y)
2635 || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
2636 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2637 ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
2638 || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
2639 || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
2640 return 0;
2642 if (ASM_OPERANDS_INPUT_LENGTH (x))
2644 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
2645 if (! exp_equiv_p (ASM_OPERANDS_INPUT (x, i),
2646 ASM_OPERANDS_INPUT (y, i),
2647 validate, equal_values)
2648 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
2649 ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
2650 return 0;
2653 return 1;
2655 default:
2656 break;
2659 /* Compare the elements. If any pair of corresponding elements
2660 fail to match, return 0 for the whole things. */
2662 fmt = GET_RTX_FORMAT (code);
2663 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2665 switch (fmt[i])
2667 case 'e':
2668 if (! exp_equiv_p (XEXP (x, i), XEXP (y, i), validate, equal_values))
2669 return 0;
2670 break;
2672 case 'E':
2673 if (XVECLEN (x, i) != XVECLEN (y, i))
2674 return 0;
2675 for (j = 0; j < XVECLEN (x, i); j++)
2676 if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2677 validate, equal_values))
2678 return 0;
2679 break;
2681 case 's':
2682 if (strcmp (XSTR (x, i), XSTR (y, i)))
2683 return 0;
2684 break;
2686 case 'i':
2687 if (XINT (x, i) != XINT (y, i))
2688 return 0;
2689 break;
2691 case 'w':
2692 if (XWINT (x, i) != XWINT (y, i))
2693 return 0;
2694 break;
2696 case '0':
2697 case 't':
2698 break;
2700 default:
2701 abort ();
2705 return 1;
2708 /* Return 1 if X has a value that can vary even between two
2709 executions of the program. 0 means X can be compared reliably
2710 against certain constants or near-constants. */
2712 static int
2713 cse_rtx_varies_p (x, from_alias)
2714 rtx x;
2715 int from_alias;
2717 /* We need not check for X and the equivalence class being of the same
2718 mode because if X is equivalent to a constant in some mode, it
2719 doesn't vary in any mode. */
2721 if (GET_CODE (x) == REG
2722 && REGNO_QTY_VALID_P (REGNO (x)))
2724 int x_q = REG_QTY (REGNO (x));
2725 struct qty_table_elem *x_ent = &qty_table[x_q];
2727 if (GET_MODE (x) == x_ent->mode
2728 && x_ent->const_rtx != NULL_RTX)
2729 return 0;
2732 if (GET_CODE (x) == PLUS
2733 && GET_CODE (XEXP (x, 1)) == CONST_INT
2734 && GET_CODE (XEXP (x, 0)) == REG
2735 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
2737 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2738 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2740 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2741 && x0_ent->const_rtx != NULL_RTX)
2742 return 0;
2745 /* This can happen as the result of virtual register instantiation, if
2746 the initial constant is too large to be a valid address. This gives
2747 us a three instruction sequence, load large offset into a register,
2748 load fp minus a constant into a register, then a MEM which is the
2749 sum of the two `constant' registers. */
2750 if (GET_CODE (x) == PLUS
2751 && GET_CODE (XEXP (x, 0)) == REG
2752 && GET_CODE (XEXP (x, 1)) == REG
2753 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2754 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
2756 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2757 int x1_q = REG_QTY (REGNO (XEXP (x, 1)));
2758 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2759 struct qty_table_elem *x1_ent = &qty_table[x1_q];
2761 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2762 && x0_ent->const_rtx != NULL_RTX
2763 && (GET_MODE (XEXP (x, 1)) == x1_ent->mode)
2764 && x1_ent->const_rtx != NULL_RTX)
2765 return 0;
2768 return rtx_varies_p (x, from_alias);
2771 /* Canonicalize an expression:
2772 replace each register reference inside it
2773 with the "oldest" equivalent register.
2775 If INSN is nonzero and we are replacing a pseudo with a hard register
2776 or vice versa, validate_change is used to ensure that INSN remains valid
2777 after we make our substitution. The calls are made with IN_GROUP nonzero
2778 so apply_change_group must be called upon the outermost return from this
2779 function (unless INSN is zero). The result of apply_change_group can
2780 generally be discarded since the changes we are making are optional. */
2782 static rtx
2783 canon_reg (x, insn)
2784 rtx x;
2785 rtx insn;
2787 int i;
2788 enum rtx_code code;
2789 const char *fmt;
2791 if (x == 0)
2792 return x;
2794 code = GET_CODE (x);
2795 switch (code)
2797 case PC:
2798 case CC0:
2799 case CONST:
2800 case CONST_INT:
2801 case CONST_DOUBLE:
2802 case CONST_VECTOR:
2803 case SYMBOL_REF:
2804 case LABEL_REF:
2805 case ADDR_VEC:
2806 case ADDR_DIFF_VEC:
2807 return x;
2809 case REG:
2811 int first;
2812 int q;
2813 struct qty_table_elem *ent;
2815 /* Never replace a hard reg, because hard regs can appear
2816 in more than one machine mode, and we must preserve the mode
2817 of each occurrence. Also, some hard regs appear in
2818 MEMs that are shared and mustn't be altered. Don't try to
2819 replace any reg that maps to a reg of class NO_REGS. */
2820 if (REGNO (x) < FIRST_PSEUDO_REGISTER
2821 || ! REGNO_QTY_VALID_P (REGNO (x)))
2822 return x;
2824 q = REG_QTY (REGNO (x));
2825 ent = &qty_table[q];
2826 first = ent->first_reg;
2827 return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2828 : REGNO_REG_CLASS (first) == NO_REGS ? x
2829 : gen_rtx_REG (ent->mode, first));
2832 default:
2833 break;
2836 fmt = GET_RTX_FORMAT (code);
2837 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2839 int j;
2841 if (fmt[i] == 'e')
2843 rtx new = canon_reg (XEXP (x, i), insn);
2844 int insn_code;
2846 /* If replacing pseudo with hard reg or vice versa, ensure the
2847 insn remains valid. Likewise if the insn has MATCH_DUPs. */
2848 if (insn != 0 && new != 0
2849 && GET_CODE (new) == REG && GET_CODE (XEXP (x, i)) == REG
2850 && (((REGNO (new) < FIRST_PSEUDO_REGISTER)
2851 != (REGNO (XEXP (x, i)) < FIRST_PSEUDO_REGISTER))
2852 || (insn_code = recog_memoized (insn)) < 0
2853 || insn_data[insn_code].n_dups > 0))
2854 validate_change (insn, &XEXP (x, i), new, 1);
2855 else
2856 XEXP (x, i) = new;
2858 else if (fmt[i] == 'E')
2859 for (j = 0; j < XVECLEN (x, i); j++)
2860 XVECEXP (x, i, j) = canon_reg (XVECEXP (x, i, j), insn);
2863 return x;
2866 /* LOC is a location within INSN that is an operand address (the contents of
2867 a MEM). Find the best equivalent address to use that is valid for this
2868 insn.
2870 On most CISC machines, complicated address modes are costly, and rtx_cost
2871 is a good approximation for that cost. However, most RISC machines have
2872 only a few (usually only one) memory reference formats. If an address is
2873 valid at all, it is often just as cheap as any other address. Hence, for
2874 RISC machines, we use the configuration macro `ADDRESS_COST' to compare the
2875 costs of various addresses. For two addresses of equal cost, choose the one
2876 with the highest `rtx_cost' value as that has the potential of eliminating
2877 the most insns. For equal costs, we choose the first in the equivalence
2878 class. Note that we ignore the fact that pseudo registers are cheaper
2879 than hard registers here because we would also prefer the pseudo registers.
2882 static void
2883 find_best_addr (insn, loc, mode)
2884 rtx insn;
2885 rtx *loc;
2886 enum machine_mode mode;
2888 struct table_elt *elt;
2889 rtx addr = *loc;
2890 #ifdef ADDRESS_COST
2891 struct table_elt *p;
2892 int found_better = 1;
2893 #endif
2894 int save_do_not_record = do_not_record;
2895 int save_hash_arg_in_memory = hash_arg_in_memory;
2896 int addr_volatile;
2897 int regno;
2898 unsigned hash;
2900 /* Do not try to replace constant addresses or addresses of local and
2901 argument slots. These MEM expressions are made only once and inserted
2902 in many instructions, as well as being used to control symbol table
2903 output. It is not safe to clobber them.
2905 There are some uncommon cases where the address is already in a register
2906 for some reason, but we cannot take advantage of that because we have
2907 no easy way to unshare the MEM. In addition, looking up all stack
2908 addresses is costly. */
2909 if ((GET_CODE (addr) == PLUS
2910 && GET_CODE (XEXP (addr, 0)) == REG
2911 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2912 && (regno = REGNO (XEXP (addr, 0)),
2913 regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM
2914 || regno == ARG_POINTER_REGNUM))
2915 || (GET_CODE (addr) == REG
2916 && (regno = REGNO (addr), regno == FRAME_POINTER_REGNUM
2917 || regno == HARD_FRAME_POINTER_REGNUM
2918 || regno == ARG_POINTER_REGNUM))
2919 || GET_CODE (addr) == ADDRESSOF
2920 || CONSTANT_ADDRESS_P (addr))
2921 return;
2923 /* If this address is not simply a register, try to fold it. This will
2924 sometimes simplify the expression. Many simplifications
2925 will not be valid, but some, usually applying the associative rule, will
2926 be valid and produce better code. */
2927 if (GET_CODE (addr) != REG)
2929 rtx folded = fold_rtx (copy_rtx (addr), NULL_RTX);
2930 int addr_folded_cost = address_cost (folded, mode);
2931 int addr_cost = address_cost (addr, mode);
2933 if ((addr_folded_cost < addr_cost
2934 || (addr_folded_cost == addr_cost
2935 /* ??? The rtx_cost comparison is left over from an older
2936 version of this code. It is probably no longer helpful. */
2937 && (rtx_cost (folded, MEM) > rtx_cost (addr, MEM)
2938 || approx_reg_cost (folded) < approx_reg_cost (addr))))
2939 && validate_change (insn, loc, folded, 0))
2940 addr = folded;
2943 /* If this address is not in the hash table, we can't look for equivalences
2944 of the whole address. Also, ignore if volatile. */
2946 do_not_record = 0;
2947 hash = HASH (addr, Pmode);
2948 addr_volatile = do_not_record;
2949 do_not_record = save_do_not_record;
2950 hash_arg_in_memory = save_hash_arg_in_memory;
2952 if (addr_volatile)
2953 return;
2955 elt = lookup (addr, hash, Pmode);
2957 #ifndef ADDRESS_COST
2958 if (elt)
2960 int our_cost = elt->cost;
2962 /* Find the lowest cost below ours that works. */
2963 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
2964 if (elt->cost < our_cost
2965 && (GET_CODE (elt->exp) == REG
2966 || exp_equiv_p (elt->exp, elt->exp, 1, 0))
2967 && validate_change (insn, loc,
2968 canon_reg (copy_rtx (elt->exp), NULL_RTX), 0))
2969 return;
2971 #else
2973 if (elt)
2975 /* We need to find the best (under the criteria documented above) entry
2976 in the class that is valid. We use the `flag' field to indicate
2977 choices that were invalid and iterate until we can't find a better
2978 one that hasn't already been tried. */
2980 for (p = elt->first_same_value; p; p = p->next_same_value)
2981 p->flag = 0;
2983 while (found_better)
2985 int best_addr_cost = address_cost (*loc, mode);
2986 int best_rtx_cost = (elt->cost + 1) >> 1;
2987 int exp_cost;
2988 struct table_elt *best_elt = elt;
2990 found_better = 0;
2991 for (p = elt->first_same_value; p; p = p->next_same_value)
2992 if (! p->flag)
2994 if ((GET_CODE (p->exp) == REG
2995 || exp_equiv_p (p->exp, p->exp, 1, 0))
2996 && ((exp_cost = address_cost (p->exp, mode)) < best_addr_cost
2997 || (exp_cost == best_addr_cost
2998 && ((p->cost + 1) >> 1) > best_rtx_cost)))
3000 found_better = 1;
3001 best_addr_cost = exp_cost;
3002 best_rtx_cost = (p->cost + 1) >> 1;
3003 best_elt = p;
3007 if (found_better)
3009 if (validate_change (insn, loc,
3010 canon_reg (copy_rtx (best_elt->exp),
3011 NULL_RTX), 0))
3012 return;
3013 else
3014 best_elt->flag = 1;
3019 /* If the address is a binary operation with the first operand a register
3020 and the second a constant, do the same as above, but looking for
3021 equivalences of the register. Then try to simplify before checking for
3022 the best address to use. This catches a few cases: First is when we
3023 have REG+const and the register is another REG+const. We can often merge
3024 the constants and eliminate one insn and one register. It may also be
3025 that a machine has a cheap REG+REG+const. Finally, this improves the
3026 code on the Alpha for unaligned byte stores. */
3028 if (flag_expensive_optimizations
3029 && (GET_RTX_CLASS (GET_CODE (*loc)) == '2'
3030 || GET_RTX_CLASS (GET_CODE (*loc)) == 'c')
3031 && GET_CODE (XEXP (*loc, 0)) == REG
3032 && GET_CODE (XEXP (*loc, 1)) == CONST_INT)
3034 rtx c = XEXP (*loc, 1);
3036 do_not_record = 0;
3037 hash = HASH (XEXP (*loc, 0), Pmode);
3038 do_not_record = save_do_not_record;
3039 hash_arg_in_memory = save_hash_arg_in_memory;
3041 elt = lookup (XEXP (*loc, 0), hash, Pmode);
3042 if (elt == 0)
3043 return;
3045 /* We need to find the best (under the criteria documented above) entry
3046 in the class that is valid. We use the `flag' field to indicate
3047 choices that were invalid and iterate until we can't find a better
3048 one that hasn't already been tried. */
3050 for (p = elt->first_same_value; p; p = p->next_same_value)
3051 p->flag = 0;
3053 while (found_better)
3055 int best_addr_cost = address_cost (*loc, mode);
3056 int best_rtx_cost = (COST (*loc) + 1) >> 1;
3057 struct table_elt *best_elt = elt;
3058 rtx best_rtx = *loc;
3059 int count;
3061 /* This is at worst case an O(n^2) algorithm, so limit our search
3062 to the first 32 elements on the list. This avoids trouble
3063 compiling code with very long basic blocks that can easily
3064 call simplify_gen_binary so many times that we run out of
3065 memory. */
3067 found_better = 0;
3068 for (p = elt->first_same_value, count = 0;
3069 p && count < 32;
3070 p = p->next_same_value, count++)
3071 if (! p->flag
3072 && (GET_CODE (p->exp) == REG
3073 || exp_equiv_p (p->exp, p->exp, 1, 0)))
3075 rtx new = simplify_gen_binary (GET_CODE (*loc), Pmode,
3076 p->exp, c);
3077 int new_cost;
3078 new_cost = address_cost (new, mode);
3080 if (new_cost < best_addr_cost
3081 || (new_cost == best_addr_cost
3082 && (COST (new) + 1) >> 1 > best_rtx_cost))
3084 found_better = 1;
3085 best_addr_cost = new_cost;
3086 best_rtx_cost = (COST (new) + 1) >> 1;
3087 best_elt = p;
3088 best_rtx = new;
3092 if (found_better)
3094 if (validate_change (insn, loc,
3095 canon_reg (copy_rtx (best_rtx),
3096 NULL_RTX), 0))
3097 return;
3098 else
3099 best_elt->flag = 1;
3103 #endif
3106 /* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
3107 operation (EQ, NE, GT, etc.), follow it back through the hash table and
3108 what values are being compared.
3110 *PARG1 and *PARG2 are updated to contain the rtx representing the values
3111 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
3112 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
3113 compared to produce cc0.
3115 The return value is the comparison operator and is either the code of
3116 A or the code corresponding to the inverse of the comparison. */
3118 static enum rtx_code
3119 find_comparison_args (code, parg1, parg2, pmode1, pmode2)
3120 enum rtx_code code;
3121 rtx *parg1, *parg2;
3122 enum machine_mode *pmode1, *pmode2;
3124 rtx arg1, arg2;
3126 arg1 = *parg1, arg2 = *parg2;
3128 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
3130 while (arg2 == CONST0_RTX (GET_MODE (arg1)))
3132 /* Set nonzero when we find something of interest. */
3133 rtx x = 0;
3134 int reverse_code = 0;
3135 struct table_elt *p = 0;
3137 /* If arg1 is a COMPARE, extract the comparison arguments from it.
3138 On machines with CC0, this is the only case that can occur, since
3139 fold_rtx will return the COMPARE or item being compared with zero
3140 when given CC0. */
3142 if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
3143 x = arg1;
3145 /* If ARG1 is a comparison operator and CODE is testing for
3146 STORE_FLAG_VALUE, get the inner arguments. */
3148 else if (GET_RTX_CLASS (GET_CODE (arg1)) == '<')
3150 #ifdef FLOAT_STORE_FLAG_VALUE
3151 REAL_VALUE_TYPE fsfv;
3152 #endif
3154 if (code == NE
3155 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3156 && code == LT && STORE_FLAG_VALUE == -1)
3157 #ifdef FLOAT_STORE_FLAG_VALUE
3158 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3159 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3160 REAL_VALUE_NEGATIVE (fsfv)))
3161 #endif
3163 x = arg1;
3164 else if (code == EQ
3165 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3166 && code == GE && STORE_FLAG_VALUE == -1)
3167 #ifdef FLOAT_STORE_FLAG_VALUE
3168 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3169 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3170 REAL_VALUE_NEGATIVE (fsfv)))
3171 #endif
3173 x = arg1, reverse_code = 1;
3176 /* ??? We could also check for
3178 (ne (and (eq (...) (const_int 1))) (const_int 0))
3180 and related forms, but let's wait until we see them occurring. */
3182 if (x == 0)
3183 /* Look up ARG1 in the hash table and see if it has an equivalence
3184 that lets us see what is being compared. */
3185 p = lookup (arg1, safe_hash (arg1, GET_MODE (arg1)) & HASH_MASK,
3186 GET_MODE (arg1));
3187 if (p)
3189 p = p->first_same_value;
3191 /* If what we compare is already known to be constant, that is as
3192 good as it gets.
3193 We need to break the loop in this case, because otherwise we
3194 can have an infinite loop when looking at a reg that is known
3195 to be a constant which is the same as a comparison of a reg
3196 against zero which appears later in the insn stream, which in
3197 turn is constant and the same as the comparison of the first reg
3198 against zero... */
3199 if (p->is_const)
3200 break;
3203 for (; p; p = p->next_same_value)
3205 enum machine_mode inner_mode = GET_MODE (p->exp);
3206 #ifdef FLOAT_STORE_FLAG_VALUE
3207 REAL_VALUE_TYPE fsfv;
3208 #endif
3210 /* If the entry isn't valid, skip it. */
3211 if (! exp_equiv_p (p->exp, p->exp, 1, 0))
3212 continue;
3214 if (GET_CODE (p->exp) == COMPARE
3215 /* Another possibility is that this machine has a compare insn
3216 that includes the comparison code. In that case, ARG1 would
3217 be equivalent to a comparison operation that would set ARG1 to
3218 either STORE_FLAG_VALUE or zero. If this is an NE operation,
3219 ORIG_CODE is the actual comparison being done; if it is an EQ,
3220 we must reverse ORIG_CODE. On machine with a negative value
3221 for STORE_FLAG_VALUE, also look at LT and GE operations. */
3222 || ((code == NE
3223 || (code == LT
3224 && GET_MODE_CLASS (inner_mode) == MODE_INT
3225 && (GET_MODE_BITSIZE (inner_mode)
3226 <= HOST_BITS_PER_WIDE_INT)
3227 && (STORE_FLAG_VALUE
3228 & ((HOST_WIDE_INT) 1
3229 << (GET_MODE_BITSIZE (inner_mode) - 1))))
3230 #ifdef FLOAT_STORE_FLAG_VALUE
3231 || (code == LT
3232 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3233 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3234 REAL_VALUE_NEGATIVE (fsfv)))
3235 #endif
3237 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<'))
3239 x = p->exp;
3240 break;
3242 else if ((code == EQ
3243 || (code == GE
3244 && GET_MODE_CLASS (inner_mode) == MODE_INT
3245 && (GET_MODE_BITSIZE (inner_mode)
3246 <= HOST_BITS_PER_WIDE_INT)
3247 && (STORE_FLAG_VALUE
3248 & ((HOST_WIDE_INT) 1
3249 << (GET_MODE_BITSIZE (inner_mode) - 1))))
3250 #ifdef FLOAT_STORE_FLAG_VALUE
3251 || (code == GE
3252 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3253 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3254 REAL_VALUE_NEGATIVE (fsfv)))
3255 #endif
3257 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<')
3259 reverse_code = 1;
3260 x = p->exp;
3261 break;
3264 /* If this non-trapping address, e.g. fp + constant, the
3265 equivalent is a better operand since it may let us predict
3266 the value of the comparison. */
3267 else if (!rtx_addr_can_trap_p (p->exp))
3269 arg1 = p->exp;
3270 continue;
3274 /* If we didn't find a useful equivalence for ARG1, we are done.
3275 Otherwise, set up for the next iteration. */
3276 if (x == 0)
3277 break;
3279 /* If we need to reverse the comparison, make sure that that is
3280 possible -- we can't necessarily infer the value of GE from LT
3281 with floating-point operands. */
3282 if (reverse_code)
3284 enum rtx_code reversed = reversed_comparison_code (x, NULL_RTX);
3285 if (reversed == UNKNOWN)
3286 break;
3287 else
3288 code = reversed;
3290 else if (GET_RTX_CLASS (GET_CODE (x)) == '<')
3291 code = GET_CODE (x);
3292 arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
3295 /* Return our results. Return the modes from before fold_rtx
3296 because fold_rtx might produce const_int, and then it's too late. */
3297 *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
3298 *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
3300 return code;
3303 /* If X is a nontrivial arithmetic operation on an argument
3304 for which a constant value can be determined, return
3305 the result of operating on that value, as a constant.
3306 Otherwise, return X, possibly with one or more operands
3307 modified by recursive calls to this function.
3309 If X is a register whose contents are known, we do NOT
3310 return those contents here. equiv_constant is called to
3311 perform that task.
3313 INSN is the insn that we may be modifying. If it is 0, make a copy
3314 of X before modifying it. */
3316 static rtx
3317 fold_rtx (x, insn)
3318 rtx x;
3319 rtx insn;
3321 enum rtx_code code;
3322 enum machine_mode mode;
3323 const char *fmt;
3324 int i;
3325 rtx new = 0;
3326 int copied = 0;
3327 int must_swap = 0;
3329 /* Folded equivalents of first two operands of X. */
3330 rtx folded_arg0;
3331 rtx folded_arg1;
3333 /* Constant equivalents of first three operands of X;
3334 0 when no such equivalent is known. */
3335 rtx const_arg0;
3336 rtx const_arg1;
3337 rtx const_arg2;
3339 /* The mode of the first operand of X. We need this for sign and zero
3340 extends. */
3341 enum machine_mode mode_arg0;
3343 if (x == 0)
3344 return x;
3346 mode = GET_MODE (x);
3347 code = GET_CODE (x);
3348 switch (code)
3350 case CONST:
3351 case CONST_INT:
3352 case CONST_DOUBLE:
3353 case CONST_VECTOR:
3354 case SYMBOL_REF:
3355 case LABEL_REF:
3356 case REG:
3357 /* No use simplifying an EXPR_LIST
3358 since they are used only for lists of args
3359 in a function call's REG_EQUAL note. */
3360 case EXPR_LIST:
3361 /* Changing anything inside an ADDRESSOF is incorrect; we don't
3362 want to (e.g.,) make (addressof (const_int 0)) just because
3363 the location is known to be zero. */
3364 case ADDRESSOF:
3365 return x;
3367 #ifdef HAVE_cc0
3368 case CC0:
3369 return prev_insn_cc0;
3370 #endif
3372 case PC:
3373 /* If the next insn is a CODE_LABEL followed by a jump table,
3374 PC's value is a LABEL_REF pointing to that label. That
3375 lets us fold switch statements on the VAX. */
3376 if (insn && GET_CODE (insn) == JUMP_INSN)
3378 rtx next = next_nonnote_insn (insn);
3380 if (next && GET_CODE (next) == CODE_LABEL
3381 && NEXT_INSN (next) != 0
3382 && GET_CODE (NEXT_INSN (next)) == JUMP_INSN
3383 && (GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_VEC
3384 || GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_DIFF_VEC))
3385 return gen_rtx_LABEL_REF (Pmode, next);
3387 break;
3389 case SUBREG:
3390 /* See if we previously assigned a constant value to this SUBREG. */
3391 if ((new = lookup_as_function (x, CONST_INT)) != 0
3392 || (new = lookup_as_function (x, CONST_DOUBLE)) != 0)
3393 return new;
3395 /* If this is a paradoxical SUBREG, we have no idea what value the
3396 extra bits would have. However, if the operand is equivalent
3397 to a SUBREG whose operand is the same as our mode, and all the
3398 modes are within a word, we can just use the inner operand
3399 because these SUBREGs just say how to treat the register.
3401 Similarly if we find an integer constant. */
3403 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3405 enum machine_mode imode = GET_MODE (SUBREG_REG (x));
3406 struct table_elt *elt;
3408 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
3409 && GET_MODE_SIZE (imode) <= UNITS_PER_WORD
3410 && (elt = lookup (SUBREG_REG (x), HASH (SUBREG_REG (x), imode),
3411 imode)) != 0)
3412 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
3414 if (CONSTANT_P (elt->exp)
3415 && GET_MODE (elt->exp) == VOIDmode)
3416 return elt->exp;
3418 if (GET_CODE (elt->exp) == SUBREG
3419 && GET_MODE (SUBREG_REG (elt->exp)) == mode
3420 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
3421 return copy_rtx (SUBREG_REG (elt->exp));
3424 return x;
3427 /* Fold SUBREG_REG. If it changed, see if we can simplify the SUBREG.
3428 We might be able to if the SUBREG is extracting a single word in an
3429 integral mode or extracting the low part. */
3431 folded_arg0 = fold_rtx (SUBREG_REG (x), insn);
3432 const_arg0 = equiv_constant (folded_arg0);
3433 if (const_arg0)
3434 folded_arg0 = const_arg0;
3436 if (folded_arg0 != SUBREG_REG (x))
3438 new = simplify_subreg (mode, folded_arg0,
3439 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
3440 if (new)
3441 return new;
3444 /* If this is a narrowing SUBREG and our operand is a REG, see if
3445 we can find an equivalence for REG that is an arithmetic operation
3446 in a wider mode where both operands are paradoxical SUBREGs
3447 from objects of our result mode. In that case, we couldn't report
3448 an equivalent value for that operation, since we don't know what the
3449 extra bits will be. But we can find an equivalence for this SUBREG
3450 by folding that operation is the narrow mode. This allows us to
3451 fold arithmetic in narrow modes when the machine only supports
3452 word-sized arithmetic.
3454 Also look for a case where we have a SUBREG whose operand is the
3455 same as our result. If both modes are smaller than a word, we
3456 are simply interpreting a register in different modes and we
3457 can use the inner value. */
3459 if (GET_CODE (folded_arg0) == REG
3460 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (folded_arg0))
3461 && subreg_lowpart_p (x))
3463 struct table_elt *elt;
3465 /* We can use HASH here since we know that canon_hash won't be
3466 called. */
3467 elt = lookup (folded_arg0,
3468 HASH (folded_arg0, GET_MODE (folded_arg0)),
3469 GET_MODE (folded_arg0));
3471 if (elt)
3472 elt = elt->first_same_value;
3474 for (; elt; elt = elt->next_same_value)
3476 enum rtx_code eltcode = GET_CODE (elt->exp);
3478 /* Just check for unary and binary operations. */
3479 if (GET_RTX_CLASS (GET_CODE (elt->exp)) == '1'
3480 && GET_CODE (elt->exp) != SIGN_EXTEND
3481 && GET_CODE (elt->exp) != ZERO_EXTEND
3482 && GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3483 && GET_MODE (SUBREG_REG (XEXP (elt->exp, 0))) == mode
3484 && (GET_MODE_CLASS (mode)
3485 == GET_MODE_CLASS (GET_MODE (XEXP (elt->exp, 0)))))
3487 rtx op0 = SUBREG_REG (XEXP (elt->exp, 0));
3489 if (GET_CODE (op0) != REG && ! CONSTANT_P (op0))
3490 op0 = fold_rtx (op0, NULL_RTX);
3492 op0 = equiv_constant (op0);
3493 if (op0)
3494 new = simplify_unary_operation (GET_CODE (elt->exp), mode,
3495 op0, mode);
3497 else if ((GET_RTX_CLASS (GET_CODE (elt->exp)) == '2'
3498 || GET_RTX_CLASS (GET_CODE (elt->exp)) == 'c')
3499 && eltcode != DIV && eltcode != MOD
3500 && eltcode != UDIV && eltcode != UMOD
3501 && eltcode != ASHIFTRT && eltcode != LSHIFTRT
3502 && eltcode != ROTATE && eltcode != ROTATERT
3503 && ((GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3504 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 0)))
3505 == mode))
3506 || CONSTANT_P (XEXP (elt->exp, 0)))
3507 && ((GET_CODE (XEXP (elt->exp, 1)) == SUBREG
3508 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 1)))
3509 == mode))
3510 || CONSTANT_P (XEXP (elt->exp, 1))))
3512 rtx op0 = gen_lowpart_common (mode, XEXP (elt->exp, 0));
3513 rtx op1 = gen_lowpart_common (mode, XEXP (elt->exp, 1));
3515 if (op0 && GET_CODE (op0) != REG && ! CONSTANT_P (op0))
3516 op0 = fold_rtx (op0, NULL_RTX);
3518 if (op0)
3519 op0 = equiv_constant (op0);
3521 if (op1 && GET_CODE (op1) != REG && ! CONSTANT_P (op1))
3522 op1 = fold_rtx (op1, NULL_RTX);
3524 if (op1)
3525 op1 = equiv_constant (op1);
3527 /* If we are looking for the low SImode part of
3528 (ashift:DI c (const_int 32)), it doesn't work
3529 to compute that in SImode, because a 32-bit shift
3530 in SImode is unpredictable. We know the value is 0. */
3531 if (op0 && op1
3532 && GET_CODE (elt->exp) == ASHIFT
3533 && GET_CODE (op1) == CONST_INT
3534 && INTVAL (op1) >= GET_MODE_BITSIZE (mode))
3536 if (INTVAL (op1) < GET_MODE_BITSIZE (GET_MODE (elt->exp)))
3538 /* If the count fits in the inner mode's width,
3539 but exceeds the outer mode's width,
3540 the value will get truncated to 0
3541 by the subreg. */
3542 new = const0_rtx;
3543 else
3544 /* If the count exceeds even the inner mode's width,
3545 don't fold this expression. */
3546 new = 0;
3548 else if (op0 && op1)
3549 new = simplify_binary_operation (GET_CODE (elt->exp), mode,
3550 op0, op1);
3553 else if (GET_CODE (elt->exp) == SUBREG
3554 && GET_MODE (SUBREG_REG (elt->exp)) == mode
3555 && (GET_MODE_SIZE (GET_MODE (folded_arg0))
3556 <= UNITS_PER_WORD)
3557 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
3558 new = copy_rtx (SUBREG_REG (elt->exp));
3560 if (new)
3561 return new;
3565 return x;
3567 case NOT:
3568 case NEG:
3569 /* If we have (NOT Y), see if Y is known to be (NOT Z).
3570 If so, (NOT Y) simplifies to Z. Similarly for NEG. */
3571 new = lookup_as_function (XEXP (x, 0), code);
3572 if (new)
3573 return fold_rtx (copy_rtx (XEXP (new, 0)), insn);
3574 break;
3576 case MEM:
3577 /* If we are not actually processing an insn, don't try to find the
3578 best address. Not only don't we care, but we could modify the
3579 MEM in an invalid way since we have no insn to validate against. */
3580 if (insn != 0)
3581 find_best_addr (insn, &XEXP (x, 0), GET_MODE (x));
3584 /* Even if we don't fold in the insn itself,
3585 we can safely do so here, in hopes of getting a constant. */
3586 rtx addr = fold_rtx (XEXP (x, 0), NULL_RTX);
3587 rtx base = 0;
3588 HOST_WIDE_INT offset = 0;
3590 if (GET_CODE (addr) == REG
3591 && REGNO_QTY_VALID_P (REGNO (addr)))
3593 int addr_q = REG_QTY (REGNO (addr));
3594 struct qty_table_elem *addr_ent = &qty_table[addr_q];
3596 if (GET_MODE (addr) == addr_ent->mode
3597 && addr_ent->const_rtx != NULL_RTX)
3598 addr = addr_ent->const_rtx;
3601 /* If address is constant, split it into a base and integer offset. */
3602 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
3603 base = addr;
3604 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
3605 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
3607 base = XEXP (XEXP (addr, 0), 0);
3608 offset = INTVAL (XEXP (XEXP (addr, 0), 1));
3610 else if (GET_CODE (addr) == LO_SUM
3611 && GET_CODE (XEXP (addr, 1)) == SYMBOL_REF)
3612 base = XEXP (addr, 1);
3613 else if (GET_CODE (addr) == ADDRESSOF)
3614 return change_address (x, VOIDmode, addr);
3616 /* If this is a constant pool reference, we can fold it into its
3617 constant to allow better value tracking. */
3618 if (base && GET_CODE (base) == SYMBOL_REF
3619 && CONSTANT_POOL_ADDRESS_P (base))
3621 rtx constant = get_pool_constant (base);
3622 enum machine_mode const_mode = get_pool_mode (base);
3623 rtx new;
3625 if (CONSTANT_P (constant) && GET_CODE (constant) != CONST_INT)
3626 constant_pool_entries_cost = COST (constant);
3628 /* If we are loading the full constant, we have an equivalence. */
3629 if (offset == 0 && mode == const_mode)
3630 return constant;
3632 /* If this actually isn't a constant (weird!), we can't do
3633 anything. Otherwise, handle the two most common cases:
3634 extracting a word from a multi-word constant, and extracting
3635 the low-order bits. Other cases don't seem common enough to
3636 worry about. */
3637 if (! CONSTANT_P (constant))
3638 return x;
3640 if (GET_MODE_CLASS (mode) == MODE_INT
3641 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
3642 && offset % UNITS_PER_WORD == 0
3643 && (new = operand_subword (constant,
3644 offset / UNITS_PER_WORD,
3645 0, const_mode)) != 0)
3646 return new;
3648 if (((BYTES_BIG_ENDIAN
3649 && offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
3650 || (! BYTES_BIG_ENDIAN && offset == 0))
3651 && (new = gen_lowpart_if_possible (mode, constant)) != 0)
3652 return new;
3655 /* If this is a reference to a label at a known position in a jump
3656 table, we also know its value. */
3657 if (base && GET_CODE (base) == LABEL_REF)
3659 rtx label = XEXP (base, 0);
3660 rtx table_insn = NEXT_INSN (label);
3662 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
3663 && GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
3665 rtx table = PATTERN (table_insn);
3667 if (offset >= 0
3668 && (offset / GET_MODE_SIZE (GET_MODE (table))
3669 < XVECLEN (table, 0)))
3670 return XVECEXP (table, 0,
3671 offset / GET_MODE_SIZE (GET_MODE (table)));
3673 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
3674 && GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
3676 rtx table = PATTERN (table_insn);
3678 if (offset >= 0
3679 && (offset / GET_MODE_SIZE (GET_MODE (table))
3680 < XVECLEN (table, 1)))
3682 offset /= GET_MODE_SIZE (GET_MODE (table));
3683 new = gen_rtx_MINUS (Pmode, XVECEXP (table, 1, offset),
3684 XEXP (table, 0));
3686 if (GET_MODE (table) != Pmode)
3687 new = gen_rtx_TRUNCATE (GET_MODE (table), new);
3689 /* Indicate this is a constant. This isn't a
3690 valid form of CONST, but it will only be used
3691 to fold the next insns and then discarded, so
3692 it should be safe.
3694 Note this expression must be explicitly discarded,
3695 by cse_insn, else it may end up in a REG_EQUAL note
3696 and "escape" to cause problems elsewhere. */
3697 return gen_rtx_CONST (GET_MODE (new), new);
3702 return x;
3705 #ifdef NO_FUNCTION_CSE
3706 case CALL:
3707 if (CONSTANT_P (XEXP (XEXP (x, 0), 0)))
3708 return x;
3709 break;
3710 #endif
3712 case ASM_OPERANDS:
3713 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
3714 validate_change (insn, &ASM_OPERANDS_INPUT (x, i),
3715 fold_rtx (ASM_OPERANDS_INPUT (x, i), insn), 0);
3716 break;
3718 default:
3719 break;
3722 const_arg0 = 0;
3723 const_arg1 = 0;
3724 const_arg2 = 0;
3725 mode_arg0 = VOIDmode;
3727 /* Try folding our operands.
3728 Then see which ones have constant values known. */
3730 fmt = GET_RTX_FORMAT (code);
3731 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3732 if (fmt[i] == 'e')
3734 rtx arg = XEXP (x, i);
3735 rtx folded_arg = arg, const_arg = 0;
3736 enum machine_mode mode_arg = GET_MODE (arg);
3737 rtx cheap_arg, expensive_arg;
3738 rtx replacements[2];
3739 int j;
3740 int old_cost = COST_IN (XEXP (x, i), code);
3742 /* Most arguments are cheap, so handle them specially. */
3743 switch (GET_CODE (arg))
3745 case REG:
3746 /* This is the same as calling equiv_constant; it is duplicated
3747 here for speed. */
3748 if (REGNO_QTY_VALID_P (REGNO (arg)))
3750 int arg_q = REG_QTY (REGNO (arg));
3751 struct qty_table_elem *arg_ent = &qty_table[arg_q];
3753 if (arg_ent->const_rtx != NULL_RTX
3754 && GET_CODE (arg_ent->const_rtx) != REG
3755 && GET_CODE (arg_ent->const_rtx) != PLUS)
3756 const_arg
3757 = gen_lowpart_if_possible (GET_MODE (arg),
3758 arg_ent->const_rtx);
3760 break;
3762 case CONST:
3763 case CONST_INT:
3764 case SYMBOL_REF:
3765 case LABEL_REF:
3766 case CONST_DOUBLE:
3767 case CONST_VECTOR:
3768 const_arg = arg;
3769 break;
3771 #ifdef HAVE_cc0
3772 case CC0:
3773 folded_arg = prev_insn_cc0;
3774 mode_arg = prev_insn_cc0_mode;
3775 const_arg = equiv_constant (folded_arg);
3776 break;
3777 #endif
3779 default:
3780 folded_arg = fold_rtx (arg, insn);
3781 const_arg = equiv_constant (folded_arg);
3784 /* For the first three operands, see if the operand
3785 is constant or equivalent to a constant. */
3786 switch (i)
3788 case 0:
3789 folded_arg0 = folded_arg;
3790 const_arg0 = const_arg;
3791 mode_arg0 = mode_arg;
3792 break;
3793 case 1:
3794 folded_arg1 = folded_arg;
3795 const_arg1 = const_arg;
3796 break;
3797 case 2:
3798 const_arg2 = const_arg;
3799 break;
3802 /* Pick the least expensive of the folded argument and an
3803 equivalent constant argument. */
3804 if (const_arg == 0 || const_arg == folded_arg
3805 || COST_IN (const_arg, code) > COST_IN (folded_arg, code))
3806 cheap_arg = folded_arg, expensive_arg = const_arg;
3807 else
3808 cheap_arg = const_arg, expensive_arg = folded_arg;
3810 /* Try to replace the operand with the cheapest of the two
3811 possibilities. If it doesn't work and this is either of the first
3812 two operands of a commutative operation, try swapping them.
3813 If THAT fails, try the more expensive, provided it is cheaper
3814 than what is already there. */
3816 if (cheap_arg == XEXP (x, i))
3817 continue;
3819 if (insn == 0 && ! copied)
3821 x = copy_rtx (x);
3822 copied = 1;
3825 /* Order the replacements from cheapest to most expensive. */
3826 replacements[0] = cheap_arg;
3827 replacements[1] = expensive_arg;
3829 for (j = 0; j < 2 && replacements[j]; j++)
3831 int new_cost = COST_IN (replacements[j], code);
3833 /* Stop if what existed before was cheaper. Prefer constants
3834 in the case of a tie. */
3835 if (new_cost > old_cost
3836 || (new_cost == old_cost && CONSTANT_P (XEXP (x, i))))
3837 break;
3839 if (validate_change (insn, &XEXP (x, i), replacements[j], 0))
3840 break;
3842 if (code == NE || code == EQ || GET_RTX_CLASS (code) == 'c'
3843 || code == LTGT || code == UNEQ || code == ORDERED
3844 || code == UNORDERED)
3846 validate_change (insn, &XEXP (x, i), XEXP (x, 1 - i), 1);
3847 validate_change (insn, &XEXP (x, 1 - i), replacements[j], 1);
3849 if (apply_change_group ())
3851 /* Swap them back to be invalid so that this loop can
3852 continue and flag them to be swapped back later. */
3853 rtx tem;
3855 tem = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1);
3856 XEXP (x, 1) = tem;
3857 must_swap = 1;
3858 break;
3864 else
3866 if (fmt[i] == 'E')
3867 /* Don't try to fold inside of a vector of expressions.
3868 Doing nothing is harmless. */
3872 /* If a commutative operation, place a constant integer as the second
3873 operand unless the first operand is also a constant integer. Otherwise,
3874 place any constant second unless the first operand is also a constant. */
3876 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c'
3877 || code == LTGT || code == UNEQ || code == ORDERED
3878 || code == UNORDERED)
3880 if (must_swap || (const_arg0
3881 && (const_arg1 == 0
3882 || (GET_CODE (const_arg0) == CONST_INT
3883 && GET_CODE (const_arg1) != CONST_INT))))
3885 rtx tem = XEXP (x, 0);
3887 if (insn == 0 && ! copied)
3889 x = copy_rtx (x);
3890 copied = 1;
3893 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
3894 validate_change (insn, &XEXP (x, 1), tem, 1);
3895 if (apply_change_group ())
3897 tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
3898 tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
3903 /* If X is an arithmetic operation, see if we can simplify it. */
3905 switch (GET_RTX_CLASS (code))
3907 case '1':
3909 int is_const = 0;
3911 /* We can't simplify extension ops unless we know the
3912 original mode. */
3913 if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
3914 && mode_arg0 == VOIDmode)
3915 break;
3917 /* If we had a CONST, strip it off and put it back later if we
3918 fold. */
3919 if (const_arg0 != 0 && GET_CODE (const_arg0) == CONST)
3920 is_const = 1, const_arg0 = XEXP (const_arg0, 0);
3922 new = simplify_unary_operation (code, mode,
3923 const_arg0 ? const_arg0 : folded_arg0,
3924 mode_arg0);
3925 if (new != 0 && is_const)
3926 new = gen_rtx_CONST (mode, new);
3928 break;
3930 case '<':
3931 /* See what items are actually being compared and set FOLDED_ARG[01]
3932 to those values and CODE to the actual comparison code. If any are
3933 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
3934 do anything if both operands are already known to be constant. */
3936 if (const_arg0 == 0 || const_arg1 == 0)
3938 struct table_elt *p0, *p1;
3939 rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
3940 enum machine_mode mode_arg1;
3942 #ifdef FLOAT_STORE_FLAG_VALUE
3943 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3945 true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
3946 (FLOAT_STORE_FLAG_VALUE (mode), mode));
3947 false_rtx = CONST0_RTX (mode);
3949 #endif
3951 code = find_comparison_args (code, &folded_arg0, &folded_arg1,
3952 &mode_arg0, &mode_arg1);
3953 const_arg0 = equiv_constant (folded_arg0);
3954 const_arg1 = equiv_constant (folded_arg1);
3956 /* If the mode is VOIDmode or a MODE_CC mode, we don't know
3957 what kinds of things are being compared, so we can't do
3958 anything with this comparison. */
3960 if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
3961 break;
3963 /* If we do not now have two constants being compared, see
3964 if we can nevertheless deduce some things about the
3965 comparison. */
3966 if (const_arg0 == 0 || const_arg1 == 0)
3968 /* Some addresses are known to be nonzero. We don't know
3969 their sign, but equality comparisons are known. */
3970 if (const_arg1 == const0_rtx
3971 && nonzero_address_p (folded_arg0))
3973 if (code == EQ)
3974 return false_rtx;
3975 else if (code == NE)
3976 return true_rtx;
3979 /* See if the two operands are the same. */
3981 if (folded_arg0 == folded_arg1
3982 || (GET_CODE (folded_arg0) == REG
3983 && GET_CODE (folded_arg1) == REG
3984 && (REG_QTY (REGNO (folded_arg0))
3985 == REG_QTY (REGNO (folded_arg1))))
3986 || ((p0 = lookup (folded_arg0,
3987 (safe_hash (folded_arg0, mode_arg0)
3988 & HASH_MASK), mode_arg0))
3989 && (p1 = lookup (folded_arg1,
3990 (safe_hash (folded_arg1, mode_arg0)
3991 & HASH_MASK), mode_arg0))
3992 && p0->first_same_value == p1->first_same_value))
3994 /* Sadly two equal NaNs are not equivalent. */
3995 if (!HONOR_NANS (mode_arg0))
3996 return ((code == EQ || code == LE || code == GE
3997 || code == LEU || code == GEU || code == UNEQ
3998 || code == UNLE || code == UNGE
3999 || code == ORDERED)
4000 ? true_rtx : false_rtx);
4001 /* Take care for the FP compares we can resolve. */
4002 if (code == UNEQ || code == UNLE || code == UNGE)
4003 return true_rtx;
4004 if (code == LTGT || code == LT || code == GT)
4005 return false_rtx;
4008 /* If FOLDED_ARG0 is a register, see if the comparison we are
4009 doing now is either the same as we did before or the reverse
4010 (we only check the reverse if not floating-point). */
4011 else if (GET_CODE (folded_arg0) == REG)
4013 int qty = REG_QTY (REGNO (folded_arg0));
4015 if (REGNO_QTY_VALID_P (REGNO (folded_arg0)))
4017 struct qty_table_elem *ent = &qty_table[qty];
4019 if ((comparison_dominates_p (ent->comparison_code, code)
4020 || (! FLOAT_MODE_P (mode_arg0)
4021 && comparison_dominates_p (ent->comparison_code,
4022 reverse_condition (code))))
4023 && (rtx_equal_p (ent->comparison_const, folded_arg1)
4024 || (const_arg1
4025 && rtx_equal_p (ent->comparison_const,
4026 const_arg1))
4027 || (GET_CODE (folded_arg1) == REG
4028 && (REG_QTY (REGNO (folded_arg1)) == ent->comparison_qty))))
4029 return (comparison_dominates_p (ent->comparison_code, code)
4030 ? true_rtx : false_rtx);
4036 /* If we are comparing against zero, see if the first operand is
4037 equivalent to an IOR with a constant. If so, we may be able to
4038 determine the result of this comparison. */
4040 if (const_arg1 == const0_rtx)
4042 rtx y = lookup_as_function (folded_arg0, IOR);
4043 rtx inner_const;
4045 if (y != 0
4046 && (inner_const = equiv_constant (XEXP (y, 1))) != 0
4047 && GET_CODE (inner_const) == CONST_INT
4048 && INTVAL (inner_const) != 0)
4050 int sign_bitnum = GET_MODE_BITSIZE (mode_arg0) - 1;
4051 int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
4052 && (INTVAL (inner_const)
4053 & ((HOST_WIDE_INT) 1 << sign_bitnum)));
4054 rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
4056 #ifdef FLOAT_STORE_FLAG_VALUE
4057 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
4059 true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
4060 (FLOAT_STORE_FLAG_VALUE (mode), mode));
4061 false_rtx = CONST0_RTX (mode);
4063 #endif
4065 switch (code)
4067 case EQ:
4068 return false_rtx;
4069 case NE:
4070 return true_rtx;
4071 case LT: case LE:
4072 if (has_sign)
4073 return true_rtx;
4074 break;
4075 case GT: case GE:
4076 if (has_sign)
4077 return false_rtx;
4078 break;
4079 default:
4080 break;
4085 new = simplify_relational_operation (code,
4086 (mode_arg0 != VOIDmode
4087 ? mode_arg0
4088 : (GET_MODE (const_arg0
4089 ? const_arg0
4090 : folded_arg0)
4091 != VOIDmode)
4092 ? GET_MODE (const_arg0
4093 ? const_arg0
4094 : folded_arg0)
4095 : GET_MODE (const_arg1
4096 ? const_arg1
4097 : folded_arg1)),
4098 const_arg0 ? const_arg0 : folded_arg0,
4099 const_arg1 ? const_arg1 : folded_arg1);
4100 #ifdef FLOAT_STORE_FLAG_VALUE
4101 if (new != 0 && GET_MODE_CLASS (mode) == MODE_FLOAT)
4103 if (new == const0_rtx)
4104 new = CONST0_RTX (mode);
4105 else
4106 new = (CONST_DOUBLE_FROM_REAL_VALUE
4107 (FLOAT_STORE_FLAG_VALUE (mode), mode));
4109 #endif
4110 break;
4112 case '2':
4113 case 'c':
4114 switch (code)
4116 case PLUS:
4117 /* If the second operand is a LABEL_REF, see if the first is a MINUS
4118 with that LABEL_REF as its second operand. If so, the result is
4119 the first operand of that MINUS. This handles switches with an
4120 ADDR_DIFF_VEC table. */
4121 if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
4123 rtx y
4124 = GET_CODE (folded_arg0) == MINUS ? folded_arg0
4125 : lookup_as_function (folded_arg0, MINUS);
4127 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
4128 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
4129 return XEXP (y, 0);
4131 /* Now try for a CONST of a MINUS like the above. */
4132 if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
4133 : lookup_as_function (folded_arg0, CONST))) != 0
4134 && GET_CODE (XEXP (y, 0)) == MINUS
4135 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
4136 && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg1, 0))
4137 return XEXP (XEXP (y, 0), 0);
4140 /* Likewise if the operands are in the other order. */
4141 if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
4143 rtx y
4144 = GET_CODE (folded_arg1) == MINUS ? folded_arg1
4145 : lookup_as_function (folded_arg1, MINUS);
4147 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
4148 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
4149 return XEXP (y, 0);
4151 /* Now try for a CONST of a MINUS like the above. */
4152 if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
4153 : lookup_as_function (folded_arg1, CONST))) != 0
4154 && GET_CODE (XEXP (y, 0)) == MINUS
4155 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
4156 && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg0, 0))
4157 return XEXP (XEXP (y, 0), 0);
4160 /* If second operand is a register equivalent to a negative
4161 CONST_INT, see if we can find a register equivalent to the
4162 positive constant. Make a MINUS if so. Don't do this for
4163 a non-negative constant since we might then alternate between
4164 choosing positive and negative constants. Having the positive
4165 constant previously-used is the more common case. Be sure
4166 the resulting constant is non-negative; if const_arg1 were
4167 the smallest negative number this would overflow: depending
4168 on the mode, this would either just be the same value (and
4169 hence not save anything) or be incorrect. */
4170 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT
4171 && INTVAL (const_arg1) < 0
4172 /* This used to test
4174 -INTVAL (const_arg1) >= 0
4176 But The Sun V5.0 compilers mis-compiled that test. So
4177 instead we test for the problematic value in a more direct
4178 manner and hope the Sun compilers get it correct. */
4179 && INTVAL (const_arg1) !=
4180 ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1))
4181 && GET_CODE (folded_arg1) == REG)
4183 rtx new_const = GEN_INT (-INTVAL (const_arg1));
4184 struct table_elt *p
4185 = lookup (new_const, safe_hash (new_const, mode) & HASH_MASK,
4186 mode);
4188 if (p)
4189 for (p = p->first_same_value; p; p = p->next_same_value)
4190 if (GET_CODE (p->exp) == REG)
4191 return simplify_gen_binary (MINUS, mode, folded_arg0,
4192 canon_reg (p->exp, NULL_RTX));
4194 goto from_plus;
4196 case MINUS:
4197 /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
4198 If so, produce (PLUS Z C2-C). */
4199 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT)
4201 rtx y = lookup_as_function (XEXP (x, 0), PLUS);
4202 if (y && GET_CODE (XEXP (y, 1)) == CONST_INT)
4203 return fold_rtx (plus_constant (copy_rtx (y),
4204 -INTVAL (const_arg1)),
4205 NULL_RTX);
4208 /* Fall through. */
4210 from_plus:
4211 case SMIN: case SMAX: case UMIN: case UMAX:
4212 case IOR: case AND: case XOR:
4213 case MULT:
4214 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
4215 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
4216 is known to be of similar form, we may be able to replace the
4217 operation with a combined operation. This may eliminate the
4218 intermediate operation if every use is simplified in this way.
4219 Note that the similar optimization done by combine.c only works
4220 if the intermediate operation's result has only one reference. */
4222 if (GET_CODE (folded_arg0) == REG
4223 && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
4225 int is_shift
4226 = (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
4227 rtx y = lookup_as_function (folded_arg0, code);
4228 rtx inner_const;
4229 enum rtx_code associate_code;
4230 rtx new_const;
4232 if (y == 0
4233 || 0 == (inner_const
4234 = equiv_constant (fold_rtx (XEXP (y, 1), 0)))
4235 || GET_CODE (inner_const) != CONST_INT
4236 /* If we have compiled a statement like
4237 "if (x == (x & mask1))", and now are looking at
4238 "x & mask2", we will have a case where the first operand
4239 of Y is the same as our first operand. Unless we detect
4240 this case, an infinite loop will result. */
4241 || XEXP (y, 0) == folded_arg0)
4242 break;
4244 /* Don't associate these operations if they are a PLUS with the
4245 same constant and it is a power of two. These might be doable
4246 with a pre- or post-increment. Similarly for two subtracts of
4247 identical powers of two with post decrement. */
4249 if (code == PLUS && INTVAL (const_arg1) == INTVAL (inner_const)
4250 && ((HAVE_PRE_INCREMENT
4251 && exact_log2 (INTVAL (const_arg1)) >= 0)
4252 || (HAVE_POST_INCREMENT
4253 && exact_log2 (INTVAL (const_arg1)) >= 0)
4254 || (HAVE_PRE_DECREMENT
4255 && exact_log2 (- INTVAL (const_arg1)) >= 0)
4256 || (HAVE_POST_DECREMENT
4257 && exact_log2 (- INTVAL (const_arg1)) >= 0)))
4258 break;
4260 /* Compute the code used to compose the constants. For example,
4261 A-C1-C2 is A-(C1 + C2), so if CODE == MINUS, we want PLUS. */
4263 associate_code = (is_shift || code == MINUS ? PLUS : code);
4265 new_const = simplify_binary_operation (associate_code, mode,
4266 const_arg1, inner_const);
4268 if (new_const == 0)
4269 break;
4271 /* If we are associating shift operations, don't let this
4272 produce a shift of the size of the object or larger.
4273 This could occur when we follow a sign-extend by a right
4274 shift on a machine that does a sign-extend as a pair
4275 of shifts. */
4277 if (is_shift && GET_CODE (new_const) == CONST_INT
4278 && INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
4280 /* As an exception, we can turn an ASHIFTRT of this
4281 form into a shift of the number of bits - 1. */
4282 if (code == ASHIFTRT)
4283 new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
4284 else
4285 break;
4288 y = copy_rtx (XEXP (y, 0));
4290 /* If Y contains our first operand (the most common way this
4291 can happen is if Y is a MEM), we would do into an infinite
4292 loop if we tried to fold it. So don't in that case. */
4294 if (! reg_mentioned_p (folded_arg0, y))
4295 y = fold_rtx (y, insn);
4297 return simplify_gen_binary (code, mode, y, new_const);
4299 break;
4301 case DIV: case UDIV:
4302 /* ??? The associative optimization performed immediately above is
4303 also possible for DIV and UDIV using associate_code of MULT.
4304 However, we would need extra code to verify that the
4305 multiplication does not overflow, that is, there is no overflow
4306 in the calculation of new_const. */
4307 break;
4309 default:
4310 break;
4313 new = simplify_binary_operation (code, mode,
4314 const_arg0 ? const_arg0 : folded_arg0,
4315 const_arg1 ? const_arg1 : folded_arg1);
4316 break;
4318 case 'o':
4319 /* (lo_sum (high X) X) is simply X. */
4320 if (code == LO_SUM && const_arg0 != 0
4321 && GET_CODE (const_arg0) == HIGH
4322 && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
4323 return const_arg1;
4324 break;
4326 case '3':
4327 case 'b':
4328 new = simplify_ternary_operation (code, mode, mode_arg0,
4329 const_arg0 ? const_arg0 : folded_arg0,
4330 const_arg1 ? const_arg1 : folded_arg1,
4331 const_arg2 ? const_arg2 : XEXP (x, 2));
4332 break;
4334 case 'x':
4335 /* Always eliminate CONSTANT_P_RTX at this stage. */
4336 if (code == CONSTANT_P_RTX)
4337 return (const_arg0 ? const1_rtx : const0_rtx);
4338 break;
4341 return new ? new : x;
4344 /* Return a constant value currently equivalent to X.
4345 Return 0 if we don't know one. */
4347 static rtx
4348 equiv_constant (x)
4349 rtx x;
4351 if (GET_CODE (x) == REG
4352 && REGNO_QTY_VALID_P (REGNO (x)))
4354 int x_q = REG_QTY (REGNO (x));
4355 struct qty_table_elem *x_ent = &qty_table[x_q];
4357 if (x_ent->const_rtx)
4358 x = gen_lowpart_if_possible (GET_MODE (x), x_ent->const_rtx);
4361 if (x == 0 || CONSTANT_P (x))
4362 return x;
4364 /* If X is a MEM, try to fold it outside the context of any insn to see if
4365 it might be equivalent to a constant. That handles the case where it
4366 is a constant-pool reference. Then try to look it up in the hash table
4367 in case it is something whose value we have seen before. */
4369 if (GET_CODE (x) == MEM)
4371 struct table_elt *elt;
4373 x = fold_rtx (x, NULL_RTX);
4374 if (CONSTANT_P (x))
4375 return x;
4377 elt = lookup (x, safe_hash (x, GET_MODE (x)) & HASH_MASK, GET_MODE (x));
4378 if (elt == 0)
4379 return 0;
4381 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
4382 if (elt->is_const && CONSTANT_P (elt->exp))
4383 return elt->exp;
4386 return 0;
4389 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a fixed-point
4390 number, return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
4391 least-significant part of X.
4392 MODE specifies how big a part of X to return.
4394 If the requested operation cannot be done, 0 is returned.
4396 This is similar to gen_lowpart in emit-rtl.c. */
4399 gen_lowpart_if_possible (mode, x)
4400 enum machine_mode mode;
4401 rtx x;
4403 rtx result = gen_lowpart_common (mode, x);
4405 if (result)
4406 return result;
4407 else if (GET_CODE (x) == MEM)
4409 /* This is the only other case we handle. */
4410 int offset = 0;
4411 rtx new;
4413 if (WORDS_BIG_ENDIAN)
4414 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
4415 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
4416 if (BYTES_BIG_ENDIAN)
4417 /* Adjust the address so that the address-after-the-data is
4418 unchanged. */
4419 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
4420 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
4422 new = adjust_address_nv (x, mode, offset);
4423 if (! memory_address_p (mode, XEXP (new, 0)))
4424 return 0;
4426 return new;
4428 else
4429 return 0;
4432 /* Given INSN, a jump insn, TAKEN indicates if we are following the "taken"
4433 branch. It will be zero if not.
4435 In certain cases, this can cause us to add an equivalence. For example,
4436 if we are following the taken case of
4437 if (i == 2)
4438 we can add the fact that `i' and '2' are now equivalent.
4440 In any case, we can record that this comparison was passed. If the same
4441 comparison is seen later, we will know its value. */
4443 static void
4444 record_jump_equiv (insn, taken)
4445 rtx insn;
4446 int taken;
4448 int cond_known_true;
4449 rtx op0, op1;
4450 rtx set;
4451 enum machine_mode mode, mode0, mode1;
4452 int reversed_nonequality = 0;
4453 enum rtx_code code;
4455 /* Ensure this is the right kind of insn. */
4456 if (! any_condjump_p (insn))
4457 return;
4458 set = pc_set (insn);
4460 /* See if this jump condition is known true or false. */
4461 if (taken)
4462 cond_known_true = (XEXP (SET_SRC (set), 2) == pc_rtx);
4463 else
4464 cond_known_true = (XEXP (SET_SRC (set), 1) == pc_rtx);
4466 /* Get the type of comparison being done and the operands being compared.
4467 If we had to reverse a non-equality condition, record that fact so we
4468 know that it isn't valid for floating-point. */
4469 code = GET_CODE (XEXP (SET_SRC (set), 0));
4470 op0 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 0), insn);
4471 op1 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 1), insn);
4473 code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
4474 if (! cond_known_true)
4476 code = reversed_comparison_code_parts (code, op0, op1, insn);
4478 /* Don't remember if we can't find the inverse. */
4479 if (code == UNKNOWN)
4480 return;
4483 /* The mode is the mode of the non-constant. */
4484 mode = mode0;
4485 if (mode1 != VOIDmode)
4486 mode = mode1;
4488 record_jump_cond (code, mode, op0, op1, reversed_nonequality);
4491 /* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
4492 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
4493 Make any useful entries we can with that information. Called from
4494 above function and called recursively. */
4496 static void
4497 record_jump_cond (code, mode, op0, op1, reversed_nonequality)
4498 enum rtx_code code;
4499 enum machine_mode mode;
4500 rtx op0, op1;
4501 int reversed_nonequality;
4503 unsigned op0_hash, op1_hash;
4504 int op0_in_memory, op1_in_memory;
4505 struct table_elt *op0_elt, *op1_elt;
4507 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
4508 we know that they are also equal in the smaller mode (this is also
4509 true for all smaller modes whether or not there is a SUBREG, but
4510 is not worth testing for with no SUBREG). */
4512 /* Note that GET_MODE (op0) may not equal MODE. */
4513 if (code == EQ && GET_CODE (op0) == SUBREG
4514 && (GET_MODE_SIZE (GET_MODE (op0))
4515 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4517 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4518 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
4520 record_jump_cond (code, mode, SUBREG_REG (op0),
4521 tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
4522 reversed_nonequality);
4525 if (code == EQ && GET_CODE (op1) == SUBREG
4526 && (GET_MODE_SIZE (GET_MODE (op1))
4527 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4529 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4530 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
4532 record_jump_cond (code, mode, SUBREG_REG (op1),
4533 tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
4534 reversed_nonequality);
4537 /* Similarly, if this is an NE comparison, and either is a SUBREG
4538 making a smaller mode, we know the whole thing is also NE. */
4540 /* Note that GET_MODE (op0) may not equal MODE;
4541 if we test MODE instead, we can get an infinite recursion
4542 alternating between two modes each wider than MODE. */
4544 if (code == NE && GET_CODE (op0) == SUBREG
4545 && subreg_lowpart_p (op0)
4546 && (GET_MODE_SIZE (GET_MODE (op0))
4547 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4549 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4550 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
4552 record_jump_cond (code, mode, SUBREG_REG (op0),
4553 tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
4554 reversed_nonequality);
4557 if (code == NE && GET_CODE (op1) == SUBREG
4558 && subreg_lowpart_p (op1)
4559 && (GET_MODE_SIZE (GET_MODE (op1))
4560 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4562 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4563 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
4565 record_jump_cond (code, mode, SUBREG_REG (op1),
4566 tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
4567 reversed_nonequality);
4570 /* Hash both operands. */
4572 do_not_record = 0;
4573 hash_arg_in_memory = 0;
4574 op0_hash = HASH (op0, mode);
4575 op0_in_memory = hash_arg_in_memory;
4577 if (do_not_record)
4578 return;
4580 do_not_record = 0;
4581 hash_arg_in_memory = 0;
4582 op1_hash = HASH (op1, mode);
4583 op1_in_memory = hash_arg_in_memory;
4585 if (do_not_record)
4586 return;
4588 /* Look up both operands. */
4589 op0_elt = lookup (op0, op0_hash, mode);
4590 op1_elt = lookup (op1, op1_hash, mode);
4592 /* If both operands are already equivalent or if they are not in the
4593 table but are identical, do nothing. */
4594 if ((op0_elt != 0 && op1_elt != 0
4595 && op0_elt->first_same_value == op1_elt->first_same_value)
4596 || op0 == op1 || rtx_equal_p (op0, op1))
4597 return;
4599 /* If we aren't setting two things equal all we can do is save this
4600 comparison. Similarly if this is floating-point. In the latter
4601 case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
4602 If we record the equality, we might inadvertently delete code
4603 whose intent was to change -0 to +0. */
4605 if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
4607 struct qty_table_elem *ent;
4608 int qty;
4610 /* If we reversed a floating-point comparison, if OP0 is not a
4611 register, or if OP1 is neither a register or constant, we can't
4612 do anything. */
4614 if (GET_CODE (op1) != REG)
4615 op1 = equiv_constant (op1);
4617 if ((reversed_nonequality && FLOAT_MODE_P (mode))
4618 || GET_CODE (op0) != REG || op1 == 0)
4619 return;
4621 /* Put OP0 in the hash table if it isn't already. This gives it a
4622 new quantity number. */
4623 if (op0_elt == 0)
4625 if (insert_regs (op0, NULL, 0))
4627 rehash_using_reg (op0);
4628 op0_hash = HASH (op0, mode);
4630 /* If OP0 is contained in OP1, this changes its hash code
4631 as well. Faster to rehash than to check, except
4632 for the simple case of a constant. */
4633 if (! CONSTANT_P (op1))
4634 op1_hash = HASH (op1,mode);
4637 op0_elt = insert (op0, NULL, op0_hash, mode);
4638 op0_elt->in_memory = op0_in_memory;
4641 qty = REG_QTY (REGNO (op0));
4642 ent = &qty_table[qty];
4644 ent->comparison_code = code;
4645 if (GET_CODE (op1) == REG)
4647 /* Look it up again--in case op0 and op1 are the same. */
4648 op1_elt = lookup (op1, op1_hash, mode);
4650 /* Put OP1 in the hash table so it gets a new quantity number. */
4651 if (op1_elt == 0)
4653 if (insert_regs (op1, NULL, 0))
4655 rehash_using_reg (op1);
4656 op1_hash = HASH (op1, mode);
4659 op1_elt = insert (op1, NULL, op1_hash, mode);
4660 op1_elt->in_memory = op1_in_memory;
4663 ent->comparison_const = NULL_RTX;
4664 ent->comparison_qty = REG_QTY (REGNO (op1));
4666 else
4668 ent->comparison_const = op1;
4669 ent->comparison_qty = -1;
4672 return;
4675 /* If either side is still missing an equivalence, make it now,
4676 then merge the equivalences. */
4678 if (op0_elt == 0)
4680 if (insert_regs (op0, NULL, 0))
4682 rehash_using_reg (op0);
4683 op0_hash = HASH (op0, mode);
4686 op0_elt = insert (op0, NULL, op0_hash, mode);
4687 op0_elt->in_memory = op0_in_memory;
4690 if (op1_elt == 0)
4692 if (insert_regs (op1, NULL, 0))
4694 rehash_using_reg (op1);
4695 op1_hash = HASH (op1, mode);
4698 op1_elt = insert (op1, NULL, op1_hash, mode);
4699 op1_elt->in_memory = op1_in_memory;
4702 merge_equiv_classes (op0_elt, op1_elt);
4703 last_jump_equiv_class = op0_elt;
4706 /* CSE processing for one instruction.
4707 First simplify sources and addresses of all assignments
4708 in the instruction, using previously-computed equivalents values.
4709 Then install the new sources and destinations in the table
4710 of available values.
4712 If LIBCALL_INSN is nonzero, don't record any equivalence made in
4713 the insn. It means that INSN is inside libcall block. In this
4714 case LIBCALL_INSN is the corresponding insn with REG_LIBCALL. */
4716 /* Data on one SET contained in the instruction. */
4718 struct set
4720 /* The SET rtx itself. */
4721 rtx rtl;
4722 /* The SET_SRC of the rtx (the original value, if it is changing). */
4723 rtx src;
4724 /* The hash-table element for the SET_SRC of the SET. */
4725 struct table_elt *src_elt;
4726 /* Hash value for the SET_SRC. */
4727 unsigned src_hash;
4728 /* Hash value for the SET_DEST. */
4729 unsigned dest_hash;
4730 /* The SET_DEST, with SUBREG, etc., stripped. */
4731 rtx inner_dest;
4732 /* Nonzero if the SET_SRC is in memory. */
4733 char src_in_memory;
4734 /* Nonzero if the SET_SRC contains something
4735 whose value cannot be predicted and understood. */
4736 char src_volatile;
4737 /* Original machine mode, in case it becomes a CONST_INT. */
4738 enum machine_mode mode;
4739 /* A constant equivalent for SET_SRC, if any. */
4740 rtx src_const;
4741 /* Original SET_SRC value used for libcall notes. */
4742 rtx orig_src;
4743 /* Hash value of constant equivalent for SET_SRC. */
4744 unsigned src_const_hash;
4745 /* Table entry for constant equivalent for SET_SRC, if any. */
4746 struct table_elt *src_const_elt;
4749 static void
4750 cse_insn (insn, libcall_insn)
4751 rtx insn;
4752 rtx libcall_insn;
4754 rtx x = PATTERN (insn);
4755 int i;
4756 rtx tem;
4757 int n_sets = 0;
4759 #ifdef HAVE_cc0
4760 /* Records what this insn does to set CC0. */
4761 rtx this_insn_cc0 = 0;
4762 enum machine_mode this_insn_cc0_mode = VOIDmode;
4763 #endif
4765 rtx src_eqv = 0;
4766 struct table_elt *src_eqv_elt = 0;
4767 int src_eqv_volatile = 0;
4768 int src_eqv_in_memory = 0;
4769 unsigned src_eqv_hash = 0;
4771 struct set *sets = (struct set *) 0;
4773 this_insn = insn;
4775 /* Find all the SETs and CLOBBERs in this instruction.
4776 Record all the SETs in the array `set' and count them.
4777 Also determine whether there is a CLOBBER that invalidates
4778 all memory references, or all references at varying addresses. */
4780 if (GET_CODE (insn) == CALL_INSN)
4782 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4784 if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
4785 invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
4786 XEXP (tem, 0) = canon_reg (XEXP (tem, 0), insn);
4790 if (GET_CODE (x) == SET)
4792 sets = (struct set *) alloca (sizeof (struct set));
4793 sets[0].rtl = x;
4795 /* Ignore SETs that are unconditional jumps.
4796 They never need cse processing, so this does not hurt.
4797 The reason is not efficiency but rather
4798 so that we can test at the end for instructions
4799 that have been simplified to unconditional jumps
4800 and not be misled by unchanged instructions
4801 that were unconditional jumps to begin with. */
4802 if (SET_DEST (x) == pc_rtx
4803 && GET_CODE (SET_SRC (x)) == LABEL_REF)
4806 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
4807 The hard function value register is used only once, to copy to
4808 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
4809 Ensure we invalidate the destination register. On the 80386 no
4810 other code would invalidate it since it is a fixed_reg.
4811 We need not check the return of apply_change_group; see canon_reg. */
4813 else if (GET_CODE (SET_SRC (x)) == CALL)
4815 canon_reg (SET_SRC (x), insn);
4816 apply_change_group ();
4817 fold_rtx (SET_SRC (x), insn);
4818 invalidate (SET_DEST (x), VOIDmode);
4820 else
4821 n_sets = 1;
4823 else if (GET_CODE (x) == PARALLEL)
4825 int lim = XVECLEN (x, 0);
4827 sets = (struct set *) alloca (lim * sizeof (struct set));
4829 /* Find all regs explicitly clobbered in this insn,
4830 and ensure they are not replaced with any other regs
4831 elsewhere in this insn.
4832 When a reg that is clobbered is also used for input,
4833 we should presume that that is for a reason,
4834 and we should not substitute some other register
4835 which is not supposed to be clobbered.
4836 Therefore, this loop cannot be merged into the one below
4837 because a CALL may precede a CLOBBER and refer to the
4838 value clobbered. We must not let a canonicalization do
4839 anything in that case. */
4840 for (i = 0; i < lim; i++)
4842 rtx y = XVECEXP (x, 0, i);
4843 if (GET_CODE (y) == CLOBBER)
4845 rtx clobbered = XEXP (y, 0);
4847 if (GET_CODE (clobbered) == REG
4848 || GET_CODE (clobbered) == SUBREG)
4849 invalidate (clobbered, VOIDmode);
4850 else if (GET_CODE (clobbered) == STRICT_LOW_PART
4851 || GET_CODE (clobbered) == ZERO_EXTRACT)
4852 invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
4856 for (i = 0; i < lim; i++)
4858 rtx y = XVECEXP (x, 0, i);
4859 if (GET_CODE (y) == SET)
4861 /* As above, we ignore unconditional jumps and call-insns and
4862 ignore the result of apply_change_group. */
4863 if (GET_CODE (SET_SRC (y)) == CALL)
4865 canon_reg (SET_SRC (y), insn);
4866 apply_change_group ();
4867 fold_rtx (SET_SRC (y), insn);
4868 invalidate (SET_DEST (y), VOIDmode);
4870 else if (SET_DEST (y) == pc_rtx
4871 && GET_CODE (SET_SRC (y)) == LABEL_REF)
4873 else
4874 sets[n_sets++].rtl = y;
4876 else if (GET_CODE (y) == CLOBBER)
4878 /* If we clobber memory, canon the address.
4879 This does nothing when a register is clobbered
4880 because we have already invalidated the reg. */
4881 if (GET_CODE (XEXP (y, 0)) == MEM)
4882 canon_reg (XEXP (y, 0), NULL_RTX);
4884 else if (GET_CODE (y) == USE
4885 && ! (GET_CODE (XEXP (y, 0)) == REG
4886 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
4887 canon_reg (y, NULL_RTX);
4888 else if (GET_CODE (y) == CALL)
4890 /* The result of apply_change_group can be ignored; see
4891 canon_reg. */
4892 canon_reg (y, insn);
4893 apply_change_group ();
4894 fold_rtx (y, insn);
4898 else if (GET_CODE (x) == CLOBBER)
4900 if (GET_CODE (XEXP (x, 0)) == MEM)
4901 canon_reg (XEXP (x, 0), NULL_RTX);
4904 /* Canonicalize a USE of a pseudo register or memory location. */
4905 else if (GET_CODE (x) == USE
4906 && ! (GET_CODE (XEXP (x, 0)) == REG
4907 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
4908 canon_reg (XEXP (x, 0), NULL_RTX);
4909 else if (GET_CODE (x) == CALL)
4911 /* The result of apply_change_group can be ignored; see canon_reg. */
4912 canon_reg (x, insn);
4913 apply_change_group ();
4914 fold_rtx (x, insn);
4917 /* Store the equivalent value in SRC_EQV, if different, or if the DEST
4918 is a STRICT_LOW_PART. The latter condition is necessary because SRC_EQV
4919 is handled specially for this case, and if it isn't set, then there will
4920 be no equivalence for the destination. */
4921 if (n_sets == 1 && REG_NOTES (insn) != 0
4922 && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
4923 && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
4924 || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
4926 src_eqv = fold_rtx (canon_reg (XEXP (tem, 0), NULL_RTX), insn);
4927 XEXP (tem, 0) = src_eqv;
4930 /* Canonicalize sources and addresses of destinations.
4931 We do this in a separate pass to avoid problems when a MATCH_DUP is
4932 present in the insn pattern. In that case, we want to ensure that
4933 we don't break the duplicate nature of the pattern. So we will replace
4934 both operands at the same time. Otherwise, we would fail to find an
4935 equivalent substitution in the loop calling validate_change below.
4937 We used to suppress canonicalization of DEST if it appears in SRC,
4938 but we don't do this any more. */
4940 for (i = 0; i < n_sets; i++)
4942 rtx dest = SET_DEST (sets[i].rtl);
4943 rtx src = SET_SRC (sets[i].rtl);
4944 rtx new = canon_reg (src, insn);
4945 int insn_code;
4947 sets[i].orig_src = src;
4948 if ((GET_CODE (new) == REG && GET_CODE (src) == REG
4949 && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
4950 != (REGNO (src) < FIRST_PSEUDO_REGISTER)))
4951 || (insn_code = recog_memoized (insn)) < 0
4952 || insn_data[insn_code].n_dups > 0)
4953 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
4954 else
4955 SET_SRC (sets[i].rtl) = new;
4957 if (GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SIGN_EXTRACT)
4959 validate_change (insn, &XEXP (dest, 1),
4960 canon_reg (XEXP (dest, 1), insn), 1);
4961 validate_change (insn, &XEXP (dest, 2),
4962 canon_reg (XEXP (dest, 2), insn), 1);
4965 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
4966 || GET_CODE (dest) == ZERO_EXTRACT
4967 || GET_CODE (dest) == SIGN_EXTRACT)
4968 dest = XEXP (dest, 0);
4970 if (GET_CODE (dest) == MEM)
4971 canon_reg (dest, insn);
4974 /* Now that we have done all the replacements, we can apply the change
4975 group and see if they all work. Note that this will cause some
4976 canonicalizations that would have worked individually not to be applied
4977 because some other canonicalization didn't work, but this should not
4978 occur often.
4980 The result of apply_change_group can be ignored; see canon_reg. */
4982 apply_change_group ();
4984 /* Set sets[i].src_elt to the class each source belongs to.
4985 Detect assignments from or to volatile things
4986 and set set[i] to zero so they will be ignored
4987 in the rest of this function.
4989 Nothing in this loop changes the hash table or the register chains. */
4991 for (i = 0; i < n_sets; i++)
4993 rtx src, dest;
4994 rtx src_folded;
4995 struct table_elt *elt = 0, *p;
4996 enum machine_mode mode;
4997 rtx src_eqv_here;
4998 rtx src_const = 0;
4999 rtx src_related = 0;
5000 struct table_elt *src_const_elt = 0;
5001 int src_cost = MAX_COST;
5002 int src_eqv_cost = MAX_COST;
5003 int src_folded_cost = MAX_COST;
5004 int src_related_cost = MAX_COST;
5005 int src_elt_cost = MAX_COST;
5006 int src_regcost = MAX_COST;
5007 int src_eqv_regcost = MAX_COST;
5008 int src_folded_regcost = MAX_COST;
5009 int src_related_regcost = MAX_COST;
5010 int src_elt_regcost = MAX_COST;
5011 /* Set nonzero if we need to call force_const_mem on with the
5012 contents of src_folded before using it. */
5013 int src_folded_force_flag = 0;
5015 dest = SET_DEST (sets[i].rtl);
5016 src = SET_SRC (sets[i].rtl);
5018 /* If SRC is a constant that has no machine mode,
5019 hash it with the destination's machine mode.
5020 This way we can keep different modes separate. */
5022 mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5023 sets[i].mode = mode;
5025 if (src_eqv)
5027 enum machine_mode eqvmode = mode;
5028 if (GET_CODE (dest) == STRICT_LOW_PART)
5029 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5030 do_not_record = 0;
5031 hash_arg_in_memory = 0;
5032 src_eqv_hash = HASH (src_eqv, eqvmode);
5034 /* Find the equivalence class for the equivalent expression. */
5036 if (!do_not_record)
5037 src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
5039 src_eqv_volatile = do_not_record;
5040 src_eqv_in_memory = hash_arg_in_memory;
5043 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
5044 value of the INNER register, not the destination. So it is not
5045 a valid substitution for the source. But save it for later. */
5046 if (GET_CODE (dest) == STRICT_LOW_PART)
5047 src_eqv_here = 0;
5048 else
5049 src_eqv_here = src_eqv;
5051 /* Simplify and foldable subexpressions in SRC. Then get the fully-
5052 simplified result, which may not necessarily be valid. */
5053 src_folded = fold_rtx (src, insn);
5055 #if 0
5056 /* ??? This caused bad code to be generated for the m68k port with -O2.
5057 Suppose src is (CONST_INT -1), and that after truncation src_folded
5058 is (CONST_INT 3). Suppose src_folded is then used for src_const.
5059 At the end we will add src and src_const to the same equivalence
5060 class. We now have 3 and -1 on the same equivalence class. This
5061 causes later instructions to be mis-optimized. */
5062 /* If storing a constant in a bitfield, pre-truncate the constant
5063 so we will be able to record it later. */
5064 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
5065 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
5067 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5069 if (GET_CODE (src) == CONST_INT
5070 && GET_CODE (width) == CONST_INT
5071 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5072 && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5073 src_folded
5074 = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
5075 << INTVAL (width)) - 1));
5077 #endif
5079 /* Compute SRC's hash code, and also notice if it
5080 should not be recorded at all. In that case,
5081 prevent any further processing of this assignment. */
5082 do_not_record = 0;
5083 hash_arg_in_memory = 0;
5085 sets[i].src = src;
5086 sets[i].src_hash = HASH (src, mode);
5087 sets[i].src_volatile = do_not_record;
5088 sets[i].src_in_memory = hash_arg_in_memory;
5090 /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
5091 a pseudo, do not record SRC. Using SRC as a replacement for
5092 anything else will be incorrect in that situation. Note that
5093 this usually occurs only for stack slots, in which case all the
5094 RTL would be referring to SRC, so we don't lose any optimization
5095 opportunities by not having SRC in the hash table. */
5097 if (GET_CODE (src) == MEM
5098 && find_reg_note (insn, REG_EQUIV, NULL_RTX) != 0
5099 && GET_CODE (dest) == REG
5100 && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
5101 sets[i].src_volatile = 1;
5103 #if 0
5104 /* It is no longer clear why we used to do this, but it doesn't
5105 appear to still be needed. So let's try without it since this
5106 code hurts cse'ing widened ops. */
5107 /* If source is a perverse subreg (such as QI treated as an SI),
5108 treat it as volatile. It may do the work of an SI in one context
5109 where the extra bits are not being used, but cannot replace an SI
5110 in general. */
5111 if (GET_CODE (src) == SUBREG
5112 && (GET_MODE_SIZE (GET_MODE (src))
5113 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
5114 sets[i].src_volatile = 1;
5115 #endif
5117 /* Locate all possible equivalent forms for SRC. Try to replace
5118 SRC in the insn with each cheaper equivalent.
5120 We have the following types of equivalents: SRC itself, a folded
5121 version, a value given in a REG_EQUAL note, or a value related
5122 to a constant.
5124 Each of these equivalents may be part of an additional class
5125 of equivalents (if more than one is in the table, they must be in
5126 the same class; we check for this).
5128 If the source is volatile, we don't do any table lookups.
5130 We note any constant equivalent for possible later use in a
5131 REG_NOTE. */
5133 if (!sets[i].src_volatile)
5134 elt = lookup (src, sets[i].src_hash, mode);
5136 sets[i].src_elt = elt;
5138 if (elt && src_eqv_here && src_eqv_elt)
5140 if (elt->first_same_value != src_eqv_elt->first_same_value)
5142 /* The REG_EQUAL is indicating that two formerly distinct
5143 classes are now equivalent. So merge them. */
5144 merge_equiv_classes (elt, src_eqv_elt);
5145 src_eqv_hash = HASH (src_eqv, elt->mode);
5146 src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
5149 src_eqv_here = 0;
5152 else if (src_eqv_elt)
5153 elt = src_eqv_elt;
5155 /* Try to find a constant somewhere and record it in `src_const'.
5156 Record its table element, if any, in `src_const_elt'. Look in
5157 any known equivalences first. (If the constant is not in the
5158 table, also set `sets[i].src_const_hash'). */
5159 if (elt)
5160 for (p = elt->first_same_value; p; p = p->next_same_value)
5161 if (p->is_const)
5163 src_const = p->exp;
5164 src_const_elt = elt;
5165 break;
5168 if (src_const == 0
5169 && (CONSTANT_P (src_folded)
5170 /* Consider (minus (label_ref L1) (label_ref L2)) as
5171 "constant" here so we will record it. This allows us
5172 to fold switch statements when an ADDR_DIFF_VEC is used. */
5173 || (GET_CODE (src_folded) == MINUS
5174 && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
5175 && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
5176 src_const = src_folded, src_const_elt = elt;
5177 else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
5178 src_const = src_eqv_here, src_const_elt = src_eqv_elt;
5180 /* If we don't know if the constant is in the table, get its
5181 hash code and look it up. */
5182 if (src_const && src_const_elt == 0)
5184 sets[i].src_const_hash = HASH (src_const, mode);
5185 src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
5188 sets[i].src_const = src_const;
5189 sets[i].src_const_elt = src_const_elt;
5191 /* If the constant and our source are both in the table, mark them as
5192 equivalent. Otherwise, if a constant is in the table but the source
5193 isn't, set ELT to it. */
5194 if (src_const_elt && elt
5195 && src_const_elt->first_same_value != elt->first_same_value)
5196 merge_equiv_classes (elt, src_const_elt);
5197 else if (src_const_elt && elt == 0)
5198 elt = src_const_elt;
5200 /* See if there is a register linearly related to a constant
5201 equivalent of SRC. */
5202 if (src_const
5203 && (GET_CODE (src_const) == CONST
5204 || (src_const_elt && src_const_elt->related_value != 0)))
5206 src_related = use_related_value (src_const, src_const_elt);
5207 if (src_related)
5209 struct table_elt *src_related_elt
5210 = lookup (src_related, HASH (src_related, mode), mode);
5211 if (src_related_elt && elt)
5213 if (elt->first_same_value
5214 != src_related_elt->first_same_value)
5215 /* This can occur when we previously saw a CONST
5216 involving a SYMBOL_REF and then see the SYMBOL_REF
5217 twice. Merge the involved classes. */
5218 merge_equiv_classes (elt, src_related_elt);
5220 src_related = 0;
5221 src_related_elt = 0;
5223 else if (src_related_elt && elt == 0)
5224 elt = src_related_elt;
5228 /* See if we have a CONST_INT that is already in a register in a
5229 wider mode. */
5231 if (src_const && src_related == 0 && GET_CODE (src_const) == CONST_INT
5232 && GET_MODE_CLASS (mode) == MODE_INT
5233 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
5235 enum machine_mode wider_mode;
5237 for (wider_mode = GET_MODE_WIDER_MODE (mode);
5238 GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
5239 && src_related == 0;
5240 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5242 struct table_elt *const_elt
5243 = lookup (src_const, HASH (src_const, wider_mode), wider_mode);
5245 if (const_elt == 0)
5246 continue;
5248 for (const_elt = const_elt->first_same_value;
5249 const_elt; const_elt = const_elt->next_same_value)
5250 if (GET_CODE (const_elt->exp) == REG)
5252 src_related = gen_lowpart_if_possible (mode,
5253 const_elt->exp);
5254 break;
5259 /* Another possibility is that we have an AND with a constant in
5260 a mode narrower than a word. If so, it might have been generated
5261 as part of an "if" which would narrow the AND. If we already
5262 have done the AND in a wider mode, we can use a SUBREG of that
5263 value. */
5265 if (flag_expensive_optimizations && ! src_related
5266 && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
5267 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5269 enum machine_mode tmode;
5270 rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
5272 for (tmode = GET_MODE_WIDER_MODE (mode);
5273 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5274 tmode = GET_MODE_WIDER_MODE (tmode))
5276 rtx inner = gen_lowpart_if_possible (tmode, XEXP (src, 0));
5277 struct table_elt *larger_elt;
5279 if (inner)
5281 PUT_MODE (new_and, tmode);
5282 XEXP (new_and, 0) = inner;
5283 larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
5284 if (larger_elt == 0)
5285 continue;
5287 for (larger_elt = larger_elt->first_same_value;
5288 larger_elt; larger_elt = larger_elt->next_same_value)
5289 if (GET_CODE (larger_elt->exp) == REG)
5291 src_related
5292 = gen_lowpart_if_possible (mode, larger_elt->exp);
5293 break;
5296 if (src_related)
5297 break;
5302 #ifdef LOAD_EXTEND_OP
5303 /* See if a MEM has already been loaded with a widening operation;
5304 if it has, we can use a subreg of that. Many CISC machines
5305 also have such operations, but this is only likely to be
5306 beneficial these machines. */
5308 if (flag_expensive_optimizations && src_related == 0
5309 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5310 && GET_MODE_CLASS (mode) == MODE_INT
5311 && GET_CODE (src) == MEM && ! do_not_record
5312 && LOAD_EXTEND_OP (mode) != NIL)
5314 enum machine_mode tmode;
5316 /* Set what we are trying to extend and the operation it might
5317 have been extended with. */
5318 PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
5319 XEXP (memory_extend_rtx, 0) = src;
5321 for (tmode = GET_MODE_WIDER_MODE (mode);
5322 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5323 tmode = GET_MODE_WIDER_MODE (tmode))
5325 struct table_elt *larger_elt;
5327 PUT_MODE (memory_extend_rtx, tmode);
5328 larger_elt = lookup (memory_extend_rtx,
5329 HASH (memory_extend_rtx, tmode), tmode);
5330 if (larger_elt == 0)
5331 continue;
5333 for (larger_elt = larger_elt->first_same_value;
5334 larger_elt; larger_elt = larger_elt->next_same_value)
5335 if (GET_CODE (larger_elt->exp) == REG)
5337 src_related = gen_lowpart_if_possible (mode,
5338 larger_elt->exp);
5339 break;
5342 if (src_related)
5343 break;
5346 #endif /* LOAD_EXTEND_OP */
5348 if (src == src_folded)
5349 src_folded = 0;
5351 /* At this point, ELT, if nonzero, points to a class of expressions
5352 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
5353 and SRC_RELATED, if nonzero, each contain additional equivalent
5354 expressions. Prune these latter expressions by deleting expressions
5355 already in the equivalence class.
5357 Check for an equivalent identical to the destination. If found,
5358 this is the preferred equivalent since it will likely lead to
5359 elimination of the insn. Indicate this by placing it in
5360 `src_related'. */
5362 if (elt)
5363 elt = elt->first_same_value;
5364 for (p = elt; p; p = p->next_same_value)
5366 enum rtx_code code = GET_CODE (p->exp);
5368 /* If the expression is not valid, ignore it. Then we do not
5369 have to check for validity below. In most cases, we can use
5370 `rtx_equal_p', since canonicalization has already been done. */
5371 if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, 0))
5372 continue;
5374 /* Also skip paradoxical subregs, unless that's what we're
5375 looking for. */
5376 if (code == SUBREG
5377 && (GET_MODE_SIZE (GET_MODE (p->exp))
5378 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))
5379 && ! (src != 0
5380 && GET_CODE (src) == SUBREG
5381 && GET_MODE (src) == GET_MODE (p->exp)
5382 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5383 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))))
5384 continue;
5386 if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
5387 src = 0;
5388 else if (src_folded && GET_CODE (src_folded) == code
5389 && rtx_equal_p (src_folded, p->exp))
5390 src_folded = 0;
5391 else if (src_eqv_here && GET_CODE (src_eqv_here) == code
5392 && rtx_equal_p (src_eqv_here, p->exp))
5393 src_eqv_here = 0;
5394 else if (src_related && GET_CODE (src_related) == code
5395 && rtx_equal_p (src_related, p->exp))
5396 src_related = 0;
5398 /* This is the same as the destination of the insns, we want
5399 to prefer it. Copy it to src_related. The code below will
5400 then give it a negative cost. */
5401 if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
5402 src_related = dest;
5405 /* Find the cheapest valid equivalent, trying all the available
5406 possibilities. Prefer items not in the hash table to ones
5407 that are when they are equal cost. Note that we can never
5408 worsen an insn as the current contents will also succeed.
5409 If we find an equivalent identical to the destination, use it as best,
5410 since this insn will probably be eliminated in that case. */
5411 if (src)
5413 if (rtx_equal_p (src, dest))
5414 src_cost = src_regcost = -1;
5415 else
5417 src_cost = COST (src);
5418 src_regcost = approx_reg_cost (src);
5422 if (src_eqv_here)
5424 if (rtx_equal_p (src_eqv_here, dest))
5425 src_eqv_cost = src_eqv_regcost = -1;
5426 else
5428 src_eqv_cost = COST (src_eqv_here);
5429 src_eqv_regcost = approx_reg_cost (src_eqv_here);
5433 if (src_folded)
5435 if (rtx_equal_p (src_folded, dest))
5436 src_folded_cost = src_folded_regcost = -1;
5437 else
5439 src_folded_cost = COST (src_folded);
5440 src_folded_regcost = approx_reg_cost (src_folded);
5444 if (src_related)
5446 if (rtx_equal_p (src_related, dest))
5447 src_related_cost = src_related_regcost = -1;
5448 else
5450 src_related_cost = COST (src_related);
5451 src_related_regcost = approx_reg_cost (src_related);
5455 /* If this was an indirect jump insn, a known label will really be
5456 cheaper even though it looks more expensive. */
5457 if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
5458 src_folded = src_const, src_folded_cost = src_folded_regcost = -1;
5460 /* Terminate loop when replacement made. This must terminate since
5461 the current contents will be tested and will always be valid. */
5462 while (1)
5464 rtx trial;
5466 /* Skip invalid entries. */
5467 while (elt && GET_CODE (elt->exp) != REG
5468 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
5469 elt = elt->next_same_value;
5471 /* A paradoxical subreg would be bad here: it'll be the right
5472 size, but later may be adjusted so that the upper bits aren't
5473 what we want. So reject it. */
5474 if (elt != 0
5475 && GET_CODE (elt->exp) == SUBREG
5476 && (GET_MODE_SIZE (GET_MODE (elt->exp))
5477 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))
5478 /* It is okay, though, if the rtx we're trying to match
5479 will ignore any of the bits we can't predict. */
5480 && ! (src != 0
5481 && GET_CODE (src) == SUBREG
5482 && GET_MODE (src) == GET_MODE (elt->exp)
5483 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5484 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))))
5486 elt = elt->next_same_value;
5487 continue;
5490 if (elt)
5492 src_elt_cost = elt->cost;
5493 src_elt_regcost = elt->regcost;
5496 /* Find cheapest and skip it for the next time. For items
5497 of equal cost, use this order:
5498 src_folded, src, src_eqv, src_related and hash table entry. */
5499 if (src_folded
5500 && preferrable (src_folded_cost, src_folded_regcost,
5501 src_cost, src_regcost) <= 0
5502 && preferrable (src_folded_cost, src_folded_regcost,
5503 src_eqv_cost, src_eqv_regcost) <= 0
5504 && preferrable (src_folded_cost, src_folded_regcost,
5505 src_related_cost, src_related_regcost) <= 0
5506 && preferrable (src_folded_cost, src_folded_regcost,
5507 src_elt_cost, src_elt_regcost) <= 0)
5509 trial = src_folded, src_folded_cost = MAX_COST;
5510 if (src_folded_force_flag)
5511 trial = force_const_mem (mode, trial);
5513 else if (src
5514 && preferrable (src_cost, src_regcost,
5515 src_eqv_cost, src_eqv_regcost) <= 0
5516 && preferrable (src_cost, src_regcost,
5517 src_related_cost, src_related_regcost) <= 0
5518 && preferrable (src_cost, src_regcost,
5519 src_elt_cost, src_elt_regcost) <= 0)
5520 trial = src, src_cost = MAX_COST;
5521 else if (src_eqv_here
5522 && preferrable (src_eqv_cost, src_eqv_regcost,
5523 src_related_cost, src_related_regcost) <= 0
5524 && preferrable (src_eqv_cost, src_eqv_regcost,
5525 src_elt_cost, src_elt_regcost) <= 0)
5526 trial = copy_rtx (src_eqv_here), src_eqv_cost = MAX_COST;
5527 else if (src_related
5528 && preferrable (src_related_cost, src_related_regcost,
5529 src_elt_cost, src_elt_regcost) <= 0)
5530 trial = copy_rtx (src_related), src_related_cost = MAX_COST;
5531 else
5533 trial = copy_rtx (elt->exp);
5534 elt = elt->next_same_value;
5535 src_elt_cost = MAX_COST;
5538 /* We don't normally have an insn matching (set (pc) (pc)), so
5539 check for this separately here. We will delete such an
5540 insn below.
5542 For other cases such as a table jump or conditional jump
5543 where we know the ultimate target, go ahead and replace the
5544 operand. While that may not make a valid insn, we will
5545 reemit the jump below (and also insert any necessary
5546 barriers). */
5547 if (n_sets == 1 && dest == pc_rtx
5548 && (trial == pc_rtx
5549 || (GET_CODE (trial) == LABEL_REF
5550 && ! condjump_p (insn))))
5552 SET_SRC (sets[i].rtl) = trial;
5553 cse_jumps_altered = 1;
5554 break;
5557 /* Look for a substitution that makes a valid insn. */
5558 else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
5560 /* If we just made a substitution inside a libcall, then we
5561 need to make the same substitution in any notes attached
5562 to the RETVAL insn. */
5563 if (libcall_insn
5564 && (GET_CODE (sets[i].orig_src) == REG
5565 || GET_CODE (sets[i].orig_src) == SUBREG
5566 || GET_CODE (sets[i].orig_src) == MEM))
5567 replace_rtx (REG_NOTES (libcall_insn), sets[i].orig_src,
5568 canon_reg (SET_SRC (sets[i].rtl), insn));
5570 /* The result of apply_change_group can be ignored; see
5571 canon_reg. */
5573 validate_change (insn, &SET_SRC (sets[i].rtl),
5574 canon_reg (SET_SRC (sets[i].rtl), insn),
5576 apply_change_group ();
5577 break;
5580 /* If we previously found constant pool entries for
5581 constants and this is a constant, try making a
5582 pool entry. Put it in src_folded unless we already have done
5583 this since that is where it likely came from. */
5585 else if (constant_pool_entries_cost
5586 && CONSTANT_P (trial)
5587 /* Reject cases that will abort in decode_rtx_const.
5588 On the alpha when simplifying a switch, we get
5589 (const (truncate (minus (label_ref) (label_ref)))). */
5590 && ! (GET_CODE (trial) == CONST
5591 && GET_CODE (XEXP (trial, 0)) == TRUNCATE)
5592 /* Likewise on IA-64, except without the truncate. */
5593 && ! (GET_CODE (trial) == CONST
5594 && GET_CODE (XEXP (trial, 0)) == MINUS
5595 && GET_CODE (XEXP (XEXP (trial, 0), 0)) == LABEL_REF
5596 && GET_CODE (XEXP (XEXP (trial, 0), 1)) == LABEL_REF)
5597 && (src_folded == 0
5598 || (GET_CODE (src_folded) != MEM
5599 && ! src_folded_force_flag))
5600 && GET_MODE_CLASS (mode) != MODE_CC
5601 && mode != VOIDmode)
5603 src_folded_force_flag = 1;
5604 src_folded = trial;
5605 src_folded_cost = constant_pool_entries_cost;
5609 src = SET_SRC (sets[i].rtl);
5611 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
5612 However, there is an important exception: If both are registers
5613 that are not the head of their equivalence class, replace SET_SRC
5614 with the head of the class. If we do not do this, we will have
5615 both registers live over a portion of the basic block. This way,
5616 their lifetimes will likely abut instead of overlapping. */
5617 if (GET_CODE (dest) == REG
5618 && REGNO_QTY_VALID_P (REGNO (dest)))
5620 int dest_q = REG_QTY (REGNO (dest));
5621 struct qty_table_elem *dest_ent = &qty_table[dest_q];
5623 if (dest_ent->mode == GET_MODE (dest)
5624 && dest_ent->first_reg != REGNO (dest)
5625 && GET_CODE (src) == REG && REGNO (src) == REGNO (dest)
5626 /* Don't do this if the original insn had a hard reg as
5627 SET_SRC or SET_DEST. */
5628 && (GET_CODE (sets[i].src) != REG
5629 || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER)
5630 && (GET_CODE (dest) != REG || REGNO (dest) >= FIRST_PSEUDO_REGISTER))
5631 /* We can't call canon_reg here because it won't do anything if
5632 SRC is a hard register. */
5634 int src_q = REG_QTY (REGNO (src));
5635 struct qty_table_elem *src_ent = &qty_table[src_q];
5636 int first = src_ent->first_reg;
5637 rtx new_src
5638 = (first >= FIRST_PSEUDO_REGISTER
5639 ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first));
5641 /* We must use validate-change even for this, because this
5642 might be a special no-op instruction, suitable only to
5643 tag notes onto. */
5644 if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0))
5646 src = new_src;
5647 /* If we had a constant that is cheaper than what we are now
5648 setting SRC to, use that constant. We ignored it when we
5649 thought we could make this into a no-op. */
5650 if (src_const && COST (src_const) < COST (src)
5651 && validate_change (insn, &SET_SRC (sets[i].rtl),
5652 src_const, 0))
5653 src = src_const;
5658 /* If we made a change, recompute SRC values. */
5659 if (src != sets[i].src)
5661 cse_altered = 1;
5662 do_not_record = 0;
5663 hash_arg_in_memory = 0;
5664 sets[i].src = src;
5665 sets[i].src_hash = HASH (src, mode);
5666 sets[i].src_volatile = do_not_record;
5667 sets[i].src_in_memory = hash_arg_in_memory;
5668 sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
5671 /* If this is a single SET, we are setting a register, and we have an
5672 equivalent constant, we want to add a REG_NOTE. We don't want
5673 to write a REG_EQUAL note for a constant pseudo since verifying that
5674 that pseudo hasn't been eliminated is a pain. Such a note also
5675 won't help anything.
5677 Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
5678 which can be created for a reference to a compile time computable
5679 entry in a jump table. */
5681 if (n_sets == 1 && src_const && GET_CODE (dest) == REG
5682 && GET_CODE (src_const) != REG
5683 && ! (GET_CODE (src_const) == CONST
5684 && GET_CODE (XEXP (src_const, 0)) == MINUS
5685 && GET_CODE (XEXP (XEXP (src_const, 0), 0)) == LABEL_REF
5686 && GET_CODE (XEXP (XEXP (src_const, 0), 1)) == LABEL_REF))
5688 /* Make sure that the rtx is not shared with any other insn. */
5689 src_const = copy_rtx (src_const);
5691 /* Record the actual constant value in a REG_EQUAL note, making
5692 a new one if one does not already exist. */
5693 set_unique_reg_note (insn, REG_EQUAL, src_const);
5695 /* If storing a constant value in a register that
5696 previously held the constant value 0,
5697 record this fact with a REG_WAS_0 note on this insn.
5699 Note that the *register* is required to have previously held 0,
5700 not just any register in the quantity and we must point to the
5701 insn that set that register to zero.
5703 Rather than track each register individually, we just see if
5704 the last set for this quantity was for this register. */
5706 if (REGNO_QTY_VALID_P (REGNO (dest)))
5708 int dest_q = REG_QTY (REGNO (dest));
5709 struct qty_table_elem *dest_ent = &qty_table[dest_q];
5711 if (dest_ent->const_rtx == const0_rtx)
5713 /* See if we previously had a REG_WAS_0 note. */
5714 rtx note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
5715 rtx const_insn = dest_ent->const_insn;
5717 if ((tem = single_set (const_insn)) != 0
5718 && rtx_equal_p (SET_DEST (tem), dest))
5720 if (note)
5721 XEXP (note, 0) = const_insn;
5722 else
5723 REG_NOTES (insn)
5724 = gen_rtx_INSN_LIST (REG_WAS_0, const_insn,
5725 REG_NOTES (insn));
5731 /* Now deal with the destination. */
5732 do_not_record = 0;
5734 /* Look within any SIGN_EXTRACT or ZERO_EXTRACT
5735 to the MEM or REG within it. */
5736 while (GET_CODE (dest) == SIGN_EXTRACT
5737 || GET_CODE (dest) == ZERO_EXTRACT
5738 || GET_CODE (dest) == SUBREG
5739 || GET_CODE (dest) == STRICT_LOW_PART)
5740 dest = XEXP (dest, 0);
5742 sets[i].inner_dest = dest;
5744 if (GET_CODE (dest) == MEM)
5746 #ifdef PUSH_ROUNDING
5747 /* Stack pushes invalidate the stack pointer. */
5748 rtx addr = XEXP (dest, 0);
5749 if (GET_RTX_CLASS (GET_CODE (addr)) == 'a'
5750 && XEXP (addr, 0) == stack_pointer_rtx)
5751 invalidate (stack_pointer_rtx, Pmode);
5752 #endif
5753 dest = fold_rtx (dest, insn);
5756 /* Compute the hash code of the destination now,
5757 before the effects of this instruction are recorded,
5758 since the register values used in the address computation
5759 are those before this instruction. */
5760 sets[i].dest_hash = HASH (dest, mode);
5762 /* Don't enter a bit-field in the hash table
5763 because the value in it after the store
5764 may not equal what was stored, due to truncation. */
5766 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
5767 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
5769 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5771 if (src_const != 0 && GET_CODE (src_const) == CONST_INT
5772 && GET_CODE (width) == CONST_INT
5773 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5774 && ! (INTVAL (src_const)
5775 & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5776 /* Exception: if the value is constant,
5777 and it won't be truncated, record it. */
5779 else
5781 /* This is chosen so that the destination will be invalidated
5782 but no new value will be recorded.
5783 We must invalidate because sometimes constant
5784 values can be recorded for bitfields. */
5785 sets[i].src_elt = 0;
5786 sets[i].src_volatile = 1;
5787 src_eqv = 0;
5788 src_eqv_elt = 0;
5792 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
5793 the insn. */
5794 else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
5796 /* One less use of the label this insn used to jump to. */
5797 delete_insn (insn);
5798 cse_jumps_altered = 1;
5799 /* No more processing for this set. */
5800 sets[i].rtl = 0;
5803 /* If this SET is now setting PC to a label, we know it used to
5804 be a conditional or computed branch. */
5805 else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF)
5807 /* Now emit a BARRIER after the unconditional jump. */
5808 if (NEXT_INSN (insn) == 0
5809 || GET_CODE (NEXT_INSN (insn)) != BARRIER)
5810 emit_barrier_after (insn);
5812 /* We reemit the jump in as many cases as possible just in
5813 case the form of an unconditional jump is significantly
5814 different than a computed jump or conditional jump.
5816 If this insn has multiple sets, then reemitting the
5817 jump is nontrivial. So instead we just force rerecognition
5818 and hope for the best. */
5819 if (n_sets == 1)
5821 rtx new = emit_jump_insn_after (gen_jump (XEXP (src, 0)), insn);
5823 JUMP_LABEL (new) = XEXP (src, 0);
5824 LABEL_NUSES (XEXP (src, 0))++;
5825 delete_insn (insn);
5826 insn = new;
5828 /* Now emit a BARRIER after the unconditional jump. */
5829 if (NEXT_INSN (insn) == 0
5830 || GET_CODE (NEXT_INSN (insn)) != BARRIER)
5831 emit_barrier_after (insn);
5833 else
5834 INSN_CODE (insn) = -1;
5836 never_reached_warning (insn, NULL);
5838 /* Do not bother deleting any unreachable code,
5839 let jump/flow do that. */
5841 cse_jumps_altered = 1;
5842 sets[i].rtl = 0;
5845 /* If destination is volatile, invalidate it and then do no further
5846 processing for this assignment. */
5848 else if (do_not_record)
5850 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
5851 invalidate (dest, VOIDmode);
5852 else if (GET_CODE (dest) == MEM)
5854 /* Outgoing arguments for a libcall don't
5855 affect any recorded expressions. */
5856 if (! libcall_insn || insn == libcall_insn)
5857 invalidate (dest, VOIDmode);
5859 else if (GET_CODE (dest) == STRICT_LOW_PART
5860 || GET_CODE (dest) == ZERO_EXTRACT)
5861 invalidate (XEXP (dest, 0), GET_MODE (dest));
5862 sets[i].rtl = 0;
5865 if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
5866 sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
5868 #ifdef HAVE_cc0
5869 /* If setting CC0, record what it was set to, or a constant, if it
5870 is equivalent to a constant. If it is being set to a floating-point
5871 value, make a COMPARE with the appropriate constant of 0. If we
5872 don't do this, later code can interpret this as a test against
5873 const0_rtx, which can cause problems if we try to put it into an
5874 insn as a floating-point operand. */
5875 if (dest == cc0_rtx)
5877 this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
5878 this_insn_cc0_mode = mode;
5879 if (FLOAT_MODE_P (mode))
5880 this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0,
5881 CONST0_RTX (mode));
5883 #endif
5886 /* Now enter all non-volatile source expressions in the hash table
5887 if they are not already present.
5888 Record their equivalence classes in src_elt.
5889 This way we can insert the corresponding destinations into
5890 the same classes even if the actual sources are no longer in them
5891 (having been invalidated). */
5893 if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
5894 && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
5896 struct table_elt *elt;
5897 struct table_elt *classp = sets[0].src_elt;
5898 rtx dest = SET_DEST (sets[0].rtl);
5899 enum machine_mode eqvmode = GET_MODE (dest);
5901 if (GET_CODE (dest) == STRICT_LOW_PART)
5903 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5904 classp = 0;
5906 if (insert_regs (src_eqv, classp, 0))
5908 rehash_using_reg (src_eqv);
5909 src_eqv_hash = HASH (src_eqv, eqvmode);
5911 elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
5912 elt->in_memory = src_eqv_in_memory;
5913 src_eqv_elt = elt;
5915 /* Check to see if src_eqv_elt is the same as a set source which
5916 does not yet have an elt, and if so set the elt of the set source
5917 to src_eqv_elt. */
5918 for (i = 0; i < n_sets; i++)
5919 if (sets[i].rtl && sets[i].src_elt == 0
5920 && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
5921 sets[i].src_elt = src_eqv_elt;
5924 for (i = 0; i < n_sets; i++)
5925 if (sets[i].rtl && ! sets[i].src_volatile
5926 && ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
5928 if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
5930 /* REG_EQUAL in setting a STRICT_LOW_PART
5931 gives an equivalent for the entire destination register,
5932 not just for the subreg being stored in now.
5933 This is a more interesting equivalence, so we arrange later
5934 to treat the entire reg as the destination. */
5935 sets[i].src_elt = src_eqv_elt;
5936 sets[i].src_hash = src_eqv_hash;
5938 else
5940 /* Insert source and constant equivalent into hash table, if not
5941 already present. */
5942 struct table_elt *classp = src_eqv_elt;
5943 rtx src = sets[i].src;
5944 rtx dest = SET_DEST (sets[i].rtl);
5945 enum machine_mode mode
5946 = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5948 if (sets[i].src_elt == 0)
5950 /* Don't put a hard register source into the table if this is
5951 the last insn of a libcall. In this case, we only need
5952 to put src_eqv_elt in src_elt. */
5953 if (! find_reg_note (insn, REG_RETVAL, NULL_RTX))
5955 struct table_elt *elt;
5957 /* Note that these insert_regs calls cannot remove
5958 any of the src_elt's, because they would have failed to
5959 match if not still valid. */
5960 if (insert_regs (src, classp, 0))
5962 rehash_using_reg (src);
5963 sets[i].src_hash = HASH (src, mode);
5965 elt = insert (src, classp, sets[i].src_hash, mode);
5966 elt->in_memory = sets[i].src_in_memory;
5967 sets[i].src_elt = classp = elt;
5969 else
5970 sets[i].src_elt = classp;
5972 if (sets[i].src_const && sets[i].src_const_elt == 0
5973 && src != sets[i].src_const
5974 && ! rtx_equal_p (sets[i].src_const, src))
5975 sets[i].src_elt = insert (sets[i].src_const, classp,
5976 sets[i].src_const_hash, mode);
5979 else if (sets[i].src_elt == 0)
5980 /* If we did not insert the source into the hash table (e.g., it was
5981 volatile), note the equivalence class for the REG_EQUAL value, if any,
5982 so that the destination goes into that class. */
5983 sets[i].src_elt = src_eqv_elt;
5985 invalidate_from_clobbers (x);
5987 /* Some registers are invalidated by subroutine calls. Memory is
5988 invalidated by non-constant calls. */
5990 if (GET_CODE (insn) == CALL_INSN)
5992 if (! CONST_OR_PURE_CALL_P (insn))
5993 invalidate_memory ();
5994 invalidate_for_call ();
5997 /* Now invalidate everything set by this instruction.
5998 If a SUBREG or other funny destination is being set,
5999 sets[i].rtl is still nonzero, so here we invalidate the reg
6000 a part of which is being set. */
6002 for (i = 0; i < n_sets; i++)
6003 if (sets[i].rtl)
6005 /* We can't use the inner dest, because the mode associated with
6006 a ZERO_EXTRACT is significant. */
6007 rtx dest = SET_DEST (sets[i].rtl);
6009 /* Needed for registers to remove the register from its
6010 previous quantity's chain.
6011 Needed for memory if this is a nonvarying address, unless
6012 we have just done an invalidate_memory that covers even those. */
6013 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
6014 invalidate (dest, VOIDmode);
6015 else if (GET_CODE (dest) == MEM)
6017 /* Outgoing arguments for a libcall don't
6018 affect any recorded expressions. */
6019 if (! libcall_insn || insn == libcall_insn)
6020 invalidate (dest, VOIDmode);
6022 else if (GET_CODE (dest) == STRICT_LOW_PART
6023 || GET_CODE (dest) == ZERO_EXTRACT)
6024 invalidate (XEXP (dest, 0), GET_MODE (dest));
6027 /* A volatile ASM invalidates everything. */
6028 if (GET_CODE (insn) == INSN
6029 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
6030 && MEM_VOLATILE_P (PATTERN (insn)))
6031 flush_hash_table ();
6033 /* Make sure registers mentioned in destinations
6034 are safe for use in an expression to be inserted.
6035 This removes from the hash table
6036 any invalid entry that refers to one of these registers.
6038 We don't care about the return value from mention_regs because
6039 we are going to hash the SET_DEST values unconditionally. */
6041 for (i = 0; i < n_sets; i++)
6043 if (sets[i].rtl)
6045 rtx x = SET_DEST (sets[i].rtl);
6047 if (GET_CODE (x) != REG)
6048 mention_regs (x);
6049 else
6051 /* We used to rely on all references to a register becoming
6052 inaccessible when a register changes to a new quantity,
6053 since that changes the hash code. However, that is not
6054 safe, since after HASH_SIZE new quantities we get a
6055 hash 'collision' of a register with its own invalid
6056 entries. And since SUBREGs have been changed not to
6057 change their hash code with the hash code of the register,
6058 it wouldn't work any longer at all. So we have to check
6059 for any invalid references lying around now.
6060 This code is similar to the REG case in mention_regs,
6061 but it knows that reg_tick has been incremented, and
6062 it leaves reg_in_table as -1 . */
6063 unsigned int regno = REGNO (x);
6064 unsigned int endregno
6065 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
6066 : HARD_REGNO_NREGS (regno, GET_MODE (x)));
6067 unsigned int i;
6069 for (i = regno; i < endregno; i++)
6071 if (REG_IN_TABLE (i) >= 0)
6073 remove_invalid_refs (i);
6074 REG_IN_TABLE (i) = -1;
6081 /* We may have just removed some of the src_elt's from the hash table.
6082 So replace each one with the current head of the same class. */
6084 for (i = 0; i < n_sets; i++)
6085 if (sets[i].rtl)
6087 if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
6088 /* If elt was removed, find current head of same class,
6089 or 0 if nothing remains of that class. */
6091 struct table_elt *elt = sets[i].src_elt;
6093 while (elt && elt->prev_same_value)
6094 elt = elt->prev_same_value;
6096 while (elt && elt->first_same_value == 0)
6097 elt = elt->next_same_value;
6098 sets[i].src_elt = elt ? elt->first_same_value : 0;
6102 /* Now insert the destinations into their equivalence classes. */
6104 for (i = 0; i < n_sets; i++)
6105 if (sets[i].rtl)
6107 rtx dest = SET_DEST (sets[i].rtl);
6108 rtx inner_dest = sets[i].inner_dest;
6109 struct table_elt *elt;
6111 /* Don't record value if we are not supposed to risk allocating
6112 floating-point values in registers that might be wider than
6113 memory. */
6114 if ((flag_float_store
6115 && GET_CODE (dest) == MEM
6116 && FLOAT_MODE_P (GET_MODE (dest)))
6117 /* Don't record BLKmode values, because we don't know the
6118 size of it, and can't be sure that other BLKmode values
6119 have the same or smaller size. */
6120 || GET_MODE (dest) == BLKmode
6121 /* Don't record values of destinations set inside a libcall block
6122 since we might delete the libcall. Things should have been set
6123 up so we won't want to reuse such a value, but we play it safe
6124 here. */
6125 || libcall_insn
6126 /* If we didn't put a REG_EQUAL value or a source into the hash
6127 table, there is no point is recording DEST. */
6128 || sets[i].src_elt == 0
6129 /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
6130 or SIGN_EXTEND, don't record DEST since it can cause
6131 some tracking to be wrong.
6133 ??? Think about this more later. */
6134 || (GET_CODE (dest) == SUBREG
6135 && (GET_MODE_SIZE (GET_MODE (dest))
6136 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6137 && (GET_CODE (sets[i].src) == SIGN_EXTEND
6138 || GET_CODE (sets[i].src) == ZERO_EXTEND)))
6139 continue;
6141 /* STRICT_LOW_PART isn't part of the value BEING set,
6142 and neither is the SUBREG inside it.
6143 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
6144 if (GET_CODE (dest) == STRICT_LOW_PART)
6145 dest = SUBREG_REG (XEXP (dest, 0));
6147 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
6148 /* Registers must also be inserted into chains for quantities. */
6149 if (insert_regs (dest, sets[i].src_elt, 1))
6151 /* If `insert_regs' changes something, the hash code must be
6152 recalculated. */
6153 rehash_using_reg (dest);
6154 sets[i].dest_hash = HASH (dest, GET_MODE (dest));
6157 if (GET_CODE (inner_dest) == MEM
6158 && GET_CODE (XEXP (inner_dest, 0)) == ADDRESSOF)
6159 /* Given (SET (MEM (ADDRESSOF (X))) Y) we don't want to say
6160 that (MEM (ADDRESSOF (X))) is equivalent to Y.
6161 Consider the case in which the address of the MEM is
6162 passed to a function, which alters the MEM. Then, if we
6163 later use Y instead of the MEM we'll miss the update. */
6164 elt = insert (dest, 0, sets[i].dest_hash, GET_MODE (dest));
6165 else
6166 elt = insert (dest, sets[i].src_elt,
6167 sets[i].dest_hash, GET_MODE (dest));
6169 elt->in_memory = (GET_CODE (sets[i].inner_dest) == MEM
6170 && (! RTX_UNCHANGING_P (sets[i].inner_dest)
6171 || fixed_base_plus_p (XEXP (sets[i].inner_dest,
6172 0))));
6174 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
6175 narrower than M2, and both M1 and M2 are the same number of words,
6176 we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
6177 make that equivalence as well.
6179 However, BAR may have equivalences for which gen_lowpart_if_possible
6180 will produce a simpler value than gen_lowpart_if_possible applied to
6181 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
6182 BAR's equivalences. If we don't get a simplified form, make
6183 the SUBREG. It will not be used in an equivalence, but will
6184 cause two similar assignments to be detected.
6186 Note the loop below will find SUBREG_REG (DEST) since we have
6187 already entered SRC and DEST of the SET in the table. */
6189 if (GET_CODE (dest) == SUBREG
6190 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
6191 / UNITS_PER_WORD)
6192 == (GET_MODE_SIZE (GET_MODE (dest)) - 1) / UNITS_PER_WORD)
6193 && (GET_MODE_SIZE (GET_MODE (dest))
6194 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6195 && sets[i].src_elt != 0)
6197 enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
6198 struct table_elt *elt, *classp = 0;
6200 for (elt = sets[i].src_elt->first_same_value; elt;
6201 elt = elt->next_same_value)
6203 rtx new_src = 0;
6204 unsigned src_hash;
6205 struct table_elt *src_elt;
6206 int byte = 0;
6208 /* Ignore invalid entries. */
6209 if (GET_CODE (elt->exp) != REG
6210 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
6211 continue;
6213 /* We may have already been playing subreg games. If the
6214 mode is already correct for the destination, use it. */
6215 if (GET_MODE (elt->exp) == new_mode)
6216 new_src = elt->exp;
6217 else
6219 /* Calculate big endian correction for the SUBREG_BYTE.
6220 We have already checked that M1 (GET_MODE (dest))
6221 is not narrower than M2 (new_mode). */
6222 if (BYTES_BIG_ENDIAN)
6223 byte = (GET_MODE_SIZE (GET_MODE (dest))
6224 - GET_MODE_SIZE (new_mode));
6226 new_src = simplify_gen_subreg (new_mode, elt->exp,
6227 GET_MODE (dest), byte);
6230 /* The call to simplify_gen_subreg fails if the value
6231 is VOIDmode, yet we can't do any simplification, e.g.
6232 for EXPR_LISTs denoting function call results.
6233 It is invalid to construct a SUBREG with a VOIDmode
6234 SUBREG_REG, hence a zero new_src means we can't do
6235 this substitution. */
6236 if (! new_src)
6237 continue;
6239 src_hash = HASH (new_src, new_mode);
6240 src_elt = lookup (new_src, src_hash, new_mode);
6242 /* Put the new source in the hash table is if isn't
6243 already. */
6244 if (src_elt == 0)
6246 if (insert_regs (new_src, classp, 0))
6248 rehash_using_reg (new_src);
6249 src_hash = HASH (new_src, new_mode);
6251 src_elt = insert (new_src, classp, src_hash, new_mode);
6252 src_elt->in_memory = elt->in_memory;
6254 else if (classp && classp != src_elt->first_same_value)
6255 /* Show that two things that we've seen before are
6256 actually the same. */
6257 merge_equiv_classes (src_elt, classp);
6259 classp = src_elt->first_same_value;
6260 /* Ignore invalid entries. */
6261 while (classp
6262 && GET_CODE (classp->exp) != REG
6263 && ! exp_equiv_p (classp->exp, classp->exp, 1, 0))
6264 classp = classp->next_same_value;
6269 /* Special handling for (set REG0 REG1) where REG0 is the
6270 "cheapest", cheaper than REG1. After cse, REG1 will probably not
6271 be used in the sequel, so (if easily done) change this insn to
6272 (set REG1 REG0) and replace REG1 with REG0 in the previous insn
6273 that computed their value. Then REG1 will become a dead store
6274 and won't cloud the situation for later optimizations.
6276 Do not make this change if REG1 is a hard register, because it will
6277 then be used in the sequel and we may be changing a two-operand insn
6278 into a three-operand insn.
6280 Also do not do this if we are operating on a copy of INSN.
6282 Also don't do this if INSN ends a libcall; this would cause an unrelated
6283 register to be set in the middle of a libcall, and we then get bad code
6284 if the libcall is deleted. */
6286 if (n_sets == 1 && sets[0].rtl && GET_CODE (SET_DEST (sets[0].rtl)) == REG
6287 && NEXT_INSN (PREV_INSN (insn)) == insn
6288 && GET_CODE (SET_SRC (sets[0].rtl)) == REG
6289 && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
6290 && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl))))
6292 int src_q = REG_QTY (REGNO (SET_SRC (sets[0].rtl)));
6293 struct qty_table_elem *src_ent = &qty_table[src_q];
6295 if ((src_ent->first_reg == REGNO (SET_DEST (sets[0].rtl)))
6296 && ! find_reg_note (insn, REG_RETVAL, NULL_RTX))
6298 rtx prev = insn;
6299 /* Scan for the previous nonnote insn, but stop at a basic
6300 block boundary. */
6303 prev = PREV_INSN (prev);
6305 while (prev && GET_CODE (prev) == NOTE
6306 && NOTE_LINE_NUMBER (prev) != NOTE_INSN_BASIC_BLOCK);
6308 /* Do not swap the registers around if the previous instruction
6309 attaches a REG_EQUIV note to REG1.
6311 ??? It's not entirely clear whether we can transfer a REG_EQUIV
6312 from the pseudo that originally shadowed an incoming argument
6313 to another register. Some uses of REG_EQUIV might rely on it
6314 being attached to REG1 rather than REG2.
6316 This section previously turned the REG_EQUIV into a REG_EQUAL
6317 note. We cannot do that because REG_EQUIV may provide an
6318 uninitialized stack slot when REG_PARM_STACK_SPACE is used. */
6320 if (prev != 0 && GET_CODE (prev) == INSN
6321 && GET_CODE (PATTERN (prev)) == SET
6322 && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl)
6323 && ! find_reg_note (prev, REG_EQUIV, NULL_RTX))
6325 rtx dest = SET_DEST (sets[0].rtl);
6326 rtx src = SET_SRC (sets[0].rtl);
6327 rtx note;
6329 validate_change (prev, &SET_DEST (PATTERN (prev)), dest, 1);
6330 validate_change (insn, &SET_DEST (sets[0].rtl), src, 1);
6331 validate_change (insn, &SET_SRC (sets[0].rtl), dest, 1);
6332 apply_change_group ();
6334 /* If there was a REG_WAS_0 note on PREV, remove it. Move
6335 any REG_WAS_0 note on INSN to PREV. */
6336 note = find_reg_note (prev, REG_WAS_0, NULL_RTX);
6337 if (note)
6338 remove_note (prev, note);
6340 note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
6341 if (note)
6343 remove_note (insn, note);
6344 XEXP (note, 1) = REG_NOTES (prev);
6345 REG_NOTES (prev) = note;
6348 /* If INSN has a REG_EQUAL note, and this note mentions
6349 REG0, then we must delete it, because the value in
6350 REG0 has changed. If the note's value is REG1, we must
6351 also delete it because that is now this insn's dest. */
6352 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
6353 if (note != 0
6354 && (reg_mentioned_p (dest, XEXP (note, 0))
6355 || rtx_equal_p (src, XEXP (note, 0))))
6356 remove_note (insn, note);
6361 /* If this is a conditional jump insn, record any known equivalences due to
6362 the condition being tested. */
6364 last_jump_equiv_class = 0;
6365 if (GET_CODE (insn) == JUMP_INSN
6366 && n_sets == 1 && GET_CODE (x) == SET
6367 && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
6368 record_jump_equiv (insn, 0);
6370 #ifdef HAVE_cc0
6371 /* If the previous insn set CC0 and this insn no longer references CC0,
6372 delete the previous insn. Here we use the fact that nothing expects CC0
6373 to be valid over an insn, which is true until the final pass. */
6374 if (prev_insn && GET_CODE (prev_insn) == INSN
6375 && (tem = single_set (prev_insn)) != 0
6376 && SET_DEST (tem) == cc0_rtx
6377 && ! reg_mentioned_p (cc0_rtx, x))
6378 delete_insn (prev_insn);
6380 prev_insn_cc0 = this_insn_cc0;
6381 prev_insn_cc0_mode = this_insn_cc0_mode;
6382 prev_insn = insn;
6383 #endif
6386 /* Remove from the hash table all expressions that reference memory. */
6388 static void
6389 invalidate_memory ()
6391 int i;
6392 struct table_elt *p, *next;
6394 for (i = 0; i < HASH_SIZE; i++)
6395 for (p = table[i]; p; p = next)
6397 next = p->next_same_hash;
6398 if (p->in_memory)
6399 remove_from_table (p, i);
6403 /* If ADDR is an address that implicitly affects the stack pointer, return
6404 1 and update the register tables to show the effect. Else, return 0. */
6406 static int
6407 addr_affects_sp_p (addr)
6408 rtx addr;
6410 if (GET_RTX_CLASS (GET_CODE (addr)) == 'a'
6411 && GET_CODE (XEXP (addr, 0)) == REG
6412 && REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
6414 if (REG_TICK (STACK_POINTER_REGNUM) >= 0)
6416 REG_TICK (STACK_POINTER_REGNUM)++;
6417 /* Is it possible to use a subreg of SP? */
6418 SUBREG_TICKED (STACK_POINTER_REGNUM) = -1;
6421 /* This should be *very* rare. */
6422 if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
6423 invalidate (stack_pointer_rtx, VOIDmode);
6425 return 1;
6428 return 0;
6431 /* Perform invalidation on the basis of everything about an insn
6432 except for invalidating the actual places that are SET in it.
6433 This includes the places CLOBBERed, and anything that might
6434 alias with something that is SET or CLOBBERed.
6436 X is the pattern of the insn. */
6438 static void
6439 invalidate_from_clobbers (x)
6440 rtx x;
6442 if (GET_CODE (x) == CLOBBER)
6444 rtx ref = XEXP (x, 0);
6445 if (ref)
6447 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
6448 || GET_CODE (ref) == MEM)
6449 invalidate (ref, VOIDmode);
6450 else if (GET_CODE (ref) == STRICT_LOW_PART
6451 || GET_CODE (ref) == ZERO_EXTRACT)
6452 invalidate (XEXP (ref, 0), GET_MODE (ref));
6455 else if (GET_CODE (x) == PARALLEL)
6457 int i;
6458 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
6460 rtx y = XVECEXP (x, 0, i);
6461 if (GET_CODE (y) == CLOBBER)
6463 rtx ref = XEXP (y, 0);
6464 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
6465 || GET_CODE (ref) == MEM)
6466 invalidate (ref, VOIDmode);
6467 else if (GET_CODE (ref) == STRICT_LOW_PART
6468 || GET_CODE (ref) == ZERO_EXTRACT)
6469 invalidate (XEXP (ref, 0), GET_MODE (ref));
6475 /* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes
6476 and replace any registers in them with either an equivalent constant
6477 or the canonical form of the register. If we are inside an address,
6478 only do this if the address remains valid.
6480 OBJECT is 0 except when within a MEM in which case it is the MEM.
6482 Return the replacement for X. */
6484 static rtx
6485 cse_process_notes (x, object)
6486 rtx x;
6487 rtx object;
6489 enum rtx_code code = GET_CODE (x);
6490 const char *fmt = GET_RTX_FORMAT (code);
6491 int i;
6493 switch (code)
6495 case CONST_INT:
6496 case CONST:
6497 case SYMBOL_REF:
6498 case LABEL_REF:
6499 case CONST_DOUBLE:
6500 case CONST_VECTOR:
6501 case PC:
6502 case CC0:
6503 case LO_SUM:
6504 return x;
6506 case MEM:
6507 validate_change (x, &XEXP (x, 0),
6508 cse_process_notes (XEXP (x, 0), x), 0);
6509 return x;
6511 case EXPR_LIST:
6512 case INSN_LIST:
6513 if (REG_NOTE_KIND (x) == REG_EQUAL)
6514 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX);
6515 if (XEXP (x, 1))
6516 XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX);
6517 return x;
6519 case SIGN_EXTEND:
6520 case ZERO_EXTEND:
6521 case SUBREG:
6523 rtx new = cse_process_notes (XEXP (x, 0), object);
6524 /* We don't substitute VOIDmode constants into these rtx,
6525 since they would impede folding. */
6526 if (GET_MODE (new) != VOIDmode)
6527 validate_change (object, &XEXP (x, 0), new, 0);
6528 return x;
6531 case REG:
6532 i = REG_QTY (REGNO (x));
6534 /* Return a constant or a constant register. */
6535 if (REGNO_QTY_VALID_P (REGNO (x)))
6537 struct qty_table_elem *ent = &qty_table[i];
6539 if (ent->const_rtx != NULL_RTX
6540 && (CONSTANT_P (ent->const_rtx)
6541 || GET_CODE (ent->const_rtx) == REG))
6543 rtx new = gen_lowpart_if_possible (GET_MODE (x), ent->const_rtx);
6544 if (new)
6545 return new;
6549 /* Otherwise, canonicalize this register. */
6550 return canon_reg (x, NULL_RTX);
6552 default:
6553 break;
6556 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6557 if (fmt[i] == 'e')
6558 validate_change (object, &XEXP (x, i),
6559 cse_process_notes (XEXP (x, i), object), 0);
6561 return x;
6564 /* Find common subexpressions between the end test of a loop and the beginning
6565 of the loop. LOOP_START is the CODE_LABEL at the start of a loop.
6567 Often we have a loop where an expression in the exit test is used
6568 in the body of the loop. For example "while (*p) *q++ = *p++;".
6569 Because of the way we duplicate the loop exit test in front of the loop,
6570 however, we don't detect that common subexpression. This will be caught
6571 when global cse is implemented, but this is a quite common case.
6573 This function handles the most common cases of these common expressions.
6574 It is called after we have processed the basic block ending with the
6575 NOTE_INSN_LOOP_END note that ends a loop and the previous JUMP_INSN
6576 jumps to a label used only once. */
6578 static void
6579 cse_around_loop (loop_start)
6580 rtx loop_start;
6582 rtx insn;
6583 int i;
6584 struct table_elt *p;
6586 /* If the jump at the end of the loop doesn't go to the start, we don't
6587 do anything. */
6588 for (insn = PREV_INSN (loop_start);
6589 insn && (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0);
6590 insn = PREV_INSN (insn))
6593 if (insn == 0
6594 || GET_CODE (insn) != NOTE
6595 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_BEG)
6596 return;
6598 /* If the last insn of the loop (the end test) was an NE comparison,
6599 we will interpret it as an EQ comparison, since we fell through
6600 the loop. Any equivalences resulting from that comparison are
6601 therefore not valid and must be invalidated. */
6602 if (last_jump_equiv_class)
6603 for (p = last_jump_equiv_class->first_same_value; p;
6604 p = p->next_same_value)
6606 if (GET_CODE (p->exp) == MEM || GET_CODE (p->exp) == REG
6607 || (GET_CODE (p->exp) == SUBREG
6608 && GET_CODE (SUBREG_REG (p->exp)) == REG))
6609 invalidate (p->exp, VOIDmode);
6610 else if (GET_CODE (p->exp) == STRICT_LOW_PART
6611 || GET_CODE (p->exp) == ZERO_EXTRACT)
6612 invalidate (XEXP (p->exp, 0), GET_MODE (p->exp));
6615 /* Process insns starting after LOOP_START until we hit a CALL_INSN or
6616 a CODE_LABEL (we could handle a CALL_INSN, but it isn't worth it).
6618 The only thing we do with SET_DEST is invalidate entries, so we
6619 can safely process each SET in order. It is slightly less efficient
6620 to do so, but we only want to handle the most common cases.
6622 The gen_move_insn call in cse_set_around_loop may create new pseudos.
6623 These pseudos won't have valid entries in any of the tables indexed
6624 by register number, such as reg_qty. We avoid out-of-range array
6625 accesses by not processing any instructions created after cse started. */
6627 for (insn = NEXT_INSN (loop_start);
6628 GET_CODE (insn) != CALL_INSN && GET_CODE (insn) != CODE_LABEL
6629 && INSN_UID (insn) < max_insn_uid
6630 && ! (GET_CODE (insn) == NOTE
6631 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
6632 insn = NEXT_INSN (insn))
6634 if (INSN_P (insn)
6635 && (GET_CODE (PATTERN (insn)) == SET
6636 || GET_CODE (PATTERN (insn)) == CLOBBER))
6637 cse_set_around_loop (PATTERN (insn), insn, loop_start);
6638 else if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == PARALLEL)
6639 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6640 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET
6641 || GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == CLOBBER)
6642 cse_set_around_loop (XVECEXP (PATTERN (insn), 0, i), insn,
6643 loop_start);
6647 /* Process one SET of an insn that was skipped. We ignore CLOBBERs
6648 since they are done elsewhere. This function is called via note_stores. */
6650 static void
6651 invalidate_skipped_set (dest, set, data)
6652 rtx set;
6653 rtx dest;
6654 void *data ATTRIBUTE_UNUSED;
6656 enum rtx_code code = GET_CODE (dest);
6658 if (code == MEM
6659 && ! addr_affects_sp_p (dest) /* If this is not a stack push ... */
6660 /* There are times when an address can appear varying and be a PLUS
6661 during this scan when it would be a fixed address were we to know
6662 the proper equivalences. So invalidate all memory if there is
6663 a BLKmode or nonscalar memory reference or a reference to a
6664 variable address. */
6665 && (MEM_IN_STRUCT_P (dest) || GET_MODE (dest) == BLKmode
6666 || cse_rtx_varies_p (XEXP (dest, 0), 0)))
6668 invalidate_memory ();
6669 return;
6672 if (GET_CODE (set) == CLOBBER
6673 #ifdef HAVE_cc0
6674 || dest == cc0_rtx
6675 #endif
6676 || dest == pc_rtx)
6677 return;
6679 if (code == STRICT_LOW_PART || code == ZERO_EXTRACT)
6680 invalidate (XEXP (dest, 0), GET_MODE (dest));
6681 else if (code == REG || code == SUBREG || code == MEM)
6682 invalidate (dest, VOIDmode);
6685 /* Invalidate all insns from START up to the end of the function or the
6686 next label. This called when we wish to CSE around a block that is
6687 conditionally executed. */
6689 static void
6690 invalidate_skipped_block (start)
6691 rtx start;
6693 rtx insn;
6695 for (insn = start; insn && GET_CODE (insn) != CODE_LABEL;
6696 insn = NEXT_INSN (insn))
6698 if (! INSN_P (insn))
6699 continue;
6701 if (GET_CODE (insn) == CALL_INSN)
6703 if (! CONST_OR_PURE_CALL_P (insn))
6704 invalidate_memory ();
6705 invalidate_for_call ();
6708 invalidate_from_clobbers (PATTERN (insn));
6709 note_stores (PATTERN (insn), invalidate_skipped_set, NULL);
6713 /* If modifying X will modify the value in *DATA (which is really an
6714 `rtx *'), indicate that fact by setting the pointed to value to
6715 NULL_RTX. */
6717 static void
6718 cse_check_loop_start (x, set, data)
6719 rtx x;
6720 rtx set ATTRIBUTE_UNUSED;
6721 void *data;
6723 rtx *cse_check_loop_start_value = (rtx *) data;
6725 if (*cse_check_loop_start_value == NULL_RTX
6726 || GET_CODE (x) == CC0 || GET_CODE (x) == PC)
6727 return;
6729 if ((GET_CODE (x) == MEM && GET_CODE (*cse_check_loop_start_value) == MEM)
6730 || reg_overlap_mentioned_p (x, *cse_check_loop_start_value))
6731 *cse_check_loop_start_value = NULL_RTX;
6734 /* X is a SET or CLOBBER contained in INSN that was found near the start of
6735 a loop that starts with the label at LOOP_START.
6737 If X is a SET, we see if its SET_SRC is currently in our hash table.
6738 If so, we see if it has a value equal to some register used only in the
6739 loop exit code (as marked by jump.c).
6741 If those two conditions are true, we search backwards from the start of
6742 the loop to see if that same value was loaded into a register that still
6743 retains its value at the start of the loop.
6745 If so, we insert an insn after the load to copy the destination of that
6746 load into the equivalent register and (try to) replace our SET_SRC with that
6747 register.
6749 In any event, we invalidate whatever this SET or CLOBBER modifies. */
6751 static void
6752 cse_set_around_loop (x, insn, loop_start)
6753 rtx x;
6754 rtx insn;
6755 rtx loop_start;
6757 struct table_elt *src_elt;
6759 /* If this is a SET, see if we can replace SET_SRC, but ignore SETs that
6760 are setting PC or CC0 or whose SET_SRC is already a register. */
6761 if (GET_CODE (x) == SET
6762 && GET_CODE (SET_DEST (x)) != PC && GET_CODE (SET_DEST (x)) != CC0
6763 && GET_CODE (SET_SRC (x)) != REG)
6765 src_elt = lookup (SET_SRC (x),
6766 HASH (SET_SRC (x), GET_MODE (SET_DEST (x))),
6767 GET_MODE (SET_DEST (x)));
6769 if (src_elt)
6770 for (src_elt = src_elt->first_same_value; src_elt;
6771 src_elt = src_elt->next_same_value)
6772 if (GET_CODE (src_elt->exp) == REG && REG_LOOP_TEST_P (src_elt->exp)
6773 && COST (src_elt->exp) < COST (SET_SRC (x)))
6775 rtx p, set;
6777 /* Look for an insn in front of LOOP_START that sets
6778 something in the desired mode to SET_SRC (x) before we hit
6779 a label or CALL_INSN. */
6781 for (p = prev_nonnote_insn (loop_start);
6782 p && GET_CODE (p) != CALL_INSN
6783 && GET_CODE (p) != CODE_LABEL;
6784 p = prev_nonnote_insn (p))
6785 if ((set = single_set (p)) != 0
6786 && GET_CODE (SET_DEST (set)) == REG
6787 && GET_MODE (SET_DEST (set)) == src_elt->mode
6788 && rtx_equal_p (SET_SRC (set), SET_SRC (x)))
6790 /* We now have to ensure that nothing between P
6791 and LOOP_START modified anything referenced in
6792 SET_SRC (x). We know that nothing within the loop
6793 can modify it, or we would have invalidated it in
6794 the hash table. */
6795 rtx q;
6796 rtx cse_check_loop_start_value = SET_SRC (x);
6797 for (q = p; q != loop_start; q = NEXT_INSN (q))
6798 if (INSN_P (q))
6799 note_stores (PATTERN (q),
6800 cse_check_loop_start,
6801 &cse_check_loop_start_value);
6803 /* If nothing was changed and we can replace our
6804 SET_SRC, add an insn after P to copy its destination
6805 to what we will be replacing SET_SRC with. */
6806 if (cse_check_loop_start_value
6807 && validate_change (insn, &SET_SRC (x),
6808 src_elt->exp, 0))
6810 /* If this creates new pseudos, this is unsafe,
6811 because the regno of new pseudo is unsuitable
6812 to index into reg_qty when cse_insn processes
6813 the new insn. Therefore, if a new pseudo was
6814 created, discard this optimization. */
6815 int nregs = max_reg_num ();
6816 rtx move
6817 = gen_move_insn (src_elt->exp, SET_DEST (set));
6818 if (nregs != max_reg_num ())
6820 if (! validate_change (insn, &SET_SRC (x),
6821 SET_SRC (set), 0))
6822 abort ();
6824 else
6825 emit_insn_after (move, p);
6827 break;
6832 /* Deal with the destination of X affecting the stack pointer. */
6833 addr_affects_sp_p (SET_DEST (x));
6835 /* See comment on similar code in cse_insn for explanation of these
6836 tests. */
6837 if (GET_CODE (SET_DEST (x)) == REG || GET_CODE (SET_DEST (x)) == SUBREG
6838 || GET_CODE (SET_DEST (x)) == MEM)
6839 invalidate (SET_DEST (x), VOIDmode);
6840 else if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
6841 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
6842 invalidate (XEXP (SET_DEST (x), 0), GET_MODE (SET_DEST (x)));
6845 /* Find the end of INSN's basic block and return its range,
6846 the total number of SETs in all the insns of the block, the last insn of the
6847 block, and the branch path.
6849 The branch path indicates which branches should be followed. If a nonzero
6850 path size is specified, the block should be rescanned and a different set
6851 of branches will be taken. The branch path is only used if
6852 FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is nonzero.
6854 DATA is a pointer to a struct cse_basic_block_data, defined below, that is
6855 used to describe the block. It is filled in with the information about
6856 the current block. The incoming structure's branch path, if any, is used
6857 to construct the output branch path. */
6859 void
6860 cse_end_of_basic_block (insn, data, follow_jumps, after_loop, skip_blocks)
6861 rtx insn;
6862 struct cse_basic_block_data *data;
6863 int follow_jumps;
6864 int after_loop;
6865 int skip_blocks;
6867 rtx p = insn, q;
6868 int nsets = 0;
6869 int low_cuid = INSN_CUID (insn), high_cuid = INSN_CUID (insn);
6870 rtx next = INSN_P (insn) ? insn : next_real_insn (insn);
6871 int path_size = data->path_size;
6872 int path_entry = 0;
6873 int i;
6875 /* Update the previous branch path, if any. If the last branch was
6876 previously TAKEN, mark it NOT_TAKEN. If it was previously NOT_TAKEN,
6877 shorten the path by one and look at the previous branch. We know that
6878 at least one branch must have been taken if PATH_SIZE is nonzero. */
6879 while (path_size > 0)
6881 if (data->path[path_size - 1].status != NOT_TAKEN)
6883 data->path[path_size - 1].status = NOT_TAKEN;
6884 break;
6886 else
6887 path_size--;
6890 /* If the first instruction is marked with QImode, that means we've
6891 already processed this block. Our caller will look at DATA->LAST
6892 to figure out where to go next. We want to return the next block
6893 in the instruction stream, not some branched-to block somewhere
6894 else. We accomplish this by pretending our called forbid us to
6895 follow jumps, or skip blocks. */
6896 if (GET_MODE (insn) == QImode)
6897 follow_jumps = skip_blocks = 0;
6899 /* Scan to end of this basic block. */
6900 while (p && GET_CODE (p) != CODE_LABEL)
6902 /* Don't cse out the end of a loop. This makes a difference
6903 only for the unusual loops that always execute at least once;
6904 all other loops have labels there so we will stop in any case.
6905 Cse'ing out the end of the loop is dangerous because it
6906 might cause an invariant expression inside the loop
6907 to be reused after the end of the loop. This would make it
6908 hard to move the expression out of the loop in loop.c,
6909 especially if it is one of several equivalent expressions
6910 and loop.c would like to eliminate it.
6912 If we are running after loop.c has finished, we can ignore
6913 the NOTE_INSN_LOOP_END. */
6915 if (! after_loop && GET_CODE (p) == NOTE
6916 && NOTE_LINE_NUMBER (p) == NOTE_INSN_LOOP_END)
6917 break;
6919 /* Don't cse over a call to setjmp; on some machines (eg VAX)
6920 the regs restored by the longjmp come from
6921 a later time than the setjmp. */
6922 if (PREV_INSN (p) && GET_CODE (PREV_INSN (p)) == CALL_INSN
6923 && find_reg_note (PREV_INSN (p), REG_SETJMP, NULL))
6924 break;
6926 /* A PARALLEL can have lots of SETs in it,
6927 especially if it is really an ASM_OPERANDS. */
6928 if (INSN_P (p) && GET_CODE (PATTERN (p)) == PARALLEL)
6929 nsets += XVECLEN (PATTERN (p), 0);
6930 else if (GET_CODE (p) != NOTE)
6931 nsets += 1;
6933 /* Ignore insns made by CSE; they cannot affect the boundaries of
6934 the basic block. */
6936 if (INSN_UID (p) <= max_uid && INSN_CUID (p) > high_cuid)
6937 high_cuid = INSN_CUID (p);
6938 if (INSN_UID (p) <= max_uid && INSN_CUID (p) < low_cuid)
6939 low_cuid = INSN_CUID (p);
6941 /* See if this insn is in our branch path. If it is and we are to
6942 take it, do so. */
6943 if (path_entry < path_size && data->path[path_entry].branch == p)
6945 if (data->path[path_entry].status != NOT_TAKEN)
6946 p = JUMP_LABEL (p);
6948 /* Point to next entry in path, if any. */
6949 path_entry++;
6952 /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
6953 was specified, we haven't reached our maximum path length, there are
6954 insns following the target of the jump, this is the only use of the
6955 jump label, and the target label is preceded by a BARRIER.
6957 Alternatively, we can follow the jump if it branches around a
6958 block of code and there are no other branches into the block.
6959 In this case invalidate_skipped_block will be called to invalidate any
6960 registers set in the block when following the jump. */
6962 else if ((follow_jumps || skip_blocks) && path_size < PATHLENGTH - 1
6963 && GET_CODE (p) == JUMP_INSN
6964 && GET_CODE (PATTERN (p)) == SET
6965 && GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
6966 && JUMP_LABEL (p) != 0
6967 && LABEL_NUSES (JUMP_LABEL (p)) == 1
6968 && NEXT_INSN (JUMP_LABEL (p)) != 0)
6970 for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
6971 if ((GET_CODE (q) != NOTE
6972 || NOTE_LINE_NUMBER (q) == NOTE_INSN_LOOP_END
6973 || (PREV_INSN (q) && GET_CODE (PREV_INSN (q)) == CALL_INSN
6974 && find_reg_note (PREV_INSN (q), REG_SETJMP, NULL)))
6975 && (GET_CODE (q) != CODE_LABEL || LABEL_NUSES (q) != 0))
6976 break;
6978 /* If we ran into a BARRIER, this code is an extension of the
6979 basic block when the branch is taken. */
6980 if (follow_jumps && q != 0 && GET_CODE (q) == BARRIER)
6982 /* Don't allow ourself to keep walking around an
6983 always-executed loop. */
6984 if (next_real_insn (q) == next)
6986 p = NEXT_INSN (p);
6987 continue;
6990 /* Similarly, don't put a branch in our path more than once. */
6991 for (i = 0; i < path_entry; i++)
6992 if (data->path[i].branch == p)
6993 break;
6995 if (i != path_entry)
6996 break;
6998 data->path[path_entry].branch = p;
6999 data->path[path_entry++].status = TAKEN;
7001 /* This branch now ends our path. It was possible that we
7002 didn't see this branch the last time around (when the
7003 insn in front of the target was a JUMP_INSN that was
7004 turned into a no-op). */
7005 path_size = path_entry;
7007 p = JUMP_LABEL (p);
7008 /* Mark block so we won't scan it again later. */
7009 PUT_MODE (NEXT_INSN (p), QImode);
7011 /* Detect a branch around a block of code. */
7012 else if (skip_blocks && q != 0 && GET_CODE (q) != CODE_LABEL)
7014 rtx tmp;
7016 if (next_real_insn (q) == next)
7018 p = NEXT_INSN (p);
7019 continue;
7022 for (i = 0; i < path_entry; i++)
7023 if (data->path[i].branch == p)
7024 break;
7026 if (i != path_entry)
7027 break;
7029 /* This is no_labels_between_p (p, q) with an added check for
7030 reaching the end of a function (in case Q precedes P). */
7031 for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
7032 if (GET_CODE (tmp) == CODE_LABEL)
7033 break;
7035 if (tmp == q)
7037 data->path[path_entry].branch = p;
7038 data->path[path_entry++].status = AROUND;
7040 path_size = path_entry;
7042 p = JUMP_LABEL (p);
7043 /* Mark block so we won't scan it again later. */
7044 PUT_MODE (NEXT_INSN (p), QImode);
7048 p = NEXT_INSN (p);
7051 data->low_cuid = low_cuid;
7052 data->high_cuid = high_cuid;
7053 data->nsets = nsets;
7054 data->last = p;
7056 /* If all jumps in the path are not taken, set our path length to zero
7057 so a rescan won't be done. */
7058 for (i = path_size - 1; i >= 0; i--)
7059 if (data->path[i].status != NOT_TAKEN)
7060 break;
7062 if (i == -1)
7063 data->path_size = 0;
7064 else
7065 data->path_size = path_size;
7067 /* End the current branch path. */
7068 data->path[path_size].branch = 0;
7071 /* Perform cse on the instructions of a function.
7072 F is the first instruction.
7073 NREGS is one plus the highest pseudo-reg number used in the instruction.
7075 AFTER_LOOP is 1 if this is the cse call done after loop optimization
7076 (only if -frerun-cse-after-loop).
7078 Returns 1 if jump_optimize should be redone due to simplifications
7079 in conditional jump instructions. */
7082 cse_main (f, nregs, after_loop, file)
7083 rtx f;
7084 int nregs;
7085 int after_loop;
7086 FILE *file;
7088 struct cse_basic_block_data val;
7089 rtx insn = f;
7090 int i;
7092 cse_jumps_altered = 0;
7093 recorded_label_ref = 0;
7094 constant_pool_entries_cost = 0;
7095 val.path_size = 0;
7097 init_recog ();
7098 init_alias_analysis ();
7100 max_reg = nregs;
7102 max_insn_uid = get_max_uid ();
7104 reg_eqv_table = (struct reg_eqv_elem *)
7105 xmalloc (nregs * sizeof (struct reg_eqv_elem));
7107 #ifdef LOAD_EXTEND_OP
7109 /* Allocate scratch rtl here. cse_insn will fill in the memory reference
7110 and change the code and mode as appropriate. */
7111 memory_extend_rtx = gen_rtx_ZERO_EXTEND (VOIDmode, NULL_RTX);
7112 #endif
7114 /* Reset the counter indicating how many elements have been made
7115 thus far. */
7116 n_elements_made = 0;
7118 /* Find the largest uid. */
7120 max_uid = get_max_uid ();
7121 uid_cuid = (int *) xcalloc (max_uid + 1, sizeof (int));
7123 /* Compute the mapping from uids to cuids.
7124 CUIDs are numbers assigned to insns, like uids,
7125 except that cuids increase monotonically through the code.
7126 Don't assign cuids to line-number NOTEs, so that the distance in cuids
7127 between two insns is not affected by -g. */
7129 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
7131 if (GET_CODE (insn) != NOTE
7132 || NOTE_LINE_NUMBER (insn) < 0)
7133 INSN_CUID (insn) = ++i;
7134 else
7135 /* Give a line number note the same cuid as preceding insn. */
7136 INSN_CUID (insn) = i;
7139 ggc_push_context ();
7141 /* Loop over basic blocks.
7142 Compute the maximum number of qty's needed for each basic block
7143 (which is 2 for each SET). */
7144 insn = f;
7145 while (insn)
7147 cse_altered = 0;
7148 cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps, after_loop,
7149 flag_cse_skip_blocks);
7151 /* If this basic block was already processed or has no sets, skip it. */
7152 if (val.nsets == 0 || GET_MODE (insn) == QImode)
7154 PUT_MODE (insn, VOIDmode);
7155 insn = (val.last ? NEXT_INSN (val.last) : 0);
7156 val.path_size = 0;
7157 continue;
7160 cse_basic_block_start = val.low_cuid;
7161 cse_basic_block_end = val.high_cuid;
7162 max_qty = val.nsets * 2;
7164 if (file)
7165 fnotice (file, ";; Processing block from %d to %d, %d sets.\n",
7166 INSN_UID (insn), val.last ? INSN_UID (val.last) : 0,
7167 val.nsets);
7169 /* Make MAX_QTY bigger to give us room to optimize
7170 past the end of this basic block, if that should prove useful. */
7171 if (max_qty < 500)
7172 max_qty = 500;
7174 max_qty += max_reg;
7176 /* If this basic block is being extended by following certain jumps,
7177 (see `cse_end_of_basic_block'), we reprocess the code from the start.
7178 Otherwise, we start after this basic block. */
7179 if (val.path_size > 0)
7180 cse_basic_block (insn, val.last, val.path, 0);
7181 else
7183 int old_cse_jumps_altered = cse_jumps_altered;
7184 rtx temp;
7186 /* When cse changes a conditional jump to an unconditional
7187 jump, we want to reprocess the block, since it will give
7188 us a new branch path to investigate. */
7189 cse_jumps_altered = 0;
7190 temp = cse_basic_block (insn, val.last, val.path, ! after_loop);
7191 if (cse_jumps_altered == 0
7192 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7193 insn = temp;
7195 cse_jumps_altered |= old_cse_jumps_altered;
7198 if (cse_altered)
7199 ggc_collect ();
7201 #ifdef USE_C_ALLOCA
7202 alloca (0);
7203 #endif
7206 ggc_pop_context ();
7208 if (max_elements_made < n_elements_made)
7209 max_elements_made = n_elements_made;
7211 /* Clean up. */
7212 end_alias_analysis ();
7213 free (uid_cuid);
7214 free (reg_eqv_table);
7216 return cse_jumps_altered || recorded_label_ref;
7219 /* Process a single basic block. FROM and TO and the limits of the basic
7220 block. NEXT_BRANCH points to the branch path when following jumps or
7221 a null path when not following jumps.
7223 AROUND_LOOP is nonzero if we are to try to cse around to the start of a
7224 loop. This is true when we are being called for the last time on a
7225 block and this CSE pass is before loop.c. */
7227 static rtx
7228 cse_basic_block (from, to, next_branch, around_loop)
7229 rtx from, to;
7230 struct branch_path *next_branch;
7231 int around_loop;
7233 rtx insn;
7234 int to_usage = 0;
7235 rtx libcall_insn = NULL_RTX;
7236 int num_insns = 0;
7238 /* This array is undefined before max_reg, so only allocate
7239 the space actually needed and adjust the start. */
7241 qty_table
7242 = (struct qty_table_elem *) xmalloc ((max_qty - max_reg)
7243 * sizeof (struct qty_table_elem));
7244 qty_table -= max_reg;
7246 new_basic_block ();
7248 /* TO might be a label. If so, protect it from being deleted. */
7249 if (to != 0 && GET_CODE (to) == CODE_LABEL)
7250 ++LABEL_NUSES (to);
7252 for (insn = from; insn != to; insn = NEXT_INSN (insn))
7254 enum rtx_code code = GET_CODE (insn);
7256 /* If we have processed 1,000 insns, flush the hash table to
7257 avoid extreme quadratic behavior. We must not include NOTEs
7258 in the count since there may be more of them when generating
7259 debugging information. If we clear the table at different
7260 times, code generated with -g -O might be different than code
7261 generated with -O but not -g.
7263 ??? This is a real kludge and needs to be done some other way.
7264 Perhaps for 2.9. */
7265 if (code != NOTE && num_insns++ > 1000)
7267 flush_hash_table ();
7268 num_insns = 0;
7271 /* See if this is a branch that is part of the path. If so, and it is
7272 to be taken, do so. */
7273 if (next_branch->branch == insn)
7275 enum taken status = next_branch++->status;
7276 if (status != NOT_TAKEN)
7278 if (status == TAKEN)
7279 record_jump_equiv (insn, 1);
7280 else
7281 invalidate_skipped_block (NEXT_INSN (insn));
7283 /* Set the last insn as the jump insn; it doesn't affect cc0.
7284 Then follow this branch. */
7285 #ifdef HAVE_cc0
7286 prev_insn_cc0 = 0;
7287 prev_insn = insn;
7288 #endif
7289 insn = JUMP_LABEL (insn);
7290 continue;
7294 if (GET_MODE (insn) == QImode)
7295 PUT_MODE (insn, VOIDmode);
7297 if (GET_RTX_CLASS (code) == 'i')
7299 rtx p;
7301 /* Process notes first so we have all notes in canonical forms when
7302 looking for duplicate operations. */
7304 if (REG_NOTES (insn))
7305 REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn), NULL_RTX);
7307 /* Track when we are inside in LIBCALL block. Inside such a block,
7308 we do not want to record destinations. The last insn of a
7309 LIBCALL block is not considered to be part of the block, since
7310 its destination is the result of the block and hence should be
7311 recorded. */
7313 if (REG_NOTES (insn) != 0)
7315 if ((p = find_reg_note (insn, REG_LIBCALL, NULL_RTX)))
7316 libcall_insn = XEXP (p, 0);
7317 else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7318 libcall_insn = 0;
7321 cse_insn (insn, libcall_insn);
7323 /* If we haven't already found an insn where we added a LABEL_REF,
7324 check this one. */
7325 if (GET_CODE (insn) == INSN && ! recorded_label_ref
7326 && for_each_rtx (&PATTERN (insn), check_for_label_ref,
7327 (void *) insn))
7328 recorded_label_ref = 1;
7331 /* If INSN is now an unconditional jump, skip to the end of our
7332 basic block by pretending that we just did the last insn in the
7333 basic block. If we are jumping to the end of our block, show
7334 that we can have one usage of TO. */
7336 if (any_uncondjump_p (insn))
7338 if (to == 0)
7340 free (qty_table + max_reg);
7341 return 0;
7344 if (JUMP_LABEL (insn) == to)
7345 to_usage = 1;
7347 /* Maybe TO was deleted because the jump is unconditional.
7348 If so, there is nothing left in this basic block. */
7349 /* ??? Perhaps it would be smarter to set TO
7350 to whatever follows this insn,
7351 and pretend the basic block had always ended here. */
7352 if (INSN_DELETED_P (to))
7353 break;
7355 insn = PREV_INSN (to);
7358 /* See if it is ok to keep on going past the label
7359 which used to end our basic block. Remember that we incremented
7360 the count of that label, so we decrement it here. If we made
7361 a jump unconditional, TO_USAGE will be one; in that case, we don't
7362 want to count the use in that jump. */
7364 if (to != 0 && NEXT_INSN (insn) == to
7365 && GET_CODE (to) == CODE_LABEL && --LABEL_NUSES (to) == to_usage)
7367 struct cse_basic_block_data val;
7368 rtx prev;
7370 insn = NEXT_INSN (to);
7372 /* If TO was the last insn in the function, we are done. */
7373 if (insn == 0)
7375 free (qty_table + max_reg);
7376 return 0;
7379 /* If TO was preceded by a BARRIER we are done with this block
7380 because it has no continuation. */
7381 prev = prev_nonnote_insn (to);
7382 if (prev && GET_CODE (prev) == BARRIER)
7384 free (qty_table + max_reg);
7385 return insn;
7388 /* Find the end of the following block. Note that we won't be
7389 following branches in this case. */
7390 to_usage = 0;
7391 val.path_size = 0;
7392 cse_end_of_basic_block (insn, &val, 0, 0, 0);
7394 /* If the tables we allocated have enough space left
7395 to handle all the SETs in the next basic block,
7396 continue through it. Otherwise, return,
7397 and that block will be scanned individually. */
7398 if (val.nsets * 2 + next_qty > max_qty)
7399 break;
7401 cse_basic_block_start = val.low_cuid;
7402 cse_basic_block_end = val.high_cuid;
7403 to = val.last;
7405 /* Prevent TO from being deleted if it is a label. */
7406 if (to != 0 && GET_CODE (to) == CODE_LABEL)
7407 ++LABEL_NUSES (to);
7409 /* Back up so we process the first insn in the extension. */
7410 insn = PREV_INSN (insn);
7414 if (next_qty > max_qty)
7415 abort ();
7417 /* If we are running before loop.c, we stopped on a NOTE_INSN_LOOP_END, and
7418 the previous insn is the only insn that branches to the head of a loop,
7419 we can cse into the loop. Don't do this if we changed the jump
7420 structure of a loop unless we aren't going to be following jumps. */
7422 insn = prev_nonnote_insn (to);
7423 if ((cse_jumps_altered == 0
7424 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7425 && around_loop && to != 0
7426 && GET_CODE (to) == NOTE && NOTE_LINE_NUMBER (to) == NOTE_INSN_LOOP_END
7427 && GET_CODE (insn) == JUMP_INSN
7428 && JUMP_LABEL (insn) != 0
7429 && LABEL_NUSES (JUMP_LABEL (insn)) == 1)
7430 cse_around_loop (JUMP_LABEL (insn));
7432 free (qty_table + max_reg);
7434 return to ? NEXT_INSN (to) : 0;
7437 /* Called via for_each_rtx to see if an insn is using a LABEL_REF for which
7438 there isn't a REG_LABEL note. Return one if so. DATA is the insn. */
7440 static int
7441 check_for_label_ref (rtl, data)
7442 rtx *rtl;
7443 void *data;
7445 rtx insn = (rtx) data;
7447 /* If this insn uses a LABEL_REF and there isn't a REG_LABEL note for it,
7448 we must rerun jump since it needs to place the note. If this is a
7449 LABEL_REF for a CODE_LABEL that isn't in the insn chain, don't do this
7450 since no REG_LABEL will be added. */
7451 return (GET_CODE (*rtl) == LABEL_REF
7452 && ! LABEL_REF_NONLOCAL_P (*rtl)
7453 && LABEL_P (XEXP (*rtl, 0))
7454 && INSN_UID (XEXP (*rtl, 0)) != 0
7455 && ! find_reg_note (insn, REG_LABEL, XEXP (*rtl, 0)));
7458 /* Count the number of times registers are used (not set) in X.
7459 COUNTS is an array in which we accumulate the count, INCR is how much
7460 we count each register usage.
7462 Don't count a usage of DEST, which is the SET_DEST of a SET which
7463 contains X in its SET_SRC. This is because such a SET does not
7464 modify the liveness of DEST. */
7466 static void
7467 count_reg_usage (x, counts, dest, incr)
7468 rtx x;
7469 int *counts;
7470 rtx dest;
7471 int incr;
7473 enum rtx_code code;
7474 const char *fmt;
7475 int i, j;
7477 if (x == 0)
7478 return;
7480 switch (code = GET_CODE (x))
7482 case REG:
7483 if (x != dest)
7484 counts[REGNO (x)] += incr;
7485 return;
7487 case PC:
7488 case CC0:
7489 case CONST:
7490 case CONST_INT:
7491 case CONST_DOUBLE:
7492 case CONST_VECTOR:
7493 case SYMBOL_REF:
7494 case LABEL_REF:
7495 return;
7497 case CLOBBER:
7498 /* If we are clobbering a MEM, mark any registers inside the address
7499 as being used. */
7500 if (GET_CODE (XEXP (x, 0)) == MEM)
7501 count_reg_usage (XEXP (XEXP (x, 0), 0), counts, NULL_RTX, incr);
7502 return;
7504 case SET:
7505 /* Unless we are setting a REG, count everything in SET_DEST. */
7506 if (GET_CODE (SET_DEST (x)) != REG)
7507 count_reg_usage (SET_DEST (x), counts, NULL_RTX, incr);
7509 /* If SRC has side-effects, then we can't delete this insn, so the
7510 usage of SET_DEST inside SRC counts.
7512 ??? Strictly-speaking, we might be preserving this insn
7513 because some other SET has side-effects, but that's hard
7514 to do and can't happen now. */
7515 count_reg_usage (SET_SRC (x), counts,
7516 side_effects_p (SET_SRC (x)) ? NULL_RTX : SET_DEST (x),
7517 incr);
7518 return;
7520 case CALL_INSN:
7521 count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, NULL_RTX, incr);
7522 /* Fall through. */
7524 case INSN:
7525 case JUMP_INSN:
7526 count_reg_usage (PATTERN (x), counts, NULL_RTX, incr);
7528 /* Things used in a REG_EQUAL note aren't dead since loop may try to
7529 use them. */
7531 count_reg_usage (REG_NOTES (x), counts, NULL_RTX, incr);
7532 return;
7534 case EXPR_LIST:
7535 case INSN_LIST:
7536 if (REG_NOTE_KIND (x) == REG_EQUAL
7537 || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE))
7538 count_reg_usage (XEXP (x, 0), counts, NULL_RTX, incr);
7539 count_reg_usage (XEXP (x, 1), counts, NULL_RTX, incr);
7540 return;
7542 default:
7543 break;
7546 fmt = GET_RTX_FORMAT (code);
7547 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7549 if (fmt[i] == 'e')
7550 count_reg_usage (XEXP (x, i), counts, dest, incr);
7551 else if (fmt[i] == 'E')
7552 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7553 count_reg_usage (XVECEXP (x, i, j), counts, dest, incr);
7557 /* Return true if set is live. */
7558 static bool
7559 set_live_p (set, insn, counts)
7560 rtx set;
7561 rtx insn ATTRIBUTE_UNUSED; /* Only used with HAVE_cc0. */
7562 int *counts;
7564 #ifdef HAVE_cc0
7565 rtx tem;
7566 #endif
7568 if (set_noop_p (set))
7571 #ifdef HAVE_cc0
7572 else if (GET_CODE (SET_DEST (set)) == CC0
7573 && !side_effects_p (SET_SRC (set))
7574 && ((tem = next_nonnote_insn (insn)) == 0
7575 || !INSN_P (tem)
7576 || !reg_referenced_p (cc0_rtx, PATTERN (tem))))
7577 return false;
7578 #endif
7579 else if (GET_CODE (SET_DEST (set)) != REG
7580 || REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
7581 || counts[REGNO (SET_DEST (set))] != 0
7582 || side_effects_p (SET_SRC (set))
7583 /* An ADDRESSOF expression can turn into a use of the
7584 internal arg pointer, so always consider the
7585 internal arg pointer live. If it is truly dead,
7586 flow will delete the initializing insn. */
7587 || (SET_DEST (set) == current_function_internal_arg_pointer))
7588 return true;
7589 return false;
7592 /* Return true if insn is live. */
7594 static bool
7595 insn_live_p (insn, counts)
7596 rtx insn;
7597 int *counts;
7599 int i;
7600 if (flag_non_call_exceptions && may_trap_p (PATTERN (insn)))
7601 return true;
7602 else if (GET_CODE (PATTERN (insn)) == SET)
7603 return set_live_p (PATTERN (insn), insn, counts);
7604 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
7606 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7608 rtx elt = XVECEXP (PATTERN (insn), 0, i);
7610 if (GET_CODE (elt) == SET)
7612 if (set_live_p (elt, insn, counts))
7613 return true;
7615 else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
7616 return true;
7618 return false;
7620 else
7621 return true;
7624 /* Return true if libcall is dead as a whole. */
7626 static bool
7627 dead_libcall_p (insn, counts)
7628 rtx insn;
7629 int *counts;
7631 rtx note;
7632 /* See if there's a REG_EQUAL note on this insn and try to
7633 replace the source with the REG_EQUAL expression.
7635 We assume that insns with REG_RETVALs can only be reg->reg
7636 copies at this point. */
7637 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
7638 if (note)
7640 rtx set = single_set (insn);
7641 rtx new = simplify_rtx (XEXP (note, 0));
7643 if (!new)
7644 new = XEXP (note, 0);
7646 /* While changing insn, we must update the counts accordingly. */
7647 count_reg_usage (insn, counts, NULL_RTX, -1);
7649 if (set && validate_change (insn, &SET_SRC (set), new, 0))
7651 count_reg_usage (insn, counts, NULL_RTX, 1);
7652 remove_note (insn, find_reg_note (insn, REG_RETVAL, NULL_RTX));
7653 remove_note (insn, note);
7654 return true;
7656 count_reg_usage (insn, counts, NULL_RTX, 1);
7658 return false;
7661 /* Scan all the insns and delete any that are dead; i.e., they store a register
7662 that is never used or they copy a register to itself.
7664 This is used to remove insns made obviously dead by cse, loop or other
7665 optimizations. It improves the heuristics in loop since it won't try to
7666 move dead invariants out of loops or make givs for dead quantities. The
7667 remaining passes of the compilation are also sped up. */
7670 delete_trivially_dead_insns (insns, nreg)
7671 rtx insns;
7672 int nreg;
7674 int *counts;
7675 rtx insn, prev;
7676 int in_libcall = 0, dead_libcall = 0;
7677 int ndead = 0, nlastdead, niterations = 0;
7679 timevar_push (TV_DELETE_TRIVIALLY_DEAD);
7680 /* First count the number of times each register is used. */
7681 counts = (int *) xcalloc (nreg, sizeof (int));
7682 for (insn = next_real_insn (insns); insn; insn = next_real_insn (insn))
7683 count_reg_usage (insn, counts, NULL_RTX, 1);
7687 nlastdead = ndead;
7688 niterations++;
7689 /* Go from the last insn to the first and delete insns that only set unused
7690 registers or copy a register to itself. As we delete an insn, remove
7691 usage counts for registers it uses.
7693 The first jump optimization pass may leave a real insn as the last
7694 insn in the function. We must not skip that insn or we may end
7695 up deleting code that is not really dead. */
7696 insn = get_last_insn ();
7697 if (! INSN_P (insn))
7698 insn = prev_real_insn (insn);
7700 for (; insn; insn = prev)
7702 int live_insn = 0;
7704 prev = prev_real_insn (insn);
7706 /* Don't delete any insns that are part of a libcall block unless
7707 we can delete the whole libcall block.
7709 Flow or loop might get confused if we did that. Remember
7710 that we are scanning backwards. */
7711 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7713 in_libcall = 1;
7714 live_insn = 1;
7715 dead_libcall = dead_libcall_p (insn, counts);
7717 else if (in_libcall)
7718 live_insn = ! dead_libcall;
7719 else
7720 live_insn = insn_live_p (insn, counts);
7722 /* If this is a dead insn, delete it and show registers in it aren't
7723 being used. */
7725 if (! live_insn)
7727 count_reg_usage (insn, counts, NULL_RTX, -1);
7728 delete_insn_and_edges (insn);
7729 ndead++;
7732 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
7734 in_libcall = 0;
7735 dead_libcall = 0;
7739 while (ndead != nlastdead);
7741 if (rtl_dump_file && ndead)
7742 fprintf (rtl_dump_file, "Deleted %i trivially dead insns; %i iterations\n",
7743 ndead, niterations);
7744 /* Clean up. */
7745 free (counts);
7746 timevar_pop (TV_DELETE_TRIVIALLY_DEAD);
7747 return ndead;