* Makefile.in (rtlanal.o): Depend on $(TM_P_H).
[official-gcc.git] / gcc / cse.c
blobefe1fadab48a6b707dd790d29eee8a53913dcf62
1 /* Common subexpression elimination for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 /* stdio.h must precede rtl.h for FFS. */
24 #include "system.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
31 #include "flags.h"
32 #include "real.h"
33 #include "insn-config.h"
34 #include "recog.h"
35 #include "function.h"
36 #include "expr.h"
37 #include "toplev.h"
38 #include "output.h"
39 #include "ggc.h"
41 /* The basic idea of common subexpression elimination is to go
42 through the code, keeping a record of expressions that would
43 have the same value at the current scan point, and replacing
44 expressions encountered with the cheapest equivalent expression.
46 It is too complicated to keep track of the different possibilities
47 when control paths merge in this code; so, at each label, we forget all
48 that is known and start fresh. This can be described as processing each
49 extended basic block separately. We have a separate pass to perform
50 global CSE.
52 Note CSE can turn a conditional or computed jump into a nop or
53 an unconditional jump. When this occurs we arrange to run the jump
54 optimizer after CSE to delete the unreachable code.
56 We use two data structures to record the equivalent expressions:
57 a hash table for most expressions, and a vector of "quantity
58 numbers" to record equivalent (pseudo) registers.
60 The use of the special data structure for registers is desirable
61 because it is faster. It is possible because registers references
62 contain a fairly small number, the register number, taken from
63 a contiguously allocated series, and two register references are
64 identical if they have the same number. General expressions
65 do not have any such thing, so the only way to retrieve the
66 information recorded on an expression other than a register
67 is to keep it in a hash table.
69 Registers and "quantity numbers":
71 At the start of each basic block, all of the (hardware and pseudo)
72 registers used in the function are given distinct quantity
73 numbers to indicate their contents. During scan, when the code
74 copies one register into another, we copy the quantity number.
75 When a register is loaded in any other way, we allocate a new
76 quantity number to describe the value generated by this operation.
77 `reg_qty' records what quantity a register is currently thought
78 of as containing.
80 All real quantity numbers are greater than or equal to `max_reg'.
81 If register N has not been assigned a quantity, reg_qty[N] will equal N.
83 Quantity numbers below `max_reg' do not exist and none of the `qty_table'
84 entries should be referenced with an index below `max_reg'.
86 We also maintain a bidirectional chain of registers for each
87 quantity number. The `qty_table` members `first_reg' and `last_reg',
88 and `reg_eqv_table' members `next' and `prev' hold these chains.
90 The first register in a chain is the one whose lifespan is least local.
91 Among equals, it is the one that was seen first.
92 We replace any equivalent register with that one.
94 If two registers have the same quantity number, it must be true that
95 REG expressions with qty_table `mode' must be in the hash table for both
96 registers and must be in the same class.
98 The converse is not true. Since hard registers may be referenced in
99 any mode, two REG expressions might be equivalent in the hash table
100 but not have the same quantity number if the quantity number of one
101 of the registers is not the same mode as those expressions.
103 Constants and quantity numbers
105 When a quantity has a known constant value, that value is stored
106 in the appropriate qty_table `const_rtx'. This is in addition to
107 putting the constant in the hash table as is usual for non-regs.
109 Whether a reg or a constant is preferred is determined by the configuration
110 macro CONST_COSTS and will often depend on the constant value. In any
111 event, expressions containing constants can be simplified, by fold_rtx.
113 When a quantity has a known nearly constant value (such as an address
114 of a stack slot), that value is stored in the appropriate qty_table
115 `const_rtx'.
117 Integer constants don't have a machine mode. However, cse
118 determines the intended machine mode from the destination
119 of the instruction that moves the constant. The machine mode
120 is recorded in the hash table along with the actual RTL
121 constant expression so that different modes are kept separate.
123 Other expressions:
125 To record known equivalences among expressions in general
126 we use a hash table called `table'. It has a fixed number of buckets
127 that contain chains of `struct table_elt' elements for expressions.
128 These chains connect the elements whose expressions have the same
129 hash codes.
131 Other chains through the same elements connect the elements which
132 currently have equivalent values.
134 Register references in an expression are canonicalized before hashing
135 the expression. This is done using `reg_qty' and qty_table `first_reg'.
136 The hash code of a register reference is computed using the quantity
137 number, not the register number.
139 When the value of an expression changes, it is necessary to remove from the
140 hash table not just that expression but all expressions whose values
141 could be different as a result.
143 1. If the value changing is in memory, except in special cases
144 ANYTHING referring to memory could be changed. That is because
145 nobody knows where a pointer does not point.
146 The function `invalidate_memory' removes what is necessary.
148 The special cases are when the address is constant or is
149 a constant plus a fixed register such as the frame pointer
150 or a static chain pointer. When such addresses are stored in,
151 we can tell exactly which other such addresses must be invalidated
152 due to overlap. `invalidate' does this.
153 All expressions that refer to non-constant
154 memory addresses are also invalidated. `invalidate_memory' does this.
156 2. If the value changing is a register, all expressions
157 containing references to that register, and only those,
158 must be removed.
160 Because searching the entire hash table for expressions that contain
161 a register is very slow, we try to figure out when it isn't necessary.
162 Precisely, this is necessary only when expressions have been
163 entered in the hash table using this register, and then the value has
164 changed, and then another expression wants to be added to refer to
165 the register's new value. This sequence of circumstances is rare
166 within any one basic block.
168 The vectors `reg_tick' and `reg_in_table' are used to detect this case.
169 reg_tick[i] is incremented whenever a value is stored in register i.
170 reg_in_table[i] holds -1 if no references to register i have been
171 entered in the table; otherwise, it contains the value reg_tick[i] had
172 when the references were entered. If we want to enter a reference
173 and reg_in_table[i] != reg_tick[i], we must scan and remove old references.
174 Until we want to enter a new entry, the mere fact that the two vectors
175 don't match makes the entries be ignored if anyone tries to match them.
177 Registers themselves are entered in the hash table as well as in
178 the equivalent-register chains. However, the vectors `reg_tick'
179 and `reg_in_table' do not apply to expressions which are simple
180 register references. These expressions are removed from the table
181 immediately when they become invalid, and this can be done even if
182 we do not immediately search for all the expressions that refer to
183 the register.
185 A CLOBBER rtx in an instruction invalidates its operand for further
186 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
187 invalidates everything that resides in memory.
189 Related expressions:
191 Constant expressions that differ only by an additive integer
192 are called related. When a constant expression is put in
193 the table, the related expression with no constant term
194 is also entered. These are made to point at each other
195 so that it is possible to find out if there exists any
196 register equivalent to an expression related to a given expression. */
198 /* One plus largest register number used in this function. */
200 static int max_reg;
202 /* One plus largest instruction UID used in this function at time of
203 cse_main call. */
205 static int max_insn_uid;
207 /* Length of qty_table vector. We know in advance we will not need
208 a quantity number this big. */
210 static int max_qty;
212 /* Next quantity number to be allocated.
213 This is 1 + the largest number needed so far. */
215 static int next_qty;
217 /* Per-qty information tracking.
219 `first_reg' and `last_reg' track the head and tail of the
220 chain of registers which currently contain this quantity.
222 `mode' contains the machine mode of this quantity.
224 `const_rtx' holds the rtx of the constant value of this
225 quantity, if known. A summations of the frame/arg pointer
226 and a constant can also be entered here. When this holds
227 a known value, `const_insn' is the insn which stored the
228 constant value.
230 `comparison_{code,const,qty}' are used to track when a
231 comparison between a quantity and some constant or register has
232 been passed. In such a case, we know the results of the comparison
233 in case we see it again. These members record a comparison that
234 is known to be true. `comparison_code' holds the rtx code of such
235 a comparison, else it is set to UNKNOWN and the other two
236 comparison members are undefined. `comparison_const' holds
237 the constant being compared against, or zero if the comparison
238 is not against a constant. `comparison_qty' holds the quantity
239 being compared against when the result is known. If the comparison
240 is not with a register, `comparison_qty' is -1. */
242 struct qty_table_elem
244 rtx const_rtx;
245 rtx const_insn;
246 rtx comparison_const;
247 int comparison_qty;
248 unsigned int first_reg, last_reg;
249 enum machine_mode mode;
250 enum rtx_code comparison_code;
253 /* The table of all qtys, indexed by qty number. */
254 static struct qty_table_elem *qty_table;
256 #ifdef HAVE_cc0
257 /* For machines that have a CC0, we do not record its value in the hash
258 table since its use is guaranteed to be the insn immediately following
259 its definition and any other insn is presumed to invalidate it.
261 Instead, we store below the value last assigned to CC0. If it should
262 happen to be a constant, it is stored in preference to the actual
263 assigned value. In case it is a constant, we store the mode in which
264 the constant should be interpreted. */
266 static rtx prev_insn_cc0;
267 static enum machine_mode prev_insn_cc0_mode;
268 #endif
270 /* Previous actual insn. 0 if at first insn of basic block. */
272 static rtx prev_insn;
274 /* Insn being scanned. */
276 static rtx this_insn;
278 /* Index by register number, gives the number of the next (or
279 previous) register in the chain of registers sharing the same
280 value.
282 Or -1 if this register is at the end of the chain.
284 If reg_qty[N] == N, reg_eqv_table[N].next is undefined. */
286 /* Per-register equivalence chain. */
287 struct reg_eqv_elem
289 int next, prev;
292 /* The table of all register equivalence chains. */
293 static struct reg_eqv_elem *reg_eqv_table;
295 struct cse_reg_info
297 /* Next in hash chain. */
298 struct cse_reg_info *hash_next;
300 /* The next cse_reg_info structure in the free or used list. */
301 struct cse_reg_info *next;
303 /* Search key */
304 unsigned int regno;
306 /* The quantity number of the register's current contents. */
307 int reg_qty;
309 /* The number of times the register has been altered in the current
310 basic block. */
311 int reg_tick;
313 /* The REG_TICK value at which rtx's containing this register are
314 valid in the hash table. If this does not equal the current
315 reg_tick value, such expressions existing in the hash table are
316 invalid. */
317 int reg_in_table;
320 /* A free list of cse_reg_info entries. */
321 static struct cse_reg_info *cse_reg_info_free_list;
323 /* A used list of cse_reg_info entries. */
324 static struct cse_reg_info *cse_reg_info_used_list;
325 static struct cse_reg_info *cse_reg_info_used_list_end;
327 /* A mapping from registers to cse_reg_info data structures. */
328 #define REGHASH_SHIFT 7
329 #define REGHASH_SIZE (1 << REGHASH_SHIFT)
330 #define REGHASH_MASK (REGHASH_SIZE - 1)
331 static struct cse_reg_info *reg_hash[REGHASH_SIZE];
333 #define REGHASH_FN(REGNO) \
334 (((REGNO) ^ ((REGNO) >> REGHASH_SHIFT)) & REGHASH_MASK)
336 /* The last lookup we did into the cse_reg_info_tree. This allows us
337 to cache repeated lookups. */
338 static unsigned int cached_regno;
339 static struct cse_reg_info *cached_cse_reg_info;
341 /* A HARD_REG_SET containing all the hard registers for which there is
342 currently a REG expression in the hash table. Note the difference
343 from the above variables, which indicate if the REG is mentioned in some
344 expression in the table. */
346 static HARD_REG_SET hard_regs_in_table;
348 /* CUID of insn that starts the basic block currently being cse-processed. */
350 static int cse_basic_block_start;
352 /* CUID of insn that ends the basic block currently being cse-processed. */
354 static int cse_basic_block_end;
356 /* Vector mapping INSN_UIDs to cuids.
357 The cuids are like uids but increase monotonically always.
358 We use them to see whether a reg is used outside a given basic block. */
360 static int *uid_cuid;
362 /* Highest UID in UID_CUID. */
363 static int max_uid;
365 /* Get the cuid of an insn. */
367 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
369 /* Nonzero if this pass has made changes, and therefore it's
370 worthwhile to run the garbage collector. */
372 static int cse_altered;
374 /* Nonzero if cse has altered conditional jump insns
375 in such a way that jump optimization should be redone. */
377 static int cse_jumps_altered;
379 /* Nonzero if we put a LABEL_REF into the hash table for an INSN without a
380 REG_LABEL, we have to rerun jump after CSE to put in the note. */
381 static int recorded_label_ref;
383 /* canon_hash stores 1 in do_not_record
384 if it notices a reference to CC0, PC, or some other volatile
385 subexpression. */
387 static int do_not_record;
389 #ifdef LOAD_EXTEND_OP
391 /* Scratch rtl used when looking for load-extended copy of a MEM. */
392 static rtx memory_extend_rtx;
393 #endif
395 /* canon_hash stores 1 in hash_arg_in_memory
396 if it notices a reference to memory within the expression being hashed. */
398 static int hash_arg_in_memory;
400 /* The hash table contains buckets which are chains of `struct table_elt's,
401 each recording one expression's information.
402 That expression is in the `exp' field.
404 The canon_exp field contains a canonical (from the point of view of
405 alias analysis) version of the `exp' field.
407 Those elements with the same hash code are chained in both directions
408 through the `next_same_hash' and `prev_same_hash' fields.
410 Each set of expressions with equivalent values
411 are on a two-way chain through the `next_same_value'
412 and `prev_same_value' fields, and all point with
413 the `first_same_value' field at the first element in
414 that chain. The chain is in order of increasing cost.
415 Each element's cost value is in its `cost' field.
417 The `in_memory' field is nonzero for elements that
418 involve any reference to memory. These elements are removed
419 whenever a write is done to an unidentified location in memory.
420 To be safe, we assume that a memory address is unidentified unless
421 the address is either a symbol constant or a constant plus
422 the frame pointer or argument pointer.
424 The `related_value' field is used to connect related expressions
425 (that differ by adding an integer).
426 The related expressions are chained in a circular fashion.
427 `related_value' is zero for expressions for which this
428 chain is not useful.
430 The `cost' field stores the cost of this element's expression.
431 The `regcost' field stores the value returned by approx_reg_cost for
432 this element's expression.
434 The `is_const' flag is set if the element is a constant (including
435 a fixed address).
437 The `flag' field is used as a temporary during some search routines.
439 The `mode' field is usually the same as GET_MODE (`exp'), but
440 if `exp' is a CONST_INT and has no machine mode then the `mode'
441 field is the mode it was being used as. Each constant is
442 recorded separately for each mode it is used with. */
444 struct table_elt
446 rtx exp;
447 rtx canon_exp;
448 struct table_elt *next_same_hash;
449 struct table_elt *prev_same_hash;
450 struct table_elt *next_same_value;
451 struct table_elt *prev_same_value;
452 struct table_elt *first_same_value;
453 struct table_elt *related_value;
454 int cost;
455 int regcost;
456 enum machine_mode mode;
457 char in_memory;
458 char is_const;
459 char flag;
462 /* We don't want a lot of buckets, because we rarely have very many
463 things stored in the hash table, and a lot of buckets slows
464 down a lot of loops that happen frequently. */
465 #define HASH_SHIFT 5
466 #define HASH_SIZE (1 << HASH_SHIFT)
467 #define HASH_MASK (HASH_SIZE - 1)
469 /* Compute hash code of X in mode M. Special-case case where X is a pseudo
470 register (hard registers may require `do_not_record' to be set). */
472 #define HASH(X, M) \
473 ((GET_CODE (X) == REG && REGNO (X) >= FIRST_PSEUDO_REGISTER \
474 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
475 : canon_hash (X, M)) & HASH_MASK)
477 /* Determine whether register number N is considered a fixed register for the
478 purpose of approximating register costs.
479 It is desirable to replace other regs with fixed regs, to reduce need for
480 non-fixed hard regs.
481 A reg wins if it is either the frame pointer or designated as fixed. */
482 #define FIXED_REGNO_P(N) \
483 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
484 || fixed_regs[N] || global_regs[N])
486 /* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
487 hard registers and pointers into the frame are the cheapest with a cost
488 of 0. Next come pseudos with a cost of one and other hard registers with
489 a cost of 2. Aside from these special cases, call `rtx_cost'. */
491 #define CHEAP_REGNO(N) \
492 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
493 || (N) == STACK_POINTER_REGNUM || (N) == ARG_POINTER_REGNUM \
494 || ((N) >= FIRST_VIRTUAL_REGISTER && (N) <= LAST_VIRTUAL_REGISTER) \
495 || ((N) < FIRST_PSEUDO_REGISTER \
496 && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
498 #define COST(X) (GET_CODE (X) == REG ? 0 : notreg_cost (X, SET))
499 #define COST_IN(X,OUTER) (GET_CODE (X) == REG ? 0 : notreg_cost (X, OUTER))
501 /* Get the info associated with register N. */
503 #define GET_CSE_REG_INFO(N) \
504 (((N) == cached_regno && cached_cse_reg_info) \
505 ? cached_cse_reg_info : get_cse_reg_info ((N)))
507 /* Get the number of times this register has been updated in this
508 basic block. */
510 #define REG_TICK(N) ((GET_CSE_REG_INFO (N))->reg_tick)
512 /* Get the point at which REG was recorded in the table. */
514 #define REG_IN_TABLE(N) ((GET_CSE_REG_INFO (N))->reg_in_table)
516 /* Get the quantity number for REG. */
518 #define REG_QTY(N) ((GET_CSE_REG_INFO (N))->reg_qty)
520 /* Determine if the quantity number for register X represents a valid index
521 into the qty_table. */
523 #define REGNO_QTY_VALID_P(N) (REG_QTY (N) != (int) (N))
525 static struct table_elt *table[HASH_SIZE];
527 /* Chain of `struct table_elt's made so far for this function
528 but currently removed from the table. */
530 static struct table_elt *free_element_chain;
532 /* Number of `struct table_elt' structures made so far for this function. */
534 static int n_elements_made;
536 /* Maximum value `n_elements_made' has had so far in this compilation
537 for functions previously processed. */
539 static int max_elements_made;
541 /* Surviving equivalence class when two equivalence classes are merged
542 by recording the effects of a jump in the last insn. Zero if the
543 last insn was not a conditional jump. */
545 static struct table_elt *last_jump_equiv_class;
547 /* Set to the cost of a constant pool reference if one was found for a
548 symbolic constant. If this was found, it means we should try to
549 convert constants into constant pool entries if they don't fit in
550 the insn. */
552 static int constant_pool_entries_cost;
554 /* Define maximum length of a branch path. */
556 #define PATHLENGTH 10
558 /* This data describes a block that will be processed by cse_basic_block. */
560 struct cse_basic_block_data
562 /* Lowest CUID value of insns in block. */
563 int low_cuid;
564 /* Highest CUID value of insns in block. */
565 int high_cuid;
566 /* Total number of SETs in block. */
567 int nsets;
568 /* Last insn in the block. */
569 rtx last;
570 /* Size of current branch path, if any. */
571 int path_size;
572 /* Current branch path, indicating which branches will be taken. */
573 struct branch_path
575 /* The branch insn. */
576 rtx branch;
577 /* Whether it should be taken or not. AROUND is the same as taken
578 except that it is used when the destination label is not preceded
579 by a BARRIER. */
580 enum taken {TAKEN, NOT_TAKEN, AROUND} status;
581 } path[PATHLENGTH];
584 /* Nonzero if X has the form (PLUS frame-pointer integer). We check for
585 virtual regs here because the simplify_*_operation routines are called
586 by integrate.c, which is called before virtual register instantiation.
588 ?!? FIXED_BASE_PLUS_P and NONZERO_BASE_PLUS_P need to move into
589 a header file so that their definitions can be shared with the
590 simplification routines in simplify-rtx.c. Until then, do not
591 change these macros without also changing the copy in simplify-rtx.c. */
593 #define FIXED_BASE_PLUS_P(X) \
594 ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx \
595 || ((X) == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])\
596 || (X) == virtual_stack_vars_rtx \
597 || (X) == virtual_incoming_args_rtx \
598 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
599 && (XEXP (X, 0) == frame_pointer_rtx \
600 || XEXP (X, 0) == hard_frame_pointer_rtx \
601 || ((X) == arg_pointer_rtx \
602 && fixed_regs[ARG_POINTER_REGNUM]) \
603 || XEXP (X, 0) == virtual_stack_vars_rtx \
604 || XEXP (X, 0) == virtual_incoming_args_rtx)) \
605 || GET_CODE (X) == ADDRESSOF)
607 /* Similar, but also allows reference to the stack pointer.
609 This used to include FIXED_BASE_PLUS_P, however, we can't assume that
610 arg_pointer_rtx by itself is nonzero, because on at least one machine,
611 the i960, the arg pointer is zero when it is unused. */
613 #define NONZERO_BASE_PLUS_P(X) \
614 ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx \
615 || (X) == virtual_stack_vars_rtx \
616 || (X) == virtual_incoming_args_rtx \
617 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
618 && (XEXP (X, 0) == frame_pointer_rtx \
619 || XEXP (X, 0) == hard_frame_pointer_rtx \
620 || ((X) == arg_pointer_rtx \
621 && fixed_regs[ARG_POINTER_REGNUM]) \
622 || XEXP (X, 0) == virtual_stack_vars_rtx \
623 || XEXP (X, 0) == virtual_incoming_args_rtx)) \
624 || (X) == stack_pointer_rtx \
625 || (X) == virtual_stack_dynamic_rtx \
626 || (X) == virtual_outgoing_args_rtx \
627 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
628 && (XEXP (X, 0) == stack_pointer_rtx \
629 || XEXP (X, 0) == virtual_stack_dynamic_rtx \
630 || XEXP (X, 0) == virtual_outgoing_args_rtx)) \
631 || GET_CODE (X) == ADDRESSOF)
633 static int notreg_cost PARAMS ((rtx, enum rtx_code));
634 static int approx_reg_cost_1 PARAMS ((rtx *, void *));
635 static int approx_reg_cost PARAMS ((rtx));
636 static int preferrable PARAMS ((int, int, int, int));
637 static void new_basic_block PARAMS ((void));
638 static void make_new_qty PARAMS ((unsigned int, enum machine_mode));
639 static void make_regs_eqv PARAMS ((unsigned int, unsigned int));
640 static void delete_reg_equiv PARAMS ((unsigned int));
641 static int mention_regs PARAMS ((rtx));
642 static int insert_regs PARAMS ((rtx, struct table_elt *, int));
643 static void remove_from_table PARAMS ((struct table_elt *, unsigned));
644 static struct table_elt *lookup PARAMS ((rtx, unsigned, enum machine_mode)),
645 *lookup_for_remove PARAMS ((rtx, unsigned, enum machine_mode));
646 static rtx lookup_as_function PARAMS ((rtx, enum rtx_code));
647 static struct table_elt *insert PARAMS ((rtx, struct table_elt *, unsigned,
648 enum machine_mode));
649 static void merge_equiv_classes PARAMS ((struct table_elt *,
650 struct table_elt *));
651 static void invalidate PARAMS ((rtx, enum machine_mode));
652 static int cse_rtx_varies_p PARAMS ((rtx, int));
653 static void remove_invalid_refs PARAMS ((unsigned int));
654 static void remove_invalid_subreg_refs PARAMS ((unsigned int, unsigned int,
655 enum machine_mode));
656 static void rehash_using_reg PARAMS ((rtx));
657 static void invalidate_memory PARAMS ((void));
658 static void invalidate_for_call PARAMS ((void));
659 static rtx use_related_value PARAMS ((rtx, struct table_elt *));
660 static unsigned canon_hash PARAMS ((rtx, enum machine_mode));
661 static unsigned canon_hash_string PARAMS ((const char *));
662 static unsigned safe_hash PARAMS ((rtx, enum machine_mode));
663 static int exp_equiv_p PARAMS ((rtx, rtx, int, int));
664 static rtx canon_reg PARAMS ((rtx, rtx));
665 static void find_best_addr PARAMS ((rtx, rtx *, enum machine_mode));
666 static enum rtx_code find_comparison_args PARAMS ((enum rtx_code, rtx *, rtx *,
667 enum machine_mode *,
668 enum machine_mode *));
669 static rtx fold_rtx PARAMS ((rtx, rtx));
670 static rtx equiv_constant PARAMS ((rtx));
671 static void record_jump_equiv PARAMS ((rtx, int));
672 static void record_jump_cond PARAMS ((enum rtx_code, enum machine_mode,
673 rtx, rtx, int));
674 static void cse_insn PARAMS ((rtx, rtx));
675 static int addr_affects_sp_p PARAMS ((rtx));
676 static void invalidate_from_clobbers PARAMS ((rtx));
677 static rtx cse_process_notes PARAMS ((rtx, rtx));
678 static void cse_around_loop PARAMS ((rtx));
679 static void invalidate_skipped_set PARAMS ((rtx, rtx, void *));
680 static void invalidate_skipped_block PARAMS ((rtx));
681 static void cse_check_loop_start PARAMS ((rtx, rtx, void *));
682 static void cse_set_around_loop PARAMS ((rtx, rtx, rtx));
683 static rtx cse_basic_block PARAMS ((rtx, rtx, struct branch_path *, int));
684 static void count_reg_usage PARAMS ((rtx, int *, rtx, int));
685 static int check_for_label_ref PARAMS ((rtx *, void *));
686 extern void dump_class PARAMS ((struct table_elt*));
687 static struct cse_reg_info * get_cse_reg_info PARAMS ((unsigned int));
688 static int check_dependence PARAMS ((rtx *, void *));
690 static void flush_hash_table PARAMS ((void));
691 static bool insn_live_p PARAMS ((rtx, int *));
692 static bool set_live_p PARAMS ((rtx, rtx, int *));
693 static bool dead_libcall_p PARAMS ((rtx));
695 /* Dump the expressions in the equivalence class indicated by CLASSP.
696 This function is used only for debugging. */
697 void
698 dump_class (classp)
699 struct table_elt *classp;
701 struct table_elt *elt;
703 fprintf (stderr, "Equivalence chain for ");
704 print_rtl (stderr, classp->exp);
705 fprintf (stderr, ": \n");
707 for (elt = classp->first_same_value; elt; elt = elt->next_same_value)
709 print_rtl (stderr, elt->exp);
710 fprintf (stderr, "\n");
714 /* Subroutine of approx_reg_cost; called through for_each_rtx. */
716 static int
717 approx_reg_cost_1 (xp, data)
718 rtx *xp;
719 void *data;
721 rtx x = *xp;
722 regset set = (regset) data;
724 if (x && GET_CODE (x) == REG)
725 SET_REGNO_REG_SET (set, REGNO (x));
726 return 0;
729 /* Return an estimate of the cost of the registers used in an rtx.
730 This is mostly the number of different REG expressions in the rtx;
731 however for some excecptions like fixed registers we use a cost of
732 0. If any other hard register reference occurs, return MAX_COST. */
734 static int
735 approx_reg_cost (x)
736 rtx x;
738 regset_head set;
739 int i;
740 int cost = 0;
741 int hardregs = 0;
743 INIT_REG_SET (&set);
744 for_each_rtx (&x, approx_reg_cost_1, (void *)&set);
746 EXECUTE_IF_SET_IN_REG_SET
747 (&set, 0, i,
749 if (! CHEAP_REGNO (i))
751 if (i < FIRST_PSEUDO_REGISTER)
752 hardregs++;
754 cost += i < FIRST_PSEUDO_REGISTER ? 2 : 1;
758 CLEAR_REG_SET (&set);
759 return hardregs && SMALL_REGISTER_CLASSES ? MAX_COST : cost;
762 /* Return a negative value if an rtx A, whose costs are given by COST_A
763 and REGCOST_A, is more desirable than an rtx B.
764 Return a positive value if A is less desirable, or 0 if the two are
765 equally good. */
766 static int
767 preferrable (cost_a, regcost_a, cost_b, regcost_b)
768 int cost_a, regcost_a, cost_b, regcost_b;
770 /* First, get rid of a cases involving expressions that are entirely
771 unwanted. */
772 if (cost_a != cost_b)
774 if (cost_a == MAX_COST)
775 return 1;
776 if (cost_b == MAX_COST)
777 return -1;
780 /* Avoid extending lifetimes of hardregs. */
781 if (regcost_a != regcost_b)
783 if (regcost_a == MAX_COST)
784 return 1;
785 if (regcost_b == MAX_COST)
786 return -1;
789 /* Normal operation costs take precedence. */
790 if (cost_a != cost_b)
791 return cost_a - cost_b;
792 /* Only if these are identical consider effects on register pressure. */
793 if (regcost_a != regcost_b)
794 return regcost_a - regcost_b;
795 return 0;
798 /* Internal function, to compute cost when X is not a register; called
799 from COST macro to keep it simple. */
801 static int
802 notreg_cost (x, outer)
803 rtx x;
804 enum rtx_code outer;
806 return ((GET_CODE (x) == SUBREG
807 && GET_CODE (SUBREG_REG (x)) == REG
808 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
809 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
810 && (GET_MODE_SIZE (GET_MODE (x))
811 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
812 && subreg_lowpart_p (x)
813 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (x)),
814 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))))
816 : rtx_cost (x, outer) * 2);
819 /* Return an estimate of the cost of computing rtx X.
820 One use is in cse, to decide which expression to keep in the hash table.
821 Another is in rtl generation, to pick the cheapest way to multiply.
822 Other uses like the latter are expected in the future. */
825 rtx_cost (x, outer_code)
826 rtx x;
827 enum rtx_code outer_code ATTRIBUTE_UNUSED;
829 int i, j;
830 enum rtx_code code;
831 const char *fmt;
832 int total;
834 if (x == 0)
835 return 0;
837 /* Compute the default costs of certain things.
838 Note that RTX_COSTS can override the defaults. */
840 code = GET_CODE (x);
841 switch (code)
843 case MULT:
844 /* Count multiplication by 2**n as a shift,
845 because if we are considering it, we would output it as a shift. */
846 if (GET_CODE (XEXP (x, 1)) == CONST_INT
847 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
848 total = 2;
849 else
850 total = COSTS_N_INSNS (5);
851 break;
852 case DIV:
853 case UDIV:
854 case MOD:
855 case UMOD:
856 total = COSTS_N_INSNS (7);
857 break;
858 case USE:
859 /* Used in loop.c and combine.c as a marker. */
860 total = 0;
861 break;
862 default:
863 total = COSTS_N_INSNS (1);
866 switch (code)
868 case REG:
869 return 0;
871 case SUBREG:
872 /* If we can't tie these modes, make this expensive. The larger
873 the mode, the more expensive it is. */
874 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
875 return COSTS_N_INSNS (2
876 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
877 break;
879 #ifdef RTX_COSTS
880 RTX_COSTS (x, code, outer_code);
881 #endif
882 #ifdef CONST_COSTS
883 CONST_COSTS (x, code, outer_code);
884 #endif
886 default:
887 #ifdef DEFAULT_RTX_COSTS
888 DEFAULT_RTX_COSTS (x, code, outer_code);
889 #endif
890 break;
893 /* Sum the costs of the sub-rtx's, plus cost of this operation,
894 which is already in total. */
896 fmt = GET_RTX_FORMAT (code);
897 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
898 if (fmt[i] == 'e')
899 total += rtx_cost (XEXP (x, i), code);
900 else if (fmt[i] == 'E')
901 for (j = 0; j < XVECLEN (x, i); j++)
902 total += rtx_cost (XVECEXP (x, i, j), code);
904 return total;
907 /* Return cost of address expression X.
908 Expect that X is propertly formed address reference. */
911 address_cost (x, mode)
912 rtx x;
913 enum machine_mode mode;
915 /* The ADDRESS_COST macro does not deal with ADDRESSOF nodes. But,
916 during CSE, such nodes are present. Using an ADDRESSOF node which
917 refers to the address of a REG is a good thing because we can then
918 turn (MEM (ADDRESSSOF (REG))) into just plain REG. */
920 if (GET_CODE (x) == ADDRESSOF && REG_P (XEXP ((x), 0)))
921 return -1;
923 /* We may be asked for cost of various unusual addresses, such as operands
924 of push instruction. It is not worthwhile to complicate writing
925 of ADDRESS_COST macro by such cases. */
927 if (!memory_address_p (mode, x))
928 return 1000;
929 #ifdef ADDRESS_COST
930 return ADDRESS_COST (x);
931 #else
932 return rtx_cost (x, MEM);
933 #endif
937 static struct cse_reg_info *
938 get_cse_reg_info (regno)
939 unsigned int regno;
941 struct cse_reg_info **hash_head = &reg_hash[REGHASH_FN (regno)];
942 struct cse_reg_info *p;
944 for (p = *hash_head; p != NULL; p = p->hash_next)
945 if (p->regno == regno)
946 break;
948 if (p == NULL)
950 /* Get a new cse_reg_info structure. */
951 if (cse_reg_info_free_list)
953 p = cse_reg_info_free_list;
954 cse_reg_info_free_list = p->next;
956 else
957 p = (struct cse_reg_info *) xmalloc (sizeof (struct cse_reg_info));
959 /* Insert into hash table. */
960 p->hash_next = *hash_head;
961 *hash_head = p;
963 /* Initialize it. */
964 p->reg_tick = 1;
965 p->reg_in_table = -1;
966 p->reg_qty = regno;
967 p->regno = regno;
968 p->next = cse_reg_info_used_list;
969 cse_reg_info_used_list = p;
970 if (!cse_reg_info_used_list_end)
971 cse_reg_info_used_list_end = p;
974 /* Cache this lookup; we tend to be looking up information about the
975 same register several times in a row. */
976 cached_regno = regno;
977 cached_cse_reg_info = p;
979 return p;
982 /* Clear the hash table and initialize each register with its own quantity,
983 for a new basic block. */
985 static void
986 new_basic_block ()
988 int i;
990 next_qty = max_reg;
992 /* Clear out hash table state for this pass. */
994 memset ((char *) reg_hash, 0, sizeof reg_hash);
996 if (cse_reg_info_used_list)
998 cse_reg_info_used_list_end->next = cse_reg_info_free_list;
999 cse_reg_info_free_list = cse_reg_info_used_list;
1000 cse_reg_info_used_list = cse_reg_info_used_list_end = 0;
1002 cached_cse_reg_info = 0;
1004 CLEAR_HARD_REG_SET (hard_regs_in_table);
1006 /* The per-quantity values used to be initialized here, but it is
1007 much faster to initialize each as it is made in `make_new_qty'. */
1009 for (i = 0; i < HASH_SIZE; i++)
1011 struct table_elt *first;
1013 first = table[i];
1014 if (first != NULL)
1016 struct table_elt *last = first;
1018 table[i] = NULL;
1020 while (last->next_same_hash != NULL)
1021 last = last->next_same_hash;
1023 /* Now relink this hash entire chain into
1024 the free element list. */
1026 last->next_same_hash = free_element_chain;
1027 free_element_chain = first;
1031 prev_insn = 0;
1033 #ifdef HAVE_cc0
1034 prev_insn_cc0 = 0;
1035 #endif
1038 /* Say that register REG contains a quantity in mode MODE not in any
1039 register before and initialize that quantity. */
1041 static void
1042 make_new_qty (reg, mode)
1043 unsigned int reg;
1044 enum machine_mode mode;
1046 int q;
1047 struct qty_table_elem *ent;
1048 struct reg_eqv_elem *eqv;
1050 if (next_qty >= max_qty)
1051 abort ();
1053 q = REG_QTY (reg) = next_qty++;
1054 ent = &qty_table[q];
1055 ent->first_reg = reg;
1056 ent->last_reg = reg;
1057 ent->mode = mode;
1058 ent->const_rtx = ent->const_insn = NULL_RTX;
1059 ent->comparison_code = UNKNOWN;
1061 eqv = &reg_eqv_table[reg];
1062 eqv->next = eqv->prev = -1;
1065 /* Make reg NEW equivalent to reg OLD.
1066 OLD is not changing; NEW is. */
1068 static void
1069 make_regs_eqv (new, old)
1070 unsigned int new, old;
1072 unsigned int lastr, firstr;
1073 int q = REG_QTY (old);
1074 struct qty_table_elem *ent;
1076 ent = &qty_table[q];
1078 /* Nothing should become eqv until it has a "non-invalid" qty number. */
1079 if (! REGNO_QTY_VALID_P (old))
1080 abort ();
1082 REG_QTY (new) = q;
1083 firstr = ent->first_reg;
1084 lastr = ent->last_reg;
1086 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
1087 hard regs. Among pseudos, if NEW will live longer than any other reg
1088 of the same qty, and that is beyond the current basic block,
1089 make it the new canonical replacement for this qty. */
1090 if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
1091 /* Certain fixed registers might be of the class NO_REGS. This means
1092 that not only can they not be allocated by the compiler, but
1093 they cannot be used in substitutions or canonicalizations
1094 either. */
1095 && (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
1096 && ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
1097 || (new >= FIRST_PSEUDO_REGISTER
1098 && (firstr < FIRST_PSEUDO_REGISTER
1099 || ((uid_cuid[REGNO_LAST_UID (new)] > cse_basic_block_end
1100 || (uid_cuid[REGNO_FIRST_UID (new)]
1101 < cse_basic_block_start))
1102 && (uid_cuid[REGNO_LAST_UID (new)]
1103 > uid_cuid[REGNO_LAST_UID (firstr)]))))))
1105 reg_eqv_table[firstr].prev = new;
1106 reg_eqv_table[new].next = firstr;
1107 reg_eqv_table[new].prev = -1;
1108 ent->first_reg = new;
1110 else
1112 /* If NEW is a hard reg (known to be non-fixed), insert at end.
1113 Otherwise, insert before any non-fixed hard regs that are at the
1114 end. Registers of class NO_REGS cannot be used as an
1115 equivalent for anything. */
1116 while (lastr < FIRST_PSEUDO_REGISTER && reg_eqv_table[lastr].prev >= 0
1117 && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
1118 && new >= FIRST_PSEUDO_REGISTER)
1119 lastr = reg_eqv_table[lastr].prev;
1120 reg_eqv_table[new].next = reg_eqv_table[lastr].next;
1121 if (reg_eqv_table[lastr].next >= 0)
1122 reg_eqv_table[reg_eqv_table[lastr].next].prev = new;
1123 else
1124 qty_table[q].last_reg = new;
1125 reg_eqv_table[lastr].next = new;
1126 reg_eqv_table[new].prev = lastr;
1130 /* Remove REG from its equivalence class. */
1132 static void
1133 delete_reg_equiv (reg)
1134 unsigned int reg;
1136 struct qty_table_elem *ent;
1137 int q = REG_QTY (reg);
1138 int p, n;
1140 /* If invalid, do nothing. */
1141 if (q == (int) reg)
1142 return;
1144 ent = &qty_table[q];
1146 p = reg_eqv_table[reg].prev;
1147 n = reg_eqv_table[reg].next;
1149 if (n != -1)
1150 reg_eqv_table[n].prev = p;
1151 else
1152 ent->last_reg = p;
1153 if (p != -1)
1154 reg_eqv_table[p].next = n;
1155 else
1156 ent->first_reg = n;
1158 REG_QTY (reg) = reg;
1161 /* Remove any invalid expressions from the hash table
1162 that refer to any of the registers contained in expression X.
1164 Make sure that newly inserted references to those registers
1165 as subexpressions will be considered valid.
1167 mention_regs is not called when a register itself
1168 is being stored in the table.
1170 Return 1 if we have done something that may have changed the hash code
1171 of X. */
1173 static int
1174 mention_regs (x)
1175 rtx x;
1177 enum rtx_code code;
1178 int i, j;
1179 const char *fmt;
1180 int changed = 0;
1182 if (x == 0)
1183 return 0;
1185 code = GET_CODE (x);
1186 if (code == REG)
1188 unsigned int regno = REGNO (x);
1189 unsigned int endregno
1190 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
1191 : HARD_REGNO_NREGS (regno, GET_MODE (x)));
1192 unsigned int i;
1194 for (i = regno; i < endregno; i++)
1196 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1197 remove_invalid_refs (i);
1199 REG_IN_TABLE (i) = REG_TICK (i);
1202 return 0;
1205 /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1206 pseudo if they don't use overlapping words. We handle only pseudos
1207 here for simplicity. */
1208 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1209 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1211 unsigned int i = REGNO (SUBREG_REG (x));
1213 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1215 /* If reg_tick has been incremented more than once since
1216 reg_in_table was last set, that means that the entire
1217 register has been set before, so discard anything memorized
1218 for the entire register, including all SUBREG expressions. */
1219 if (REG_IN_TABLE (i) != REG_TICK (i) - 1)
1220 remove_invalid_refs (i);
1221 else
1222 remove_invalid_subreg_refs (i, SUBREG_BYTE (x), GET_MODE (x));
1225 REG_IN_TABLE (i) = REG_TICK (i);
1226 return 0;
1229 /* If X is a comparison or a COMPARE and either operand is a register
1230 that does not have a quantity, give it one. This is so that a later
1231 call to record_jump_equiv won't cause X to be assigned a different
1232 hash code and not found in the table after that call.
1234 It is not necessary to do this here, since rehash_using_reg can
1235 fix up the table later, but doing this here eliminates the need to
1236 call that expensive function in the most common case where the only
1237 use of the register is in the comparison. */
1239 if (code == COMPARE || GET_RTX_CLASS (code) == '<')
1241 if (GET_CODE (XEXP (x, 0)) == REG
1242 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
1243 if (insert_regs (XEXP (x, 0), NULL, 0))
1245 rehash_using_reg (XEXP (x, 0));
1246 changed = 1;
1249 if (GET_CODE (XEXP (x, 1)) == REG
1250 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
1251 if (insert_regs (XEXP (x, 1), NULL, 0))
1253 rehash_using_reg (XEXP (x, 1));
1254 changed = 1;
1258 fmt = GET_RTX_FORMAT (code);
1259 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1260 if (fmt[i] == 'e')
1261 changed |= mention_regs (XEXP (x, i));
1262 else if (fmt[i] == 'E')
1263 for (j = 0; j < XVECLEN (x, i); j++)
1264 changed |= mention_regs (XVECEXP (x, i, j));
1266 return changed;
1269 /* Update the register quantities for inserting X into the hash table
1270 with a value equivalent to CLASSP.
1271 (If the class does not contain a REG, it is irrelevant.)
1272 If MODIFIED is nonzero, X is a destination; it is being modified.
1273 Note that delete_reg_equiv should be called on a register
1274 before insert_regs is done on that register with MODIFIED != 0.
1276 Nonzero value means that elements of reg_qty have changed
1277 so X's hash code may be different. */
1279 static int
1280 insert_regs (x, classp, modified)
1281 rtx x;
1282 struct table_elt *classp;
1283 int modified;
1285 if (GET_CODE (x) == REG)
1287 unsigned int regno = REGNO (x);
1288 int qty_valid;
1290 /* If REGNO is in the equivalence table already but is of the
1291 wrong mode for that equivalence, don't do anything here. */
1293 qty_valid = REGNO_QTY_VALID_P (regno);
1294 if (qty_valid)
1296 struct qty_table_elem *ent = &qty_table[REG_QTY (regno)];
1298 if (ent->mode != GET_MODE (x))
1299 return 0;
1302 if (modified || ! qty_valid)
1304 if (classp)
1305 for (classp = classp->first_same_value;
1306 classp != 0;
1307 classp = classp->next_same_value)
1308 if (GET_CODE (classp->exp) == REG
1309 && GET_MODE (classp->exp) == GET_MODE (x))
1311 make_regs_eqv (regno, REGNO (classp->exp));
1312 return 1;
1315 /* Mention_regs for a SUBREG checks if REG_TICK is exactly one larger
1316 than REG_IN_TABLE to find out if there was only a single preceding
1317 invalidation - for the SUBREG - or another one, which would be
1318 for the full register. However, if we find here that REG_TICK
1319 indicates that the register is invalid, it means that it has
1320 been invalidated in a separate operation. The SUBREG might be used
1321 now (then this is a recursive call), or we might use the full REG
1322 now and a SUBREG of it later. So bump up REG_TICK so that
1323 mention_regs will do the right thing. */
1324 if (! modified
1325 && REG_IN_TABLE (regno) >= 0
1326 && REG_TICK (regno) == REG_IN_TABLE (regno) + 1)
1327 REG_TICK (regno)++;
1328 make_new_qty (regno, GET_MODE (x));
1329 return 1;
1332 return 0;
1335 /* If X is a SUBREG, we will likely be inserting the inner register in the
1336 table. If that register doesn't have an assigned quantity number at
1337 this point but does later, the insertion that we will be doing now will
1338 not be accessible because its hash code will have changed. So assign
1339 a quantity number now. */
1341 else if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1342 && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1344 insert_regs (SUBREG_REG (x), NULL, 0);
1345 mention_regs (x);
1346 return 1;
1348 else
1349 return mention_regs (x);
1352 /* Look in or update the hash table. */
1354 /* Remove table element ELT from use in the table.
1355 HASH is its hash code, made using the HASH macro.
1356 It's an argument because often that is known in advance
1357 and we save much time not recomputing it. */
1359 static void
1360 remove_from_table (elt, hash)
1361 struct table_elt *elt;
1362 unsigned hash;
1364 if (elt == 0)
1365 return;
1367 /* Mark this element as removed. See cse_insn. */
1368 elt->first_same_value = 0;
1370 /* Remove the table element from its equivalence class. */
1373 struct table_elt *prev = elt->prev_same_value;
1374 struct table_elt *next = elt->next_same_value;
1376 if (next)
1377 next->prev_same_value = prev;
1379 if (prev)
1380 prev->next_same_value = next;
1381 else
1383 struct table_elt *newfirst = next;
1384 while (next)
1386 next->first_same_value = newfirst;
1387 next = next->next_same_value;
1392 /* Remove the table element from its hash bucket. */
1395 struct table_elt *prev = elt->prev_same_hash;
1396 struct table_elt *next = elt->next_same_hash;
1398 if (next)
1399 next->prev_same_hash = prev;
1401 if (prev)
1402 prev->next_same_hash = next;
1403 else if (table[hash] == elt)
1404 table[hash] = next;
1405 else
1407 /* This entry is not in the proper hash bucket. This can happen
1408 when two classes were merged by `merge_equiv_classes'. Search
1409 for the hash bucket that it heads. This happens only very
1410 rarely, so the cost is acceptable. */
1411 for (hash = 0; hash < HASH_SIZE; hash++)
1412 if (table[hash] == elt)
1413 table[hash] = next;
1417 /* Remove the table element from its related-value circular chain. */
1419 if (elt->related_value != 0 && elt->related_value != elt)
1421 struct table_elt *p = elt->related_value;
1423 while (p->related_value != elt)
1424 p = p->related_value;
1425 p->related_value = elt->related_value;
1426 if (p->related_value == p)
1427 p->related_value = 0;
1430 /* Now add it to the free element chain. */
1431 elt->next_same_hash = free_element_chain;
1432 free_element_chain = elt;
1435 /* Look up X in the hash table and return its table element,
1436 or 0 if X is not in the table.
1438 MODE is the machine-mode of X, or if X is an integer constant
1439 with VOIDmode then MODE is the mode with which X will be used.
1441 Here we are satisfied to find an expression whose tree structure
1442 looks like X. */
1444 static struct table_elt *
1445 lookup (x, hash, mode)
1446 rtx x;
1447 unsigned hash;
1448 enum machine_mode mode;
1450 struct table_elt *p;
1452 for (p = table[hash]; p; p = p->next_same_hash)
1453 if (mode == p->mode && ((x == p->exp && GET_CODE (x) == REG)
1454 || exp_equiv_p (x, p->exp, GET_CODE (x) != REG, 0)))
1455 return p;
1457 return 0;
1460 /* Like `lookup' but don't care whether the table element uses invalid regs.
1461 Also ignore discrepancies in the machine mode of a register. */
1463 static struct table_elt *
1464 lookup_for_remove (x, hash, mode)
1465 rtx x;
1466 unsigned hash;
1467 enum machine_mode mode;
1469 struct table_elt *p;
1471 if (GET_CODE (x) == REG)
1473 unsigned int regno = REGNO (x);
1475 /* Don't check the machine mode when comparing registers;
1476 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1477 for (p = table[hash]; p; p = p->next_same_hash)
1478 if (GET_CODE (p->exp) == REG
1479 && REGNO (p->exp) == regno)
1480 return p;
1482 else
1484 for (p = table[hash]; p; p = p->next_same_hash)
1485 if (mode == p->mode && (x == p->exp || exp_equiv_p (x, p->exp, 0, 0)))
1486 return p;
1489 return 0;
1492 /* Look for an expression equivalent to X and with code CODE.
1493 If one is found, return that expression. */
1495 static rtx
1496 lookup_as_function (x, code)
1497 rtx x;
1498 enum rtx_code code;
1500 struct table_elt *p
1501 = lookup (x, safe_hash (x, VOIDmode) & HASH_MASK, GET_MODE (x));
1503 /* If we are looking for a CONST_INT, the mode doesn't really matter, as
1504 long as we are narrowing. So if we looked in vain for a mode narrower
1505 than word_mode before, look for word_mode now. */
1506 if (p == 0 && code == CONST_INT
1507 && GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (word_mode))
1509 x = copy_rtx (x);
1510 PUT_MODE (x, word_mode);
1511 p = lookup (x, safe_hash (x, VOIDmode) & HASH_MASK, word_mode);
1514 if (p == 0)
1515 return 0;
1517 for (p = p->first_same_value; p; p = p->next_same_value)
1518 if (GET_CODE (p->exp) == code
1519 /* Make sure this is a valid entry in the table. */
1520 && exp_equiv_p (p->exp, p->exp, 1, 0))
1521 return p->exp;
1523 return 0;
1526 /* Insert X in the hash table, assuming HASH is its hash code
1527 and CLASSP is an element of the class it should go in
1528 (or 0 if a new class should be made).
1529 It is inserted at the proper position to keep the class in
1530 the order cheapest first.
1532 MODE is the machine-mode of X, or if X is an integer constant
1533 with VOIDmode then MODE is the mode with which X will be used.
1535 For elements of equal cheapness, the most recent one
1536 goes in front, except that the first element in the list
1537 remains first unless a cheaper element is added. The order of
1538 pseudo-registers does not matter, as canon_reg will be called to
1539 find the cheapest when a register is retrieved from the table.
1541 The in_memory field in the hash table element is set to 0.
1542 The caller must set it nonzero if appropriate.
1544 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1545 and if insert_regs returns a nonzero value
1546 you must then recompute its hash code before calling here.
1548 If necessary, update table showing constant values of quantities. */
1550 #define CHEAPER(X, Y) \
1551 (preferrable ((X)->cost, (X)->regcost, (Y)->cost, (Y)->regcost) < 0)
1553 static struct table_elt *
1554 insert (x, classp, hash, mode)
1555 rtx x;
1556 struct table_elt *classp;
1557 unsigned hash;
1558 enum machine_mode mode;
1560 struct table_elt *elt;
1562 /* If X is a register and we haven't made a quantity for it,
1563 something is wrong. */
1564 if (GET_CODE (x) == REG && ! REGNO_QTY_VALID_P (REGNO (x)))
1565 abort ();
1567 /* If X is a hard register, show it is being put in the table. */
1568 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
1570 unsigned int regno = REGNO (x);
1571 unsigned int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1572 unsigned int i;
1574 for (i = regno; i < endregno; i++)
1575 SET_HARD_REG_BIT (hard_regs_in_table, i);
1578 /* Put an element for X into the right hash bucket. */
1580 elt = free_element_chain;
1581 if (elt)
1582 free_element_chain = elt->next_same_hash;
1583 else
1585 n_elements_made++;
1586 elt = (struct table_elt *) xmalloc (sizeof (struct table_elt));
1589 elt->exp = x;
1590 elt->canon_exp = NULL_RTX;
1591 elt->cost = COST (x);
1592 elt->regcost = approx_reg_cost (x);
1593 elt->next_same_value = 0;
1594 elt->prev_same_value = 0;
1595 elt->next_same_hash = table[hash];
1596 elt->prev_same_hash = 0;
1597 elt->related_value = 0;
1598 elt->in_memory = 0;
1599 elt->mode = mode;
1600 elt->is_const = (CONSTANT_P (x)
1601 /* GNU C++ takes advantage of this for `this'
1602 (and other const values). */
1603 || (RTX_UNCHANGING_P (x)
1604 && GET_CODE (x) == REG
1605 && REGNO (x) >= FIRST_PSEUDO_REGISTER)
1606 || FIXED_BASE_PLUS_P (x));
1608 if (table[hash])
1609 table[hash]->prev_same_hash = elt;
1610 table[hash] = elt;
1612 /* Put it into the proper value-class. */
1613 if (classp)
1615 classp = classp->first_same_value;
1616 if (CHEAPER (elt, classp))
1617 /* Insert at the head of the class */
1619 struct table_elt *p;
1620 elt->next_same_value = classp;
1621 classp->prev_same_value = elt;
1622 elt->first_same_value = elt;
1624 for (p = classp; p; p = p->next_same_value)
1625 p->first_same_value = elt;
1627 else
1629 /* Insert not at head of the class. */
1630 /* Put it after the last element cheaper than X. */
1631 struct table_elt *p, *next;
1633 for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1634 p = next);
1636 /* Put it after P and before NEXT. */
1637 elt->next_same_value = next;
1638 if (next)
1639 next->prev_same_value = elt;
1641 elt->prev_same_value = p;
1642 p->next_same_value = elt;
1643 elt->first_same_value = classp;
1646 else
1647 elt->first_same_value = elt;
1649 /* If this is a constant being set equivalent to a register or a register
1650 being set equivalent to a constant, note the constant equivalence.
1652 If this is a constant, it cannot be equivalent to a different constant,
1653 and a constant is the only thing that can be cheaper than a register. So
1654 we know the register is the head of the class (before the constant was
1655 inserted).
1657 If this is a register that is not already known equivalent to a
1658 constant, we must check the entire class.
1660 If this is a register that is already known equivalent to an insn,
1661 update the qtys `const_insn' to show that `this_insn' is the latest
1662 insn making that quantity equivalent to the constant. */
1664 if (elt->is_const && classp && GET_CODE (classp->exp) == REG
1665 && GET_CODE (x) != REG)
1667 int exp_q = REG_QTY (REGNO (classp->exp));
1668 struct qty_table_elem *exp_ent = &qty_table[exp_q];
1670 exp_ent->const_rtx = gen_lowpart_if_possible (exp_ent->mode, x);
1671 exp_ent->const_insn = this_insn;
1674 else if (GET_CODE (x) == REG
1675 && classp
1676 && ! qty_table[REG_QTY (REGNO (x))].const_rtx
1677 && ! elt->is_const)
1679 struct table_elt *p;
1681 for (p = classp; p != 0; p = p->next_same_value)
1683 if (p->is_const && GET_CODE (p->exp) != REG)
1685 int x_q = REG_QTY (REGNO (x));
1686 struct qty_table_elem *x_ent = &qty_table[x_q];
1688 x_ent->const_rtx
1689 = gen_lowpart_if_possible (GET_MODE (x), p->exp);
1690 x_ent->const_insn = this_insn;
1691 break;
1696 else if (GET_CODE (x) == REG
1697 && qty_table[REG_QTY (REGNO (x))].const_rtx
1698 && GET_MODE (x) == qty_table[REG_QTY (REGNO (x))].mode)
1699 qty_table[REG_QTY (REGNO (x))].const_insn = this_insn;
1701 /* If this is a constant with symbolic value,
1702 and it has a term with an explicit integer value,
1703 link it up with related expressions. */
1704 if (GET_CODE (x) == CONST)
1706 rtx subexp = get_related_value (x);
1707 unsigned subhash;
1708 struct table_elt *subelt, *subelt_prev;
1710 if (subexp != 0)
1712 /* Get the integer-free subexpression in the hash table. */
1713 subhash = safe_hash (subexp, mode) & HASH_MASK;
1714 subelt = lookup (subexp, subhash, mode);
1715 if (subelt == 0)
1716 subelt = insert (subexp, NULL, subhash, mode);
1717 /* Initialize SUBELT's circular chain if it has none. */
1718 if (subelt->related_value == 0)
1719 subelt->related_value = subelt;
1720 /* Find the element in the circular chain that precedes SUBELT. */
1721 subelt_prev = subelt;
1722 while (subelt_prev->related_value != subelt)
1723 subelt_prev = subelt_prev->related_value;
1724 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1725 This way the element that follows SUBELT is the oldest one. */
1726 elt->related_value = subelt_prev->related_value;
1727 subelt_prev->related_value = elt;
1731 return elt;
1734 /* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1735 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1736 the two classes equivalent.
1738 CLASS1 will be the surviving class; CLASS2 should not be used after this
1739 call.
1741 Any invalid entries in CLASS2 will not be copied. */
1743 static void
1744 merge_equiv_classes (class1, class2)
1745 struct table_elt *class1, *class2;
1747 struct table_elt *elt, *next, *new;
1749 /* Ensure we start with the head of the classes. */
1750 class1 = class1->first_same_value;
1751 class2 = class2->first_same_value;
1753 /* If they were already equal, forget it. */
1754 if (class1 == class2)
1755 return;
1757 for (elt = class2; elt; elt = next)
1759 unsigned int hash;
1760 rtx exp = elt->exp;
1761 enum machine_mode mode = elt->mode;
1763 next = elt->next_same_value;
1765 /* Remove old entry, make a new one in CLASS1's class.
1766 Don't do this for invalid entries as we cannot find their
1767 hash code (it also isn't necessary). */
1768 if (GET_CODE (exp) == REG || exp_equiv_p (exp, exp, 1, 0))
1770 hash_arg_in_memory = 0;
1771 hash = HASH (exp, mode);
1773 if (GET_CODE (exp) == REG)
1774 delete_reg_equiv (REGNO (exp));
1776 remove_from_table (elt, hash);
1778 if (insert_regs (exp, class1, 0))
1780 rehash_using_reg (exp);
1781 hash = HASH (exp, mode);
1783 new = insert (exp, class1, hash, mode);
1784 new->in_memory = hash_arg_in_memory;
1789 /* Flush the entire hash table. */
1791 static void
1792 flush_hash_table ()
1794 int i;
1795 struct table_elt *p;
1797 for (i = 0; i < HASH_SIZE; i++)
1798 for (p = table[i]; p; p = table[i])
1800 /* Note that invalidate can remove elements
1801 after P in the current hash chain. */
1802 if (GET_CODE (p->exp) == REG)
1803 invalidate (p->exp, p->mode);
1804 else
1805 remove_from_table (p, i);
1809 /* Function called for each rtx to check whether true dependence exist. */
1810 struct check_dependence_data
1812 enum machine_mode mode;
1813 rtx exp;
1816 static int
1817 check_dependence (x, data)
1818 rtx *x;
1819 void *data;
1821 struct check_dependence_data *d = (struct check_dependence_data *) data;
1822 if (*x && GET_CODE (*x) == MEM)
1823 return true_dependence (d->exp, d->mode, *x, cse_rtx_varies_p);
1824 else
1825 return 0;
1828 /* Remove from the hash table, or mark as invalid, all expressions whose
1829 values could be altered by storing in X. X is a register, a subreg, or
1830 a memory reference with nonvarying address (because, when a memory
1831 reference with a varying address is stored in, all memory references are
1832 removed by invalidate_memory so specific invalidation is superfluous).
1833 FULL_MODE, if not VOIDmode, indicates that this much should be
1834 invalidated instead of just the amount indicated by the mode of X. This
1835 is only used for bitfield stores into memory.
1837 A nonvarying address may be just a register or just a symbol reference,
1838 or it may be either of those plus a numeric offset. */
1840 static void
1841 invalidate (x, full_mode)
1842 rtx x;
1843 enum machine_mode full_mode;
1845 int i;
1846 struct table_elt *p;
1848 switch (GET_CODE (x))
1850 case REG:
1852 /* If X is a register, dependencies on its contents are recorded
1853 through the qty number mechanism. Just change the qty number of
1854 the register, mark it as invalid for expressions that refer to it,
1855 and remove it itself. */
1856 unsigned int regno = REGNO (x);
1857 unsigned int hash = HASH (x, GET_MODE (x));
1859 /* Remove REGNO from any quantity list it might be on and indicate
1860 that its value might have changed. If it is a pseudo, remove its
1861 entry from the hash table.
1863 For a hard register, we do the first two actions above for any
1864 additional hard registers corresponding to X. Then, if any of these
1865 registers are in the table, we must remove any REG entries that
1866 overlap these registers. */
1868 delete_reg_equiv (regno);
1869 REG_TICK (regno)++;
1871 if (regno >= FIRST_PSEUDO_REGISTER)
1873 /* Because a register can be referenced in more than one mode,
1874 we might have to remove more than one table entry. */
1875 struct table_elt *elt;
1877 while ((elt = lookup_for_remove (x, hash, GET_MODE (x))))
1878 remove_from_table (elt, hash);
1880 else
1882 HOST_WIDE_INT in_table
1883 = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1884 unsigned int endregno
1885 = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1886 unsigned int tregno, tendregno, rn;
1887 struct table_elt *p, *next;
1889 CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1891 for (rn = regno + 1; rn < endregno; rn++)
1893 in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, rn);
1894 CLEAR_HARD_REG_BIT (hard_regs_in_table, rn);
1895 delete_reg_equiv (rn);
1896 REG_TICK (rn)++;
1899 if (in_table)
1900 for (hash = 0; hash < HASH_SIZE; hash++)
1901 for (p = table[hash]; p; p = next)
1903 next = p->next_same_hash;
1905 if (GET_CODE (p->exp) != REG
1906 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1907 continue;
1909 tregno = REGNO (p->exp);
1910 tendregno
1911 = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (p->exp));
1912 if (tendregno > regno && tregno < endregno)
1913 remove_from_table (p, hash);
1917 return;
1919 case SUBREG:
1920 invalidate (SUBREG_REG (x), VOIDmode);
1921 return;
1923 case PARALLEL:
1924 for (i = XVECLEN (x, 0) - 1; i >= 0; --i)
1925 invalidate (XVECEXP (x, 0, i), VOIDmode);
1926 return;
1928 case EXPR_LIST:
1929 /* This is part of a disjoint return value; extract the location in
1930 question ignoring the offset. */
1931 invalidate (XEXP (x, 0), VOIDmode);
1932 return;
1934 case MEM:
1935 /* Calculate the canonical version of X here so that
1936 true_dependence doesn't generate new RTL for X on each call. */
1937 x = canon_rtx (x);
1939 /* Remove all hash table elements that refer to overlapping pieces of
1940 memory. */
1941 if (full_mode == VOIDmode)
1942 full_mode = GET_MODE (x);
1944 for (i = 0; i < HASH_SIZE; i++)
1946 struct table_elt *next;
1948 for (p = table[i]; p; p = next)
1950 next = p->next_same_hash;
1951 if (p->in_memory)
1953 struct check_dependence_data d;
1955 /* Just canonicalize the expression once;
1956 otherwise each time we call invalidate
1957 true_dependence will canonicalize the
1958 expression again. */
1959 if (!p->canon_exp)
1960 p->canon_exp = canon_rtx (p->exp);
1961 d.exp = x;
1962 d.mode = full_mode;
1963 if (for_each_rtx (&p->canon_exp, check_dependence, &d))
1964 remove_from_table (p, i);
1968 return;
1970 default:
1971 abort ();
1975 /* Remove all expressions that refer to register REGNO,
1976 since they are already invalid, and we are about to
1977 mark that register valid again and don't want the old
1978 expressions to reappear as valid. */
1980 static void
1981 remove_invalid_refs (regno)
1982 unsigned int regno;
1984 unsigned int i;
1985 struct table_elt *p, *next;
1987 for (i = 0; i < HASH_SIZE; i++)
1988 for (p = table[i]; p; p = next)
1990 next = p->next_same_hash;
1991 if (GET_CODE (p->exp) != REG
1992 && refers_to_regno_p (regno, regno + 1, p->exp, (rtx*)0))
1993 remove_from_table (p, i);
1997 /* Likewise for a subreg with subreg_reg REGNO, subreg_byte OFFSET,
1998 and mode MODE. */
1999 static void
2000 remove_invalid_subreg_refs (regno, offset, mode)
2001 unsigned int regno;
2002 unsigned int offset;
2003 enum machine_mode mode;
2005 unsigned int i;
2006 struct table_elt *p, *next;
2007 unsigned int end = offset + (GET_MODE_SIZE (mode) - 1);
2009 for (i = 0; i < HASH_SIZE; i++)
2010 for (p = table[i]; p; p = next)
2012 rtx exp = p->exp;
2013 next = p->next_same_hash;
2015 if (GET_CODE (exp) != REG
2016 && (GET_CODE (exp) != SUBREG
2017 || GET_CODE (SUBREG_REG (exp)) != REG
2018 || REGNO (SUBREG_REG (exp)) != regno
2019 || (((SUBREG_BYTE (exp)
2020 + (GET_MODE_SIZE (GET_MODE (exp)) - 1)) >= offset)
2021 && SUBREG_BYTE (exp) <= end))
2022 && refers_to_regno_p (regno, regno + 1, p->exp, (rtx*)0))
2023 remove_from_table (p, i);
2027 /* Recompute the hash codes of any valid entries in the hash table that
2028 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
2030 This is called when we make a jump equivalence. */
2032 static void
2033 rehash_using_reg (x)
2034 rtx x;
2036 unsigned int i;
2037 struct table_elt *p, *next;
2038 unsigned hash;
2040 if (GET_CODE (x) == SUBREG)
2041 x = SUBREG_REG (x);
2043 /* If X is not a register or if the register is known not to be in any
2044 valid entries in the table, we have no work to do. */
2046 if (GET_CODE (x) != REG
2047 || REG_IN_TABLE (REGNO (x)) < 0
2048 || REG_IN_TABLE (REGNO (x)) != REG_TICK (REGNO (x)))
2049 return;
2051 /* Scan all hash chains looking for valid entries that mention X.
2052 If we find one and it is in the wrong hash chain, move it. We can skip
2053 objects that are registers, since they are handled specially. */
2055 for (i = 0; i < HASH_SIZE; i++)
2056 for (p = table[i]; p; p = next)
2058 next = p->next_same_hash;
2059 if (GET_CODE (p->exp) != REG && reg_mentioned_p (x, p->exp)
2060 && exp_equiv_p (p->exp, p->exp, 1, 0)
2061 && i != (hash = safe_hash (p->exp, p->mode) & HASH_MASK))
2063 if (p->next_same_hash)
2064 p->next_same_hash->prev_same_hash = p->prev_same_hash;
2066 if (p->prev_same_hash)
2067 p->prev_same_hash->next_same_hash = p->next_same_hash;
2068 else
2069 table[i] = p->next_same_hash;
2071 p->next_same_hash = table[hash];
2072 p->prev_same_hash = 0;
2073 if (table[hash])
2074 table[hash]->prev_same_hash = p;
2075 table[hash] = p;
2080 /* Remove from the hash table any expression that is a call-clobbered
2081 register. Also update their TICK values. */
2083 static void
2084 invalidate_for_call ()
2086 unsigned int regno, endregno;
2087 unsigned int i;
2088 unsigned hash;
2089 struct table_elt *p, *next;
2090 int in_table = 0;
2092 /* Go through all the hard registers. For each that is clobbered in
2093 a CALL_INSN, remove the register from quantity chains and update
2094 reg_tick if defined. Also see if any of these registers is currently
2095 in the table. */
2097 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2098 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2100 delete_reg_equiv (regno);
2101 if (REG_TICK (regno) >= 0)
2102 REG_TICK (regno)++;
2104 in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
2107 /* In the case where we have no call-clobbered hard registers in the
2108 table, we are done. Otherwise, scan the table and remove any
2109 entry that overlaps a call-clobbered register. */
2111 if (in_table)
2112 for (hash = 0; hash < HASH_SIZE; hash++)
2113 for (p = table[hash]; p; p = next)
2115 next = p->next_same_hash;
2117 if (GET_CODE (p->exp) != REG
2118 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
2119 continue;
2121 regno = REGNO (p->exp);
2122 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (p->exp));
2124 for (i = regno; i < endregno; i++)
2125 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
2127 remove_from_table (p, hash);
2128 break;
2133 /* Given an expression X of type CONST,
2134 and ELT which is its table entry (or 0 if it
2135 is not in the hash table),
2136 return an alternate expression for X as a register plus integer.
2137 If none can be found, return 0. */
2139 static rtx
2140 use_related_value (x, elt)
2141 rtx x;
2142 struct table_elt *elt;
2144 struct table_elt *relt = 0;
2145 struct table_elt *p, *q;
2146 HOST_WIDE_INT offset;
2148 /* First, is there anything related known?
2149 If we have a table element, we can tell from that.
2150 Otherwise, must look it up. */
2152 if (elt != 0 && elt->related_value != 0)
2153 relt = elt;
2154 else if (elt == 0 && GET_CODE (x) == CONST)
2156 rtx subexp = get_related_value (x);
2157 if (subexp != 0)
2158 relt = lookup (subexp,
2159 safe_hash (subexp, GET_MODE (subexp)) & HASH_MASK,
2160 GET_MODE (subexp));
2163 if (relt == 0)
2164 return 0;
2166 /* Search all related table entries for one that has an
2167 equivalent register. */
2169 p = relt;
2170 while (1)
2172 /* This loop is strange in that it is executed in two different cases.
2173 The first is when X is already in the table. Then it is searching
2174 the RELATED_VALUE list of X's class (RELT). The second case is when
2175 X is not in the table. Then RELT points to a class for the related
2176 value.
2178 Ensure that, whatever case we are in, that we ignore classes that have
2179 the same value as X. */
2181 if (rtx_equal_p (x, p->exp))
2182 q = 0;
2183 else
2184 for (q = p->first_same_value; q; q = q->next_same_value)
2185 if (GET_CODE (q->exp) == REG)
2186 break;
2188 if (q)
2189 break;
2191 p = p->related_value;
2193 /* We went all the way around, so there is nothing to be found.
2194 Alternatively, perhaps RELT was in the table for some other reason
2195 and it has no related values recorded. */
2196 if (p == relt || p == 0)
2197 break;
2200 if (q == 0)
2201 return 0;
2203 offset = (get_integer_term (x) - get_integer_term (p->exp));
2204 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
2205 return plus_constant (q->exp, offset);
2208 /* Hash a string. Just add its bytes up. */
2209 static inline unsigned
2210 canon_hash_string (ps)
2211 const char *ps;
2213 unsigned hash = 0;
2214 const unsigned char *p = (const unsigned char *)ps;
2216 if (p)
2217 while (*p)
2218 hash += *p++;
2220 return hash;
2223 /* Hash an rtx. We are careful to make sure the value is never negative.
2224 Equivalent registers hash identically.
2225 MODE is used in hashing for CONST_INTs only;
2226 otherwise the mode of X is used.
2228 Store 1 in do_not_record if any subexpression is volatile.
2230 Store 1 in hash_arg_in_memory if X contains a MEM rtx
2231 which does not have the RTX_UNCHANGING_P bit set.
2233 Note that cse_insn knows that the hash code of a MEM expression
2234 is just (int) MEM plus the hash code of the address. */
2236 static unsigned
2237 canon_hash (x, mode)
2238 rtx x;
2239 enum machine_mode mode;
2241 int i, j;
2242 unsigned hash = 0;
2243 enum rtx_code code;
2244 const char *fmt;
2246 /* repeat is used to turn tail-recursion into iteration. */
2247 repeat:
2248 if (x == 0)
2249 return hash;
2251 code = GET_CODE (x);
2252 switch (code)
2254 case REG:
2256 unsigned int regno = REGNO (x);
2258 /* On some machines, we can't record any non-fixed hard register,
2259 because extending its life will cause reload problems. We
2260 consider ap, fp, and sp to be fixed for this purpose.
2262 We also consider CCmode registers to be fixed for this purpose;
2263 failure to do so leads to failure to simplify 0<100 type of
2264 conditionals.
2266 On all machines, we can't record any global registers.
2267 Nor should we record any register that is in a small
2268 class, as defined by CLASS_LIKELY_SPILLED_P. */
2270 if (regno < FIRST_PSEUDO_REGISTER
2271 && (global_regs[regno]
2272 || CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (regno))
2273 || (SMALL_REGISTER_CLASSES
2274 && ! fixed_regs[regno]
2275 && regno != FRAME_POINTER_REGNUM
2276 && regno != HARD_FRAME_POINTER_REGNUM
2277 && regno != ARG_POINTER_REGNUM
2278 && regno != STACK_POINTER_REGNUM
2279 && GET_MODE_CLASS (GET_MODE (x)) != MODE_CC)))
2281 do_not_record = 1;
2282 return 0;
2285 hash += ((unsigned) REG << 7) + (unsigned) REG_QTY (regno);
2286 return hash;
2289 /* We handle SUBREG of a REG specially because the underlying
2290 reg changes its hash value with every value change; we don't
2291 want to have to forget unrelated subregs when one subreg changes. */
2292 case SUBREG:
2294 if (GET_CODE (SUBREG_REG (x)) == REG)
2296 hash += (((unsigned) SUBREG << 7)
2297 + REGNO (SUBREG_REG (x))
2298 + (SUBREG_BYTE (x) / UNITS_PER_WORD));
2299 return hash;
2301 break;
2304 case CONST_INT:
2306 unsigned HOST_WIDE_INT tem = INTVAL (x);
2307 hash += ((unsigned) CONST_INT << 7) + (unsigned) mode + tem;
2308 return hash;
2311 case CONST_DOUBLE:
2312 /* This is like the general case, except that it only counts
2313 the integers representing the constant. */
2314 hash += (unsigned) code + (unsigned) GET_MODE (x);
2315 if (GET_MODE (x) != VOIDmode)
2316 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
2318 unsigned HOST_WIDE_INT tem = XWINT (x, i);
2319 hash += tem;
2321 else
2322 hash += ((unsigned) CONST_DOUBLE_LOW (x)
2323 + (unsigned) CONST_DOUBLE_HIGH (x));
2324 return hash;
2326 /* Assume there is only one rtx object for any given label. */
2327 case LABEL_REF:
2328 hash += ((unsigned) LABEL_REF << 7) + (unsigned long) XEXP (x, 0);
2329 return hash;
2331 case SYMBOL_REF:
2332 hash += ((unsigned) SYMBOL_REF << 7) + (unsigned long) XSTR (x, 0);
2333 return hash;
2335 case MEM:
2336 /* We don't record if marked volatile or if BLKmode since we don't
2337 know the size of the move. */
2338 if (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode)
2340 do_not_record = 1;
2341 return 0;
2343 if (! RTX_UNCHANGING_P (x) || FIXED_BASE_PLUS_P (XEXP (x, 0)))
2345 hash_arg_in_memory = 1;
2347 /* Now that we have already found this special case,
2348 might as well speed it up as much as possible. */
2349 hash += (unsigned) MEM;
2350 x = XEXP (x, 0);
2351 goto repeat;
2353 case USE:
2354 /* A USE that mentions non-volatile memory needs special
2355 handling since the MEM may be BLKmode which normally
2356 prevents an entry from being made. Pure calls are
2357 marked by a USE which mentions BLKmode memory. */
2358 if (GET_CODE (XEXP (x, 0)) == MEM
2359 && ! MEM_VOLATILE_P (XEXP (x, 0)))
2361 hash += (unsigned)USE;
2362 x = XEXP (x, 0);
2364 if (! RTX_UNCHANGING_P (x) || FIXED_BASE_PLUS_P (XEXP (x, 0)))
2365 hash_arg_in_memory = 1;
2367 /* Now that we have already found this special case,
2368 might as well speed it up as much as possible. */
2369 hash += (unsigned) MEM;
2370 x = XEXP (x, 0);
2371 goto repeat;
2373 break;
2375 case PRE_DEC:
2376 case PRE_INC:
2377 case POST_DEC:
2378 case POST_INC:
2379 case PRE_MODIFY:
2380 case POST_MODIFY:
2381 case PC:
2382 case CC0:
2383 case CALL:
2384 case UNSPEC_VOLATILE:
2385 do_not_record = 1;
2386 return 0;
2388 case ASM_OPERANDS:
2389 if (MEM_VOLATILE_P (x))
2391 do_not_record = 1;
2392 return 0;
2394 else
2396 /* We don't want to take the filename and line into account. */
2397 hash += (unsigned) code + (unsigned) GET_MODE (x)
2398 + canon_hash_string (ASM_OPERANDS_TEMPLATE (x))
2399 + canon_hash_string (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
2400 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
2402 if (ASM_OPERANDS_INPUT_LENGTH (x))
2404 for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2406 hash += (canon_hash (ASM_OPERANDS_INPUT (x, i),
2407 GET_MODE (ASM_OPERANDS_INPUT (x, i)))
2408 + canon_hash_string (ASM_OPERANDS_INPUT_CONSTRAINT
2409 (x, i)));
2412 hash += canon_hash_string (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
2413 x = ASM_OPERANDS_INPUT (x, 0);
2414 mode = GET_MODE (x);
2415 goto repeat;
2418 return hash;
2420 break;
2422 default:
2423 break;
2426 i = GET_RTX_LENGTH (code) - 1;
2427 hash += (unsigned) code + (unsigned) GET_MODE (x);
2428 fmt = GET_RTX_FORMAT (code);
2429 for (; i >= 0; i--)
2431 if (fmt[i] == 'e')
2433 rtx tem = XEXP (x, i);
2435 /* If we are about to do the last recursive call
2436 needed at this level, change it into iteration.
2437 This function is called enough to be worth it. */
2438 if (i == 0)
2440 x = tem;
2441 goto repeat;
2443 hash += canon_hash (tem, 0);
2445 else if (fmt[i] == 'E')
2446 for (j = 0; j < XVECLEN (x, i); j++)
2447 hash += canon_hash (XVECEXP (x, i, j), 0);
2448 else if (fmt[i] == 's')
2449 hash += canon_hash_string (XSTR (x, i));
2450 else if (fmt[i] == 'i')
2452 unsigned tem = XINT (x, i);
2453 hash += tem;
2455 else if (fmt[i] == '0' || fmt[i] == 't')
2456 /* Unused. */
2458 else
2459 abort ();
2461 return hash;
2464 /* Like canon_hash but with no side effects. */
2466 static unsigned
2467 safe_hash (x, mode)
2468 rtx x;
2469 enum machine_mode mode;
2471 int save_do_not_record = do_not_record;
2472 int save_hash_arg_in_memory = hash_arg_in_memory;
2473 unsigned hash = canon_hash (x, mode);
2474 hash_arg_in_memory = save_hash_arg_in_memory;
2475 do_not_record = save_do_not_record;
2476 return hash;
2479 /* Return 1 iff X and Y would canonicalize into the same thing,
2480 without actually constructing the canonicalization of either one.
2481 If VALIDATE is nonzero,
2482 we assume X is an expression being processed from the rtl
2483 and Y was found in the hash table. We check register refs
2484 in Y for being marked as valid.
2486 If EQUAL_VALUES is nonzero, we allow a register to match a constant value
2487 that is known to be in the register. Ordinarily, we don't allow them
2488 to match, because letting them match would cause unpredictable results
2489 in all the places that search a hash table chain for an equivalent
2490 for a given value. A possible equivalent that has different structure
2491 has its hash code computed from different data. Whether the hash code
2492 is the same as that of the given value is pure luck. */
2494 static int
2495 exp_equiv_p (x, y, validate, equal_values)
2496 rtx x, y;
2497 int validate;
2498 int equal_values;
2500 int i, j;
2501 enum rtx_code code;
2502 const char *fmt;
2504 /* Note: it is incorrect to assume an expression is equivalent to itself
2505 if VALIDATE is nonzero. */
2506 if (x == y && !validate)
2507 return 1;
2508 if (x == 0 || y == 0)
2509 return x == y;
2511 code = GET_CODE (x);
2512 if (code != GET_CODE (y))
2514 if (!equal_values)
2515 return 0;
2517 /* If X is a constant and Y is a register or vice versa, they may be
2518 equivalent. We only have to validate if Y is a register. */
2519 if (CONSTANT_P (x) && GET_CODE (y) == REG
2520 && REGNO_QTY_VALID_P (REGNO (y)))
2522 int y_q = REG_QTY (REGNO (y));
2523 struct qty_table_elem *y_ent = &qty_table[y_q];
2525 if (GET_MODE (y) == y_ent->mode
2526 && rtx_equal_p (x, y_ent->const_rtx)
2527 && (! validate || REG_IN_TABLE (REGNO (y)) == REG_TICK (REGNO (y))))
2528 return 1;
2531 if (CONSTANT_P (y) && code == REG
2532 && REGNO_QTY_VALID_P (REGNO (x)))
2534 int x_q = REG_QTY (REGNO (x));
2535 struct qty_table_elem *x_ent = &qty_table[x_q];
2537 if (GET_MODE (x) == x_ent->mode
2538 && rtx_equal_p (y, x_ent->const_rtx))
2539 return 1;
2542 return 0;
2545 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2546 if (GET_MODE (x) != GET_MODE (y))
2547 return 0;
2549 switch (code)
2551 case PC:
2552 case CC0:
2553 case CONST_INT:
2554 return x == y;
2556 case LABEL_REF:
2557 return XEXP (x, 0) == XEXP (y, 0);
2559 case SYMBOL_REF:
2560 return XSTR (x, 0) == XSTR (y, 0);
2562 case REG:
2564 unsigned int regno = REGNO (y);
2565 unsigned int endregno
2566 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
2567 : HARD_REGNO_NREGS (regno, GET_MODE (y)));
2568 unsigned int i;
2570 /* If the quantities are not the same, the expressions are not
2571 equivalent. If there are and we are not to validate, they
2572 are equivalent. Otherwise, ensure all regs are up-to-date. */
2574 if (REG_QTY (REGNO (x)) != REG_QTY (regno))
2575 return 0;
2577 if (! validate)
2578 return 1;
2580 for (i = regno; i < endregno; i++)
2581 if (REG_IN_TABLE (i) != REG_TICK (i))
2582 return 0;
2584 return 1;
2587 /* For commutative operations, check both orders. */
2588 case PLUS:
2589 case MULT:
2590 case AND:
2591 case IOR:
2592 case XOR:
2593 case NE:
2594 case EQ:
2595 return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0), validate, equal_values)
2596 && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2597 validate, equal_values))
2598 || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2599 validate, equal_values)
2600 && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2601 validate, equal_values)));
2603 case ASM_OPERANDS:
2604 /* We don't use the generic code below because we want to
2605 disregard filename and line numbers. */
2607 /* A volatile asm isn't equivalent to any other. */
2608 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2609 return 0;
2611 if (GET_MODE (x) != GET_MODE (y)
2612 || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
2613 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2614 ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
2615 || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
2616 || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
2617 return 0;
2619 if (ASM_OPERANDS_INPUT_LENGTH (x))
2621 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
2622 if (! exp_equiv_p (ASM_OPERANDS_INPUT (x, i),
2623 ASM_OPERANDS_INPUT (y, i),
2624 validate, equal_values)
2625 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
2626 ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
2627 return 0;
2630 return 1;
2632 default:
2633 break;
2636 /* Compare the elements. If any pair of corresponding elements
2637 fail to match, return 0 for the whole things. */
2639 fmt = GET_RTX_FORMAT (code);
2640 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2642 switch (fmt[i])
2644 case 'e':
2645 if (! exp_equiv_p (XEXP (x, i), XEXP (y, i), validate, equal_values))
2646 return 0;
2647 break;
2649 case 'E':
2650 if (XVECLEN (x, i) != XVECLEN (y, i))
2651 return 0;
2652 for (j = 0; j < XVECLEN (x, i); j++)
2653 if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2654 validate, equal_values))
2655 return 0;
2656 break;
2658 case 's':
2659 if (strcmp (XSTR (x, i), XSTR (y, i)))
2660 return 0;
2661 break;
2663 case 'i':
2664 if (XINT (x, i) != XINT (y, i))
2665 return 0;
2666 break;
2668 case 'w':
2669 if (XWINT (x, i) != XWINT (y, i))
2670 return 0;
2671 break;
2673 case '0':
2674 case 't':
2675 break;
2677 default:
2678 abort ();
2682 return 1;
2685 /* Return 1 if X has a value that can vary even between two
2686 executions of the program. 0 means X can be compared reliably
2687 against certain constants or near-constants. */
2689 static int
2690 cse_rtx_varies_p (x, from_alias)
2691 rtx x;
2692 int from_alias;
2694 /* We need not check for X and the equivalence class being of the same
2695 mode because if X is equivalent to a constant in some mode, it
2696 doesn't vary in any mode. */
2698 if (GET_CODE (x) == REG
2699 && REGNO_QTY_VALID_P (REGNO (x)))
2701 int x_q = REG_QTY (REGNO (x));
2702 struct qty_table_elem *x_ent = &qty_table[x_q];
2704 if (GET_MODE (x) == x_ent->mode
2705 && x_ent->const_rtx != NULL_RTX)
2706 return 0;
2709 if (GET_CODE (x) == PLUS
2710 && GET_CODE (XEXP (x, 1)) == CONST_INT
2711 && GET_CODE (XEXP (x, 0)) == REG
2712 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
2714 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2715 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2717 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2718 && x0_ent->const_rtx != NULL_RTX)
2719 return 0;
2722 /* This can happen as the result of virtual register instantiation, if
2723 the initial constant is too large to be a valid address. This gives
2724 us a three instruction sequence, load large offset into a register,
2725 load fp minus a constant into a register, then a MEM which is the
2726 sum of the two `constant' registers. */
2727 if (GET_CODE (x) == PLUS
2728 && GET_CODE (XEXP (x, 0)) == REG
2729 && GET_CODE (XEXP (x, 1)) == REG
2730 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2731 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
2733 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2734 int x1_q = REG_QTY (REGNO (XEXP (x, 1)));
2735 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2736 struct qty_table_elem *x1_ent = &qty_table[x1_q];
2738 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2739 && x0_ent->const_rtx != NULL_RTX
2740 && (GET_MODE (XEXP (x, 1)) == x1_ent->mode)
2741 && x1_ent->const_rtx != NULL_RTX)
2742 return 0;
2745 return rtx_varies_p (x, from_alias);
2748 /* Canonicalize an expression:
2749 replace each register reference inside it
2750 with the "oldest" equivalent register.
2752 If INSN is non-zero and we are replacing a pseudo with a hard register
2753 or vice versa, validate_change is used to ensure that INSN remains valid
2754 after we make our substitution. The calls are made with IN_GROUP non-zero
2755 so apply_change_group must be called upon the outermost return from this
2756 function (unless INSN is zero). The result of apply_change_group can
2757 generally be discarded since the changes we are making are optional. */
2759 static rtx
2760 canon_reg (x, insn)
2761 rtx x;
2762 rtx insn;
2764 int i;
2765 enum rtx_code code;
2766 const char *fmt;
2768 if (x == 0)
2769 return x;
2771 code = GET_CODE (x);
2772 switch (code)
2774 case PC:
2775 case CC0:
2776 case CONST:
2777 case CONST_INT:
2778 case CONST_DOUBLE:
2779 case SYMBOL_REF:
2780 case LABEL_REF:
2781 case ADDR_VEC:
2782 case ADDR_DIFF_VEC:
2783 return x;
2785 case REG:
2787 int first;
2788 int q;
2789 struct qty_table_elem *ent;
2791 /* Never replace a hard reg, because hard regs can appear
2792 in more than one machine mode, and we must preserve the mode
2793 of each occurrence. Also, some hard regs appear in
2794 MEMs that are shared and mustn't be altered. Don't try to
2795 replace any reg that maps to a reg of class NO_REGS. */
2796 if (REGNO (x) < FIRST_PSEUDO_REGISTER
2797 || ! REGNO_QTY_VALID_P (REGNO (x)))
2798 return x;
2800 q = REG_QTY (REGNO (x));
2801 ent = &qty_table[q];
2802 first = ent->first_reg;
2803 return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2804 : REGNO_REG_CLASS (first) == NO_REGS ? x
2805 : gen_rtx_REG (ent->mode, first));
2808 default:
2809 break;
2812 fmt = GET_RTX_FORMAT (code);
2813 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2815 int j;
2817 if (fmt[i] == 'e')
2819 rtx new = canon_reg (XEXP (x, i), insn);
2820 int insn_code;
2822 /* If replacing pseudo with hard reg or vice versa, ensure the
2823 insn remains valid. Likewise if the insn has MATCH_DUPs. */
2824 if (insn != 0 && new != 0
2825 && GET_CODE (new) == REG && GET_CODE (XEXP (x, i)) == REG
2826 && (((REGNO (new) < FIRST_PSEUDO_REGISTER)
2827 != (REGNO (XEXP (x, i)) < FIRST_PSEUDO_REGISTER))
2828 || (insn_code = recog_memoized (insn)) < 0
2829 || insn_data[insn_code].n_dups > 0))
2830 validate_change (insn, &XEXP (x, i), new, 1);
2831 else
2832 XEXP (x, i) = new;
2834 else if (fmt[i] == 'E')
2835 for (j = 0; j < XVECLEN (x, i); j++)
2836 XVECEXP (x, i, j) = canon_reg (XVECEXP (x, i, j), insn);
2839 return x;
2842 /* LOC is a location within INSN that is an operand address (the contents of
2843 a MEM). Find the best equivalent address to use that is valid for this
2844 insn.
2846 On most CISC machines, complicated address modes are costly, and rtx_cost
2847 is a good approximation for that cost. However, most RISC machines have
2848 only a few (usually only one) memory reference formats. If an address is
2849 valid at all, it is often just as cheap as any other address. Hence, for
2850 RISC machines, we use the configuration macro `ADDRESS_COST' to compare the
2851 costs of various addresses. For two addresses of equal cost, choose the one
2852 with the highest `rtx_cost' value as that has the potential of eliminating
2853 the most insns. For equal costs, we choose the first in the equivalence
2854 class. Note that we ignore the fact that pseudo registers are cheaper
2855 than hard registers here because we would also prefer the pseudo registers.
2858 static void
2859 find_best_addr (insn, loc, mode)
2860 rtx insn;
2861 rtx *loc;
2862 enum machine_mode mode;
2864 struct table_elt *elt;
2865 rtx addr = *loc;
2866 #ifdef ADDRESS_COST
2867 struct table_elt *p;
2868 int found_better = 1;
2869 #endif
2870 int save_do_not_record = do_not_record;
2871 int save_hash_arg_in_memory = hash_arg_in_memory;
2872 int addr_volatile;
2873 int regno;
2874 unsigned hash;
2876 /* Do not try to replace constant addresses or addresses of local and
2877 argument slots. These MEM expressions are made only once and inserted
2878 in many instructions, as well as being used to control symbol table
2879 output. It is not safe to clobber them.
2881 There are some uncommon cases where the address is already in a register
2882 for some reason, but we cannot take advantage of that because we have
2883 no easy way to unshare the MEM. In addition, looking up all stack
2884 addresses is costly. */
2885 if ((GET_CODE (addr) == PLUS
2886 && GET_CODE (XEXP (addr, 0)) == REG
2887 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2888 && (regno = REGNO (XEXP (addr, 0)),
2889 regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM
2890 || regno == ARG_POINTER_REGNUM))
2891 || (GET_CODE (addr) == REG
2892 && (regno = REGNO (addr), regno == FRAME_POINTER_REGNUM
2893 || regno == HARD_FRAME_POINTER_REGNUM
2894 || regno == ARG_POINTER_REGNUM))
2895 || GET_CODE (addr) == ADDRESSOF
2896 || CONSTANT_ADDRESS_P (addr))
2897 return;
2899 /* If this address is not simply a register, try to fold it. This will
2900 sometimes simplify the expression. Many simplifications
2901 will not be valid, but some, usually applying the associative rule, will
2902 be valid and produce better code. */
2903 if (GET_CODE (addr) != REG)
2905 rtx folded = fold_rtx (copy_rtx (addr), NULL_RTX);
2906 int addr_folded_cost = address_cost (folded, mode);
2907 int addr_cost = address_cost (addr, mode);
2909 if ((addr_folded_cost < addr_cost
2910 || (addr_folded_cost == addr_cost
2911 /* ??? The rtx_cost comparison is left over from an older
2912 version of this code. It is probably no longer helpful. */
2913 && (rtx_cost (folded, MEM) > rtx_cost (addr, MEM)
2914 || approx_reg_cost (folded) < approx_reg_cost (addr))))
2915 && validate_change (insn, loc, folded, 0))
2916 addr = folded;
2919 /* If this address is not in the hash table, we can't look for equivalences
2920 of the whole address. Also, ignore if volatile. */
2922 do_not_record = 0;
2923 hash = HASH (addr, Pmode);
2924 addr_volatile = do_not_record;
2925 do_not_record = save_do_not_record;
2926 hash_arg_in_memory = save_hash_arg_in_memory;
2928 if (addr_volatile)
2929 return;
2931 elt = lookup (addr, hash, Pmode);
2933 #ifndef ADDRESS_COST
2934 if (elt)
2936 int our_cost = elt->cost;
2938 /* Find the lowest cost below ours that works. */
2939 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
2940 if (elt->cost < our_cost
2941 && (GET_CODE (elt->exp) == REG
2942 || exp_equiv_p (elt->exp, elt->exp, 1, 0))
2943 && validate_change (insn, loc,
2944 canon_reg (copy_rtx (elt->exp), NULL_RTX), 0))
2945 return;
2947 #else
2949 if (elt)
2951 /* We need to find the best (under the criteria documented above) entry
2952 in the class that is valid. We use the `flag' field to indicate
2953 choices that were invalid and iterate until we can't find a better
2954 one that hasn't already been tried. */
2956 for (p = elt->first_same_value; p; p = p->next_same_value)
2957 p->flag = 0;
2959 while (found_better)
2961 int best_addr_cost = address_cost (*loc, mode);
2962 int best_rtx_cost = (elt->cost + 1) >> 1;
2963 int exp_cost;
2964 struct table_elt *best_elt = elt;
2966 found_better = 0;
2967 for (p = elt->first_same_value; p; p = p->next_same_value)
2968 if (! p->flag)
2970 if ((GET_CODE (p->exp) == REG
2971 || exp_equiv_p (p->exp, p->exp, 1, 0))
2972 && ((exp_cost = address_cost (p->exp, mode)) < best_addr_cost
2973 || (exp_cost == best_addr_cost
2974 && ((p->cost + 1) >> 1) > best_rtx_cost)))
2976 found_better = 1;
2977 best_addr_cost = exp_cost;
2978 best_rtx_cost = (p->cost + 1) >> 1;
2979 best_elt = p;
2983 if (found_better)
2985 if (validate_change (insn, loc,
2986 canon_reg (copy_rtx (best_elt->exp),
2987 NULL_RTX), 0))
2988 return;
2989 else
2990 best_elt->flag = 1;
2995 /* If the address is a binary operation with the first operand a register
2996 and the second a constant, do the same as above, but looking for
2997 equivalences of the register. Then try to simplify before checking for
2998 the best address to use. This catches a few cases: First is when we
2999 have REG+const and the register is another REG+const. We can often merge
3000 the constants and eliminate one insn and one register. It may also be
3001 that a machine has a cheap REG+REG+const. Finally, this improves the
3002 code on the Alpha for unaligned byte stores. */
3004 if (flag_expensive_optimizations
3005 && (GET_RTX_CLASS (GET_CODE (*loc)) == '2'
3006 || GET_RTX_CLASS (GET_CODE (*loc)) == 'c')
3007 && GET_CODE (XEXP (*loc, 0)) == REG
3008 && GET_CODE (XEXP (*loc, 1)) == CONST_INT)
3010 rtx c = XEXP (*loc, 1);
3012 do_not_record = 0;
3013 hash = HASH (XEXP (*loc, 0), Pmode);
3014 do_not_record = save_do_not_record;
3015 hash_arg_in_memory = save_hash_arg_in_memory;
3017 elt = lookup (XEXP (*loc, 0), hash, Pmode);
3018 if (elt == 0)
3019 return;
3021 /* We need to find the best (under the criteria documented above) entry
3022 in the class that is valid. We use the `flag' field to indicate
3023 choices that were invalid and iterate until we can't find a better
3024 one that hasn't already been tried. */
3026 for (p = elt->first_same_value; p; p = p->next_same_value)
3027 p->flag = 0;
3029 while (found_better)
3031 int best_addr_cost = address_cost (*loc, mode);
3032 int best_rtx_cost = (COST (*loc) + 1) >> 1;
3033 struct table_elt *best_elt = elt;
3034 rtx best_rtx = *loc;
3035 int count;
3037 /* This is at worst case an O(n^2) algorithm, so limit our search
3038 to the first 32 elements on the list. This avoids trouble
3039 compiling code with very long basic blocks that can easily
3040 call simplify_gen_binary so many times that we run out of
3041 memory. */
3043 found_better = 0;
3044 for (p = elt->first_same_value, count = 0;
3045 p && count < 32;
3046 p = p->next_same_value, count++)
3047 if (! p->flag
3048 && (GET_CODE (p->exp) == REG
3049 || exp_equiv_p (p->exp, p->exp, 1, 0)))
3051 rtx new = simplify_gen_binary (GET_CODE (*loc), Pmode,
3052 p->exp, c);
3053 int new_cost;
3054 new_cost = address_cost (new, mode);
3056 if (new_cost < best_addr_cost
3057 || (new_cost == best_addr_cost
3058 && (COST (new) + 1) >> 1 > best_rtx_cost))
3060 found_better = 1;
3061 best_addr_cost = new_cost;
3062 best_rtx_cost = (COST (new) + 1) >> 1;
3063 best_elt = p;
3064 best_rtx = new;
3068 if (found_better)
3070 if (validate_change (insn, loc,
3071 canon_reg (copy_rtx (best_rtx),
3072 NULL_RTX), 0))
3073 return;
3074 else
3075 best_elt->flag = 1;
3079 #endif
3082 /* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
3083 operation (EQ, NE, GT, etc.), follow it back through the hash table and
3084 what values are being compared.
3086 *PARG1 and *PARG2 are updated to contain the rtx representing the values
3087 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
3088 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
3089 compared to produce cc0.
3091 The return value is the comparison operator and is either the code of
3092 A or the code corresponding to the inverse of the comparison. */
3094 static enum rtx_code
3095 find_comparison_args (code, parg1, parg2, pmode1, pmode2)
3096 enum rtx_code code;
3097 rtx *parg1, *parg2;
3098 enum machine_mode *pmode1, *pmode2;
3100 rtx arg1, arg2;
3102 arg1 = *parg1, arg2 = *parg2;
3104 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
3106 while (arg2 == CONST0_RTX (GET_MODE (arg1)))
3108 /* Set non-zero when we find something of interest. */
3109 rtx x = 0;
3110 int reverse_code = 0;
3111 struct table_elt *p = 0;
3113 /* If arg1 is a COMPARE, extract the comparison arguments from it.
3114 On machines with CC0, this is the only case that can occur, since
3115 fold_rtx will return the COMPARE or item being compared with zero
3116 when given CC0. */
3118 if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
3119 x = arg1;
3121 /* If ARG1 is a comparison operator and CODE is testing for
3122 STORE_FLAG_VALUE, get the inner arguments. */
3124 else if (GET_RTX_CLASS (GET_CODE (arg1)) == '<')
3126 if (code == NE
3127 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3128 && code == LT && STORE_FLAG_VALUE == -1)
3129 #ifdef FLOAT_STORE_FLAG_VALUE
3130 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3131 && (REAL_VALUE_NEGATIVE
3132 (FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)))))
3133 #endif
3135 x = arg1;
3136 else if (code == EQ
3137 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3138 && code == GE && STORE_FLAG_VALUE == -1)
3139 #ifdef FLOAT_STORE_FLAG_VALUE
3140 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3141 && (REAL_VALUE_NEGATIVE
3142 (FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)))))
3143 #endif
3145 x = arg1, reverse_code = 1;
3148 /* ??? We could also check for
3150 (ne (and (eq (...) (const_int 1))) (const_int 0))
3152 and related forms, but let's wait until we see them occurring. */
3154 if (x == 0)
3155 /* Look up ARG1 in the hash table and see if it has an equivalence
3156 that lets us see what is being compared. */
3157 p = lookup (arg1, safe_hash (arg1, GET_MODE (arg1)) & HASH_MASK,
3158 GET_MODE (arg1));
3159 if (p)
3161 p = p->first_same_value;
3163 /* If what we compare is already known to be constant, that is as
3164 good as it gets.
3165 We need to break the loop in this case, because otherwise we
3166 can have an infinite loop when looking at a reg that is known
3167 to be a constant which is the same as a comparison of a reg
3168 against zero which appears later in the insn stream, which in
3169 turn is constant and the same as the comparison of the first reg
3170 against zero... */
3171 if (p->is_const)
3172 break;
3175 for (; p; p = p->next_same_value)
3177 enum machine_mode inner_mode = GET_MODE (p->exp);
3179 /* If the entry isn't valid, skip it. */
3180 if (! exp_equiv_p (p->exp, p->exp, 1, 0))
3181 continue;
3183 if (GET_CODE (p->exp) == COMPARE
3184 /* Another possibility is that this machine has a compare insn
3185 that includes the comparison code. In that case, ARG1 would
3186 be equivalent to a comparison operation that would set ARG1 to
3187 either STORE_FLAG_VALUE or zero. If this is an NE operation,
3188 ORIG_CODE is the actual comparison being done; if it is an EQ,
3189 we must reverse ORIG_CODE. On machine with a negative value
3190 for STORE_FLAG_VALUE, also look at LT and GE operations. */
3191 || ((code == NE
3192 || (code == LT
3193 && GET_MODE_CLASS (inner_mode) == MODE_INT
3194 && (GET_MODE_BITSIZE (inner_mode)
3195 <= HOST_BITS_PER_WIDE_INT)
3196 && (STORE_FLAG_VALUE
3197 & ((HOST_WIDE_INT) 1
3198 << (GET_MODE_BITSIZE (inner_mode) - 1))))
3199 #ifdef FLOAT_STORE_FLAG_VALUE
3200 || (code == LT
3201 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3202 && (REAL_VALUE_NEGATIVE
3203 (FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)))))
3204 #endif
3206 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<'))
3208 x = p->exp;
3209 break;
3211 else if ((code == EQ
3212 || (code == GE
3213 && GET_MODE_CLASS (inner_mode) == MODE_INT
3214 && (GET_MODE_BITSIZE (inner_mode)
3215 <= HOST_BITS_PER_WIDE_INT)
3216 && (STORE_FLAG_VALUE
3217 & ((HOST_WIDE_INT) 1
3218 << (GET_MODE_BITSIZE (inner_mode) - 1))))
3219 #ifdef FLOAT_STORE_FLAG_VALUE
3220 || (code == GE
3221 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3222 && (REAL_VALUE_NEGATIVE
3223 (FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)))))
3224 #endif
3226 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<')
3228 reverse_code = 1;
3229 x = p->exp;
3230 break;
3233 /* If this is fp + constant, the equivalent is a better operand since
3234 it may let us predict the value of the comparison. */
3235 else if (NONZERO_BASE_PLUS_P (p->exp))
3237 arg1 = p->exp;
3238 continue;
3242 /* If we didn't find a useful equivalence for ARG1, we are done.
3243 Otherwise, set up for the next iteration. */
3244 if (x == 0)
3245 break;
3247 /* If we need to reverse the comparison, make sure that that is
3248 possible -- we can't necessarily infer the value of GE from LT
3249 with floating-point operands. */
3250 if (reverse_code)
3252 enum rtx_code reversed = reversed_comparison_code (x, NULL_RTX);
3253 if (reversed == UNKNOWN)
3254 break;
3255 else code = reversed;
3257 else if (GET_RTX_CLASS (GET_CODE (x)) == '<')
3258 code = GET_CODE (x);
3259 arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
3262 /* Return our results. Return the modes from before fold_rtx
3263 because fold_rtx might produce const_int, and then it's too late. */
3264 *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
3265 *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
3267 return code;
3270 /* If X is a nontrivial arithmetic operation on an argument
3271 for which a constant value can be determined, return
3272 the result of operating on that value, as a constant.
3273 Otherwise, return X, possibly with one or more operands
3274 modified by recursive calls to this function.
3276 If X is a register whose contents are known, we do NOT
3277 return those contents here. equiv_constant is called to
3278 perform that task.
3280 INSN is the insn that we may be modifying. If it is 0, make a copy
3281 of X before modifying it. */
3283 static rtx
3284 fold_rtx (x, insn)
3285 rtx x;
3286 rtx insn;
3288 enum rtx_code code;
3289 enum machine_mode mode;
3290 const char *fmt;
3291 int i;
3292 rtx new = 0;
3293 int copied = 0;
3294 int must_swap = 0;
3296 /* Folded equivalents of first two operands of X. */
3297 rtx folded_arg0;
3298 rtx folded_arg1;
3300 /* Constant equivalents of first three operands of X;
3301 0 when no such equivalent is known. */
3302 rtx const_arg0;
3303 rtx const_arg1;
3304 rtx const_arg2;
3306 /* The mode of the first operand of X. We need this for sign and zero
3307 extends. */
3308 enum machine_mode mode_arg0;
3310 if (x == 0)
3311 return x;
3313 mode = GET_MODE (x);
3314 code = GET_CODE (x);
3315 switch (code)
3317 case CONST:
3318 case CONST_INT:
3319 case CONST_DOUBLE:
3320 case SYMBOL_REF:
3321 case LABEL_REF:
3322 case REG:
3323 /* No use simplifying an EXPR_LIST
3324 since they are used only for lists of args
3325 in a function call's REG_EQUAL note. */
3326 case EXPR_LIST:
3327 /* Changing anything inside an ADDRESSOF is incorrect; we don't
3328 want to (e.g.,) make (addressof (const_int 0)) just because
3329 the location is known to be zero. */
3330 case ADDRESSOF:
3331 return x;
3333 #ifdef HAVE_cc0
3334 case CC0:
3335 return prev_insn_cc0;
3336 #endif
3338 case PC:
3339 /* If the next insn is a CODE_LABEL followed by a jump table,
3340 PC's value is a LABEL_REF pointing to that label. That
3341 lets us fold switch statements on the VAX. */
3342 if (insn && GET_CODE (insn) == JUMP_INSN)
3344 rtx next = next_nonnote_insn (insn);
3346 if (next && GET_CODE (next) == CODE_LABEL
3347 && NEXT_INSN (next) != 0
3348 && GET_CODE (NEXT_INSN (next)) == JUMP_INSN
3349 && (GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_VEC
3350 || GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_DIFF_VEC))
3351 return gen_rtx_LABEL_REF (Pmode, next);
3353 break;
3355 case SUBREG:
3356 /* See if we previously assigned a constant value to this SUBREG. */
3357 if ((new = lookup_as_function (x, CONST_INT)) != 0
3358 || (new = lookup_as_function (x, CONST_DOUBLE)) != 0)
3359 return new;
3361 /* If this is a paradoxical SUBREG, we have no idea what value the
3362 extra bits would have. However, if the operand is equivalent
3363 to a SUBREG whose operand is the same as our mode, and all the
3364 modes are within a word, we can just use the inner operand
3365 because these SUBREGs just say how to treat the register.
3367 Similarly if we find an integer constant. */
3369 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3371 enum machine_mode imode = GET_MODE (SUBREG_REG (x));
3372 struct table_elt *elt;
3374 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
3375 && GET_MODE_SIZE (imode) <= UNITS_PER_WORD
3376 && (elt = lookup (SUBREG_REG (x), HASH (SUBREG_REG (x), imode),
3377 imode)) != 0)
3378 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
3380 if (CONSTANT_P (elt->exp)
3381 && GET_MODE (elt->exp) == VOIDmode)
3382 return elt->exp;
3384 if (GET_CODE (elt->exp) == SUBREG
3385 && GET_MODE (SUBREG_REG (elt->exp)) == mode
3386 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
3387 return copy_rtx (SUBREG_REG (elt->exp));
3390 return x;
3393 /* Fold SUBREG_REG. If it changed, see if we can simplify the SUBREG.
3394 We might be able to if the SUBREG is extracting a single word in an
3395 integral mode or extracting the low part. */
3397 folded_arg0 = fold_rtx (SUBREG_REG (x), insn);
3398 const_arg0 = equiv_constant (folded_arg0);
3399 if (const_arg0)
3400 folded_arg0 = const_arg0;
3402 if (folded_arg0 != SUBREG_REG (x))
3404 new = simplify_subreg (mode, folded_arg0,
3405 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
3406 if (new)
3407 return new;
3410 /* If this is a narrowing SUBREG and our operand is a REG, see if
3411 we can find an equivalence for REG that is an arithmetic operation
3412 in a wider mode where both operands are paradoxical SUBREGs
3413 from objects of our result mode. In that case, we couldn't report
3414 an equivalent value for that operation, since we don't know what the
3415 extra bits will be. But we can find an equivalence for this SUBREG
3416 by folding that operation is the narrow mode. This allows us to
3417 fold arithmetic in narrow modes when the machine only supports
3418 word-sized arithmetic.
3420 Also look for a case where we have a SUBREG whose operand is the
3421 same as our result. If both modes are smaller than a word, we
3422 are simply interpreting a register in different modes and we
3423 can use the inner value. */
3425 if (GET_CODE (folded_arg0) == REG
3426 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (folded_arg0))
3427 && subreg_lowpart_p (x))
3429 struct table_elt *elt;
3431 /* We can use HASH here since we know that canon_hash won't be
3432 called. */
3433 elt = lookup (folded_arg0,
3434 HASH (folded_arg0, GET_MODE (folded_arg0)),
3435 GET_MODE (folded_arg0));
3437 if (elt)
3438 elt = elt->first_same_value;
3440 for (; elt; elt = elt->next_same_value)
3442 enum rtx_code eltcode = GET_CODE (elt->exp);
3444 /* Just check for unary and binary operations. */
3445 if (GET_RTX_CLASS (GET_CODE (elt->exp)) == '1'
3446 && GET_CODE (elt->exp) != SIGN_EXTEND
3447 && GET_CODE (elt->exp) != ZERO_EXTEND
3448 && GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3449 && GET_MODE (SUBREG_REG (XEXP (elt->exp, 0))) == mode)
3451 rtx op0 = SUBREG_REG (XEXP (elt->exp, 0));
3453 if (GET_CODE (op0) != REG && ! CONSTANT_P (op0))
3454 op0 = fold_rtx (op0, NULL_RTX);
3456 op0 = equiv_constant (op0);
3457 if (op0)
3458 new = simplify_unary_operation (GET_CODE (elt->exp), mode,
3459 op0, mode);
3461 else if ((GET_RTX_CLASS (GET_CODE (elt->exp)) == '2'
3462 || GET_RTX_CLASS (GET_CODE (elt->exp)) == 'c')
3463 && eltcode != DIV && eltcode != MOD
3464 && eltcode != UDIV && eltcode != UMOD
3465 && eltcode != ASHIFTRT && eltcode != LSHIFTRT
3466 && eltcode != ROTATE && eltcode != ROTATERT
3467 && ((GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3468 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 0)))
3469 == mode))
3470 || CONSTANT_P (XEXP (elt->exp, 0)))
3471 && ((GET_CODE (XEXP (elt->exp, 1)) == SUBREG
3472 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 1)))
3473 == mode))
3474 || CONSTANT_P (XEXP (elt->exp, 1))))
3476 rtx op0 = gen_lowpart_common (mode, XEXP (elt->exp, 0));
3477 rtx op1 = gen_lowpart_common (mode, XEXP (elt->exp, 1));
3479 if (op0 && GET_CODE (op0) != REG && ! CONSTANT_P (op0))
3480 op0 = fold_rtx (op0, NULL_RTX);
3482 if (op0)
3483 op0 = equiv_constant (op0);
3485 if (op1 && GET_CODE (op1) != REG && ! CONSTANT_P (op1))
3486 op1 = fold_rtx (op1, NULL_RTX);
3488 if (op1)
3489 op1 = equiv_constant (op1);
3491 /* If we are looking for the low SImode part of
3492 (ashift:DI c (const_int 32)), it doesn't work
3493 to compute that in SImode, because a 32-bit shift
3494 in SImode is unpredictable. We know the value is 0. */
3495 if (op0 && op1
3496 && GET_CODE (elt->exp) == ASHIFT
3497 && GET_CODE (op1) == CONST_INT
3498 && INTVAL (op1) >= GET_MODE_BITSIZE (mode))
3500 if (INTVAL (op1) < GET_MODE_BITSIZE (GET_MODE (elt->exp)))
3502 /* If the count fits in the inner mode's width,
3503 but exceeds the outer mode's width,
3504 the value will get truncated to 0
3505 by the subreg. */
3506 new = const0_rtx;
3507 else
3508 /* If the count exceeds even the inner mode's width,
3509 don't fold this expression. */
3510 new = 0;
3512 else if (op0 && op1)
3513 new = simplify_binary_operation (GET_CODE (elt->exp), mode,
3514 op0, op1);
3517 else if (GET_CODE (elt->exp) == SUBREG
3518 && GET_MODE (SUBREG_REG (elt->exp)) == mode
3519 && (GET_MODE_SIZE (GET_MODE (folded_arg0))
3520 <= UNITS_PER_WORD)
3521 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
3522 new = copy_rtx (SUBREG_REG (elt->exp));
3524 if (new)
3525 return new;
3529 return x;
3531 case NOT:
3532 case NEG:
3533 /* If we have (NOT Y), see if Y is known to be (NOT Z).
3534 If so, (NOT Y) simplifies to Z. Similarly for NEG. */
3535 new = lookup_as_function (XEXP (x, 0), code);
3536 if (new)
3537 return fold_rtx (copy_rtx (XEXP (new, 0)), insn);
3538 break;
3540 case MEM:
3541 /* If we are not actually processing an insn, don't try to find the
3542 best address. Not only don't we care, but we could modify the
3543 MEM in an invalid way since we have no insn to validate against. */
3544 if (insn != 0)
3545 find_best_addr (insn, &XEXP (x, 0), GET_MODE (x));
3548 /* Even if we don't fold in the insn itself,
3549 we can safely do so here, in hopes of getting a constant. */
3550 rtx addr = fold_rtx (XEXP (x, 0), NULL_RTX);
3551 rtx base = 0;
3552 HOST_WIDE_INT offset = 0;
3554 if (GET_CODE (addr) == REG
3555 && REGNO_QTY_VALID_P (REGNO (addr)))
3557 int addr_q = REG_QTY (REGNO (addr));
3558 struct qty_table_elem *addr_ent = &qty_table[addr_q];
3560 if (GET_MODE (addr) == addr_ent->mode
3561 && addr_ent->const_rtx != NULL_RTX)
3562 addr = addr_ent->const_rtx;
3565 /* If address is constant, split it into a base and integer offset. */
3566 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
3567 base = addr;
3568 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
3569 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
3571 base = XEXP (XEXP (addr, 0), 0);
3572 offset = INTVAL (XEXP (XEXP (addr, 0), 1));
3574 else if (GET_CODE (addr) == LO_SUM
3575 && GET_CODE (XEXP (addr, 1)) == SYMBOL_REF)
3576 base = XEXP (addr, 1);
3577 else if (GET_CODE (addr) == ADDRESSOF)
3578 return change_address (x, VOIDmode, addr);
3580 /* If this is a constant pool reference, we can fold it into its
3581 constant to allow better value tracking. */
3582 if (base && GET_CODE (base) == SYMBOL_REF
3583 && CONSTANT_POOL_ADDRESS_P (base))
3585 rtx constant = get_pool_constant (base);
3586 enum machine_mode const_mode = get_pool_mode (base);
3587 rtx new;
3589 if (CONSTANT_P (constant) && GET_CODE (constant) != CONST_INT)
3590 constant_pool_entries_cost = COST (constant);
3592 /* If we are loading the full constant, we have an equivalence. */
3593 if (offset == 0 && mode == const_mode)
3594 return constant;
3596 /* If this actually isn't a constant (weird!), we can't do
3597 anything. Otherwise, handle the two most common cases:
3598 extracting a word from a multi-word constant, and extracting
3599 the low-order bits. Other cases don't seem common enough to
3600 worry about. */
3601 if (! CONSTANT_P (constant))
3602 return x;
3604 if (GET_MODE_CLASS (mode) == MODE_INT
3605 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
3606 && offset % UNITS_PER_WORD == 0
3607 && (new = operand_subword (constant,
3608 offset / UNITS_PER_WORD,
3609 0, const_mode)) != 0)
3610 return new;
3612 if (((BYTES_BIG_ENDIAN
3613 && offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
3614 || (! BYTES_BIG_ENDIAN && offset == 0))
3615 && (new = gen_lowpart_if_possible (mode, constant)) != 0)
3616 return new;
3619 /* If this is a reference to a label at a known position in a jump
3620 table, we also know its value. */
3621 if (base && GET_CODE (base) == LABEL_REF)
3623 rtx label = XEXP (base, 0);
3624 rtx table_insn = NEXT_INSN (label);
3626 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
3627 && GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
3629 rtx table = PATTERN (table_insn);
3631 if (offset >= 0
3632 && (offset / GET_MODE_SIZE (GET_MODE (table))
3633 < XVECLEN (table, 0)))
3634 return XVECEXP (table, 0,
3635 offset / GET_MODE_SIZE (GET_MODE (table)));
3637 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
3638 && GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
3640 rtx table = PATTERN (table_insn);
3642 if (offset >= 0
3643 && (offset / GET_MODE_SIZE (GET_MODE (table))
3644 < XVECLEN (table, 1)))
3646 offset /= GET_MODE_SIZE (GET_MODE (table));
3647 new = gen_rtx_MINUS (Pmode, XVECEXP (table, 1, offset),
3648 XEXP (table, 0));
3650 if (GET_MODE (table) != Pmode)
3651 new = gen_rtx_TRUNCATE (GET_MODE (table), new);
3653 /* Indicate this is a constant. This isn't a
3654 valid form of CONST, but it will only be used
3655 to fold the next insns and then discarded, so
3656 it should be safe.
3658 Note this expression must be explicitly discarded,
3659 by cse_insn, else it may end up in a REG_EQUAL note
3660 and "escape" to cause problems elsewhere. */
3661 return gen_rtx_CONST (GET_MODE (new), new);
3666 return x;
3669 #ifdef NO_FUNCTION_CSE
3670 case CALL:
3671 if (CONSTANT_P (XEXP (XEXP (x, 0), 0)))
3672 return x;
3673 break;
3674 #endif
3676 case ASM_OPERANDS:
3677 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
3678 validate_change (insn, &ASM_OPERANDS_INPUT (x, i),
3679 fold_rtx (ASM_OPERANDS_INPUT (x, i), insn), 0);
3680 break;
3682 default:
3683 break;
3686 const_arg0 = 0;
3687 const_arg1 = 0;
3688 const_arg2 = 0;
3689 mode_arg0 = VOIDmode;
3691 /* Try folding our operands.
3692 Then see which ones have constant values known. */
3694 fmt = GET_RTX_FORMAT (code);
3695 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3696 if (fmt[i] == 'e')
3698 rtx arg = XEXP (x, i);
3699 rtx folded_arg = arg, const_arg = 0;
3700 enum machine_mode mode_arg = GET_MODE (arg);
3701 rtx cheap_arg, expensive_arg;
3702 rtx replacements[2];
3703 int j;
3705 /* Most arguments are cheap, so handle them specially. */
3706 switch (GET_CODE (arg))
3708 case REG:
3709 /* This is the same as calling equiv_constant; it is duplicated
3710 here for speed. */
3711 if (REGNO_QTY_VALID_P (REGNO (arg)))
3713 int arg_q = REG_QTY (REGNO (arg));
3714 struct qty_table_elem *arg_ent = &qty_table[arg_q];
3716 if (arg_ent->const_rtx != NULL_RTX
3717 && GET_CODE (arg_ent->const_rtx) != REG
3718 && GET_CODE (arg_ent->const_rtx) != PLUS)
3719 const_arg
3720 = gen_lowpart_if_possible (GET_MODE (arg),
3721 arg_ent->const_rtx);
3723 break;
3725 case CONST:
3726 case CONST_INT:
3727 case SYMBOL_REF:
3728 case LABEL_REF:
3729 case CONST_DOUBLE:
3730 const_arg = arg;
3731 break;
3733 #ifdef HAVE_cc0
3734 case CC0:
3735 folded_arg = prev_insn_cc0;
3736 mode_arg = prev_insn_cc0_mode;
3737 const_arg = equiv_constant (folded_arg);
3738 break;
3739 #endif
3741 default:
3742 folded_arg = fold_rtx (arg, insn);
3743 const_arg = equiv_constant (folded_arg);
3746 /* For the first three operands, see if the operand
3747 is constant or equivalent to a constant. */
3748 switch (i)
3750 case 0:
3751 folded_arg0 = folded_arg;
3752 const_arg0 = const_arg;
3753 mode_arg0 = mode_arg;
3754 break;
3755 case 1:
3756 folded_arg1 = folded_arg;
3757 const_arg1 = const_arg;
3758 break;
3759 case 2:
3760 const_arg2 = const_arg;
3761 break;
3764 /* Pick the least expensive of the folded argument and an
3765 equivalent constant argument. */
3766 if (const_arg == 0 || const_arg == folded_arg
3767 || COST_IN (const_arg, code) > COST_IN (folded_arg, code))
3768 cheap_arg = folded_arg, expensive_arg = const_arg;
3769 else
3770 cheap_arg = const_arg, expensive_arg = folded_arg;
3772 /* Try to replace the operand with the cheapest of the two
3773 possibilities. If it doesn't work and this is either of the first
3774 two operands of a commutative operation, try swapping them.
3775 If THAT fails, try the more expensive, provided it is cheaper
3776 than what is already there. */
3778 if (cheap_arg == XEXP (x, i))
3779 continue;
3781 if (insn == 0 && ! copied)
3783 x = copy_rtx (x);
3784 copied = 1;
3787 /* Order the replacements from cheapest to most expensive. */
3788 replacements[0] = cheap_arg;
3789 replacements[1] = expensive_arg;
3791 for (j = 0; j < 2 && replacements[j]; j++)
3793 int old_cost = COST_IN (XEXP (x, i), code);
3794 int new_cost = COST_IN (replacements[j], code);
3796 /* Stop if what existed before was cheaper. Prefer constants
3797 in the case of a tie. */
3798 if (new_cost > old_cost
3799 || (new_cost == old_cost && CONSTANT_P (XEXP (x, i))))
3800 break;
3802 if (validate_change (insn, &XEXP (x, i), replacements[j], 0))
3803 break;
3805 if (code == NE || code == EQ || GET_RTX_CLASS (code) == 'c'
3806 || code == LTGT || code == UNEQ || code == ORDERED
3807 || code == UNORDERED)
3809 validate_change (insn, &XEXP (x, i), XEXP (x, 1 - i), 1);
3810 validate_change (insn, &XEXP (x, 1 - i), replacements[j], 1);
3812 if (apply_change_group ())
3814 /* Swap them back to be invalid so that this loop can
3815 continue and flag them to be swapped back later. */
3816 rtx tem;
3818 tem = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1);
3819 XEXP (x, 1) = tem;
3820 must_swap = 1;
3821 break;
3827 else
3829 if (fmt[i] == 'E')
3830 /* Don't try to fold inside of a vector of expressions.
3831 Doing nothing is harmless. */
3835 /* If a commutative operation, place a constant integer as the second
3836 operand unless the first operand is also a constant integer. Otherwise,
3837 place any constant second unless the first operand is also a constant. */
3839 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c'
3840 || code == LTGT || code == UNEQ || code == ORDERED
3841 || code == UNORDERED)
3843 if (must_swap || (const_arg0
3844 && (const_arg1 == 0
3845 || (GET_CODE (const_arg0) == CONST_INT
3846 && GET_CODE (const_arg1) != CONST_INT))))
3848 rtx tem = XEXP (x, 0);
3850 if (insn == 0 && ! copied)
3852 x = copy_rtx (x);
3853 copied = 1;
3856 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
3857 validate_change (insn, &XEXP (x, 1), tem, 1);
3858 if (apply_change_group ())
3860 tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
3861 tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
3866 /* If X is an arithmetic operation, see if we can simplify it. */
3868 switch (GET_RTX_CLASS (code))
3870 case '1':
3872 int is_const = 0;
3874 /* We can't simplify extension ops unless we know the
3875 original mode. */
3876 if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
3877 && mode_arg0 == VOIDmode)
3878 break;
3880 /* If we had a CONST, strip it off and put it back later if we
3881 fold. */
3882 if (const_arg0 != 0 && GET_CODE (const_arg0) == CONST)
3883 is_const = 1, const_arg0 = XEXP (const_arg0, 0);
3885 new = simplify_unary_operation (code, mode,
3886 const_arg0 ? const_arg0 : folded_arg0,
3887 mode_arg0);
3888 if (new != 0 && is_const)
3889 new = gen_rtx_CONST (mode, new);
3891 break;
3893 case '<':
3894 /* See what items are actually being compared and set FOLDED_ARG[01]
3895 to those values and CODE to the actual comparison code. If any are
3896 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
3897 do anything if both operands are already known to be constant. */
3899 if (const_arg0 == 0 || const_arg1 == 0)
3901 struct table_elt *p0, *p1;
3902 rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
3903 enum machine_mode mode_arg1;
3905 #ifdef FLOAT_STORE_FLAG_VALUE
3906 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3908 true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
3909 (FLOAT_STORE_FLAG_VALUE (mode), mode));
3910 false_rtx = CONST0_RTX (mode);
3912 #endif
3914 code = find_comparison_args (code, &folded_arg0, &folded_arg1,
3915 &mode_arg0, &mode_arg1);
3916 const_arg0 = equiv_constant (folded_arg0);
3917 const_arg1 = equiv_constant (folded_arg1);
3919 /* If the mode is VOIDmode or a MODE_CC mode, we don't know
3920 what kinds of things are being compared, so we can't do
3921 anything with this comparison. */
3923 if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
3924 break;
3926 /* If we do not now have two constants being compared, see
3927 if we can nevertheless deduce some things about the
3928 comparison. */
3929 if (const_arg0 == 0 || const_arg1 == 0)
3931 /* Is FOLDED_ARG0 frame-pointer plus a constant? Or
3932 non-explicit constant? These aren't zero, but we
3933 don't know their sign. */
3934 if (const_arg1 == const0_rtx
3935 && (NONZERO_BASE_PLUS_P (folded_arg0)
3936 #if 0 /* Sad to say, on sysvr4, #pragma weak can make a symbol address
3937 come out as 0. */
3938 || GET_CODE (folded_arg0) == SYMBOL_REF
3939 #endif
3940 || GET_CODE (folded_arg0) == LABEL_REF
3941 || GET_CODE (folded_arg0) == CONST))
3943 if (code == EQ)
3944 return false_rtx;
3945 else if (code == NE)
3946 return true_rtx;
3949 /* See if the two operands are the same. */
3951 if (folded_arg0 == folded_arg1
3952 || (GET_CODE (folded_arg0) == REG
3953 && GET_CODE (folded_arg1) == REG
3954 && (REG_QTY (REGNO (folded_arg0))
3955 == REG_QTY (REGNO (folded_arg1))))
3956 || ((p0 = lookup (folded_arg0,
3957 (safe_hash (folded_arg0, mode_arg0)
3958 & HASH_MASK), mode_arg0))
3959 && (p1 = lookup (folded_arg1,
3960 (safe_hash (folded_arg1, mode_arg0)
3961 & HASH_MASK), mode_arg0))
3962 && p0->first_same_value == p1->first_same_value))
3964 /* Sadly two equal NaNs are not equivalent. */
3965 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3966 || ! FLOAT_MODE_P (mode_arg0)
3967 || flag_unsafe_math_optimizations)
3968 return ((code == EQ || code == LE || code == GE
3969 || code == LEU || code == GEU || code == UNEQ
3970 || code == UNLE || code == UNGE || code == ORDERED)
3971 ? true_rtx : false_rtx);
3972 /* Take care for the FP compares we can resolve. */
3973 if (code == UNEQ || code == UNLE || code == UNGE)
3974 return true_rtx;
3975 if (code == LTGT || code == LT || code == GT)
3976 return false_rtx;
3979 /* If FOLDED_ARG0 is a register, see if the comparison we are
3980 doing now is either the same as we did before or the reverse
3981 (we only check the reverse if not floating-point). */
3982 else if (GET_CODE (folded_arg0) == REG)
3984 int qty = REG_QTY (REGNO (folded_arg0));
3986 if (REGNO_QTY_VALID_P (REGNO (folded_arg0)))
3988 struct qty_table_elem *ent = &qty_table[qty];
3990 if ((comparison_dominates_p (ent->comparison_code, code)
3991 || (! FLOAT_MODE_P (mode_arg0)
3992 && comparison_dominates_p (ent->comparison_code,
3993 reverse_condition (code))))
3994 && (rtx_equal_p (ent->comparison_const, folded_arg1)
3995 || (const_arg1
3996 && rtx_equal_p (ent->comparison_const,
3997 const_arg1))
3998 || (GET_CODE (folded_arg1) == REG
3999 && (REG_QTY (REGNO (folded_arg1)) == ent->comparison_qty))))
4000 return (comparison_dominates_p (ent->comparison_code, code)
4001 ? true_rtx : false_rtx);
4007 /* If we are comparing against zero, see if the first operand is
4008 equivalent to an IOR with a constant. If so, we may be able to
4009 determine the result of this comparison. */
4011 if (const_arg1 == const0_rtx)
4013 rtx y = lookup_as_function (folded_arg0, IOR);
4014 rtx inner_const;
4016 if (y != 0
4017 && (inner_const = equiv_constant (XEXP (y, 1))) != 0
4018 && GET_CODE (inner_const) == CONST_INT
4019 && INTVAL (inner_const) != 0)
4021 int sign_bitnum = GET_MODE_BITSIZE (mode_arg0) - 1;
4022 int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
4023 && (INTVAL (inner_const)
4024 & ((HOST_WIDE_INT) 1 << sign_bitnum)));
4025 rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
4027 #ifdef FLOAT_STORE_FLAG_VALUE
4028 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
4030 true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
4031 (FLOAT_STORE_FLAG_VALUE (mode), mode));
4032 false_rtx = CONST0_RTX (mode);
4034 #endif
4036 switch (code)
4038 case EQ:
4039 return false_rtx;
4040 case NE:
4041 return true_rtx;
4042 case LT: case LE:
4043 if (has_sign)
4044 return true_rtx;
4045 break;
4046 case GT: case GE:
4047 if (has_sign)
4048 return false_rtx;
4049 break;
4050 default:
4051 break;
4056 new = simplify_relational_operation (code,
4057 (mode_arg0 != VOIDmode
4058 ? mode_arg0
4059 : (GET_MODE (const_arg0
4060 ? const_arg0
4061 : folded_arg0)
4062 != VOIDmode)
4063 ? GET_MODE (const_arg0
4064 ? const_arg0
4065 : folded_arg0)
4066 : GET_MODE (const_arg1
4067 ? const_arg1
4068 : folded_arg1)),
4069 const_arg0 ? const_arg0 : folded_arg0,
4070 const_arg1 ? const_arg1 : folded_arg1);
4071 #ifdef FLOAT_STORE_FLAG_VALUE
4072 if (new != 0 && GET_MODE_CLASS (mode) == MODE_FLOAT)
4074 if (new == const0_rtx)
4075 new = CONST0_RTX (mode);
4076 else
4077 new = (CONST_DOUBLE_FROM_REAL_VALUE
4078 (FLOAT_STORE_FLAG_VALUE (mode), mode));
4080 #endif
4081 break;
4083 case '2':
4084 case 'c':
4085 switch (code)
4087 case PLUS:
4088 /* If the second operand is a LABEL_REF, see if the first is a MINUS
4089 with that LABEL_REF as its second operand. If so, the result is
4090 the first operand of that MINUS. This handles switches with an
4091 ADDR_DIFF_VEC table. */
4092 if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
4094 rtx y
4095 = GET_CODE (folded_arg0) == MINUS ? folded_arg0
4096 : lookup_as_function (folded_arg0, MINUS);
4098 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
4099 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
4100 return XEXP (y, 0);
4102 /* Now try for a CONST of a MINUS like the above. */
4103 if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
4104 : lookup_as_function (folded_arg0, CONST))) != 0
4105 && GET_CODE (XEXP (y, 0)) == MINUS
4106 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
4107 && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg1, 0))
4108 return XEXP (XEXP (y, 0), 0);
4111 /* Likewise if the operands are in the other order. */
4112 if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
4114 rtx y
4115 = GET_CODE (folded_arg1) == MINUS ? folded_arg1
4116 : lookup_as_function (folded_arg1, MINUS);
4118 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
4119 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
4120 return XEXP (y, 0);
4122 /* Now try for a CONST of a MINUS like the above. */
4123 if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
4124 : lookup_as_function (folded_arg1, CONST))) != 0
4125 && GET_CODE (XEXP (y, 0)) == MINUS
4126 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
4127 && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg0, 0))
4128 return XEXP (XEXP (y, 0), 0);
4131 /* If second operand is a register equivalent to a negative
4132 CONST_INT, see if we can find a register equivalent to the
4133 positive constant. Make a MINUS if so. Don't do this for
4134 a non-negative constant since we might then alternate between
4135 chosing positive and negative constants. Having the positive
4136 constant previously-used is the more common case. Be sure
4137 the resulting constant is non-negative; if const_arg1 were
4138 the smallest negative number this would overflow: depending
4139 on the mode, this would either just be the same value (and
4140 hence not save anything) or be incorrect. */
4141 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT
4142 && INTVAL (const_arg1) < 0
4143 /* This used to test
4145 -INTVAL (const_arg1) >= 0
4147 But The Sun V5.0 compilers mis-compiled that test. So
4148 instead we test for the problematic value in a more direct
4149 manner and hope the Sun compilers get it correct. */
4150 && INTVAL (const_arg1) !=
4151 ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1))
4152 && GET_CODE (folded_arg1) == REG)
4154 rtx new_const = GEN_INT (-INTVAL (const_arg1));
4155 struct table_elt *p
4156 = lookup (new_const, safe_hash (new_const, mode) & HASH_MASK,
4157 mode);
4159 if (p)
4160 for (p = p->first_same_value; p; p = p->next_same_value)
4161 if (GET_CODE (p->exp) == REG)
4162 return simplify_gen_binary (MINUS, mode, folded_arg0,
4163 canon_reg (p->exp, NULL_RTX));
4165 goto from_plus;
4167 case MINUS:
4168 /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
4169 If so, produce (PLUS Z C2-C). */
4170 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT)
4172 rtx y = lookup_as_function (XEXP (x, 0), PLUS);
4173 if (y && GET_CODE (XEXP (y, 1)) == CONST_INT)
4174 return fold_rtx (plus_constant (copy_rtx (y),
4175 -INTVAL (const_arg1)),
4176 NULL_RTX);
4179 /* Fall through. */
4181 from_plus:
4182 case SMIN: case SMAX: case UMIN: case UMAX:
4183 case IOR: case AND: case XOR:
4184 case MULT: case DIV: case UDIV:
4185 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
4186 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
4187 is known to be of similar form, we may be able to replace the
4188 operation with a combined operation. This may eliminate the
4189 intermediate operation if every use is simplified in this way.
4190 Note that the similar optimization done by combine.c only works
4191 if the intermediate operation's result has only one reference. */
4193 if (GET_CODE (folded_arg0) == REG
4194 && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
4196 int is_shift
4197 = (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
4198 rtx y = lookup_as_function (folded_arg0, code);
4199 rtx inner_const;
4200 enum rtx_code associate_code;
4201 rtx new_const;
4203 if (y == 0
4204 || 0 == (inner_const
4205 = equiv_constant (fold_rtx (XEXP (y, 1), 0)))
4206 || GET_CODE (inner_const) != CONST_INT
4207 /* If we have compiled a statement like
4208 "if (x == (x & mask1))", and now are looking at
4209 "x & mask2", we will have a case where the first operand
4210 of Y is the same as our first operand. Unless we detect
4211 this case, an infinite loop will result. */
4212 || XEXP (y, 0) == folded_arg0)
4213 break;
4215 /* Don't associate these operations if they are a PLUS with the
4216 same constant and it is a power of two. These might be doable
4217 with a pre- or post-increment. Similarly for two subtracts of
4218 identical powers of two with post decrement. */
4220 if (code == PLUS && INTVAL (const_arg1) == INTVAL (inner_const)
4221 && ((HAVE_PRE_INCREMENT
4222 && exact_log2 (INTVAL (const_arg1)) >= 0)
4223 || (HAVE_POST_INCREMENT
4224 && exact_log2 (INTVAL (const_arg1)) >= 0)
4225 || (HAVE_PRE_DECREMENT
4226 && exact_log2 (- INTVAL (const_arg1)) >= 0)
4227 || (HAVE_POST_DECREMENT
4228 && exact_log2 (- INTVAL (const_arg1)) >= 0)))
4229 break;
4231 /* Compute the code used to compose the constants. For example,
4232 A/C1/C2 is A/(C1 * C2), so if CODE == DIV, we want MULT. */
4234 associate_code
4235 = (code == MULT || code == DIV || code == UDIV ? MULT
4236 : is_shift || code == PLUS || code == MINUS ? PLUS : code);
4238 new_const = simplify_binary_operation (associate_code, mode,
4239 const_arg1, inner_const);
4241 if (new_const == 0)
4242 break;
4244 /* If we are associating shift operations, don't let this
4245 produce a shift of the size of the object or larger.
4246 This could occur when we follow a sign-extend by a right
4247 shift on a machine that does a sign-extend as a pair
4248 of shifts. */
4250 if (is_shift && GET_CODE (new_const) == CONST_INT
4251 && INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
4253 /* As an exception, we can turn an ASHIFTRT of this
4254 form into a shift of the number of bits - 1. */
4255 if (code == ASHIFTRT)
4256 new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
4257 else
4258 break;
4261 y = copy_rtx (XEXP (y, 0));
4263 /* If Y contains our first operand (the most common way this
4264 can happen is if Y is a MEM), we would do into an infinite
4265 loop if we tried to fold it. So don't in that case. */
4267 if (! reg_mentioned_p (folded_arg0, y))
4268 y = fold_rtx (y, insn);
4270 return simplify_gen_binary (code, mode, y, new_const);
4272 break;
4274 default:
4275 break;
4278 new = simplify_binary_operation (code, mode,
4279 const_arg0 ? const_arg0 : folded_arg0,
4280 const_arg1 ? const_arg1 : folded_arg1);
4281 break;
4283 case 'o':
4284 /* (lo_sum (high X) X) is simply X. */
4285 if (code == LO_SUM && const_arg0 != 0
4286 && GET_CODE (const_arg0) == HIGH
4287 && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
4288 return const_arg1;
4289 break;
4291 case '3':
4292 case 'b':
4293 new = simplify_ternary_operation (code, mode, mode_arg0,
4294 const_arg0 ? const_arg0 : folded_arg0,
4295 const_arg1 ? const_arg1 : folded_arg1,
4296 const_arg2 ? const_arg2 : XEXP (x, 2));
4297 break;
4299 case 'x':
4300 /* Always eliminate CONSTANT_P_RTX at this stage. */
4301 if (code == CONSTANT_P_RTX)
4302 return (const_arg0 ? const1_rtx : const0_rtx);
4303 break;
4306 return new ? new : x;
4309 /* Return a constant value currently equivalent to X.
4310 Return 0 if we don't know one. */
4312 static rtx
4313 equiv_constant (x)
4314 rtx x;
4316 if (GET_CODE (x) == REG
4317 && REGNO_QTY_VALID_P (REGNO (x)))
4319 int x_q = REG_QTY (REGNO (x));
4320 struct qty_table_elem *x_ent = &qty_table[x_q];
4322 if (x_ent->const_rtx)
4323 x = gen_lowpart_if_possible (GET_MODE (x), x_ent->const_rtx);
4326 if (x == 0 || CONSTANT_P (x))
4327 return x;
4329 /* If X is a MEM, try to fold it outside the context of any insn to see if
4330 it might be equivalent to a constant. That handles the case where it
4331 is a constant-pool reference. Then try to look it up in the hash table
4332 in case it is something whose value we have seen before. */
4334 if (GET_CODE (x) == MEM)
4336 struct table_elt *elt;
4338 x = fold_rtx (x, NULL_RTX);
4339 if (CONSTANT_P (x))
4340 return x;
4342 elt = lookup (x, safe_hash (x, GET_MODE (x)) & HASH_MASK, GET_MODE (x));
4343 if (elt == 0)
4344 return 0;
4346 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
4347 if (elt->is_const && CONSTANT_P (elt->exp))
4348 return elt->exp;
4351 return 0;
4354 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a fixed-point
4355 number, return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
4356 least-significant part of X.
4357 MODE specifies how big a part of X to return.
4359 If the requested operation cannot be done, 0 is returned.
4361 This is similar to gen_lowpart in emit-rtl.c. */
4364 gen_lowpart_if_possible (mode, x)
4365 enum machine_mode mode;
4366 rtx x;
4368 rtx result = gen_lowpart_common (mode, x);
4370 if (result)
4371 return result;
4372 else if (GET_CODE (x) == MEM)
4374 /* This is the only other case we handle. */
4375 int offset = 0;
4376 rtx new;
4378 if (WORDS_BIG_ENDIAN)
4379 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
4380 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
4381 if (BYTES_BIG_ENDIAN)
4382 /* Adjust the address so that the address-after-the-data is
4383 unchanged. */
4384 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
4385 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
4387 new = adjust_address_nv (x, mode, offset);
4388 if (! memory_address_p (mode, XEXP (new, 0)))
4389 return 0;
4391 return new;
4393 else
4394 return 0;
4397 /* Given INSN, a jump insn, TAKEN indicates if we are following the "taken"
4398 branch. It will be zero if not.
4400 In certain cases, this can cause us to add an equivalence. For example,
4401 if we are following the taken case of
4402 if (i == 2)
4403 we can add the fact that `i' and '2' are now equivalent.
4405 In any case, we can record that this comparison was passed. If the same
4406 comparison is seen later, we will know its value. */
4408 static void
4409 record_jump_equiv (insn, taken)
4410 rtx insn;
4411 int taken;
4413 int cond_known_true;
4414 rtx op0, op1;
4415 rtx set;
4416 enum machine_mode mode, mode0, mode1;
4417 int reversed_nonequality = 0;
4418 enum rtx_code code;
4420 /* Ensure this is the right kind of insn. */
4421 if (! any_condjump_p (insn))
4422 return;
4423 set = pc_set (insn);
4425 /* See if this jump condition is known true or false. */
4426 if (taken)
4427 cond_known_true = (XEXP (SET_SRC (set), 2) == pc_rtx);
4428 else
4429 cond_known_true = (XEXP (SET_SRC (set), 1) == pc_rtx);
4431 /* Get the type of comparison being done and the operands being compared.
4432 If we had to reverse a non-equality condition, record that fact so we
4433 know that it isn't valid for floating-point. */
4434 code = GET_CODE (XEXP (SET_SRC (set), 0));
4435 op0 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 0), insn);
4436 op1 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 1), insn);
4438 code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
4439 if (! cond_known_true)
4441 code = reversed_comparison_code_parts (code, op0, op1, insn);
4443 /* Don't remember if we can't find the inverse. */
4444 if (code == UNKNOWN)
4445 return;
4448 /* The mode is the mode of the non-constant. */
4449 mode = mode0;
4450 if (mode1 != VOIDmode)
4451 mode = mode1;
4453 record_jump_cond (code, mode, op0, op1, reversed_nonequality);
4456 /* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
4457 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
4458 Make any useful entries we can with that information. Called from
4459 above function and called recursively. */
4461 static void
4462 record_jump_cond (code, mode, op0, op1, reversed_nonequality)
4463 enum rtx_code code;
4464 enum machine_mode mode;
4465 rtx op0, op1;
4466 int reversed_nonequality;
4468 unsigned op0_hash, op1_hash;
4469 int op0_in_memory, op1_in_memory;
4470 struct table_elt *op0_elt, *op1_elt;
4472 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
4473 we know that they are also equal in the smaller mode (this is also
4474 true for all smaller modes whether or not there is a SUBREG, but
4475 is not worth testing for with no SUBREG). */
4477 /* Note that GET_MODE (op0) may not equal MODE. */
4478 if (code == EQ && GET_CODE (op0) == SUBREG
4479 && (GET_MODE_SIZE (GET_MODE (op0))
4480 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4482 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4483 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
4485 record_jump_cond (code, mode, SUBREG_REG (op0),
4486 tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
4487 reversed_nonequality);
4490 if (code == EQ && GET_CODE (op1) == SUBREG
4491 && (GET_MODE_SIZE (GET_MODE (op1))
4492 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4494 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4495 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
4497 record_jump_cond (code, mode, SUBREG_REG (op1),
4498 tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
4499 reversed_nonequality);
4502 /* Similarly, if this is an NE comparison, and either is a SUBREG
4503 making a smaller mode, we know the whole thing is also NE. */
4505 /* Note that GET_MODE (op0) may not equal MODE;
4506 if we test MODE instead, we can get an infinite recursion
4507 alternating between two modes each wider than MODE. */
4509 if (code == NE && GET_CODE (op0) == SUBREG
4510 && subreg_lowpart_p (op0)
4511 && (GET_MODE_SIZE (GET_MODE (op0))
4512 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4514 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4515 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
4517 record_jump_cond (code, mode, SUBREG_REG (op0),
4518 tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
4519 reversed_nonequality);
4522 if (code == NE && GET_CODE (op1) == SUBREG
4523 && subreg_lowpart_p (op1)
4524 && (GET_MODE_SIZE (GET_MODE (op1))
4525 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4527 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4528 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
4530 record_jump_cond (code, mode, SUBREG_REG (op1),
4531 tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
4532 reversed_nonequality);
4535 /* Hash both operands. */
4537 do_not_record = 0;
4538 hash_arg_in_memory = 0;
4539 op0_hash = HASH (op0, mode);
4540 op0_in_memory = hash_arg_in_memory;
4542 if (do_not_record)
4543 return;
4545 do_not_record = 0;
4546 hash_arg_in_memory = 0;
4547 op1_hash = HASH (op1, mode);
4548 op1_in_memory = hash_arg_in_memory;
4550 if (do_not_record)
4551 return;
4553 /* Look up both operands. */
4554 op0_elt = lookup (op0, op0_hash, mode);
4555 op1_elt = lookup (op1, op1_hash, mode);
4557 /* If both operands are already equivalent or if they are not in the
4558 table but are identical, do nothing. */
4559 if ((op0_elt != 0 && op1_elt != 0
4560 && op0_elt->first_same_value == op1_elt->first_same_value)
4561 || op0 == op1 || rtx_equal_p (op0, op1))
4562 return;
4564 /* If we aren't setting two things equal all we can do is save this
4565 comparison. Similarly if this is floating-point. In the latter
4566 case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
4567 If we record the equality, we might inadvertently delete code
4568 whose intent was to change -0 to +0. */
4570 if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
4572 struct qty_table_elem *ent;
4573 int qty;
4575 /* If we reversed a floating-point comparison, if OP0 is not a
4576 register, or if OP1 is neither a register or constant, we can't
4577 do anything. */
4579 if (GET_CODE (op1) != REG)
4580 op1 = equiv_constant (op1);
4582 if ((reversed_nonequality && FLOAT_MODE_P (mode))
4583 || GET_CODE (op0) != REG || op1 == 0)
4584 return;
4586 /* Put OP0 in the hash table if it isn't already. This gives it a
4587 new quantity number. */
4588 if (op0_elt == 0)
4590 if (insert_regs (op0, NULL, 0))
4592 rehash_using_reg (op0);
4593 op0_hash = HASH (op0, mode);
4595 /* If OP0 is contained in OP1, this changes its hash code
4596 as well. Faster to rehash than to check, except
4597 for the simple case of a constant. */
4598 if (! CONSTANT_P (op1))
4599 op1_hash = HASH (op1,mode);
4602 op0_elt = insert (op0, NULL, op0_hash, mode);
4603 op0_elt->in_memory = op0_in_memory;
4606 qty = REG_QTY (REGNO (op0));
4607 ent = &qty_table[qty];
4609 ent->comparison_code = code;
4610 if (GET_CODE (op1) == REG)
4612 /* Look it up again--in case op0 and op1 are the same. */
4613 op1_elt = lookup (op1, op1_hash, mode);
4615 /* Put OP1 in the hash table so it gets a new quantity number. */
4616 if (op1_elt == 0)
4618 if (insert_regs (op1, NULL, 0))
4620 rehash_using_reg (op1);
4621 op1_hash = HASH (op1, mode);
4624 op1_elt = insert (op1, NULL, op1_hash, mode);
4625 op1_elt->in_memory = op1_in_memory;
4628 ent->comparison_const = NULL_RTX;
4629 ent->comparison_qty = REG_QTY (REGNO (op1));
4631 else
4633 ent->comparison_const = op1;
4634 ent->comparison_qty = -1;
4637 return;
4640 /* If either side is still missing an equivalence, make it now,
4641 then merge the equivalences. */
4643 if (op0_elt == 0)
4645 if (insert_regs (op0, NULL, 0))
4647 rehash_using_reg (op0);
4648 op0_hash = HASH (op0, mode);
4651 op0_elt = insert (op0, NULL, op0_hash, mode);
4652 op0_elt->in_memory = op0_in_memory;
4655 if (op1_elt == 0)
4657 if (insert_regs (op1, NULL, 0))
4659 rehash_using_reg (op1);
4660 op1_hash = HASH (op1, mode);
4663 op1_elt = insert (op1, NULL, op1_hash, mode);
4664 op1_elt->in_memory = op1_in_memory;
4667 merge_equiv_classes (op0_elt, op1_elt);
4668 last_jump_equiv_class = op0_elt;
4671 /* CSE processing for one instruction.
4672 First simplify sources and addresses of all assignments
4673 in the instruction, using previously-computed equivalents values.
4674 Then install the new sources and destinations in the table
4675 of available values.
4677 If LIBCALL_INSN is nonzero, don't record any equivalence made in
4678 the insn. It means that INSN is inside libcall block. In this
4679 case LIBCALL_INSN is the corresponding insn with REG_LIBCALL. */
4681 /* Data on one SET contained in the instruction. */
4683 struct set
4685 /* The SET rtx itself. */
4686 rtx rtl;
4687 /* The SET_SRC of the rtx (the original value, if it is changing). */
4688 rtx src;
4689 /* The hash-table element for the SET_SRC of the SET. */
4690 struct table_elt *src_elt;
4691 /* Hash value for the SET_SRC. */
4692 unsigned src_hash;
4693 /* Hash value for the SET_DEST. */
4694 unsigned dest_hash;
4695 /* The SET_DEST, with SUBREG, etc., stripped. */
4696 rtx inner_dest;
4697 /* Nonzero if the SET_SRC is in memory. */
4698 char src_in_memory;
4699 /* Nonzero if the SET_SRC contains something
4700 whose value cannot be predicted and understood. */
4701 char src_volatile;
4702 /* Original machine mode, in case it becomes a CONST_INT. */
4703 enum machine_mode mode;
4704 /* A constant equivalent for SET_SRC, if any. */
4705 rtx src_const;
4706 /* Original SET_SRC value used for libcall notes. */
4707 rtx orig_src;
4708 /* Hash value of constant equivalent for SET_SRC. */
4709 unsigned src_const_hash;
4710 /* Table entry for constant equivalent for SET_SRC, if any. */
4711 struct table_elt *src_const_elt;
4714 static void
4715 cse_insn (insn, libcall_insn)
4716 rtx insn;
4717 rtx libcall_insn;
4719 rtx x = PATTERN (insn);
4720 int i;
4721 rtx tem;
4722 int n_sets = 0;
4724 #ifdef HAVE_cc0
4725 /* Records what this insn does to set CC0. */
4726 rtx this_insn_cc0 = 0;
4727 enum machine_mode this_insn_cc0_mode = VOIDmode;
4728 #endif
4730 rtx src_eqv = 0;
4731 struct table_elt *src_eqv_elt = 0;
4732 int src_eqv_volatile = 0;
4733 int src_eqv_in_memory = 0;
4734 unsigned src_eqv_hash = 0;
4736 struct set *sets = (struct set *) 0;
4738 this_insn = insn;
4740 /* Find all the SETs and CLOBBERs in this instruction.
4741 Record all the SETs in the array `set' and count them.
4742 Also determine whether there is a CLOBBER that invalidates
4743 all memory references, or all references at varying addresses. */
4745 if (GET_CODE (insn) == CALL_INSN)
4747 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4749 if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
4750 invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
4751 XEXP (tem, 0) = canon_reg (XEXP (tem, 0), insn);
4755 if (GET_CODE (x) == SET)
4757 sets = (struct set *) alloca (sizeof (struct set));
4758 sets[0].rtl = x;
4760 /* Ignore SETs that are unconditional jumps.
4761 They never need cse processing, so this does not hurt.
4762 The reason is not efficiency but rather
4763 so that we can test at the end for instructions
4764 that have been simplified to unconditional jumps
4765 and not be misled by unchanged instructions
4766 that were unconditional jumps to begin with. */
4767 if (SET_DEST (x) == pc_rtx
4768 && GET_CODE (SET_SRC (x)) == LABEL_REF)
4771 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
4772 The hard function value register is used only once, to copy to
4773 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
4774 Ensure we invalidate the destination register. On the 80386 no
4775 other code would invalidate it since it is a fixed_reg.
4776 We need not check the return of apply_change_group; see canon_reg. */
4778 else if (GET_CODE (SET_SRC (x)) == CALL)
4780 canon_reg (SET_SRC (x), insn);
4781 apply_change_group ();
4782 fold_rtx (SET_SRC (x), insn);
4783 invalidate (SET_DEST (x), VOIDmode);
4785 else
4786 n_sets = 1;
4788 else if (GET_CODE (x) == PARALLEL)
4790 int lim = XVECLEN (x, 0);
4792 sets = (struct set *) alloca (lim * sizeof (struct set));
4794 /* Find all regs explicitly clobbered in this insn,
4795 and ensure they are not replaced with any other regs
4796 elsewhere in this insn.
4797 When a reg that is clobbered is also used for input,
4798 we should presume that that is for a reason,
4799 and we should not substitute some other register
4800 which is not supposed to be clobbered.
4801 Therefore, this loop cannot be merged into the one below
4802 because a CALL may precede a CLOBBER and refer to the
4803 value clobbered. We must not let a canonicalization do
4804 anything in that case. */
4805 for (i = 0; i < lim; i++)
4807 rtx y = XVECEXP (x, 0, i);
4808 if (GET_CODE (y) == CLOBBER)
4810 rtx clobbered = XEXP (y, 0);
4812 if (GET_CODE (clobbered) == REG
4813 || GET_CODE (clobbered) == SUBREG)
4814 invalidate (clobbered, VOIDmode);
4815 else if (GET_CODE (clobbered) == STRICT_LOW_PART
4816 || GET_CODE (clobbered) == ZERO_EXTRACT)
4817 invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
4821 for (i = 0; i < lim; i++)
4823 rtx y = XVECEXP (x, 0, i);
4824 if (GET_CODE (y) == SET)
4826 /* As above, we ignore unconditional jumps and call-insns and
4827 ignore the result of apply_change_group. */
4828 if (GET_CODE (SET_SRC (y)) == CALL)
4830 canon_reg (SET_SRC (y), insn);
4831 apply_change_group ();
4832 fold_rtx (SET_SRC (y), insn);
4833 invalidate (SET_DEST (y), VOIDmode);
4835 else if (SET_DEST (y) == pc_rtx
4836 && GET_CODE (SET_SRC (y)) == LABEL_REF)
4838 else
4839 sets[n_sets++].rtl = y;
4841 else if (GET_CODE (y) == CLOBBER)
4843 /* If we clobber memory, canon the address.
4844 This does nothing when a register is clobbered
4845 because we have already invalidated the reg. */
4846 if (GET_CODE (XEXP (y, 0)) == MEM)
4847 canon_reg (XEXP (y, 0), NULL_RTX);
4849 else if (GET_CODE (y) == USE
4850 && ! (GET_CODE (XEXP (y, 0)) == REG
4851 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
4852 canon_reg (y, NULL_RTX);
4853 else if (GET_CODE (y) == CALL)
4855 /* The result of apply_change_group can be ignored; see
4856 canon_reg. */
4857 canon_reg (y, insn);
4858 apply_change_group ();
4859 fold_rtx (y, insn);
4863 else if (GET_CODE (x) == CLOBBER)
4865 if (GET_CODE (XEXP (x, 0)) == MEM)
4866 canon_reg (XEXP (x, 0), NULL_RTX);
4869 /* Canonicalize a USE of a pseudo register or memory location. */
4870 else if (GET_CODE (x) == USE
4871 && ! (GET_CODE (XEXP (x, 0)) == REG
4872 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
4873 canon_reg (XEXP (x, 0), NULL_RTX);
4874 else if (GET_CODE (x) == CALL)
4876 /* The result of apply_change_group can be ignored; see canon_reg. */
4877 canon_reg (x, insn);
4878 apply_change_group ();
4879 fold_rtx (x, insn);
4882 /* Store the equivalent value in SRC_EQV, if different, or if the DEST
4883 is a STRICT_LOW_PART. The latter condition is necessary because SRC_EQV
4884 is handled specially for this case, and if it isn't set, then there will
4885 be no equivalence for the destination. */
4886 if (n_sets == 1 && REG_NOTES (insn) != 0
4887 && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
4888 && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
4889 || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
4890 src_eqv = canon_reg (XEXP (tem, 0), NULL_RTX);
4892 /* Canonicalize sources and addresses of destinations.
4893 We do this in a separate pass to avoid problems when a MATCH_DUP is
4894 present in the insn pattern. In that case, we want to ensure that
4895 we don't break the duplicate nature of the pattern. So we will replace
4896 both operands at the same time. Otherwise, we would fail to find an
4897 equivalent substitution in the loop calling validate_change below.
4899 We used to suppress canonicalization of DEST if it appears in SRC,
4900 but we don't do this any more. */
4902 for (i = 0; i < n_sets; i++)
4904 rtx dest = SET_DEST (sets[i].rtl);
4905 rtx src = SET_SRC (sets[i].rtl);
4906 rtx new = canon_reg (src, insn);
4907 int insn_code;
4909 sets[i].orig_src = src;
4910 if ((GET_CODE (new) == REG && GET_CODE (src) == REG
4911 && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
4912 != (REGNO (src) < FIRST_PSEUDO_REGISTER)))
4913 || (insn_code = recog_memoized (insn)) < 0
4914 || insn_data[insn_code].n_dups > 0)
4915 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
4916 else
4917 SET_SRC (sets[i].rtl) = new;
4919 if (GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SIGN_EXTRACT)
4921 validate_change (insn, &XEXP (dest, 1),
4922 canon_reg (XEXP (dest, 1), insn), 1);
4923 validate_change (insn, &XEXP (dest, 2),
4924 canon_reg (XEXP (dest, 2), insn), 1);
4927 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
4928 || GET_CODE (dest) == ZERO_EXTRACT
4929 || GET_CODE (dest) == SIGN_EXTRACT)
4930 dest = XEXP (dest, 0);
4932 if (GET_CODE (dest) == MEM)
4933 canon_reg (dest, insn);
4936 /* Now that we have done all the replacements, we can apply the change
4937 group and see if they all work. Note that this will cause some
4938 canonicalizations that would have worked individually not to be applied
4939 because some other canonicalization didn't work, but this should not
4940 occur often.
4942 The result of apply_change_group can be ignored; see canon_reg. */
4944 apply_change_group ();
4946 /* Set sets[i].src_elt to the class each source belongs to.
4947 Detect assignments from or to volatile things
4948 and set set[i] to zero so they will be ignored
4949 in the rest of this function.
4951 Nothing in this loop changes the hash table or the register chains. */
4953 for (i = 0; i < n_sets; i++)
4955 rtx src, dest;
4956 rtx src_folded;
4957 struct table_elt *elt = 0, *p;
4958 enum machine_mode mode;
4959 rtx src_eqv_here;
4960 rtx src_const = 0;
4961 rtx src_related = 0;
4962 struct table_elt *src_const_elt = 0;
4963 int src_cost = MAX_COST;
4964 int src_eqv_cost = MAX_COST;
4965 int src_folded_cost = MAX_COST;
4966 int src_related_cost = MAX_COST;
4967 int src_elt_cost = MAX_COST;
4968 int src_regcost = MAX_COST;
4969 int src_eqv_regcost = MAX_COST;
4970 int src_folded_regcost = MAX_COST;
4971 int src_related_regcost = MAX_COST;
4972 int src_elt_regcost = MAX_COST;
4973 /* Set non-zero if we need to call force_const_mem on with the
4974 contents of src_folded before using it. */
4975 int src_folded_force_flag = 0;
4977 dest = SET_DEST (sets[i].rtl);
4978 src = SET_SRC (sets[i].rtl);
4980 /* If SRC is a constant that has no machine mode,
4981 hash it with the destination's machine mode.
4982 This way we can keep different modes separate. */
4984 mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
4985 sets[i].mode = mode;
4987 if (src_eqv)
4989 enum machine_mode eqvmode = mode;
4990 if (GET_CODE (dest) == STRICT_LOW_PART)
4991 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
4992 do_not_record = 0;
4993 hash_arg_in_memory = 0;
4994 src_eqv = fold_rtx (src_eqv, insn);
4995 src_eqv_hash = HASH (src_eqv, eqvmode);
4997 /* Find the equivalence class for the equivalent expression. */
4999 if (!do_not_record)
5000 src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
5002 src_eqv_volatile = do_not_record;
5003 src_eqv_in_memory = hash_arg_in_memory;
5006 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
5007 value of the INNER register, not the destination. So it is not
5008 a valid substitution for the source. But save it for later. */
5009 if (GET_CODE (dest) == STRICT_LOW_PART)
5010 src_eqv_here = 0;
5011 else
5012 src_eqv_here = src_eqv;
5014 /* Simplify and foldable subexpressions in SRC. Then get the fully-
5015 simplified result, which may not necessarily be valid. */
5016 src_folded = fold_rtx (src, insn);
5018 #if 0
5019 /* ??? This caused bad code to be generated for the m68k port with -O2.
5020 Suppose src is (CONST_INT -1), and that after truncation src_folded
5021 is (CONST_INT 3). Suppose src_folded is then used for src_const.
5022 At the end we will add src and src_const to the same equivalence
5023 class. We now have 3 and -1 on the same equivalence class. This
5024 causes later instructions to be mis-optimized. */
5025 /* If storing a constant in a bitfield, pre-truncate the constant
5026 so we will be able to record it later. */
5027 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
5028 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
5030 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5032 if (GET_CODE (src) == CONST_INT
5033 && GET_CODE (width) == CONST_INT
5034 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5035 && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5036 src_folded
5037 = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
5038 << INTVAL (width)) - 1));
5040 #endif
5042 /* Compute SRC's hash code, and also notice if it
5043 should not be recorded at all. In that case,
5044 prevent any further processing of this assignment. */
5045 do_not_record = 0;
5046 hash_arg_in_memory = 0;
5048 sets[i].src = src;
5049 sets[i].src_hash = HASH (src, mode);
5050 sets[i].src_volatile = do_not_record;
5051 sets[i].src_in_memory = hash_arg_in_memory;
5053 /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
5054 a pseudo, do not record SRC. Using SRC as a replacement for
5055 anything else will be incorrect in that situation. Note that
5056 this usually occurs only for stack slots, in which case all the
5057 RTL would be referring to SRC, so we don't lose any optimization
5058 opportunities by not having SRC in the hash table. */
5060 if (GET_CODE (src) == MEM
5061 && find_reg_note (insn, REG_EQUIV, NULL_RTX) != 0
5062 && GET_CODE (dest) == REG
5063 && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
5064 sets[i].src_volatile = 1;
5066 #if 0
5067 /* It is no longer clear why we used to do this, but it doesn't
5068 appear to still be needed. So let's try without it since this
5069 code hurts cse'ing widened ops. */
5070 /* If source is a perverse subreg (such as QI treated as an SI),
5071 treat it as volatile. It may do the work of an SI in one context
5072 where the extra bits are not being used, but cannot replace an SI
5073 in general. */
5074 if (GET_CODE (src) == SUBREG
5075 && (GET_MODE_SIZE (GET_MODE (src))
5076 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
5077 sets[i].src_volatile = 1;
5078 #endif
5080 /* Locate all possible equivalent forms for SRC. Try to replace
5081 SRC in the insn with each cheaper equivalent.
5083 We have the following types of equivalents: SRC itself, a folded
5084 version, a value given in a REG_EQUAL note, or a value related
5085 to a constant.
5087 Each of these equivalents may be part of an additional class
5088 of equivalents (if more than one is in the table, they must be in
5089 the same class; we check for this).
5091 If the source is volatile, we don't do any table lookups.
5093 We note any constant equivalent for possible later use in a
5094 REG_NOTE. */
5096 if (!sets[i].src_volatile)
5097 elt = lookup (src, sets[i].src_hash, mode);
5099 sets[i].src_elt = elt;
5101 if (elt && src_eqv_here && src_eqv_elt)
5103 if (elt->first_same_value != src_eqv_elt->first_same_value)
5105 /* The REG_EQUAL is indicating that two formerly distinct
5106 classes are now equivalent. So merge them. */
5107 merge_equiv_classes (elt, src_eqv_elt);
5108 src_eqv_hash = HASH (src_eqv, elt->mode);
5109 src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
5112 src_eqv_here = 0;
5115 else if (src_eqv_elt)
5116 elt = src_eqv_elt;
5118 /* Try to find a constant somewhere and record it in `src_const'.
5119 Record its table element, if any, in `src_const_elt'. Look in
5120 any known equivalences first. (If the constant is not in the
5121 table, also set `sets[i].src_const_hash'). */
5122 if (elt)
5123 for (p = elt->first_same_value; p; p = p->next_same_value)
5124 if (p->is_const)
5126 src_const = p->exp;
5127 src_const_elt = elt;
5128 break;
5131 if (src_const == 0
5132 && (CONSTANT_P (src_folded)
5133 /* Consider (minus (label_ref L1) (label_ref L2)) as
5134 "constant" here so we will record it. This allows us
5135 to fold switch statements when an ADDR_DIFF_VEC is used. */
5136 || (GET_CODE (src_folded) == MINUS
5137 && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
5138 && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
5139 src_const = src_folded, src_const_elt = elt;
5140 else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
5141 src_const = src_eqv_here, src_const_elt = src_eqv_elt;
5143 /* If we don't know if the constant is in the table, get its
5144 hash code and look it up. */
5145 if (src_const && src_const_elt == 0)
5147 sets[i].src_const_hash = HASH (src_const, mode);
5148 src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
5151 sets[i].src_const = src_const;
5152 sets[i].src_const_elt = src_const_elt;
5154 /* If the constant and our source are both in the table, mark them as
5155 equivalent. Otherwise, if a constant is in the table but the source
5156 isn't, set ELT to it. */
5157 if (src_const_elt && elt
5158 && src_const_elt->first_same_value != elt->first_same_value)
5159 merge_equiv_classes (elt, src_const_elt);
5160 else if (src_const_elt && elt == 0)
5161 elt = src_const_elt;
5163 /* See if there is a register linearly related to a constant
5164 equivalent of SRC. */
5165 if (src_const
5166 && (GET_CODE (src_const) == CONST
5167 || (src_const_elt && src_const_elt->related_value != 0)))
5169 src_related = use_related_value (src_const, src_const_elt);
5170 if (src_related)
5172 struct table_elt *src_related_elt
5173 = lookup (src_related, HASH (src_related, mode), mode);
5174 if (src_related_elt && elt)
5176 if (elt->first_same_value
5177 != src_related_elt->first_same_value)
5178 /* This can occur when we previously saw a CONST
5179 involving a SYMBOL_REF and then see the SYMBOL_REF
5180 twice. Merge the involved classes. */
5181 merge_equiv_classes (elt, src_related_elt);
5183 src_related = 0;
5184 src_related_elt = 0;
5186 else if (src_related_elt && elt == 0)
5187 elt = src_related_elt;
5191 /* See if we have a CONST_INT that is already in a register in a
5192 wider mode. */
5194 if (src_const && src_related == 0 && GET_CODE (src_const) == CONST_INT
5195 && GET_MODE_CLASS (mode) == MODE_INT
5196 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
5198 enum machine_mode wider_mode;
5200 for (wider_mode = GET_MODE_WIDER_MODE (mode);
5201 GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
5202 && src_related == 0;
5203 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5205 struct table_elt *const_elt
5206 = lookup (src_const, HASH (src_const, wider_mode), wider_mode);
5208 if (const_elt == 0)
5209 continue;
5211 for (const_elt = const_elt->first_same_value;
5212 const_elt; const_elt = const_elt->next_same_value)
5213 if (GET_CODE (const_elt->exp) == REG)
5215 src_related = gen_lowpart_if_possible (mode,
5216 const_elt->exp);
5217 break;
5222 /* Another possibility is that we have an AND with a constant in
5223 a mode narrower than a word. If so, it might have been generated
5224 as part of an "if" which would narrow the AND. If we already
5225 have done the AND in a wider mode, we can use a SUBREG of that
5226 value. */
5228 if (flag_expensive_optimizations && ! src_related
5229 && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
5230 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5232 enum machine_mode tmode;
5233 rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
5235 for (tmode = GET_MODE_WIDER_MODE (mode);
5236 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5237 tmode = GET_MODE_WIDER_MODE (tmode))
5239 rtx inner = gen_lowpart_if_possible (tmode, XEXP (src, 0));
5240 struct table_elt *larger_elt;
5242 if (inner)
5244 PUT_MODE (new_and, tmode);
5245 XEXP (new_and, 0) = inner;
5246 larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
5247 if (larger_elt == 0)
5248 continue;
5250 for (larger_elt = larger_elt->first_same_value;
5251 larger_elt; larger_elt = larger_elt->next_same_value)
5252 if (GET_CODE (larger_elt->exp) == REG)
5254 src_related
5255 = gen_lowpart_if_possible (mode, larger_elt->exp);
5256 break;
5259 if (src_related)
5260 break;
5265 #ifdef LOAD_EXTEND_OP
5266 /* See if a MEM has already been loaded with a widening operation;
5267 if it has, we can use a subreg of that. Many CISC machines
5268 also have such operations, but this is only likely to be
5269 beneficial these machines. */
5271 if (flag_expensive_optimizations && src_related == 0
5272 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5273 && GET_MODE_CLASS (mode) == MODE_INT
5274 && GET_CODE (src) == MEM && ! do_not_record
5275 && LOAD_EXTEND_OP (mode) != NIL)
5277 enum machine_mode tmode;
5279 /* Set what we are trying to extend and the operation it might
5280 have been extended with. */
5281 PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
5282 XEXP (memory_extend_rtx, 0) = src;
5284 for (tmode = GET_MODE_WIDER_MODE (mode);
5285 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5286 tmode = GET_MODE_WIDER_MODE (tmode))
5288 struct table_elt *larger_elt;
5290 PUT_MODE (memory_extend_rtx, tmode);
5291 larger_elt = lookup (memory_extend_rtx,
5292 HASH (memory_extend_rtx, tmode), tmode);
5293 if (larger_elt == 0)
5294 continue;
5296 for (larger_elt = larger_elt->first_same_value;
5297 larger_elt; larger_elt = larger_elt->next_same_value)
5298 if (GET_CODE (larger_elt->exp) == REG)
5300 src_related = gen_lowpart_if_possible (mode,
5301 larger_elt->exp);
5302 break;
5305 if (src_related)
5306 break;
5309 #endif /* LOAD_EXTEND_OP */
5311 if (src == src_folded)
5312 src_folded = 0;
5314 /* At this point, ELT, if non-zero, points to a class of expressions
5315 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
5316 and SRC_RELATED, if non-zero, each contain additional equivalent
5317 expressions. Prune these latter expressions by deleting expressions
5318 already in the equivalence class.
5320 Check for an equivalent identical to the destination. If found,
5321 this is the preferred equivalent since it will likely lead to
5322 elimination of the insn. Indicate this by placing it in
5323 `src_related'. */
5325 if (elt)
5326 elt = elt->first_same_value;
5327 for (p = elt; p; p = p->next_same_value)
5329 enum rtx_code code = GET_CODE (p->exp);
5331 /* If the expression is not valid, ignore it. Then we do not
5332 have to check for validity below. In most cases, we can use
5333 `rtx_equal_p', since canonicalization has already been done. */
5334 if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, 0))
5335 continue;
5337 /* Also skip paradoxical subregs, unless that's what we're
5338 looking for. */
5339 if (code == SUBREG
5340 && (GET_MODE_SIZE (GET_MODE (p->exp))
5341 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))
5342 && ! (src != 0
5343 && GET_CODE (src) == SUBREG
5344 && GET_MODE (src) == GET_MODE (p->exp)
5345 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5346 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))))
5347 continue;
5349 if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
5350 src = 0;
5351 else if (src_folded && GET_CODE (src_folded) == code
5352 && rtx_equal_p (src_folded, p->exp))
5353 src_folded = 0;
5354 else if (src_eqv_here && GET_CODE (src_eqv_here) == code
5355 && rtx_equal_p (src_eqv_here, p->exp))
5356 src_eqv_here = 0;
5357 else if (src_related && GET_CODE (src_related) == code
5358 && rtx_equal_p (src_related, p->exp))
5359 src_related = 0;
5361 /* This is the same as the destination of the insns, we want
5362 to prefer it. Copy it to src_related. The code below will
5363 then give it a negative cost. */
5364 if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
5365 src_related = dest;
5368 /* Find the cheapest valid equivalent, trying all the available
5369 possibilities. Prefer items not in the hash table to ones
5370 that are when they are equal cost. Note that we can never
5371 worsen an insn as the current contents will also succeed.
5372 If we find an equivalent identical to the destination, use it as best,
5373 since this insn will probably be eliminated in that case. */
5374 if (src)
5376 if (rtx_equal_p (src, dest))
5377 src_cost = src_regcost = -1;
5378 else
5380 src_cost = COST (src);
5381 src_regcost = approx_reg_cost (src);
5385 if (src_eqv_here)
5387 if (rtx_equal_p (src_eqv_here, dest))
5388 src_eqv_cost = src_eqv_regcost = -1;
5389 else
5391 src_eqv_cost = COST (src_eqv_here);
5392 src_eqv_regcost = approx_reg_cost (src_eqv_here);
5396 if (src_folded)
5398 if (rtx_equal_p (src_folded, dest))
5399 src_folded_cost = src_folded_regcost = -1;
5400 else
5402 src_folded_cost = COST (src_folded);
5403 src_folded_regcost = approx_reg_cost (src_folded);
5407 if (src_related)
5409 if (rtx_equal_p (src_related, dest))
5410 src_related_cost = src_related_regcost = -1;
5411 else
5413 src_related_cost = COST (src_related);
5414 src_related_regcost = approx_reg_cost (src_related);
5418 /* If this was an indirect jump insn, a known label will really be
5419 cheaper even though it looks more expensive. */
5420 if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
5421 src_folded = src_const, src_folded_cost = src_folded_regcost = -1;
5423 /* Terminate loop when replacement made. This must terminate since
5424 the current contents will be tested and will always be valid. */
5425 while (1)
5427 rtx trial;
5429 /* Skip invalid entries. */
5430 while (elt && GET_CODE (elt->exp) != REG
5431 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
5432 elt = elt->next_same_value;
5434 /* A paradoxical subreg would be bad here: it'll be the right
5435 size, but later may be adjusted so that the upper bits aren't
5436 what we want. So reject it. */
5437 if (elt != 0
5438 && GET_CODE (elt->exp) == SUBREG
5439 && (GET_MODE_SIZE (GET_MODE (elt->exp))
5440 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))
5441 /* It is okay, though, if the rtx we're trying to match
5442 will ignore any of the bits we can't predict. */
5443 && ! (src != 0
5444 && GET_CODE (src) == SUBREG
5445 && GET_MODE (src) == GET_MODE (elt->exp)
5446 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5447 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))))
5449 elt = elt->next_same_value;
5450 continue;
5453 if (elt)
5455 src_elt_cost = elt->cost;
5456 src_elt_regcost = elt->regcost;
5459 /* Find cheapest and skip it for the next time. For items
5460 of equal cost, use this order:
5461 src_folded, src, src_eqv, src_related and hash table entry. */
5462 if (src_folded
5463 && preferrable (src_folded_cost, src_folded_regcost,
5464 src_cost, src_regcost) <= 0
5465 && preferrable (src_folded_cost, src_folded_regcost,
5466 src_eqv_cost, src_eqv_regcost) <= 0
5467 && preferrable (src_folded_cost, src_folded_regcost,
5468 src_related_cost, src_related_regcost) <= 0
5469 && preferrable (src_folded_cost, src_folded_regcost,
5470 src_elt_cost, src_elt_regcost) <= 0)
5472 trial = src_folded, src_folded_cost = MAX_COST;
5473 if (src_folded_force_flag)
5474 trial = force_const_mem (mode, trial);
5476 else if (src
5477 && preferrable (src_cost, src_regcost,
5478 src_eqv_cost, src_eqv_regcost) <= 0
5479 && preferrable (src_cost, src_regcost,
5480 src_related_cost, src_related_regcost) <= 0
5481 && preferrable (src_cost, src_regcost,
5482 src_elt_cost, src_elt_regcost) <= 0)
5483 trial = src, src_cost = MAX_COST;
5484 else if (src_eqv_here
5485 && preferrable (src_eqv_cost, src_eqv_regcost,
5486 src_related_cost, src_related_regcost) <= 0
5487 && preferrable (src_eqv_cost, src_eqv_regcost,
5488 src_elt_cost, src_elt_regcost) <= 0)
5489 trial = copy_rtx (src_eqv_here), src_eqv_cost = MAX_COST;
5490 else if (src_related
5491 && preferrable (src_related_cost, src_related_regcost,
5492 src_elt_cost, src_elt_regcost) <= 0)
5493 trial = copy_rtx (src_related), src_related_cost = MAX_COST;
5494 else
5496 trial = copy_rtx (elt->exp);
5497 elt = elt->next_same_value;
5498 src_elt_cost = MAX_COST;
5501 /* We don't normally have an insn matching (set (pc) (pc)), so
5502 check for this separately here. We will delete such an
5503 insn below.
5505 For other cases such as a table jump or conditional jump
5506 where we know the ultimate target, go ahead and replace the
5507 operand. While that may not make a valid insn, we will
5508 reemit the jump below (and also insert any necessary
5509 barriers). */
5510 if (n_sets == 1 && dest == pc_rtx
5511 && (trial == pc_rtx
5512 || (GET_CODE (trial) == LABEL_REF
5513 && ! condjump_p (insn))))
5515 SET_SRC (sets[i].rtl) = trial;
5516 cse_jumps_altered = 1;
5517 break;
5520 /* Look for a substitution that makes a valid insn. */
5521 else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
5523 /* If we just made a substitution inside a libcall, then we
5524 need to make the same substitution in any notes attached
5525 to the RETVAL insn. */
5526 if (libcall_insn
5527 && (GET_CODE (sets[i].orig_src) == REG
5528 || GET_CODE (sets[i].orig_src) == SUBREG
5529 || GET_CODE (sets[i].orig_src) == MEM))
5530 replace_rtx (REG_NOTES (libcall_insn), sets[i].orig_src,
5531 canon_reg (SET_SRC (sets[i].rtl), insn));
5533 /* The result of apply_change_group can be ignored; see
5534 canon_reg. */
5536 validate_change (insn, &SET_SRC (sets[i].rtl),
5537 canon_reg (SET_SRC (sets[i].rtl), insn),
5539 apply_change_group ();
5540 break;
5543 /* If we previously found constant pool entries for
5544 constants and this is a constant, try making a
5545 pool entry. Put it in src_folded unless we already have done
5546 this since that is where it likely came from. */
5548 else if (constant_pool_entries_cost
5549 && CONSTANT_P (trial)
5550 /* Reject cases that will abort in decode_rtx_const.
5551 On the alpha when simplifying a switch, we get
5552 (const (truncate (minus (label_ref) (label_ref)))). */
5553 && ! (GET_CODE (trial) == CONST
5554 && GET_CODE (XEXP (trial, 0)) == TRUNCATE)
5555 /* Likewise on IA-64, except without the truncate. */
5556 && ! (GET_CODE (trial) == CONST
5557 && GET_CODE (XEXP (trial, 0)) == MINUS
5558 && GET_CODE (XEXP (XEXP (trial, 0), 0)) == LABEL_REF
5559 && GET_CODE (XEXP (XEXP (trial, 0), 1)) == LABEL_REF)
5560 && (src_folded == 0
5561 || (GET_CODE (src_folded) != MEM
5562 && ! src_folded_force_flag))
5563 && GET_MODE_CLASS (mode) != MODE_CC
5564 && mode != VOIDmode)
5566 src_folded_force_flag = 1;
5567 src_folded = trial;
5568 src_folded_cost = constant_pool_entries_cost;
5572 src = SET_SRC (sets[i].rtl);
5574 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
5575 However, there is an important exception: If both are registers
5576 that are not the head of their equivalence class, replace SET_SRC
5577 with the head of the class. If we do not do this, we will have
5578 both registers live over a portion of the basic block. This way,
5579 their lifetimes will likely abut instead of overlapping. */
5580 if (GET_CODE (dest) == REG
5581 && REGNO_QTY_VALID_P (REGNO (dest)))
5583 int dest_q = REG_QTY (REGNO (dest));
5584 struct qty_table_elem *dest_ent = &qty_table[dest_q];
5586 if (dest_ent->mode == GET_MODE (dest)
5587 && dest_ent->first_reg != REGNO (dest)
5588 && GET_CODE (src) == REG && REGNO (src) == REGNO (dest)
5589 /* Don't do this if the original insn had a hard reg as
5590 SET_SRC or SET_DEST. */
5591 && (GET_CODE (sets[i].src) != REG
5592 || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER)
5593 && (GET_CODE (dest) != REG || REGNO (dest) >= FIRST_PSEUDO_REGISTER))
5594 /* We can't call canon_reg here because it won't do anything if
5595 SRC is a hard register. */
5597 int src_q = REG_QTY (REGNO (src));
5598 struct qty_table_elem *src_ent = &qty_table[src_q];
5599 int first = src_ent->first_reg;
5600 rtx new_src
5601 = (first >= FIRST_PSEUDO_REGISTER
5602 ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first));
5604 /* We must use validate-change even for this, because this
5605 might be a special no-op instruction, suitable only to
5606 tag notes onto. */
5607 if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0))
5609 src = new_src;
5610 /* If we had a constant that is cheaper than what we are now
5611 setting SRC to, use that constant. We ignored it when we
5612 thought we could make this into a no-op. */
5613 if (src_const && COST (src_const) < COST (src)
5614 && validate_change (insn, &SET_SRC (sets[i].rtl),
5615 src_const, 0))
5616 src = src_const;
5621 /* If we made a change, recompute SRC values. */
5622 if (src != sets[i].src)
5624 cse_altered = 1;
5625 do_not_record = 0;
5626 hash_arg_in_memory = 0;
5627 sets[i].src = src;
5628 sets[i].src_hash = HASH (src, mode);
5629 sets[i].src_volatile = do_not_record;
5630 sets[i].src_in_memory = hash_arg_in_memory;
5631 sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
5634 /* If this is a single SET, we are setting a register, and we have an
5635 equivalent constant, we want to add a REG_NOTE. We don't want
5636 to write a REG_EQUAL note for a constant pseudo since verifying that
5637 that pseudo hasn't been eliminated is a pain. Such a note also
5638 won't help anything.
5640 Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
5641 which can be created for a reference to a compile time computable
5642 entry in a jump table. */
5644 if (n_sets == 1 && src_const && GET_CODE (dest) == REG
5645 && GET_CODE (src_const) != REG
5646 && ! (GET_CODE (src_const) == CONST
5647 && GET_CODE (XEXP (src_const, 0)) == MINUS
5648 && GET_CODE (XEXP (XEXP (src_const, 0), 0)) == LABEL_REF
5649 && GET_CODE (XEXP (XEXP (src_const, 0), 1)) == LABEL_REF))
5651 tem = find_reg_note (insn, REG_EQUAL, NULL_RTX);
5653 /* Make sure that the rtx is not shared with any other insn. */
5654 src_const = copy_rtx (src_const);
5656 /* Record the actual constant value in a REG_EQUAL note, making
5657 a new one if one does not already exist. */
5658 if (tem)
5659 XEXP (tem, 0) = src_const;
5660 else
5661 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL,
5662 src_const, REG_NOTES (insn));
5664 /* If storing a constant value in a register that
5665 previously held the constant value 0,
5666 record this fact with a REG_WAS_0 note on this insn.
5668 Note that the *register* is required to have previously held 0,
5669 not just any register in the quantity and we must point to the
5670 insn that set that register to zero.
5672 Rather than track each register individually, we just see if
5673 the last set for this quantity was for this register. */
5675 if (REGNO_QTY_VALID_P (REGNO (dest)))
5677 int dest_q = REG_QTY (REGNO (dest));
5678 struct qty_table_elem *dest_ent = &qty_table[dest_q];
5680 if (dest_ent->const_rtx == const0_rtx)
5682 /* See if we previously had a REG_WAS_0 note. */
5683 rtx note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
5684 rtx const_insn = dest_ent->const_insn;
5686 if ((tem = single_set (const_insn)) != 0
5687 && rtx_equal_p (SET_DEST (tem), dest))
5689 if (note)
5690 XEXP (note, 0) = const_insn;
5691 else
5692 REG_NOTES (insn)
5693 = gen_rtx_INSN_LIST (REG_WAS_0, const_insn,
5694 REG_NOTES (insn));
5700 /* Now deal with the destination. */
5701 do_not_record = 0;
5703 /* Look within any SIGN_EXTRACT or ZERO_EXTRACT
5704 to the MEM or REG within it. */
5705 while (GET_CODE (dest) == SIGN_EXTRACT
5706 || GET_CODE (dest) == ZERO_EXTRACT
5707 || GET_CODE (dest) == SUBREG
5708 || GET_CODE (dest) == STRICT_LOW_PART)
5709 dest = XEXP (dest, 0);
5711 sets[i].inner_dest = dest;
5713 if (GET_CODE (dest) == MEM)
5715 #ifdef PUSH_ROUNDING
5716 /* Stack pushes invalidate the stack pointer. */
5717 rtx addr = XEXP (dest, 0);
5718 if (GET_RTX_CLASS (GET_CODE (addr)) == 'a'
5719 && XEXP (addr, 0) == stack_pointer_rtx)
5720 invalidate (stack_pointer_rtx, Pmode);
5721 #endif
5722 dest = fold_rtx (dest, insn);
5725 /* Compute the hash code of the destination now,
5726 before the effects of this instruction are recorded,
5727 since the register values used in the address computation
5728 are those before this instruction. */
5729 sets[i].dest_hash = HASH (dest, mode);
5731 /* Don't enter a bit-field in the hash table
5732 because the value in it after the store
5733 may not equal what was stored, due to truncation. */
5735 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
5736 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
5738 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5740 if (src_const != 0 && GET_CODE (src_const) == CONST_INT
5741 && GET_CODE (width) == CONST_INT
5742 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5743 && ! (INTVAL (src_const)
5744 & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5745 /* Exception: if the value is constant,
5746 and it won't be truncated, record it. */
5748 else
5750 /* This is chosen so that the destination will be invalidated
5751 but no new value will be recorded.
5752 We must invalidate because sometimes constant
5753 values can be recorded for bitfields. */
5754 sets[i].src_elt = 0;
5755 sets[i].src_volatile = 1;
5756 src_eqv = 0;
5757 src_eqv_elt = 0;
5761 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
5762 the insn. */
5763 else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
5765 /* One less use of the label this insn used to jump to. */
5766 delete_insn (insn);
5767 cse_jumps_altered = 1;
5768 /* No more processing for this set. */
5769 sets[i].rtl = 0;
5772 /* If this SET is now setting PC to a label, we know it used to
5773 be a conditional or computed branch. */
5774 else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF)
5776 /* We reemit the jump in as many cases as possible just in
5777 case the form of an unconditional jump is significantly
5778 different than a computed jump or conditional jump.
5780 If this insn has multiple sets, then reemitting the
5781 jump is nontrivial. So instead we just force rerecognition
5782 and hope for the best. */
5783 if (n_sets == 1)
5785 rtx new = emit_jump_insn_before (gen_jump (XEXP (src, 0)), insn);
5786 JUMP_LABEL (new) = XEXP (src, 0);
5787 LABEL_NUSES (XEXP (src, 0))++;
5788 insn = new;
5790 else
5791 INSN_CODE (insn) = -1;
5793 never_reached_warning (insn);
5795 /* Now emit a BARRIER after the unconditional jump. Do not bother
5796 deleting any unreachable code, let jump/flow do that. */
5797 if (NEXT_INSN (insn) != 0
5798 && GET_CODE (NEXT_INSN (insn)) != BARRIER)
5799 emit_barrier_after (insn);
5801 cse_jumps_altered = 1;
5802 sets[i].rtl = 0;
5805 /* If destination is volatile, invalidate it and then do no further
5806 processing for this assignment. */
5808 else if (do_not_record)
5810 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
5811 invalidate (dest, VOIDmode);
5812 else if (GET_CODE (dest) == MEM)
5814 /* Outgoing arguments for a libcall don't
5815 affect any recorded expressions. */
5816 if (! libcall_insn || insn == libcall_insn)
5817 invalidate (dest, VOIDmode);
5819 else if (GET_CODE (dest) == STRICT_LOW_PART
5820 || GET_CODE (dest) == ZERO_EXTRACT)
5821 invalidate (XEXP (dest, 0), GET_MODE (dest));
5822 sets[i].rtl = 0;
5825 if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
5826 sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
5828 #ifdef HAVE_cc0
5829 /* If setting CC0, record what it was set to, or a constant, if it
5830 is equivalent to a constant. If it is being set to a floating-point
5831 value, make a COMPARE with the appropriate constant of 0. If we
5832 don't do this, later code can interpret this as a test against
5833 const0_rtx, which can cause problems if we try to put it into an
5834 insn as a floating-point operand. */
5835 if (dest == cc0_rtx)
5837 this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
5838 this_insn_cc0_mode = mode;
5839 if (FLOAT_MODE_P (mode))
5840 this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0,
5841 CONST0_RTX (mode));
5843 #endif
5846 /* Now enter all non-volatile source expressions in the hash table
5847 if they are not already present.
5848 Record their equivalence classes in src_elt.
5849 This way we can insert the corresponding destinations into
5850 the same classes even if the actual sources are no longer in them
5851 (having been invalidated). */
5853 if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
5854 && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
5856 struct table_elt *elt;
5857 struct table_elt *classp = sets[0].src_elt;
5858 rtx dest = SET_DEST (sets[0].rtl);
5859 enum machine_mode eqvmode = GET_MODE (dest);
5861 if (GET_CODE (dest) == STRICT_LOW_PART)
5863 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5864 classp = 0;
5866 if (insert_regs (src_eqv, classp, 0))
5868 rehash_using_reg (src_eqv);
5869 src_eqv_hash = HASH (src_eqv, eqvmode);
5871 elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
5872 elt->in_memory = src_eqv_in_memory;
5873 src_eqv_elt = elt;
5875 /* Check to see if src_eqv_elt is the same as a set source which
5876 does not yet have an elt, and if so set the elt of the set source
5877 to src_eqv_elt. */
5878 for (i = 0; i < n_sets; i++)
5879 if (sets[i].rtl && sets[i].src_elt == 0
5880 && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
5881 sets[i].src_elt = src_eqv_elt;
5884 for (i = 0; i < n_sets; i++)
5885 if (sets[i].rtl && ! sets[i].src_volatile
5886 && ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
5888 if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
5890 /* REG_EQUAL in setting a STRICT_LOW_PART
5891 gives an equivalent for the entire destination register,
5892 not just for the subreg being stored in now.
5893 This is a more interesting equivalence, so we arrange later
5894 to treat the entire reg as the destination. */
5895 sets[i].src_elt = src_eqv_elt;
5896 sets[i].src_hash = src_eqv_hash;
5898 else
5900 /* Insert source and constant equivalent into hash table, if not
5901 already present. */
5902 struct table_elt *classp = src_eqv_elt;
5903 rtx src = sets[i].src;
5904 rtx dest = SET_DEST (sets[i].rtl);
5905 enum machine_mode mode
5906 = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5908 if (sets[i].src_elt == 0)
5910 /* Don't put a hard register source into the table if this is
5911 the last insn of a libcall. In this case, we only need
5912 to put src_eqv_elt in src_elt. */
5913 if (GET_CODE (src) != REG
5914 || REGNO (src) >= FIRST_PSEUDO_REGISTER
5915 || ! find_reg_note (insn, REG_RETVAL, NULL_RTX))
5917 struct table_elt *elt;
5919 /* Note that these insert_regs calls cannot remove
5920 any of the src_elt's, because they would have failed to
5921 match if not still valid. */
5922 if (insert_regs (src, classp, 0))
5924 rehash_using_reg (src);
5925 sets[i].src_hash = HASH (src, mode);
5927 elt = insert (src, classp, sets[i].src_hash, mode);
5928 elt->in_memory = sets[i].src_in_memory;
5929 sets[i].src_elt = classp = elt;
5931 else
5932 sets[i].src_elt = classp;
5934 if (sets[i].src_const && sets[i].src_const_elt == 0
5935 && src != sets[i].src_const
5936 && ! rtx_equal_p (sets[i].src_const, src))
5937 sets[i].src_elt = insert (sets[i].src_const, classp,
5938 sets[i].src_const_hash, mode);
5941 else if (sets[i].src_elt == 0)
5942 /* If we did not insert the source into the hash table (e.g., it was
5943 volatile), note the equivalence class for the REG_EQUAL value, if any,
5944 so that the destination goes into that class. */
5945 sets[i].src_elt = src_eqv_elt;
5947 invalidate_from_clobbers (x);
5949 /* Some registers are invalidated by subroutine calls. Memory is
5950 invalidated by non-constant calls. */
5952 if (GET_CODE (insn) == CALL_INSN)
5954 if (! CONST_OR_PURE_CALL_P (insn))
5955 invalidate_memory ();
5956 invalidate_for_call ();
5959 /* Now invalidate everything set by this instruction.
5960 If a SUBREG or other funny destination is being set,
5961 sets[i].rtl is still nonzero, so here we invalidate the reg
5962 a part of which is being set. */
5964 for (i = 0; i < n_sets; i++)
5965 if (sets[i].rtl)
5967 /* We can't use the inner dest, because the mode associated with
5968 a ZERO_EXTRACT is significant. */
5969 rtx dest = SET_DEST (sets[i].rtl);
5971 /* Needed for registers to remove the register from its
5972 previous quantity's chain.
5973 Needed for memory if this is a nonvarying address, unless
5974 we have just done an invalidate_memory that covers even those. */
5975 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
5976 invalidate (dest, VOIDmode);
5977 else if (GET_CODE (dest) == MEM)
5979 /* Outgoing arguments for a libcall don't
5980 affect any recorded expressions. */
5981 if (! libcall_insn || insn == libcall_insn)
5982 invalidate (dest, VOIDmode);
5984 else if (GET_CODE (dest) == STRICT_LOW_PART
5985 || GET_CODE (dest) == ZERO_EXTRACT)
5986 invalidate (XEXP (dest, 0), GET_MODE (dest));
5989 /* A volatile ASM invalidates everything. */
5990 if (GET_CODE (insn) == INSN
5991 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
5992 && MEM_VOLATILE_P (PATTERN (insn)))
5993 flush_hash_table ();
5995 /* Make sure registers mentioned in destinations
5996 are safe for use in an expression to be inserted.
5997 This removes from the hash table
5998 any invalid entry that refers to one of these registers.
6000 We don't care about the return value from mention_regs because
6001 we are going to hash the SET_DEST values unconditionally. */
6003 for (i = 0; i < n_sets; i++)
6005 if (sets[i].rtl)
6007 rtx x = SET_DEST (sets[i].rtl);
6009 if (GET_CODE (x) != REG)
6010 mention_regs (x);
6011 else
6013 /* We used to rely on all references to a register becoming
6014 inaccessible when a register changes to a new quantity,
6015 since that changes the hash code. However, that is not
6016 safe, since after HASH_SIZE new quantities we get a
6017 hash 'collision' of a register with its own invalid
6018 entries. And since SUBREGs have been changed not to
6019 change their hash code with the hash code of the register,
6020 it wouldn't work any longer at all. So we have to check
6021 for any invalid references lying around now.
6022 This code is similar to the REG case in mention_regs,
6023 but it knows that reg_tick has been incremented, and
6024 it leaves reg_in_table as -1 . */
6025 unsigned int regno = REGNO (x);
6026 unsigned int endregno
6027 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
6028 : HARD_REGNO_NREGS (regno, GET_MODE (x)));
6029 unsigned int i;
6031 for (i = regno; i < endregno; i++)
6033 if (REG_IN_TABLE (i) >= 0)
6035 remove_invalid_refs (i);
6036 REG_IN_TABLE (i) = -1;
6043 /* We may have just removed some of the src_elt's from the hash table.
6044 So replace each one with the current head of the same class. */
6046 for (i = 0; i < n_sets; i++)
6047 if (sets[i].rtl)
6049 if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
6050 /* If elt was removed, find current head of same class,
6051 or 0 if nothing remains of that class. */
6053 struct table_elt *elt = sets[i].src_elt;
6055 while (elt && elt->prev_same_value)
6056 elt = elt->prev_same_value;
6058 while (elt && elt->first_same_value == 0)
6059 elt = elt->next_same_value;
6060 sets[i].src_elt = elt ? elt->first_same_value : 0;
6064 /* Now insert the destinations into their equivalence classes. */
6066 for (i = 0; i < n_sets; i++)
6067 if (sets[i].rtl)
6069 rtx dest = SET_DEST (sets[i].rtl);
6070 rtx inner_dest = sets[i].inner_dest;
6071 struct table_elt *elt;
6073 /* Don't record value if we are not supposed to risk allocating
6074 floating-point values in registers that might be wider than
6075 memory. */
6076 if ((flag_float_store
6077 && GET_CODE (dest) == MEM
6078 && FLOAT_MODE_P (GET_MODE (dest)))
6079 /* Don't record BLKmode values, because we don't know the
6080 size of it, and can't be sure that other BLKmode values
6081 have the same or smaller size. */
6082 || GET_MODE (dest) == BLKmode
6083 /* Don't record values of destinations set inside a libcall block
6084 since we might delete the libcall. Things should have been set
6085 up so we won't want to reuse such a value, but we play it safe
6086 here. */
6087 || libcall_insn
6088 /* If we didn't put a REG_EQUAL value or a source into the hash
6089 table, there is no point is recording DEST. */
6090 || sets[i].src_elt == 0
6091 /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
6092 or SIGN_EXTEND, don't record DEST since it can cause
6093 some tracking to be wrong.
6095 ??? Think about this more later. */
6096 || (GET_CODE (dest) == SUBREG
6097 && (GET_MODE_SIZE (GET_MODE (dest))
6098 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6099 && (GET_CODE (sets[i].src) == SIGN_EXTEND
6100 || GET_CODE (sets[i].src) == ZERO_EXTEND)))
6101 continue;
6103 /* STRICT_LOW_PART isn't part of the value BEING set,
6104 and neither is the SUBREG inside it.
6105 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
6106 if (GET_CODE (dest) == STRICT_LOW_PART)
6107 dest = SUBREG_REG (XEXP (dest, 0));
6109 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
6110 /* Registers must also be inserted into chains for quantities. */
6111 if (insert_regs (dest, sets[i].src_elt, 1))
6113 /* If `insert_regs' changes something, the hash code must be
6114 recalculated. */
6115 rehash_using_reg (dest);
6116 sets[i].dest_hash = HASH (dest, GET_MODE (dest));
6119 if (GET_CODE (inner_dest) == MEM
6120 && GET_CODE (XEXP (inner_dest, 0)) == ADDRESSOF)
6121 /* Given (SET (MEM (ADDRESSOF (X))) Y) we don't want to say
6122 that (MEM (ADDRESSOF (X))) is equivalent to Y.
6123 Consider the case in which the address of the MEM is
6124 passed to a function, which alters the MEM. Then, if we
6125 later use Y instead of the MEM we'll miss the update. */
6126 elt = insert (dest, 0, sets[i].dest_hash, GET_MODE (dest));
6127 else
6128 elt = insert (dest, sets[i].src_elt,
6129 sets[i].dest_hash, GET_MODE (dest));
6131 elt->in_memory = (GET_CODE (sets[i].inner_dest) == MEM
6132 && (! RTX_UNCHANGING_P (sets[i].inner_dest)
6133 || FIXED_BASE_PLUS_P (XEXP (sets[i].inner_dest,
6134 0))));
6136 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
6137 narrower than M2, and both M1 and M2 are the same number of words,
6138 we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
6139 make that equivalence as well.
6141 However, BAR may have equivalences for which gen_lowpart_if_possible
6142 will produce a simpler value than gen_lowpart_if_possible applied to
6143 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
6144 BAR's equivalences. If we don't get a simplified form, make
6145 the SUBREG. It will not be used in an equivalence, but will
6146 cause two similar assignments to be detected.
6148 Note the loop below will find SUBREG_REG (DEST) since we have
6149 already entered SRC and DEST of the SET in the table. */
6151 if (GET_CODE (dest) == SUBREG
6152 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
6153 / UNITS_PER_WORD)
6154 == (GET_MODE_SIZE (GET_MODE (dest)) - 1) / UNITS_PER_WORD)
6155 && (GET_MODE_SIZE (GET_MODE (dest))
6156 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6157 && sets[i].src_elt != 0)
6159 enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
6160 struct table_elt *elt, *classp = 0;
6162 for (elt = sets[i].src_elt->first_same_value; elt;
6163 elt = elt->next_same_value)
6165 rtx new_src = 0;
6166 unsigned src_hash;
6167 struct table_elt *src_elt;
6169 /* Ignore invalid entries. */
6170 if (GET_CODE (elt->exp) != REG
6171 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
6172 continue;
6174 new_src = gen_lowpart_if_possible (new_mode, elt->exp);
6175 if (new_src == 0)
6176 new_src = gen_rtx_SUBREG (new_mode, elt->exp, 0);
6178 src_hash = HASH (new_src, new_mode);
6179 src_elt = lookup (new_src, src_hash, new_mode);
6181 /* Put the new source in the hash table is if isn't
6182 already. */
6183 if (src_elt == 0)
6185 if (insert_regs (new_src, classp, 0))
6187 rehash_using_reg (new_src);
6188 src_hash = HASH (new_src, new_mode);
6190 src_elt = insert (new_src, classp, src_hash, new_mode);
6191 src_elt->in_memory = elt->in_memory;
6193 else if (classp && classp != src_elt->first_same_value)
6194 /* Show that two things that we've seen before are
6195 actually the same. */
6196 merge_equiv_classes (src_elt, classp);
6198 classp = src_elt->first_same_value;
6199 /* Ignore invalid entries. */
6200 while (classp
6201 && GET_CODE (classp->exp) != REG
6202 && ! exp_equiv_p (classp->exp, classp->exp, 1, 0))
6203 classp = classp->next_same_value;
6208 /* Special handling for (set REG0 REG1) where REG0 is the
6209 "cheapest", cheaper than REG1. After cse, REG1 will probably not
6210 be used in the sequel, so (if easily done) change this insn to
6211 (set REG1 REG0) and replace REG1 with REG0 in the previous insn
6212 that computed their value. Then REG1 will become a dead store
6213 and won't cloud the situation for later optimizations.
6215 Do not make this change if REG1 is a hard register, because it will
6216 then be used in the sequel and we may be changing a two-operand insn
6217 into a three-operand insn.
6219 Also do not do this if we are operating on a copy of INSN.
6221 Also don't do this if INSN ends a libcall; this would cause an unrelated
6222 register to be set in the middle of a libcall, and we then get bad code
6223 if the libcall is deleted. */
6225 if (n_sets == 1 && sets[0].rtl && GET_CODE (SET_DEST (sets[0].rtl)) == REG
6226 && NEXT_INSN (PREV_INSN (insn)) == insn
6227 && GET_CODE (SET_SRC (sets[0].rtl)) == REG
6228 && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
6229 && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl))))
6231 int src_q = REG_QTY (REGNO (SET_SRC (sets[0].rtl)));
6232 struct qty_table_elem *src_ent = &qty_table[src_q];
6234 if ((src_ent->first_reg == REGNO (SET_DEST (sets[0].rtl)))
6235 && ! find_reg_note (insn, REG_RETVAL, NULL_RTX))
6237 rtx prev = prev_nonnote_insn (insn);
6239 /* Do not swap the registers around if the previous instruction
6240 attaches a REG_EQUIV note to REG1.
6242 ??? It's not entirely clear whether we can transfer a REG_EQUIV
6243 from the pseudo that originally shadowed an incoming argument
6244 to another register. Some uses of REG_EQUIV might rely on it
6245 being attached to REG1 rather than REG2.
6247 This section previously turned the REG_EQUIV into a REG_EQUAL
6248 note. We cannot do that because REG_EQUIV may provide an
6249 uninitialised stack slot when REG_PARM_STACK_SPACE is used. */
6251 if (prev != 0 && GET_CODE (prev) == INSN
6252 && GET_CODE (PATTERN (prev)) == SET
6253 && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl)
6254 && ! find_reg_note (prev, REG_EQUIV, NULL_RTX))
6256 rtx dest = SET_DEST (sets[0].rtl);
6257 rtx src = SET_SRC (sets[0].rtl);
6258 rtx note;
6260 validate_change (prev, &SET_DEST (PATTERN (prev)), dest, 1);
6261 validate_change (insn, &SET_DEST (sets[0].rtl), src, 1);
6262 validate_change (insn, &SET_SRC (sets[0].rtl), dest, 1);
6263 apply_change_group ();
6265 /* If there was a REG_WAS_0 note on PREV, remove it. Move
6266 any REG_WAS_0 note on INSN to PREV. */
6267 note = find_reg_note (prev, REG_WAS_0, NULL_RTX);
6268 if (note)
6269 remove_note (prev, note);
6271 note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
6272 if (note)
6274 remove_note (insn, note);
6275 XEXP (note, 1) = REG_NOTES (prev);
6276 REG_NOTES (prev) = note;
6279 /* If INSN has a REG_EQUAL note, and this note mentions
6280 REG0, then we must delete it, because the value in
6281 REG0 has changed. If the note's value is REG1, we must
6282 also delete it because that is now this insn's dest. */
6283 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
6284 if (note != 0
6285 && (reg_mentioned_p (dest, XEXP (note, 0))
6286 || rtx_equal_p (src, XEXP (note, 0))))
6287 remove_note (insn, note);
6292 /* If this is a conditional jump insn, record any known equivalences due to
6293 the condition being tested. */
6295 last_jump_equiv_class = 0;
6296 if (GET_CODE (insn) == JUMP_INSN
6297 && n_sets == 1 && GET_CODE (x) == SET
6298 && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
6299 record_jump_equiv (insn, 0);
6301 #ifdef HAVE_cc0
6302 /* If the previous insn set CC0 and this insn no longer references CC0,
6303 delete the previous insn. Here we use the fact that nothing expects CC0
6304 to be valid over an insn, which is true until the final pass. */
6305 if (prev_insn && GET_CODE (prev_insn) == INSN
6306 && (tem = single_set (prev_insn)) != 0
6307 && SET_DEST (tem) == cc0_rtx
6308 && ! reg_mentioned_p (cc0_rtx, x))
6309 delete_insn (prev_insn);
6311 prev_insn_cc0 = this_insn_cc0;
6312 prev_insn_cc0_mode = this_insn_cc0_mode;
6313 #endif
6315 prev_insn = insn;
6318 /* Remove from the hash table all expressions that reference memory. */
6320 static void
6321 invalidate_memory ()
6323 int i;
6324 struct table_elt *p, *next;
6326 for (i = 0; i < HASH_SIZE; i++)
6327 for (p = table[i]; p; p = next)
6329 next = p->next_same_hash;
6330 if (p->in_memory)
6331 remove_from_table (p, i);
6335 /* If ADDR is an address that implicitly affects the stack pointer, return
6336 1 and update the register tables to show the effect. Else, return 0. */
6338 static int
6339 addr_affects_sp_p (addr)
6340 rtx addr;
6342 if (GET_RTX_CLASS (GET_CODE (addr)) == 'a'
6343 && GET_CODE (XEXP (addr, 0)) == REG
6344 && REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
6346 if (REG_TICK (STACK_POINTER_REGNUM) >= 0)
6347 REG_TICK (STACK_POINTER_REGNUM)++;
6349 /* This should be *very* rare. */
6350 if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
6351 invalidate (stack_pointer_rtx, VOIDmode);
6353 return 1;
6356 return 0;
6359 /* Perform invalidation on the basis of everything about an insn
6360 except for invalidating the actual places that are SET in it.
6361 This includes the places CLOBBERed, and anything that might
6362 alias with something that is SET or CLOBBERed.
6364 X is the pattern of the insn. */
6366 static void
6367 invalidate_from_clobbers (x)
6368 rtx x;
6370 if (GET_CODE (x) == CLOBBER)
6372 rtx ref = XEXP (x, 0);
6373 if (ref)
6375 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
6376 || GET_CODE (ref) == MEM)
6377 invalidate (ref, VOIDmode);
6378 else if (GET_CODE (ref) == STRICT_LOW_PART
6379 || GET_CODE (ref) == ZERO_EXTRACT)
6380 invalidate (XEXP (ref, 0), GET_MODE (ref));
6383 else if (GET_CODE (x) == PARALLEL)
6385 int i;
6386 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
6388 rtx y = XVECEXP (x, 0, i);
6389 if (GET_CODE (y) == CLOBBER)
6391 rtx ref = XEXP (y, 0);
6392 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
6393 || GET_CODE (ref) == MEM)
6394 invalidate (ref, VOIDmode);
6395 else if (GET_CODE (ref) == STRICT_LOW_PART
6396 || GET_CODE (ref) == ZERO_EXTRACT)
6397 invalidate (XEXP (ref, 0), GET_MODE (ref));
6403 /* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes
6404 and replace any registers in them with either an equivalent constant
6405 or the canonical form of the register. If we are inside an address,
6406 only do this if the address remains valid.
6408 OBJECT is 0 except when within a MEM in which case it is the MEM.
6410 Return the replacement for X. */
6412 static rtx
6413 cse_process_notes (x, object)
6414 rtx x;
6415 rtx object;
6417 enum rtx_code code = GET_CODE (x);
6418 const char *fmt = GET_RTX_FORMAT (code);
6419 int i;
6421 switch (code)
6423 case CONST_INT:
6424 case CONST:
6425 case SYMBOL_REF:
6426 case LABEL_REF:
6427 case CONST_DOUBLE:
6428 case PC:
6429 case CC0:
6430 case LO_SUM:
6431 return x;
6433 case MEM:
6434 validate_change (x, &XEXP (x, 0),
6435 cse_process_notes (XEXP (x, 0), x), 0);
6436 return x;
6438 case EXPR_LIST:
6439 case INSN_LIST:
6440 if (REG_NOTE_KIND (x) == REG_EQUAL)
6441 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX);
6442 if (XEXP (x, 1))
6443 XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX);
6444 return x;
6446 case SIGN_EXTEND:
6447 case ZERO_EXTEND:
6448 case SUBREG:
6450 rtx new = cse_process_notes (XEXP (x, 0), object);
6451 /* We don't substitute VOIDmode constants into these rtx,
6452 since they would impede folding. */
6453 if (GET_MODE (new) != VOIDmode)
6454 validate_change (object, &XEXP (x, 0), new, 0);
6455 return x;
6458 case REG:
6459 i = REG_QTY (REGNO (x));
6461 /* Return a constant or a constant register. */
6462 if (REGNO_QTY_VALID_P (REGNO (x)))
6464 struct qty_table_elem *ent = &qty_table[i];
6466 if (ent->const_rtx != NULL_RTX
6467 && (CONSTANT_P (ent->const_rtx)
6468 || GET_CODE (ent->const_rtx) == REG))
6470 rtx new = gen_lowpart_if_possible (GET_MODE (x), ent->const_rtx);
6471 if (new)
6472 return new;
6476 /* Otherwise, canonicalize this register. */
6477 return canon_reg (x, NULL_RTX);
6479 default:
6480 break;
6483 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6484 if (fmt[i] == 'e')
6485 validate_change (object, &XEXP (x, i),
6486 cse_process_notes (XEXP (x, i), object), 0);
6488 return x;
6491 /* Find common subexpressions between the end test of a loop and the beginning
6492 of the loop. LOOP_START is the CODE_LABEL at the start of a loop.
6494 Often we have a loop where an expression in the exit test is used
6495 in the body of the loop. For example "while (*p) *q++ = *p++;".
6496 Because of the way we duplicate the loop exit test in front of the loop,
6497 however, we don't detect that common subexpression. This will be caught
6498 when global cse is implemented, but this is a quite common case.
6500 This function handles the most common cases of these common expressions.
6501 It is called after we have processed the basic block ending with the
6502 NOTE_INSN_LOOP_END note that ends a loop and the previous JUMP_INSN
6503 jumps to a label used only once. */
6505 static void
6506 cse_around_loop (loop_start)
6507 rtx loop_start;
6509 rtx insn;
6510 int i;
6511 struct table_elt *p;
6513 /* If the jump at the end of the loop doesn't go to the start, we don't
6514 do anything. */
6515 for (insn = PREV_INSN (loop_start);
6516 insn && (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0);
6517 insn = PREV_INSN (insn))
6520 if (insn == 0
6521 || GET_CODE (insn) != NOTE
6522 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_BEG)
6523 return;
6525 /* If the last insn of the loop (the end test) was an NE comparison,
6526 we will interpret it as an EQ comparison, since we fell through
6527 the loop. Any equivalences resulting from that comparison are
6528 therefore not valid and must be invalidated. */
6529 if (last_jump_equiv_class)
6530 for (p = last_jump_equiv_class->first_same_value; p;
6531 p = p->next_same_value)
6533 if (GET_CODE (p->exp) == MEM || GET_CODE (p->exp) == REG
6534 || (GET_CODE (p->exp) == SUBREG
6535 && GET_CODE (SUBREG_REG (p->exp)) == REG))
6536 invalidate (p->exp, VOIDmode);
6537 else if (GET_CODE (p->exp) == STRICT_LOW_PART
6538 || GET_CODE (p->exp) == ZERO_EXTRACT)
6539 invalidate (XEXP (p->exp, 0), GET_MODE (p->exp));
6542 /* Process insns starting after LOOP_START until we hit a CALL_INSN or
6543 a CODE_LABEL (we could handle a CALL_INSN, but it isn't worth it).
6545 The only thing we do with SET_DEST is invalidate entries, so we
6546 can safely process each SET in order. It is slightly less efficient
6547 to do so, but we only want to handle the most common cases.
6549 The gen_move_insn call in cse_set_around_loop may create new pseudos.
6550 These pseudos won't have valid entries in any of the tables indexed
6551 by register number, such as reg_qty. We avoid out-of-range array
6552 accesses by not processing any instructions created after cse started. */
6554 for (insn = NEXT_INSN (loop_start);
6555 GET_CODE (insn) != CALL_INSN && GET_CODE (insn) != CODE_LABEL
6556 && INSN_UID (insn) < max_insn_uid
6557 && ! (GET_CODE (insn) == NOTE
6558 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
6559 insn = NEXT_INSN (insn))
6561 if (INSN_P (insn)
6562 && (GET_CODE (PATTERN (insn)) == SET
6563 || GET_CODE (PATTERN (insn)) == CLOBBER))
6564 cse_set_around_loop (PATTERN (insn), insn, loop_start);
6565 else if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == PARALLEL)
6566 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6567 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET
6568 || GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == CLOBBER)
6569 cse_set_around_loop (XVECEXP (PATTERN (insn), 0, i), insn,
6570 loop_start);
6574 /* Process one SET of an insn that was skipped. We ignore CLOBBERs
6575 since they are done elsewhere. This function is called via note_stores. */
6577 static void
6578 invalidate_skipped_set (dest, set, data)
6579 rtx set;
6580 rtx dest;
6581 void *data ATTRIBUTE_UNUSED;
6583 enum rtx_code code = GET_CODE (dest);
6585 if (code == MEM
6586 && ! addr_affects_sp_p (dest) /* If this is not a stack push ... */
6587 /* There are times when an address can appear varying and be a PLUS
6588 during this scan when it would be a fixed address were we to know
6589 the proper equivalences. So invalidate all memory if there is
6590 a BLKmode or nonscalar memory reference or a reference to a
6591 variable address. */
6592 && (MEM_IN_STRUCT_P (dest) || GET_MODE (dest) == BLKmode
6593 || cse_rtx_varies_p (XEXP (dest, 0), 0)))
6595 invalidate_memory ();
6596 return;
6599 if (GET_CODE (set) == CLOBBER
6600 #ifdef HAVE_cc0
6601 || dest == cc0_rtx
6602 #endif
6603 || dest == pc_rtx)
6604 return;
6606 if (code == STRICT_LOW_PART || code == ZERO_EXTRACT)
6607 invalidate (XEXP (dest, 0), GET_MODE (dest));
6608 else if (code == REG || code == SUBREG || code == MEM)
6609 invalidate (dest, VOIDmode);
6612 /* Invalidate all insns from START up to the end of the function or the
6613 next label. This called when we wish to CSE around a block that is
6614 conditionally executed. */
6616 static void
6617 invalidate_skipped_block (start)
6618 rtx start;
6620 rtx insn;
6622 for (insn = start; insn && GET_CODE (insn) != CODE_LABEL;
6623 insn = NEXT_INSN (insn))
6625 if (! INSN_P (insn))
6626 continue;
6628 if (GET_CODE (insn) == CALL_INSN)
6630 if (! CONST_OR_PURE_CALL_P (insn))
6631 invalidate_memory ();
6632 invalidate_for_call ();
6635 invalidate_from_clobbers (PATTERN (insn));
6636 note_stores (PATTERN (insn), invalidate_skipped_set, NULL);
6640 /* If modifying X will modify the value in *DATA (which is really an
6641 `rtx *'), indicate that fact by setting the pointed to value to
6642 NULL_RTX. */
6644 static void
6645 cse_check_loop_start (x, set, data)
6646 rtx x;
6647 rtx set ATTRIBUTE_UNUSED;
6648 void *data;
6650 rtx *cse_check_loop_start_value = (rtx *) data;
6652 if (*cse_check_loop_start_value == NULL_RTX
6653 || GET_CODE (x) == CC0 || GET_CODE (x) == PC)
6654 return;
6656 if ((GET_CODE (x) == MEM && GET_CODE (*cse_check_loop_start_value) == MEM)
6657 || reg_overlap_mentioned_p (x, *cse_check_loop_start_value))
6658 *cse_check_loop_start_value = NULL_RTX;
6661 /* X is a SET or CLOBBER contained in INSN that was found near the start of
6662 a loop that starts with the label at LOOP_START.
6664 If X is a SET, we see if its SET_SRC is currently in our hash table.
6665 If so, we see if it has a value equal to some register used only in the
6666 loop exit code (as marked by jump.c).
6668 If those two conditions are true, we search backwards from the start of
6669 the loop to see if that same value was loaded into a register that still
6670 retains its value at the start of the loop.
6672 If so, we insert an insn after the load to copy the destination of that
6673 load into the equivalent register and (try to) replace our SET_SRC with that
6674 register.
6676 In any event, we invalidate whatever this SET or CLOBBER modifies. */
6678 static void
6679 cse_set_around_loop (x, insn, loop_start)
6680 rtx x;
6681 rtx insn;
6682 rtx loop_start;
6684 struct table_elt *src_elt;
6686 /* If this is a SET, see if we can replace SET_SRC, but ignore SETs that
6687 are setting PC or CC0 or whose SET_SRC is already a register. */
6688 if (GET_CODE (x) == SET
6689 && GET_CODE (SET_DEST (x)) != PC && GET_CODE (SET_DEST (x)) != CC0
6690 && GET_CODE (SET_SRC (x)) != REG)
6692 src_elt = lookup (SET_SRC (x),
6693 HASH (SET_SRC (x), GET_MODE (SET_DEST (x))),
6694 GET_MODE (SET_DEST (x)));
6696 if (src_elt)
6697 for (src_elt = src_elt->first_same_value; src_elt;
6698 src_elt = src_elt->next_same_value)
6699 if (GET_CODE (src_elt->exp) == REG && REG_LOOP_TEST_P (src_elt->exp)
6700 && COST (src_elt->exp) < COST (SET_SRC (x)))
6702 rtx p, set;
6704 /* Look for an insn in front of LOOP_START that sets
6705 something in the desired mode to SET_SRC (x) before we hit
6706 a label or CALL_INSN. */
6708 for (p = prev_nonnote_insn (loop_start);
6709 p && GET_CODE (p) != CALL_INSN
6710 && GET_CODE (p) != CODE_LABEL;
6711 p = prev_nonnote_insn (p))
6712 if ((set = single_set (p)) != 0
6713 && GET_CODE (SET_DEST (set)) == REG
6714 && GET_MODE (SET_DEST (set)) == src_elt->mode
6715 && rtx_equal_p (SET_SRC (set), SET_SRC (x)))
6717 /* We now have to ensure that nothing between P
6718 and LOOP_START modified anything referenced in
6719 SET_SRC (x). We know that nothing within the loop
6720 can modify it, or we would have invalidated it in
6721 the hash table. */
6722 rtx q;
6723 rtx cse_check_loop_start_value = SET_SRC (x);
6724 for (q = p; q != loop_start; q = NEXT_INSN (q))
6725 if (INSN_P (q))
6726 note_stores (PATTERN (q),
6727 cse_check_loop_start,
6728 &cse_check_loop_start_value);
6730 /* If nothing was changed and we can replace our
6731 SET_SRC, add an insn after P to copy its destination
6732 to what we will be replacing SET_SRC with. */
6733 if (cse_check_loop_start_value
6734 && validate_change (insn, &SET_SRC (x),
6735 src_elt->exp, 0))
6737 /* If this creates new pseudos, this is unsafe,
6738 because the regno of new pseudo is unsuitable
6739 to index into reg_qty when cse_insn processes
6740 the new insn. Therefore, if a new pseudo was
6741 created, discard this optimization. */
6742 int nregs = max_reg_num ();
6743 rtx move
6744 = gen_move_insn (src_elt->exp, SET_DEST (set));
6745 if (nregs != max_reg_num ())
6747 if (! validate_change (insn, &SET_SRC (x),
6748 SET_SRC (set), 0))
6749 abort ();
6751 else
6752 emit_insn_after (move, p);
6754 break;
6759 /* Deal with the destination of X affecting the stack pointer. */
6760 addr_affects_sp_p (SET_DEST (x));
6762 /* See comment on similar code in cse_insn for explanation of these
6763 tests. */
6764 if (GET_CODE (SET_DEST (x)) == REG || GET_CODE (SET_DEST (x)) == SUBREG
6765 || GET_CODE (SET_DEST (x)) == MEM)
6766 invalidate (SET_DEST (x), VOIDmode);
6767 else if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
6768 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
6769 invalidate (XEXP (SET_DEST (x), 0), GET_MODE (SET_DEST (x)));
6772 /* Find the end of INSN's basic block and return its range,
6773 the total number of SETs in all the insns of the block, the last insn of the
6774 block, and the branch path.
6776 The branch path indicates which branches should be followed. If a non-zero
6777 path size is specified, the block should be rescanned and a different set
6778 of branches will be taken. The branch path is only used if
6779 FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is non-zero.
6781 DATA is a pointer to a struct cse_basic_block_data, defined below, that is
6782 used to describe the block. It is filled in with the information about
6783 the current block. The incoming structure's branch path, if any, is used
6784 to construct the output branch path. */
6786 void
6787 cse_end_of_basic_block (insn, data, follow_jumps, after_loop, skip_blocks)
6788 rtx insn;
6789 struct cse_basic_block_data *data;
6790 int follow_jumps;
6791 int after_loop;
6792 int skip_blocks;
6794 rtx p = insn, q;
6795 int nsets = 0;
6796 int low_cuid = INSN_CUID (insn), high_cuid = INSN_CUID (insn);
6797 rtx next = INSN_P (insn) ? insn : next_real_insn (insn);
6798 int path_size = data->path_size;
6799 int path_entry = 0;
6800 int i;
6802 /* Update the previous branch path, if any. If the last branch was
6803 previously TAKEN, mark it NOT_TAKEN. If it was previously NOT_TAKEN,
6804 shorten the path by one and look at the previous branch. We know that
6805 at least one branch must have been taken if PATH_SIZE is non-zero. */
6806 while (path_size > 0)
6808 if (data->path[path_size - 1].status != NOT_TAKEN)
6810 data->path[path_size - 1].status = NOT_TAKEN;
6811 break;
6813 else
6814 path_size--;
6817 /* If the first instruction is marked with QImode, that means we've
6818 already processed this block. Our caller will look at DATA->LAST
6819 to figure out where to go next. We want to return the next block
6820 in the instruction stream, not some branched-to block somewhere
6821 else. We accomplish this by pretending our called forbid us to
6822 follow jumps, or skip blocks. */
6823 if (GET_MODE (insn) == QImode)
6824 follow_jumps = skip_blocks = 0;
6826 /* Scan to end of this basic block. */
6827 while (p && GET_CODE (p) != CODE_LABEL)
6829 /* Don't cse out the end of a loop. This makes a difference
6830 only for the unusual loops that always execute at least once;
6831 all other loops have labels there so we will stop in any case.
6832 Cse'ing out the end of the loop is dangerous because it
6833 might cause an invariant expression inside the loop
6834 to be reused after the end of the loop. This would make it
6835 hard to move the expression out of the loop in loop.c,
6836 especially if it is one of several equivalent expressions
6837 and loop.c would like to eliminate it.
6839 If we are running after loop.c has finished, we can ignore
6840 the NOTE_INSN_LOOP_END. */
6842 if (! after_loop && GET_CODE (p) == NOTE
6843 && NOTE_LINE_NUMBER (p) == NOTE_INSN_LOOP_END)
6844 break;
6846 /* Don't cse over a call to setjmp; on some machines (eg VAX)
6847 the regs restored by the longjmp come from
6848 a later time than the setjmp. */
6849 if (PREV_INSN (p) && GET_CODE (PREV_INSN (p)) == CALL_INSN
6850 && find_reg_note (PREV_INSN (p), REG_SETJMP, NULL))
6851 break;
6853 /* A PARALLEL can have lots of SETs in it,
6854 especially if it is really an ASM_OPERANDS. */
6855 if (INSN_P (p) && GET_CODE (PATTERN (p)) == PARALLEL)
6856 nsets += XVECLEN (PATTERN (p), 0);
6857 else if (GET_CODE (p) != NOTE)
6858 nsets += 1;
6860 /* Ignore insns made by CSE; they cannot affect the boundaries of
6861 the basic block. */
6863 if (INSN_UID (p) <= max_uid && INSN_CUID (p) > high_cuid)
6864 high_cuid = INSN_CUID (p);
6865 if (INSN_UID (p) <= max_uid && INSN_CUID (p) < low_cuid)
6866 low_cuid = INSN_CUID (p);
6868 /* See if this insn is in our branch path. If it is and we are to
6869 take it, do so. */
6870 if (path_entry < path_size && data->path[path_entry].branch == p)
6872 if (data->path[path_entry].status != NOT_TAKEN)
6873 p = JUMP_LABEL (p);
6875 /* Point to next entry in path, if any. */
6876 path_entry++;
6879 /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
6880 was specified, we haven't reached our maximum path length, there are
6881 insns following the target of the jump, this is the only use of the
6882 jump label, and the target label is preceded by a BARRIER.
6884 Alternatively, we can follow the jump if it branches around a
6885 block of code and there are no other branches into the block.
6886 In this case invalidate_skipped_block will be called to invalidate any
6887 registers set in the block when following the jump. */
6889 else if ((follow_jumps || skip_blocks) && path_size < PATHLENGTH - 1
6890 && GET_CODE (p) == JUMP_INSN
6891 && GET_CODE (PATTERN (p)) == SET
6892 && GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
6893 && JUMP_LABEL (p) != 0
6894 && LABEL_NUSES (JUMP_LABEL (p)) == 1
6895 && NEXT_INSN (JUMP_LABEL (p)) != 0)
6897 for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
6898 if ((GET_CODE (q) != NOTE
6899 || NOTE_LINE_NUMBER (q) == NOTE_INSN_LOOP_END
6900 || (PREV_INSN (q) && GET_CODE (PREV_INSN (q)) == CALL_INSN
6901 && find_reg_note (PREV_INSN (q), REG_SETJMP, NULL)))
6902 && (GET_CODE (q) != CODE_LABEL || LABEL_NUSES (q) != 0))
6903 break;
6905 /* If we ran into a BARRIER, this code is an extension of the
6906 basic block when the branch is taken. */
6907 if (follow_jumps && q != 0 && GET_CODE (q) == BARRIER)
6909 /* Don't allow ourself to keep walking around an
6910 always-executed loop. */
6911 if (next_real_insn (q) == next)
6913 p = NEXT_INSN (p);
6914 continue;
6917 /* Similarly, don't put a branch in our path more than once. */
6918 for (i = 0; i < path_entry; i++)
6919 if (data->path[i].branch == p)
6920 break;
6922 if (i != path_entry)
6923 break;
6925 data->path[path_entry].branch = p;
6926 data->path[path_entry++].status = TAKEN;
6928 /* This branch now ends our path. It was possible that we
6929 didn't see this branch the last time around (when the
6930 insn in front of the target was a JUMP_INSN that was
6931 turned into a no-op). */
6932 path_size = path_entry;
6934 p = JUMP_LABEL (p);
6935 /* Mark block so we won't scan it again later. */
6936 PUT_MODE (NEXT_INSN (p), QImode);
6938 /* Detect a branch around a block of code. */
6939 else if (skip_blocks && q != 0 && GET_CODE (q) != CODE_LABEL)
6941 rtx tmp;
6943 if (next_real_insn (q) == next)
6945 p = NEXT_INSN (p);
6946 continue;
6949 for (i = 0; i < path_entry; i++)
6950 if (data->path[i].branch == p)
6951 break;
6953 if (i != path_entry)
6954 break;
6956 /* This is no_labels_between_p (p, q) with an added check for
6957 reaching the end of a function (in case Q precedes P). */
6958 for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
6959 if (GET_CODE (tmp) == CODE_LABEL)
6960 break;
6962 if (tmp == q)
6964 data->path[path_entry].branch = p;
6965 data->path[path_entry++].status = AROUND;
6967 path_size = path_entry;
6969 p = JUMP_LABEL (p);
6970 /* Mark block so we won't scan it again later. */
6971 PUT_MODE (NEXT_INSN (p), QImode);
6975 p = NEXT_INSN (p);
6978 data->low_cuid = low_cuid;
6979 data->high_cuid = high_cuid;
6980 data->nsets = nsets;
6981 data->last = p;
6983 /* If all jumps in the path are not taken, set our path length to zero
6984 so a rescan won't be done. */
6985 for (i = path_size - 1; i >= 0; i--)
6986 if (data->path[i].status != NOT_TAKEN)
6987 break;
6989 if (i == -1)
6990 data->path_size = 0;
6991 else
6992 data->path_size = path_size;
6994 /* End the current branch path. */
6995 data->path[path_size].branch = 0;
6998 /* Perform cse on the instructions of a function.
6999 F is the first instruction.
7000 NREGS is one plus the highest pseudo-reg number used in the instruction.
7002 AFTER_LOOP is 1 if this is the cse call done after loop optimization
7003 (only if -frerun-cse-after-loop).
7005 Returns 1 if jump_optimize should be redone due to simplifications
7006 in conditional jump instructions. */
7009 cse_main (f, nregs, after_loop, file)
7010 rtx f;
7011 int nregs;
7012 int after_loop;
7013 FILE *file;
7015 struct cse_basic_block_data val;
7016 rtx insn = f;
7017 int i;
7019 cse_jumps_altered = 0;
7020 recorded_label_ref = 0;
7021 constant_pool_entries_cost = 0;
7022 val.path_size = 0;
7024 init_recog ();
7025 init_alias_analysis ();
7027 max_reg = nregs;
7029 max_insn_uid = get_max_uid ();
7031 reg_eqv_table = (struct reg_eqv_elem *)
7032 xmalloc (nregs * sizeof (struct reg_eqv_elem));
7034 #ifdef LOAD_EXTEND_OP
7036 /* Allocate scratch rtl here. cse_insn will fill in the memory reference
7037 and change the code and mode as appropriate. */
7038 memory_extend_rtx = gen_rtx_ZERO_EXTEND (VOIDmode, NULL_RTX);
7039 #endif
7041 /* Reset the counter indicating how many elements have been made
7042 thus far. */
7043 n_elements_made = 0;
7045 /* Find the largest uid. */
7047 max_uid = get_max_uid ();
7048 uid_cuid = (int *) xcalloc (max_uid + 1, sizeof (int));
7050 /* Compute the mapping from uids to cuids.
7051 CUIDs are numbers assigned to insns, like uids,
7052 except that cuids increase monotonically through the code.
7053 Don't assign cuids to line-number NOTEs, so that the distance in cuids
7054 between two insns is not affected by -g. */
7056 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
7058 if (GET_CODE (insn) != NOTE
7059 || NOTE_LINE_NUMBER (insn) < 0)
7060 INSN_CUID (insn) = ++i;
7061 else
7062 /* Give a line number note the same cuid as preceding insn. */
7063 INSN_CUID (insn) = i;
7066 ggc_push_context ();
7068 /* Loop over basic blocks.
7069 Compute the maximum number of qty's needed for each basic block
7070 (which is 2 for each SET). */
7071 insn = f;
7072 while (insn)
7074 cse_altered = 0;
7075 cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps, after_loop,
7076 flag_cse_skip_blocks);
7078 /* If this basic block was already processed or has no sets, skip it. */
7079 if (val.nsets == 0 || GET_MODE (insn) == QImode)
7081 PUT_MODE (insn, VOIDmode);
7082 insn = (val.last ? NEXT_INSN (val.last) : 0);
7083 val.path_size = 0;
7084 continue;
7087 cse_basic_block_start = val.low_cuid;
7088 cse_basic_block_end = val.high_cuid;
7089 max_qty = val.nsets * 2;
7091 if (file)
7092 fnotice (file, ";; Processing block from %d to %d, %d sets.\n",
7093 INSN_UID (insn), val.last ? INSN_UID (val.last) : 0,
7094 val.nsets);
7096 /* Make MAX_QTY bigger to give us room to optimize
7097 past the end of this basic block, if that should prove useful. */
7098 if (max_qty < 500)
7099 max_qty = 500;
7101 max_qty += max_reg;
7103 /* If this basic block is being extended by following certain jumps,
7104 (see `cse_end_of_basic_block'), we reprocess the code from the start.
7105 Otherwise, we start after this basic block. */
7106 if (val.path_size > 0)
7107 cse_basic_block (insn, val.last, val.path, 0);
7108 else
7110 int old_cse_jumps_altered = cse_jumps_altered;
7111 rtx temp;
7113 /* When cse changes a conditional jump to an unconditional
7114 jump, we want to reprocess the block, since it will give
7115 us a new branch path to investigate. */
7116 cse_jumps_altered = 0;
7117 temp = cse_basic_block (insn, val.last, val.path, ! after_loop);
7118 if (cse_jumps_altered == 0
7119 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7120 insn = temp;
7122 cse_jumps_altered |= old_cse_jumps_altered;
7125 if (cse_altered)
7126 ggc_collect ();
7128 #ifdef USE_C_ALLOCA
7129 alloca (0);
7130 #endif
7133 ggc_pop_context ();
7135 if (max_elements_made < n_elements_made)
7136 max_elements_made = n_elements_made;
7138 /* Clean up. */
7139 end_alias_analysis ();
7140 free (uid_cuid);
7141 free (reg_eqv_table);
7143 return cse_jumps_altered || recorded_label_ref;
7146 /* Process a single basic block. FROM and TO and the limits of the basic
7147 block. NEXT_BRANCH points to the branch path when following jumps or
7148 a null path when not following jumps.
7150 AROUND_LOOP is non-zero if we are to try to cse around to the start of a
7151 loop. This is true when we are being called for the last time on a
7152 block and this CSE pass is before loop.c. */
7154 static rtx
7155 cse_basic_block (from, to, next_branch, around_loop)
7156 rtx from, to;
7157 struct branch_path *next_branch;
7158 int around_loop;
7160 rtx insn;
7161 int to_usage = 0;
7162 rtx libcall_insn = NULL_RTX;
7163 int num_insns = 0;
7165 /* This array is undefined before max_reg, so only allocate
7166 the space actually needed and adjust the start. */
7168 qty_table
7169 = (struct qty_table_elem *) xmalloc ((max_qty - max_reg)
7170 * sizeof (struct qty_table_elem));
7171 qty_table -= max_reg;
7173 new_basic_block ();
7175 /* TO might be a label. If so, protect it from being deleted. */
7176 if (to != 0 && GET_CODE (to) == CODE_LABEL)
7177 ++LABEL_NUSES (to);
7179 for (insn = from; insn != to; insn = NEXT_INSN (insn))
7181 enum rtx_code code = GET_CODE (insn);
7183 /* If we have processed 1,000 insns, flush the hash table to
7184 avoid extreme quadratic behavior. We must not include NOTEs
7185 in the count since there may be more of them when generating
7186 debugging information. If we clear the table at different
7187 times, code generated with -g -O might be different than code
7188 generated with -O but not -g.
7190 ??? This is a real kludge and needs to be done some other way.
7191 Perhaps for 2.9. */
7192 if (code != NOTE && num_insns++ > 1000)
7194 flush_hash_table ();
7195 num_insns = 0;
7198 /* See if this is a branch that is part of the path. If so, and it is
7199 to be taken, do so. */
7200 if (next_branch->branch == insn)
7202 enum taken status = next_branch++->status;
7203 if (status != NOT_TAKEN)
7205 if (status == TAKEN)
7206 record_jump_equiv (insn, 1);
7207 else
7208 invalidate_skipped_block (NEXT_INSN (insn));
7210 /* Set the last insn as the jump insn; it doesn't affect cc0.
7211 Then follow this branch. */
7212 #ifdef HAVE_cc0
7213 prev_insn_cc0 = 0;
7214 #endif
7215 prev_insn = insn;
7216 insn = JUMP_LABEL (insn);
7217 continue;
7221 if (GET_MODE (insn) == QImode)
7222 PUT_MODE (insn, VOIDmode);
7224 if (GET_RTX_CLASS (code) == 'i')
7226 rtx p;
7228 /* Process notes first so we have all notes in canonical forms when
7229 looking for duplicate operations. */
7231 if (REG_NOTES (insn))
7232 REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn), NULL_RTX);
7234 /* Track when we are inside in LIBCALL block. Inside such a block,
7235 we do not want to record destinations. The last insn of a
7236 LIBCALL block is not considered to be part of the block, since
7237 its destination is the result of the block and hence should be
7238 recorded. */
7240 if (REG_NOTES (insn) != 0)
7242 if ((p = find_reg_note (insn, REG_LIBCALL, NULL_RTX)))
7243 libcall_insn = XEXP (p, 0);
7244 else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7245 libcall_insn = 0;
7248 cse_insn (insn, libcall_insn);
7250 /* If we haven't already found an insn where we added a LABEL_REF,
7251 check this one. */
7252 if (GET_CODE (insn) == INSN && ! recorded_label_ref
7253 && for_each_rtx (&PATTERN (insn), check_for_label_ref,
7254 (void *) insn))
7255 recorded_label_ref = 1;
7258 /* If INSN is now an unconditional jump, skip to the end of our
7259 basic block by pretending that we just did the last insn in the
7260 basic block. If we are jumping to the end of our block, show
7261 that we can have one usage of TO. */
7263 if (any_uncondjump_p (insn))
7265 if (to == 0)
7267 free (qty_table + max_reg);
7268 return 0;
7271 if (JUMP_LABEL (insn) == to)
7272 to_usage = 1;
7274 /* Maybe TO was deleted because the jump is unconditional.
7275 If so, there is nothing left in this basic block. */
7276 /* ??? Perhaps it would be smarter to set TO
7277 to whatever follows this insn,
7278 and pretend the basic block had always ended here. */
7279 if (INSN_DELETED_P (to))
7280 break;
7282 insn = PREV_INSN (to);
7285 /* See if it is ok to keep on going past the label
7286 which used to end our basic block. Remember that we incremented
7287 the count of that label, so we decrement it here. If we made
7288 a jump unconditional, TO_USAGE will be one; in that case, we don't
7289 want to count the use in that jump. */
7291 if (to != 0 && NEXT_INSN (insn) == to
7292 && GET_CODE (to) == CODE_LABEL && --LABEL_NUSES (to) == to_usage)
7294 struct cse_basic_block_data val;
7295 rtx prev;
7297 insn = NEXT_INSN (to);
7299 /* If TO was the last insn in the function, we are done. */
7300 if (insn == 0)
7302 free (qty_table + max_reg);
7303 return 0;
7306 /* If TO was preceded by a BARRIER we are done with this block
7307 because it has no continuation. */
7308 prev = prev_nonnote_insn (to);
7309 if (prev && GET_CODE (prev) == BARRIER)
7311 free (qty_table + max_reg);
7312 return insn;
7315 /* Find the end of the following block. Note that we won't be
7316 following branches in this case. */
7317 to_usage = 0;
7318 val.path_size = 0;
7319 cse_end_of_basic_block (insn, &val, 0, 0, 0);
7321 /* If the tables we allocated have enough space left
7322 to handle all the SETs in the next basic block,
7323 continue through it. Otherwise, return,
7324 and that block will be scanned individually. */
7325 if (val.nsets * 2 + next_qty > max_qty)
7326 break;
7328 cse_basic_block_start = val.low_cuid;
7329 cse_basic_block_end = val.high_cuid;
7330 to = val.last;
7332 /* Prevent TO from being deleted if it is a label. */
7333 if (to != 0 && GET_CODE (to) == CODE_LABEL)
7334 ++LABEL_NUSES (to);
7336 /* Back up so we process the first insn in the extension. */
7337 insn = PREV_INSN (insn);
7341 if (next_qty > max_qty)
7342 abort ();
7344 /* If we are running before loop.c, we stopped on a NOTE_INSN_LOOP_END, and
7345 the previous insn is the only insn that branches to the head of a loop,
7346 we can cse into the loop. Don't do this if we changed the jump
7347 structure of a loop unless we aren't going to be following jumps. */
7349 insn = prev_nonnote_insn(to);
7350 if ((cse_jumps_altered == 0
7351 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7352 && around_loop && to != 0
7353 && GET_CODE (to) == NOTE && NOTE_LINE_NUMBER (to) == NOTE_INSN_LOOP_END
7354 && GET_CODE (insn) == JUMP_INSN
7355 && JUMP_LABEL (insn) != 0
7356 && LABEL_NUSES (JUMP_LABEL (insn)) == 1)
7357 cse_around_loop (JUMP_LABEL (insn));
7359 free (qty_table + max_reg);
7361 return to ? NEXT_INSN (to) : 0;
7364 /* Called via for_each_rtx to see if an insn is using a LABEL_REF for which
7365 there isn't a REG_DEAD note. Return one if so. DATA is the insn. */
7367 static int
7368 check_for_label_ref (rtl, data)
7369 rtx *rtl;
7370 void *data;
7372 rtx insn = (rtx) data;
7374 /* If this insn uses a LABEL_REF and there isn't a REG_LABEL note for it,
7375 we must rerun jump since it needs to place the note. If this is a
7376 LABEL_REF for a CODE_LABEL that isn't in the insn chain, don't do this
7377 since no REG_LABEL will be added. */
7378 return (GET_CODE (*rtl) == LABEL_REF
7379 && LABEL_P (XEXP (*rtl, 0))
7380 && INSN_UID (XEXP (*rtl, 0)) != 0
7381 && ! find_reg_note (insn, REG_LABEL, XEXP (*rtl, 0)));
7384 /* Count the number of times registers are used (not set) in X.
7385 COUNTS is an array in which we accumulate the count, INCR is how much
7386 we count each register usage.
7388 Don't count a usage of DEST, which is the SET_DEST of a SET which
7389 contains X in its SET_SRC. This is because such a SET does not
7390 modify the liveness of DEST. */
7392 static void
7393 count_reg_usage (x, counts, dest, incr)
7394 rtx x;
7395 int *counts;
7396 rtx dest;
7397 int incr;
7399 enum rtx_code code;
7400 const char *fmt;
7401 int i, j;
7403 if (x == 0)
7404 return;
7406 switch (code = GET_CODE (x))
7408 case REG:
7409 if (x != dest)
7410 counts[REGNO (x)] += incr;
7411 return;
7413 case PC:
7414 case CC0:
7415 case CONST:
7416 case CONST_INT:
7417 case CONST_DOUBLE:
7418 case SYMBOL_REF:
7419 case LABEL_REF:
7420 return;
7422 case CLOBBER:
7423 /* If we are clobbering a MEM, mark any registers inside the address
7424 as being used. */
7425 if (GET_CODE (XEXP (x, 0)) == MEM)
7426 count_reg_usage (XEXP (XEXP (x, 0), 0), counts, NULL_RTX, incr);
7427 return;
7429 case SET:
7430 /* Unless we are setting a REG, count everything in SET_DEST. */
7431 if (GET_CODE (SET_DEST (x)) != REG)
7432 count_reg_usage (SET_DEST (x), counts, NULL_RTX, incr);
7434 /* If SRC has side-effects, then we can't delete this insn, so the
7435 usage of SET_DEST inside SRC counts.
7437 ??? Strictly-speaking, we might be preserving this insn
7438 because some other SET has side-effects, but that's hard
7439 to do and can't happen now. */
7440 count_reg_usage (SET_SRC (x), counts,
7441 side_effects_p (SET_SRC (x)) ? NULL_RTX : SET_DEST (x),
7442 incr);
7443 return;
7445 case CALL_INSN:
7446 count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, NULL_RTX, incr);
7447 /* Fall through. */
7449 case INSN:
7450 case JUMP_INSN:
7451 count_reg_usage (PATTERN (x), counts, NULL_RTX, incr);
7453 /* Things used in a REG_EQUAL note aren't dead since loop may try to
7454 use them. */
7456 count_reg_usage (REG_NOTES (x), counts, NULL_RTX, incr);
7457 return;
7459 case EXPR_LIST:
7460 case INSN_LIST:
7461 if (REG_NOTE_KIND (x) == REG_EQUAL
7462 || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE))
7463 count_reg_usage (XEXP (x, 0), counts, NULL_RTX, incr);
7464 count_reg_usage (XEXP (x, 1), counts, NULL_RTX, incr);
7465 return;
7467 default:
7468 break;
7471 fmt = GET_RTX_FORMAT (code);
7472 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7474 if (fmt[i] == 'e')
7475 count_reg_usage (XEXP (x, i), counts, dest, incr);
7476 else if (fmt[i] == 'E')
7477 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7478 count_reg_usage (XVECEXP (x, i, j), counts, dest, incr);
7482 /* Return true if set is live. */
7483 static bool
7484 set_live_p (set, insn, counts)
7485 rtx set;
7486 rtx insn ATTRIBUTE_UNUSED; /* Only used with HAVE_cc0. */
7487 int *counts;
7489 #ifdef HAVE_cc0
7490 rtx tem;
7491 #endif
7493 if (set_noop_p (set))
7496 #ifdef HAVE_cc0
7497 else if (GET_CODE (SET_DEST (set)) == CC0
7498 && !side_effects_p (SET_SRC (set))
7499 && ((tem = next_nonnote_insn (insn)) == 0
7500 || !INSN_P (tem)
7501 || !reg_referenced_p (cc0_rtx, PATTERN (tem))))
7502 return false;
7503 #endif
7504 else if (GET_CODE (SET_DEST (set)) != REG
7505 || REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
7506 || counts[REGNO (SET_DEST (set))] != 0
7507 || side_effects_p (SET_SRC (set))
7508 /* An ADDRESSOF expression can turn into a use of the
7509 internal arg pointer, so always consider the
7510 internal arg pointer live. If it is truly dead,
7511 flow will delete the initializing insn. */
7512 || (SET_DEST (set) == current_function_internal_arg_pointer))
7513 return true;
7514 return false;
7517 /* Return true if insn is live. */
7519 static bool
7520 insn_live_p (insn, counts)
7521 rtx insn;
7522 int *counts;
7524 int i;
7525 if (GET_CODE (PATTERN (insn)) == SET)
7526 return set_live_p (PATTERN (insn), insn, counts);
7527 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
7529 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7531 rtx elt = XVECEXP (PATTERN (insn), 0, i);
7533 if (GET_CODE (elt) == SET)
7535 if (set_live_p (elt, insn, counts))
7536 return true;
7538 else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
7539 return true;
7541 return false;
7543 else
7544 return true;
7547 /* Return true if libcall is dead as a whole. */
7549 static bool
7550 dead_libcall_p (insn)
7551 rtx insn;
7553 rtx note;
7554 /* See if there's a REG_EQUAL note on this insn and try to
7555 replace the source with the REG_EQUAL expression.
7557 We assume that insns with REG_RETVALs can only be reg->reg
7558 copies at this point. */
7559 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
7560 if (note)
7562 rtx set = single_set (insn);
7563 rtx new = simplify_rtx (XEXP (note, 0));
7565 if (!new)
7566 new = XEXP (note, 0);
7568 if (set && validate_change (insn, &SET_SRC (set), new, 0))
7570 remove_note (insn, find_reg_note (insn, REG_RETVAL, NULL_RTX));
7571 return true;
7574 return false;
7577 /* Scan all the insns and delete any that are dead; i.e., they store a register
7578 that is never used or they copy a register to itself.
7580 This is used to remove insns made obviously dead by cse, loop or other
7581 optimizations. It improves the heuristics in loop since it won't try to
7582 move dead invariants out of loops or make givs for dead quantities. The
7583 remaining passes of the compilation are also sped up. */
7585 void
7586 delete_trivially_dead_insns (insns, nreg, preserve_basic_blocks)
7587 rtx insns;
7588 int nreg;
7589 int preserve_basic_blocks;
7591 int *counts;
7592 rtx insn, prev;
7593 int i;
7594 int in_libcall = 0, dead_libcall = 0;
7595 basic_block bb;
7597 /* First count the number of times each register is used. */
7598 counts = (int *) xcalloc (nreg, sizeof (int));
7599 for (insn = next_real_insn (insns); insn; insn = next_real_insn (insn))
7600 count_reg_usage (insn, counts, NULL_RTX, 1);
7602 /* Go from the last insn to the first and delete insns that only set unused
7603 registers or copy a register to itself. As we delete an insn, remove
7604 usage counts for registers it uses.
7606 The first jump optimization pass may leave a real insn as the last
7607 insn in the function. We must not skip that insn or we may end
7608 up deleting code that is not really dead. */
7609 insn = get_last_insn ();
7610 if (! INSN_P (insn))
7611 insn = prev_real_insn (insn);
7613 if (!preserve_basic_blocks)
7614 for (; insn; insn = prev)
7616 int live_insn = 0;
7618 prev = prev_real_insn (insn);
7620 /* Don't delete any insns that are part of a libcall block unless
7621 we can delete the whole libcall block.
7623 Flow or loop might get confused if we did that. Remember
7624 that we are scanning backwards. */
7625 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7627 in_libcall = 1;
7628 live_insn = 1;
7629 dead_libcall = dead_libcall_p (insn);
7631 else if (in_libcall)
7632 live_insn = ! dead_libcall;
7633 else
7634 live_insn = insn_live_p (insn, counts);
7636 /* If this is a dead insn, delete it and show registers in it aren't
7637 being used. */
7639 if (! live_insn)
7641 count_reg_usage (insn, counts, NULL_RTX, -1);
7642 delete_related_insns (insn);
7645 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
7647 in_libcall = 0;
7648 dead_libcall = 0;
7651 else
7652 for (i = 0; i < n_basic_blocks; i++)
7653 for (bb = BASIC_BLOCK (i), insn = bb->end; insn != bb->head; insn = prev)
7655 int live_insn = 0;
7657 prev = PREV_INSN (insn);
7658 if (!INSN_P (insn))
7659 continue;
7661 /* Don't delete any insns that are part of a libcall block unless
7662 we can delete the whole libcall block.
7664 Flow or loop might get confused if we did that. Remember
7665 that we are scanning backwards. */
7666 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7668 in_libcall = 1;
7669 live_insn = 1;
7670 dead_libcall = dead_libcall_p (insn);
7672 else if (in_libcall)
7673 live_insn = ! dead_libcall;
7674 else
7675 live_insn = insn_live_p (insn, counts);
7677 /* If this is a dead insn, delete it and show registers in it aren't
7678 being used. */
7680 if (! live_insn)
7682 count_reg_usage (insn, counts, NULL_RTX, -1);
7683 delete_insn (insn);
7686 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
7688 in_libcall = 0;
7689 dead_libcall = 0;
7693 /* Clean up. */
7694 free (counts);