Merge from mainline
[official-gcc.git] / gcc / cse.c
blob01a79d5c978bb50d9a62a7aefd1f9a7d543ab048
1 /* Common subexpression elimination for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 /* stdio.h must precede rtl.h for FFS. */
24 #include "system.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
31 #include "flags.h"
32 #include "real.h"
33 #include "insn-config.h"
34 #include "recog.h"
35 #include "function.h"
36 #include "expr.h"
37 #include "toplev.h"
38 #include "output.h"
39 #include "ggc.h"
40 #include "timevar.h"
42 /* The basic idea of common subexpression elimination is to go
43 through the code, keeping a record of expressions that would
44 have the same value at the current scan point, and replacing
45 expressions encountered with the cheapest equivalent expression.
47 It is too complicated to keep track of the different possibilities
48 when control paths merge in this code; so, at each label, we forget all
49 that is known and start fresh. This can be described as processing each
50 extended basic block separately. We have a separate pass to perform
51 global CSE.
53 Note CSE can turn a conditional or computed jump into a nop or
54 an unconditional jump. When this occurs we arrange to run the jump
55 optimizer after CSE to delete the unreachable code.
57 We use two data structures to record the equivalent expressions:
58 a hash table for most expressions, and a vector of "quantity
59 numbers" to record equivalent (pseudo) registers.
61 The use of the special data structure for registers is desirable
62 because it is faster. It is possible because registers references
63 contain a fairly small number, the register number, taken from
64 a contiguously allocated series, and two register references are
65 identical if they have the same number. General expressions
66 do not have any such thing, so the only way to retrieve the
67 information recorded on an expression other than a register
68 is to keep it in a hash table.
70 Registers and "quantity numbers":
72 At the start of each basic block, all of the (hardware and pseudo)
73 registers used in the function are given distinct quantity
74 numbers to indicate their contents. During scan, when the code
75 copies one register into another, we copy the quantity number.
76 When a register is loaded in any other way, we allocate a new
77 quantity number to describe the value generated by this operation.
78 `reg_qty' records what quantity a register is currently thought
79 of as containing.
81 All real quantity numbers are greater than or equal to `max_reg'.
82 If register N has not been assigned a quantity, reg_qty[N] will equal N.
84 Quantity numbers below `max_reg' do not exist and none of the `qty_table'
85 entries should be referenced with an index below `max_reg'.
87 We also maintain a bidirectional chain of registers for each
88 quantity number. The `qty_table` members `first_reg' and `last_reg',
89 and `reg_eqv_table' members `next' and `prev' hold these chains.
91 The first register in a chain is the one whose lifespan is least local.
92 Among equals, it is the one that was seen first.
93 We replace any equivalent register with that one.
95 If two registers have the same quantity number, it must be true that
96 REG expressions with qty_table `mode' must be in the hash table for both
97 registers and must be in the same class.
99 The converse is not true. Since hard registers may be referenced in
100 any mode, two REG expressions might be equivalent in the hash table
101 but not have the same quantity number if the quantity number of one
102 of the registers is not the same mode as those expressions.
104 Constants and quantity numbers
106 When a quantity has a known constant value, that value is stored
107 in the appropriate qty_table `const_rtx'. This is in addition to
108 putting the constant in the hash table as is usual for non-regs.
110 Whether a reg or a constant is preferred is determined by the configuration
111 macro CONST_COSTS and will often depend on the constant value. In any
112 event, expressions containing constants can be simplified, by fold_rtx.
114 When a quantity has a known nearly constant value (such as an address
115 of a stack slot), that value is stored in the appropriate qty_table
116 `const_rtx'.
118 Integer constants don't have a machine mode. However, cse
119 determines the intended machine mode from the destination
120 of the instruction that moves the constant. The machine mode
121 is recorded in the hash table along with the actual RTL
122 constant expression so that different modes are kept separate.
124 Other expressions:
126 To record known equivalences among expressions in general
127 we use a hash table called `table'. It has a fixed number of buckets
128 that contain chains of `struct table_elt' elements for expressions.
129 These chains connect the elements whose expressions have the same
130 hash codes.
132 Other chains through the same elements connect the elements which
133 currently have equivalent values.
135 Register references in an expression are canonicalized before hashing
136 the expression. This is done using `reg_qty' and qty_table `first_reg'.
137 The hash code of a register reference is computed using the quantity
138 number, not the register number.
140 When the value of an expression changes, it is necessary to remove from the
141 hash table not just that expression but all expressions whose values
142 could be different as a result.
144 1. If the value changing is in memory, except in special cases
145 ANYTHING referring to memory could be changed. That is because
146 nobody knows where a pointer does not point.
147 The function `invalidate_memory' removes what is necessary.
149 The special cases are when the address is constant or is
150 a constant plus a fixed register such as the frame pointer
151 or a static chain pointer. When such addresses are stored in,
152 we can tell exactly which other such addresses must be invalidated
153 due to overlap. `invalidate' does this.
154 All expressions that refer to non-constant
155 memory addresses are also invalidated. `invalidate_memory' does this.
157 2. If the value changing is a register, all expressions
158 containing references to that register, and only those,
159 must be removed.
161 Because searching the entire hash table for expressions that contain
162 a register is very slow, we try to figure out when it isn't necessary.
163 Precisely, this is necessary only when expressions have been
164 entered in the hash table using this register, and then the value has
165 changed, and then another expression wants to be added to refer to
166 the register's new value. This sequence of circumstances is rare
167 within any one basic block.
169 The vectors `reg_tick' and `reg_in_table' are used to detect this case.
170 reg_tick[i] is incremented whenever a value is stored in register i.
171 reg_in_table[i] holds -1 if no references to register i have been
172 entered in the table; otherwise, it contains the value reg_tick[i] had
173 when the references were entered. If we want to enter a reference
174 and reg_in_table[i] != reg_tick[i], we must scan and remove old references.
175 Until we want to enter a new entry, the mere fact that the two vectors
176 don't match makes the entries be ignored if anyone tries to match them.
178 Registers themselves are entered in the hash table as well as in
179 the equivalent-register chains. However, the vectors `reg_tick'
180 and `reg_in_table' do not apply to expressions which are simple
181 register references. These expressions are removed from the table
182 immediately when they become invalid, and this can be done even if
183 we do not immediately search for all the expressions that refer to
184 the register.
186 A CLOBBER rtx in an instruction invalidates its operand for further
187 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
188 invalidates everything that resides in memory.
190 Related expressions:
192 Constant expressions that differ only by an additive integer
193 are called related. When a constant expression is put in
194 the table, the related expression with no constant term
195 is also entered. These are made to point at each other
196 so that it is possible to find out if there exists any
197 register equivalent to an expression related to a given expression. */
199 /* One plus largest register number used in this function. */
201 static int max_reg;
203 /* One plus largest instruction UID used in this function at time of
204 cse_main call. */
206 static int max_insn_uid;
208 /* Length of qty_table vector. We know in advance we will not need
209 a quantity number this big. */
211 static int max_qty;
213 /* Next quantity number to be allocated.
214 This is 1 + the largest number needed so far. */
216 static int next_qty;
218 /* Per-qty information tracking.
220 `first_reg' and `last_reg' track the head and tail of the
221 chain of registers which currently contain this quantity.
223 `mode' contains the machine mode of this quantity.
225 `const_rtx' holds the rtx of the constant value of this
226 quantity, if known. A summations of the frame/arg pointer
227 and a constant can also be entered here. When this holds
228 a known value, `const_insn' is the insn which stored the
229 constant value.
231 `comparison_{code,const,qty}' are used to track when a
232 comparison between a quantity and some constant or register has
233 been passed. In such a case, we know the results of the comparison
234 in case we see it again. These members record a comparison that
235 is known to be true. `comparison_code' holds the rtx code of such
236 a comparison, else it is set to UNKNOWN and the other two
237 comparison members are undefined. `comparison_const' holds
238 the constant being compared against, or zero if the comparison
239 is not against a constant. `comparison_qty' holds the quantity
240 being compared against when the result is known. If the comparison
241 is not with a register, `comparison_qty' is -1. */
243 struct qty_table_elem
245 rtx const_rtx;
246 rtx const_insn;
247 rtx comparison_const;
248 int comparison_qty;
249 unsigned int first_reg, last_reg;
250 enum machine_mode mode;
251 enum rtx_code comparison_code;
254 /* The table of all qtys, indexed by qty number. */
255 static struct qty_table_elem *qty_table;
257 #ifdef HAVE_cc0
258 /* For machines that have a CC0, we do not record its value in the hash
259 table since its use is guaranteed to be the insn immediately following
260 its definition and any other insn is presumed to invalidate it.
262 Instead, we store below the value last assigned to CC0. If it should
263 happen to be a constant, it is stored in preference to the actual
264 assigned value. In case it is a constant, we store the mode in which
265 the constant should be interpreted. */
267 static rtx prev_insn_cc0;
268 static enum machine_mode prev_insn_cc0_mode;
269 #endif
271 /* Previous actual insn. 0 if at first insn of basic block. */
273 static rtx prev_insn;
275 /* Insn being scanned. */
277 static rtx this_insn;
279 /* Index by register number, gives the number of the next (or
280 previous) register in the chain of registers sharing the same
281 value.
283 Or -1 if this register is at the end of the chain.
285 If reg_qty[N] == N, reg_eqv_table[N].next is undefined. */
287 /* Per-register equivalence chain. */
288 struct reg_eqv_elem
290 int next, prev;
293 /* The table of all register equivalence chains. */
294 static struct reg_eqv_elem *reg_eqv_table;
296 struct cse_reg_info
298 /* Next in hash chain. */
299 struct cse_reg_info *hash_next;
301 /* The next cse_reg_info structure in the free or used list. */
302 struct cse_reg_info *next;
304 /* Search key */
305 unsigned int regno;
307 /* The quantity number of the register's current contents. */
308 int reg_qty;
310 /* The number of times the register has been altered in the current
311 basic block. */
312 int reg_tick;
314 /* The REG_TICK value at which rtx's containing this register are
315 valid in the hash table. If this does not equal the current
316 reg_tick value, such expressions existing in the hash table are
317 invalid. */
318 int reg_in_table;
321 /* A free list of cse_reg_info entries. */
322 static struct cse_reg_info *cse_reg_info_free_list;
324 /* A used list of cse_reg_info entries. */
325 static struct cse_reg_info *cse_reg_info_used_list;
326 static struct cse_reg_info *cse_reg_info_used_list_end;
328 /* A mapping from registers to cse_reg_info data structures. */
329 #define REGHASH_SHIFT 7
330 #define REGHASH_SIZE (1 << REGHASH_SHIFT)
331 #define REGHASH_MASK (REGHASH_SIZE - 1)
332 static struct cse_reg_info *reg_hash[REGHASH_SIZE];
334 #define REGHASH_FN(REGNO) \
335 (((REGNO) ^ ((REGNO) >> REGHASH_SHIFT)) & REGHASH_MASK)
337 /* The last lookup we did into the cse_reg_info_tree. This allows us
338 to cache repeated lookups. */
339 static unsigned int cached_regno;
340 static struct cse_reg_info *cached_cse_reg_info;
342 /* A HARD_REG_SET containing all the hard registers for which there is
343 currently a REG expression in the hash table. Note the difference
344 from the above variables, which indicate if the REG is mentioned in some
345 expression in the table. */
347 static HARD_REG_SET hard_regs_in_table;
349 /* CUID of insn that starts the basic block currently being cse-processed. */
351 static int cse_basic_block_start;
353 /* CUID of insn that ends the basic block currently being cse-processed. */
355 static int cse_basic_block_end;
357 /* Vector mapping INSN_UIDs to cuids.
358 The cuids are like uids but increase monotonically always.
359 We use them to see whether a reg is used outside a given basic block. */
361 static int *uid_cuid;
363 /* Highest UID in UID_CUID. */
364 static int max_uid;
366 /* Get the cuid of an insn. */
368 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
370 /* Nonzero if this pass has made changes, and therefore it's
371 worthwhile to run the garbage collector. */
373 static int cse_altered;
375 /* Nonzero if cse has altered conditional jump insns
376 in such a way that jump optimization should be redone. */
378 static int cse_jumps_altered;
380 /* Nonzero if we put a LABEL_REF into the hash table for an INSN without a
381 REG_LABEL, we have to rerun jump after CSE to put in the note. */
382 static int recorded_label_ref;
384 /* canon_hash stores 1 in do_not_record
385 if it notices a reference to CC0, PC, or some other volatile
386 subexpression. */
388 static int do_not_record;
390 #ifdef LOAD_EXTEND_OP
392 /* Scratch rtl used when looking for load-extended copy of a MEM. */
393 static rtx memory_extend_rtx;
394 #endif
396 /* canon_hash stores 1 in hash_arg_in_memory
397 if it notices a reference to memory within the expression being hashed. */
399 static int hash_arg_in_memory;
401 /* The hash table contains buckets which are chains of `struct table_elt's,
402 each recording one expression's information.
403 That expression is in the `exp' field.
405 The canon_exp field contains a canonical (from the point of view of
406 alias analysis) version of the `exp' field.
408 Those elements with the same hash code are chained in both directions
409 through the `next_same_hash' and `prev_same_hash' fields.
411 Each set of expressions with equivalent values
412 are on a two-way chain through the `next_same_value'
413 and `prev_same_value' fields, and all point with
414 the `first_same_value' field at the first element in
415 that chain. The chain is in order of increasing cost.
416 Each element's cost value is in its `cost' field.
418 The `in_memory' field is nonzero for elements that
419 involve any reference to memory. These elements are removed
420 whenever a write is done to an unidentified location in memory.
421 To be safe, we assume that a memory address is unidentified unless
422 the address is either a symbol constant or a constant plus
423 the frame pointer or argument pointer.
425 The `related_value' field is used to connect related expressions
426 (that differ by adding an integer).
427 The related expressions are chained in a circular fashion.
428 `related_value' is zero for expressions for which this
429 chain is not useful.
431 The `cost' field stores the cost of this element's expression.
432 The `regcost' field stores the value returned by approx_reg_cost for
433 this element's expression.
435 The `is_const' flag is set if the element is a constant (including
436 a fixed address).
438 The `flag' field is used as a temporary during some search routines.
440 The `mode' field is usually the same as GET_MODE (`exp'), but
441 if `exp' is a CONST_INT and has no machine mode then the `mode'
442 field is the mode it was being used as. Each constant is
443 recorded separately for each mode it is used with. */
445 struct table_elt
447 rtx exp;
448 rtx canon_exp;
449 struct table_elt *next_same_hash;
450 struct table_elt *prev_same_hash;
451 struct table_elt *next_same_value;
452 struct table_elt *prev_same_value;
453 struct table_elt *first_same_value;
454 struct table_elt *related_value;
455 int cost;
456 int regcost;
457 enum machine_mode mode;
458 char in_memory;
459 char is_const;
460 char flag;
463 /* We don't want a lot of buckets, because we rarely have very many
464 things stored in the hash table, and a lot of buckets slows
465 down a lot of loops that happen frequently. */
466 #define HASH_SHIFT 5
467 #define HASH_SIZE (1 << HASH_SHIFT)
468 #define HASH_MASK (HASH_SIZE - 1)
470 /* Compute hash code of X in mode M. Special-case case where X is a pseudo
471 register (hard registers may require `do_not_record' to be set). */
473 #define HASH(X, M) \
474 ((GET_CODE (X) == REG && REGNO (X) >= FIRST_PSEUDO_REGISTER \
475 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
476 : canon_hash (X, M)) & HASH_MASK)
478 /* Determine whether register number N is considered a fixed register for the
479 purpose of approximating register costs.
480 It is desirable to replace other regs with fixed regs, to reduce need for
481 non-fixed hard regs.
482 A reg wins if it is either the frame pointer or designated as fixed. */
483 #define FIXED_REGNO_P(N) \
484 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
485 || fixed_regs[N] || global_regs[N])
487 /* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
488 hard registers and pointers into the frame are the cheapest with a cost
489 of 0. Next come pseudos with a cost of one and other hard registers with
490 a cost of 2. Aside from these special cases, call `rtx_cost'. */
492 #define CHEAP_REGNO(N) \
493 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
494 || (N) == STACK_POINTER_REGNUM || (N) == ARG_POINTER_REGNUM \
495 || ((N) >= FIRST_VIRTUAL_REGISTER && (N) <= LAST_VIRTUAL_REGISTER) \
496 || ((N) < FIRST_PSEUDO_REGISTER \
497 && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
499 #define COST(X) (GET_CODE (X) == REG ? 0 : notreg_cost (X, SET))
500 #define COST_IN(X,OUTER) (GET_CODE (X) == REG ? 0 : notreg_cost (X, OUTER))
502 /* Get the info associated with register N. */
504 #define GET_CSE_REG_INFO(N) \
505 (((N) == cached_regno && cached_cse_reg_info) \
506 ? cached_cse_reg_info : get_cse_reg_info ((N)))
508 /* Get the number of times this register has been updated in this
509 basic block. */
511 #define REG_TICK(N) ((GET_CSE_REG_INFO (N))->reg_tick)
513 /* Get the point at which REG was recorded in the table. */
515 #define REG_IN_TABLE(N) ((GET_CSE_REG_INFO (N))->reg_in_table)
517 /* Get the quantity number for REG. */
519 #define REG_QTY(N) ((GET_CSE_REG_INFO (N))->reg_qty)
521 /* Determine if the quantity number for register X represents a valid index
522 into the qty_table. */
524 #define REGNO_QTY_VALID_P(N) (REG_QTY (N) != (int) (N))
526 static struct table_elt *table[HASH_SIZE];
528 /* Chain of `struct table_elt's made so far for this function
529 but currently removed from the table. */
531 static struct table_elt *free_element_chain;
533 /* Number of `struct table_elt' structures made so far for this function. */
535 static int n_elements_made;
537 /* Maximum value `n_elements_made' has had so far in this compilation
538 for functions previously processed. */
540 static int max_elements_made;
542 /* Surviving equivalence class when two equivalence classes are merged
543 by recording the effects of a jump in the last insn. Zero if the
544 last insn was not a conditional jump. */
546 static struct table_elt *last_jump_equiv_class;
548 /* Set to the cost of a constant pool reference if one was found for a
549 symbolic constant. If this was found, it means we should try to
550 convert constants into constant pool entries if they don't fit in
551 the insn. */
553 static int constant_pool_entries_cost;
555 /* Define maximum length of a branch path. */
557 #define PATHLENGTH 10
559 /* This data describes a block that will be processed by cse_basic_block. */
561 struct cse_basic_block_data
563 /* Lowest CUID value of insns in block. */
564 int low_cuid;
565 /* Highest CUID value of insns in block. */
566 int high_cuid;
567 /* Total number of SETs in block. */
568 int nsets;
569 /* Last insn in the block. */
570 rtx last;
571 /* Size of current branch path, if any. */
572 int path_size;
573 /* Current branch path, indicating which branches will be taken. */
574 struct branch_path
576 /* The branch insn. */
577 rtx branch;
578 /* Whether it should be taken or not. AROUND is the same as taken
579 except that it is used when the destination label is not preceded
580 by a BARRIER. */
581 enum taken {TAKEN, NOT_TAKEN, AROUND} status;
582 } path[PATHLENGTH];
585 /* Nonzero if X has the form (PLUS frame-pointer integer). We check for
586 virtual regs here because the simplify_*_operation routines are called
587 by integrate.c, which is called before virtual register instantiation.
589 ?!? FIXED_BASE_PLUS_P and NONZERO_BASE_PLUS_P need to move into
590 a header file so that their definitions can be shared with the
591 simplification routines in simplify-rtx.c. Until then, do not
592 change these macros without also changing the copy in simplify-rtx.c. */
594 #define FIXED_BASE_PLUS_P(X) \
595 ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx \
596 || ((X) == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])\
597 || (X) == virtual_stack_vars_rtx \
598 || (X) == virtual_incoming_args_rtx \
599 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
600 && (XEXP (X, 0) == frame_pointer_rtx \
601 || XEXP (X, 0) == hard_frame_pointer_rtx \
602 || ((X) == arg_pointer_rtx \
603 && fixed_regs[ARG_POINTER_REGNUM]) \
604 || XEXP (X, 0) == virtual_stack_vars_rtx \
605 || XEXP (X, 0) == virtual_incoming_args_rtx)) \
606 || GET_CODE (X) == ADDRESSOF)
608 /* Similar, but also allows reference to the stack pointer.
610 This used to include FIXED_BASE_PLUS_P, however, we can't assume that
611 arg_pointer_rtx by itself is nonzero, because on at least one machine,
612 the i960, the arg pointer is zero when it is unused. */
614 #define NONZERO_BASE_PLUS_P(X) \
615 ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx \
616 || (X) == virtual_stack_vars_rtx \
617 || (X) == virtual_incoming_args_rtx \
618 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
619 && (XEXP (X, 0) == frame_pointer_rtx \
620 || XEXP (X, 0) == hard_frame_pointer_rtx \
621 || ((X) == arg_pointer_rtx \
622 && fixed_regs[ARG_POINTER_REGNUM]) \
623 || XEXP (X, 0) == virtual_stack_vars_rtx \
624 || XEXP (X, 0) == virtual_incoming_args_rtx)) \
625 || (X) == stack_pointer_rtx \
626 || (X) == virtual_stack_dynamic_rtx \
627 || (X) == virtual_outgoing_args_rtx \
628 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
629 && (XEXP (X, 0) == stack_pointer_rtx \
630 || XEXP (X, 0) == virtual_stack_dynamic_rtx \
631 || XEXP (X, 0) == virtual_outgoing_args_rtx)) \
632 || GET_CODE (X) == ADDRESSOF)
634 static int notreg_cost PARAMS ((rtx, enum rtx_code));
635 static int approx_reg_cost_1 PARAMS ((rtx *, void *));
636 static int approx_reg_cost PARAMS ((rtx));
637 static int preferrable PARAMS ((int, int, int, int));
638 static void new_basic_block PARAMS ((void));
639 static void make_new_qty PARAMS ((unsigned int, enum machine_mode));
640 static void make_regs_eqv PARAMS ((unsigned int, unsigned int));
641 static void delete_reg_equiv PARAMS ((unsigned int));
642 static int mention_regs PARAMS ((rtx));
643 static int insert_regs PARAMS ((rtx, struct table_elt *, int));
644 static void remove_from_table PARAMS ((struct table_elt *, unsigned));
645 static struct table_elt *lookup PARAMS ((rtx, unsigned, enum machine_mode)),
646 *lookup_for_remove PARAMS ((rtx, unsigned, enum machine_mode));
647 static rtx lookup_as_function PARAMS ((rtx, enum rtx_code));
648 static struct table_elt *insert PARAMS ((rtx, struct table_elt *, unsigned,
649 enum machine_mode));
650 static void merge_equiv_classes PARAMS ((struct table_elt *,
651 struct table_elt *));
652 static void invalidate PARAMS ((rtx, enum machine_mode));
653 static int cse_rtx_varies_p PARAMS ((rtx, int));
654 static void remove_invalid_refs PARAMS ((unsigned int));
655 static void remove_invalid_subreg_refs PARAMS ((unsigned int, unsigned int,
656 enum machine_mode));
657 static void rehash_using_reg PARAMS ((rtx));
658 static void invalidate_memory PARAMS ((void));
659 static void invalidate_for_call PARAMS ((void));
660 static rtx use_related_value PARAMS ((rtx, struct table_elt *));
661 static unsigned canon_hash PARAMS ((rtx, enum machine_mode));
662 static unsigned canon_hash_string PARAMS ((const char *));
663 static unsigned safe_hash PARAMS ((rtx, enum machine_mode));
664 static int exp_equiv_p PARAMS ((rtx, rtx, int, int));
665 static rtx canon_reg PARAMS ((rtx, rtx));
666 static void find_best_addr PARAMS ((rtx, rtx *, enum machine_mode));
667 static enum rtx_code find_comparison_args PARAMS ((enum rtx_code, rtx *, rtx *,
668 enum machine_mode *,
669 enum machine_mode *));
670 static rtx fold_rtx PARAMS ((rtx, rtx));
671 static rtx equiv_constant PARAMS ((rtx));
672 static void record_jump_equiv PARAMS ((rtx, int));
673 static void record_jump_cond PARAMS ((enum rtx_code, enum machine_mode,
674 rtx, rtx, int));
675 static void cse_insn PARAMS ((rtx, rtx));
676 static int addr_affects_sp_p PARAMS ((rtx));
677 static void invalidate_from_clobbers PARAMS ((rtx));
678 static rtx cse_process_notes PARAMS ((rtx, rtx));
679 static void cse_around_loop PARAMS ((rtx));
680 static void invalidate_skipped_set PARAMS ((rtx, rtx, void *));
681 static void invalidate_skipped_block PARAMS ((rtx));
682 static void cse_check_loop_start PARAMS ((rtx, rtx, void *));
683 static void cse_set_around_loop PARAMS ((rtx, rtx, rtx));
684 static rtx cse_basic_block PARAMS ((rtx, rtx, struct branch_path *, int));
685 static void count_reg_usage PARAMS ((rtx, int *, rtx, int));
686 static int check_for_label_ref PARAMS ((rtx *, void *));
687 extern void dump_class PARAMS ((struct table_elt*));
688 static struct cse_reg_info * get_cse_reg_info PARAMS ((unsigned int));
689 static int check_dependence PARAMS ((rtx *, void *));
691 static void flush_hash_table PARAMS ((void));
692 static bool insn_live_p PARAMS ((rtx, int *));
693 static bool set_live_p PARAMS ((rtx, rtx, int *));
694 static bool dead_libcall_p PARAMS ((rtx, int *));
696 /* Dump the expressions in the equivalence class indicated by CLASSP.
697 This function is used only for debugging. */
698 void
699 dump_class (classp)
700 struct table_elt *classp;
702 struct table_elt *elt;
704 fprintf (stderr, "Equivalence chain for ");
705 print_rtl (stderr, classp->exp);
706 fprintf (stderr, ": \n");
708 for (elt = classp->first_same_value; elt; elt = elt->next_same_value)
710 print_rtl (stderr, elt->exp);
711 fprintf (stderr, "\n");
715 /* Subroutine of approx_reg_cost; called through for_each_rtx. */
717 static int
718 approx_reg_cost_1 (xp, data)
719 rtx *xp;
720 void *data;
722 rtx x = *xp;
723 int *cost_p = data;
725 if (x && GET_CODE (x) == REG)
727 unsigned int regno = REGNO (x);
729 if (! CHEAP_REGNO (regno))
731 if (regno < FIRST_PSEUDO_REGISTER)
733 if (SMALL_REGISTER_CLASSES)
734 return 1;
735 *cost_p += 2;
737 else
738 *cost_p += 1;
742 return 0;
745 /* Return an estimate of the cost of the registers used in an rtx.
746 This is mostly the number of different REG expressions in the rtx;
747 however for some exceptions like fixed registers we use a cost of
748 0. If any other hard register reference occurs, return MAX_COST. */
750 static int
751 approx_reg_cost (x)
752 rtx x;
754 int cost = 0;
756 if (for_each_rtx (&x, approx_reg_cost_1, (void *) &cost))
757 return MAX_COST;
759 return cost;
762 /* Return a negative value if an rtx A, whose costs are given by COST_A
763 and REGCOST_A, is more desirable than an rtx B.
764 Return a positive value if A is less desirable, or 0 if the two are
765 equally good. */
766 static int
767 preferrable (cost_a, regcost_a, cost_b, regcost_b)
768 int cost_a, regcost_a, cost_b, regcost_b;
770 /* First, get rid of a cases involving expressions that are entirely
771 unwanted. */
772 if (cost_a != cost_b)
774 if (cost_a == MAX_COST)
775 return 1;
776 if (cost_b == MAX_COST)
777 return -1;
780 /* Avoid extending lifetimes of hardregs. */
781 if (regcost_a != regcost_b)
783 if (regcost_a == MAX_COST)
784 return 1;
785 if (regcost_b == MAX_COST)
786 return -1;
789 /* Normal operation costs take precedence. */
790 if (cost_a != cost_b)
791 return cost_a - cost_b;
792 /* Only if these are identical consider effects on register pressure. */
793 if (regcost_a != regcost_b)
794 return regcost_a - regcost_b;
795 return 0;
798 /* Internal function, to compute cost when X is not a register; called
799 from COST macro to keep it simple. */
801 static int
802 notreg_cost (x, outer)
803 rtx x;
804 enum rtx_code outer;
806 return ((GET_CODE (x) == SUBREG
807 && GET_CODE (SUBREG_REG (x)) == REG
808 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
809 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
810 && (GET_MODE_SIZE (GET_MODE (x))
811 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
812 && subreg_lowpart_p (x)
813 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (x)),
814 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))))
816 : rtx_cost (x, outer) * 2);
819 /* Return an estimate of the cost of computing rtx X.
820 One use is in cse, to decide which expression to keep in the hash table.
821 Another is in rtl generation, to pick the cheapest way to multiply.
822 Other uses like the latter are expected in the future. */
825 rtx_cost (x, outer_code)
826 rtx x;
827 enum rtx_code outer_code ATTRIBUTE_UNUSED;
829 int i, j;
830 enum rtx_code code;
831 const char *fmt;
832 int total;
834 if (x == 0)
835 return 0;
837 /* Compute the default costs of certain things.
838 Note that RTX_COSTS can override the defaults. */
840 code = GET_CODE (x);
841 switch (code)
843 case MULT:
844 total = COSTS_N_INSNS (5);
845 break;
846 case DIV:
847 case UDIV:
848 case MOD:
849 case UMOD:
850 total = COSTS_N_INSNS (7);
851 break;
852 case USE:
853 /* Used in loop.c and combine.c as a marker. */
854 total = 0;
855 break;
856 default:
857 total = COSTS_N_INSNS (1);
860 switch (code)
862 case REG:
863 return 0;
865 case SUBREG:
866 /* If we can't tie these modes, make this expensive. The larger
867 the mode, the more expensive it is. */
868 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
869 return COSTS_N_INSNS (2
870 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
871 break;
873 #ifdef RTX_COSTS
874 RTX_COSTS (x, code, outer_code);
875 #endif
876 #ifdef CONST_COSTS
877 CONST_COSTS (x, code, outer_code);
878 #endif
880 default:
881 #ifdef DEFAULT_RTX_COSTS
882 DEFAULT_RTX_COSTS (x, code, outer_code);
883 #endif
884 break;
887 /* Sum the costs of the sub-rtx's, plus cost of this operation,
888 which is already in total. */
890 fmt = GET_RTX_FORMAT (code);
891 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
892 if (fmt[i] == 'e')
893 total += rtx_cost (XEXP (x, i), code);
894 else if (fmt[i] == 'E')
895 for (j = 0; j < XVECLEN (x, i); j++)
896 total += rtx_cost (XVECEXP (x, i, j), code);
898 return total;
901 /* Return cost of address expression X.
902 Expect that X is properly formed address reference. */
905 address_cost (x, mode)
906 rtx x;
907 enum machine_mode mode;
909 /* The ADDRESS_COST macro does not deal with ADDRESSOF nodes. But,
910 during CSE, such nodes are present. Using an ADDRESSOF node which
911 refers to the address of a REG is a good thing because we can then
912 turn (MEM (ADDRESSSOF (REG))) into just plain REG. */
914 if (GET_CODE (x) == ADDRESSOF && REG_P (XEXP ((x), 0)))
915 return -1;
917 /* We may be asked for cost of various unusual addresses, such as operands
918 of push instruction. It is not worthwhile to complicate writing
919 of ADDRESS_COST macro by such cases. */
921 if (!memory_address_p (mode, x))
922 return 1000;
923 #ifdef ADDRESS_COST
924 return ADDRESS_COST (x);
925 #else
926 return rtx_cost (x, MEM);
927 #endif
931 static struct cse_reg_info *
932 get_cse_reg_info (regno)
933 unsigned int regno;
935 struct cse_reg_info **hash_head = &reg_hash[REGHASH_FN (regno)];
936 struct cse_reg_info *p;
938 for (p = *hash_head; p != NULL; p = p->hash_next)
939 if (p->regno == regno)
940 break;
942 if (p == NULL)
944 /* Get a new cse_reg_info structure. */
945 if (cse_reg_info_free_list)
947 p = cse_reg_info_free_list;
948 cse_reg_info_free_list = p->next;
950 else
951 p = (struct cse_reg_info *) xmalloc (sizeof (struct cse_reg_info));
953 /* Insert into hash table. */
954 p->hash_next = *hash_head;
955 *hash_head = p;
957 /* Initialize it. */
958 p->reg_tick = 1;
959 p->reg_in_table = -1;
960 p->reg_qty = regno;
961 p->regno = regno;
962 p->next = cse_reg_info_used_list;
963 cse_reg_info_used_list = p;
964 if (!cse_reg_info_used_list_end)
965 cse_reg_info_used_list_end = p;
968 /* Cache this lookup; we tend to be looking up information about the
969 same register several times in a row. */
970 cached_regno = regno;
971 cached_cse_reg_info = p;
973 return p;
976 /* Clear the hash table and initialize each register with its own quantity,
977 for a new basic block. */
979 static void
980 new_basic_block ()
982 int i;
984 next_qty = max_reg;
986 /* Clear out hash table state for this pass. */
988 memset ((char *) reg_hash, 0, sizeof reg_hash);
990 if (cse_reg_info_used_list)
992 cse_reg_info_used_list_end->next = cse_reg_info_free_list;
993 cse_reg_info_free_list = cse_reg_info_used_list;
994 cse_reg_info_used_list = cse_reg_info_used_list_end = 0;
996 cached_cse_reg_info = 0;
998 CLEAR_HARD_REG_SET (hard_regs_in_table);
1000 /* The per-quantity values used to be initialized here, but it is
1001 much faster to initialize each as it is made in `make_new_qty'. */
1003 for (i = 0; i < HASH_SIZE; i++)
1005 struct table_elt *first;
1007 first = table[i];
1008 if (first != NULL)
1010 struct table_elt *last = first;
1012 table[i] = NULL;
1014 while (last->next_same_hash != NULL)
1015 last = last->next_same_hash;
1017 /* Now relink this hash entire chain into
1018 the free element list. */
1020 last->next_same_hash = free_element_chain;
1021 free_element_chain = first;
1025 prev_insn = 0;
1027 #ifdef HAVE_cc0
1028 prev_insn_cc0 = 0;
1029 #endif
1032 /* Say that register REG contains a quantity in mode MODE not in any
1033 register before and initialize that quantity. */
1035 static void
1036 make_new_qty (reg, mode)
1037 unsigned int reg;
1038 enum machine_mode mode;
1040 int q;
1041 struct qty_table_elem *ent;
1042 struct reg_eqv_elem *eqv;
1044 if (next_qty >= max_qty)
1045 abort ();
1047 q = REG_QTY (reg) = next_qty++;
1048 ent = &qty_table[q];
1049 ent->first_reg = reg;
1050 ent->last_reg = reg;
1051 ent->mode = mode;
1052 ent->const_rtx = ent->const_insn = NULL_RTX;
1053 ent->comparison_code = UNKNOWN;
1055 eqv = &reg_eqv_table[reg];
1056 eqv->next = eqv->prev = -1;
1059 /* Make reg NEW equivalent to reg OLD.
1060 OLD is not changing; NEW is. */
1062 static void
1063 make_regs_eqv (new, old)
1064 unsigned int new, old;
1066 unsigned int lastr, firstr;
1067 int q = REG_QTY (old);
1068 struct qty_table_elem *ent;
1070 ent = &qty_table[q];
1072 /* Nothing should become eqv until it has a "non-invalid" qty number. */
1073 if (! REGNO_QTY_VALID_P (old))
1074 abort ();
1076 REG_QTY (new) = q;
1077 firstr = ent->first_reg;
1078 lastr = ent->last_reg;
1080 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
1081 hard regs. Among pseudos, if NEW will live longer than any other reg
1082 of the same qty, and that is beyond the current basic block,
1083 make it the new canonical replacement for this qty. */
1084 if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
1085 /* Certain fixed registers might be of the class NO_REGS. This means
1086 that not only can they not be allocated by the compiler, but
1087 they cannot be used in substitutions or canonicalizations
1088 either. */
1089 && (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
1090 && ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
1091 || (new >= FIRST_PSEUDO_REGISTER
1092 && (firstr < FIRST_PSEUDO_REGISTER
1093 || ((uid_cuid[REGNO_LAST_UID (new)] > cse_basic_block_end
1094 || (uid_cuid[REGNO_FIRST_UID (new)]
1095 < cse_basic_block_start))
1096 && (uid_cuid[REGNO_LAST_UID (new)]
1097 > uid_cuid[REGNO_LAST_UID (firstr)]))))))
1099 reg_eqv_table[firstr].prev = new;
1100 reg_eqv_table[new].next = firstr;
1101 reg_eqv_table[new].prev = -1;
1102 ent->first_reg = new;
1104 else
1106 /* If NEW is a hard reg (known to be non-fixed), insert at end.
1107 Otherwise, insert before any non-fixed hard regs that are at the
1108 end. Registers of class NO_REGS cannot be used as an
1109 equivalent for anything. */
1110 while (lastr < FIRST_PSEUDO_REGISTER && reg_eqv_table[lastr].prev >= 0
1111 && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
1112 && new >= FIRST_PSEUDO_REGISTER)
1113 lastr = reg_eqv_table[lastr].prev;
1114 reg_eqv_table[new].next = reg_eqv_table[lastr].next;
1115 if (reg_eqv_table[lastr].next >= 0)
1116 reg_eqv_table[reg_eqv_table[lastr].next].prev = new;
1117 else
1118 qty_table[q].last_reg = new;
1119 reg_eqv_table[lastr].next = new;
1120 reg_eqv_table[new].prev = lastr;
1124 /* Remove REG from its equivalence class. */
1126 static void
1127 delete_reg_equiv (reg)
1128 unsigned int reg;
1130 struct qty_table_elem *ent;
1131 int q = REG_QTY (reg);
1132 int p, n;
1134 /* If invalid, do nothing. */
1135 if (q == (int) reg)
1136 return;
1138 ent = &qty_table[q];
1140 p = reg_eqv_table[reg].prev;
1141 n = reg_eqv_table[reg].next;
1143 if (n != -1)
1144 reg_eqv_table[n].prev = p;
1145 else
1146 ent->last_reg = p;
1147 if (p != -1)
1148 reg_eqv_table[p].next = n;
1149 else
1150 ent->first_reg = n;
1152 REG_QTY (reg) = reg;
1155 /* Remove any invalid expressions from the hash table
1156 that refer to any of the registers contained in expression X.
1158 Make sure that newly inserted references to those registers
1159 as subexpressions will be considered valid.
1161 mention_regs is not called when a register itself
1162 is being stored in the table.
1164 Return 1 if we have done something that may have changed the hash code
1165 of X. */
1167 static int
1168 mention_regs (x)
1169 rtx x;
1171 enum rtx_code code;
1172 int i, j;
1173 const char *fmt;
1174 int changed = 0;
1176 if (x == 0)
1177 return 0;
1179 code = GET_CODE (x);
1180 if (code == REG)
1182 unsigned int regno = REGNO (x);
1183 unsigned int endregno
1184 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
1185 : HARD_REGNO_NREGS (regno, GET_MODE (x)));
1186 unsigned int i;
1188 for (i = regno; i < endregno; i++)
1190 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1191 remove_invalid_refs (i);
1193 REG_IN_TABLE (i) = REG_TICK (i);
1196 return 0;
1199 /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1200 pseudo if they don't use overlapping words. We handle only pseudos
1201 here for simplicity. */
1202 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1203 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1205 unsigned int i = REGNO (SUBREG_REG (x));
1207 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1209 /* If reg_tick has been incremented more than once since
1210 reg_in_table was last set, that means that the entire
1211 register has been set before, so discard anything memorized
1212 for the entire register, including all SUBREG expressions. */
1213 if (REG_IN_TABLE (i) != REG_TICK (i) - 1)
1214 remove_invalid_refs (i);
1215 else
1216 remove_invalid_subreg_refs (i, SUBREG_BYTE (x), GET_MODE (x));
1219 REG_IN_TABLE (i) = REG_TICK (i);
1220 return 0;
1223 /* If X is a comparison or a COMPARE and either operand is a register
1224 that does not have a quantity, give it one. This is so that a later
1225 call to record_jump_equiv won't cause X to be assigned a different
1226 hash code and not found in the table after that call.
1228 It is not necessary to do this here, since rehash_using_reg can
1229 fix up the table later, but doing this here eliminates the need to
1230 call that expensive function in the most common case where the only
1231 use of the register is in the comparison. */
1233 if (code == COMPARE || GET_RTX_CLASS (code) == '<')
1235 if (GET_CODE (XEXP (x, 0)) == REG
1236 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
1237 if (insert_regs (XEXP (x, 0), NULL, 0))
1239 rehash_using_reg (XEXP (x, 0));
1240 changed = 1;
1243 if (GET_CODE (XEXP (x, 1)) == REG
1244 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
1245 if (insert_regs (XEXP (x, 1), NULL, 0))
1247 rehash_using_reg (XEXP (x, 1));
1248 changed = 1;
1252 fmt = GET_RTX_FORMAT (code);
1253 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1254 if (fmt[i] == 'e')
1255 changed |= mention_regs (XEXP (x, i));
1256 else if (fmt[i] == 'E')
1257 for (j = 0; j < XVECLEN (x, i); j++)
1258 changed |= mention_regs (XVECEXP (x, i, j));
1260 return changed;
1263 /* Update the register quantities for inserting X into the hash table
1264 with a value equivalent to CLASSP.
1265 (If the class does not contain a REG, it is irrelevant.)
1266 If MODIFIED is nonzero, X is a destination; it is being modified.
1267 Note that delete_reg_equiv should be called on a register
1268 before insert_regs is done on that register with MODIFIED != 0.
1270 Nonzero value means that elements of reg_qty have changed
1271 so X's hash code may be different. */
1273 static int
1274 insert_regs (x, classp, modified)
1275 rtx x;
1276 struct table_elt *classp;
1277 int modified;
1279 if (GET_CODE (x) == REG)
1281 unsigned int regno = REGNO (x);
1282 int qty_valid;
1284 /* If REGNO is in the equivalence table already but is of the
1285 wrong mode for that equivalence, don't do anything here. */
1287 qty_valid = REGNO_QTY_VALID_P (regno);
1288 if (qty_valid)
1290 struct qty_table_elem *ent = &qty_table[REG_QTY (regno)];
1292 if (ent->mode != GET_MODE (x))
1293 return 0;
1296 if (modified || ! qty_valid)
1298 if (classp)
1299 for (classp = classp->first_same_value;
1300 classp != 0;
1301 classp = classp->next_same_value)
1302 if (GET_CODE (classp->exp) == REG
1303 && GET_MODE (classp->exp) == GET_MODE (x))
1305 make_regs_eqv (regno, REGNO (classp->exp));
1306 return 1;
1309 /* Mention_regs for a SUBREG checks if REG_TICK is exactly one larger
1310 than REG_IN_TABLE to find out if there was only a single preceding
1311 invalidation - for the SUBREG - or another one, which would be
1312 for the full register. However, if we find here that REG_TICK
1313 indicates that the register is invalid, it means that it has
1314 been invalidated in a separate operation. The SUBREG might be used
1315 now (then this is a recursive call), or we might use the full REG
1316 now and a SUBREG of it later. So bump up REG_TICK so that
1317 mention_regs will do the right thing. */
1318 if (! modified
1319 && REG_IN_TABLE (regno) >= 0
1320 && REG_TICK (regno) == REG_IN_TABLE (regno) + 1)
1321 REG_TICK (regno)++;
1322 make_new_qty (regno, GET_MODE (x));
1323 return 1;
1326 return 0;
1329 /* If X is a SUBREG, we will likely be inserting the inner register in the
1330 table. If that register doesn't have an assigned quantity number at
1331 this point but does later, the insertion that we will be doing now will
1332 not be accessible because its hash code will have changed. So assign
1333 a quantity number now. */
1335 else if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1336 && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1338 insert_regs (SUBREG_REG (x), NULL, 0);
1339 mention_regs (x);
1340 return 1;
1342 else
1343 return mention_regs (x);
1346 /* Look in or update the hash table. */
1348 /* Remove table element ELT from use in the table.
1349 HASH is its hash code, made using the HASH macro.
1350 It's an argument because often that is known in advance
1351 and we save much time not recomputing it. */
1353 static void
1354 remove_from_table (elt, hash)
1355 struct table_elt *elt;
1356 unsigned hash;
1358 if (elt == 0)
1359 return;
1361 /* Mark this element as removed. See cse_insn. */
1362 elt->first_same_value = 0;
1364 /* Remove the table element from its equivalence class. */
1367 struct table_elt *prev = elt->prev_same_value;
1368 struct table_elt *next = elt->next_same_value;
1370 if (next)
1371 next->prev_same_value = prev;
1373 if (prev)
1374 prev->next_same_value = next;
1375 else
1377 struct table_elt *newfirst = next;
1378 while (next)
1380 next->first_same_value = newfirst;
1381 next = next->next_same_value;
1386 /* Remove the table element from its hash bucket. */
1389 struct table_elt *prev = elt->prev_same_hash;
1390 struct table_elt *next = elt->next_same_hash;
1392 if (next)
1393 next->prev_same_hash = prev;
1395 if (prev)
1396 prev->next_same_hash = next;
1397 else if (table[hash] == elt)
1398 table[hash] = next;
1399 else
1401 /* This entry is not in the proper hash bucket. This can happen
1402 when two classes were merged by `merge_equiv_classes'. Search
1403 for the hash bucket that it heads. This happens only very
1404 rarely, so the cost is acceptable. */
1405 for (hash = 0; hash < HASH_SIZE; hash++)
1406 if (table[hash] == elt)
1407 table[hash] = next;
1411 /* Remove the table element from its related-value circular chain. */
1413 if (elt->related_value != 0 && elt->related_value != elt)
1415 struct table_elt *p = elt->related_value;
1417 while (p->related_value != elt)
1418 p = p->related_value;
1419 p->related_value = elt->related_value;
1420 if (p->related_value == p)
1421 p->related_value = 0;
1424 /* Now add it to the free element chain. */
1425 elt->next_same_hash = free_element_chain;
1426 free_element_chain = elt;
1429 /* Look up X in the hash table and return its table element,
1430 or 0 if X is not in the table.
1432 MODE is the machine-mode of X, or if X is an integer constant
1433 with VOIDmode then MODE is the mode with which X will be used.
1435 Here we are satisfied to find an expression whose tree structure
1436 looks like X. */
1438 static struct table_elt *
1439 lookup (x, hash, mode)
1440 rtx x;
1441 unsigned hash;
1442 enum machine_mode mode;
1444 struct table_elt *p;
1446 for (p = table[hash]; p; p = p->next_same_hash)
1447 if (mode == p->mode && ((x == p->exp && GET_CODE (x) == REG)
1448 || exp_equiv_p (x, p->exp, GET_CODE (x) != REG, 0)))
1449 return p;
1451 return 0;
1454 /* Like `lookup' but don't care whether the table element uses invalid regs.
1455 Also ignore discrepancies in the machine mode of a register. */
1457 static struct table_elt *
1458 lookup_for_remove (x, hash, mode)
1459 rtx x;
1460 unsigned hash;
1461 enum machine_mode mode;
1463 struct table_elt *p;
1465 if (GET_CODE (x) == REG)
1467 unsigned int regno = REGNO (x);
1469 /* Don't check the machine mode when comparing registers;
1470 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1471 for (p = table[hash]; p; p = p->next_same_hash)
1472 if (GET_CODE (p->exp) == REG
1473 && REGNO (p->exp) == regno)
1474 return p;
1476 else
1478 for (p = table[hash]; p; p = p->next_same_hash)
1479 if (mode == p->mode && (x == p->exp || exp_equiv_p (x, p->exp, 0, 0)))
1480 return p;
1483 return 0;
1486 /* Look for an expression equivalent to X and with code CODE.
1487 If one is found, return that expression. */
1489 static rtx
1490 lookup_as_function (x, code)
1491 rtx x;
1492 enum rtx_code code;
1494 struct table_elt *p
1495 = lookup (x, safe_hash (x, VOIDmode) & HASH_MASK, GET_MODE (x));
1497 /* If we are looking for a CONST_INT, the mode doesn't really matter, as
1498 long as we are narrowing. So if we looked in vain for a mode narrower
1499 than word_mode before, look for word_mode now. */
1500 if (p == 0 && code == CONST_INT
1501 && GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (word_mode))
1503 x = copy_rtx (x);
1504 PUT_MODE (x, word_mode);
1505 p = lookup (x, safe_hash (x, VOIDmode) & HASH_MASK, word_mode);
1508 if (p == 0)
1509 return 0;
1511 for (p = p->first_same_value; p; p = p->next_same_value)
1512 if (GET_CODE (p->exp) == code
1513 /* Make sure this is a valid entry in the table. */
1514 && exp_equiv_p (p->exp, p->exp, 1, 0))
1515 return p->exp;
1517 return 0;
1520 /* Insert X in the hash table, assuming HASH is its hash code
1521 and CLASSP is an element of the class it should go in
1522 (or 0 if a new class should be made).
1523 It is inserted at the proper position to keep the class in
1524 the order cheapest first.
1526 MODE is the machine-mode of X, or if X is an integer constant
1527 with VOIDmode then MODE is the mode with which X will be used.
1529 For elements of equal cheapness, the most recent one
1530 goes in front, except that the first element in the list
1531 remains first unless a cheaper element is added. The order of
1532 pseudo-registers does not matter, as canon_reg will be called to
1533 find the cheapest when a register is retrieved from the table.
1535 The in_memory field in the hash table element is set to 0.
1536 The caller must set it nonzero if appropriate.
1538 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1539 and if insert_regs returns a nonzero value
1540 you must then recompute its hash code before calling here.
1542 If necessary, update table showing constant values of quantities. */
1544 #define CHEAPER(X, Y) \
1545 (preferrable ((X)->cost, (X)->regcost, (Y)->cost, (Y)->regcost) < 0)
1547 static struct table_elt *
1548 insert (x, classp, hash, mode)
1549 rtx x;
1550 struct table_elt *classp;
1551 unsigned hash;
1552 enum machine_mode mode;
1554 struct table_elt *elt;
1556 /* If X is a register and we haven't made a quantity for it,
1557 something is wrong. */
1558 if (GET_CODE (x) == REG && ! REGNO_QTY_VALID_P (REGNO (x)))
1559 abort ();
1561 /* If X is a hard register, show it is being put in the table. */
1562 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
1564 unsigned int regno = REGNO (x);
1565 unsigned int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1566 unsigned int i;
1568 for (i = regno; i < endregno; i++)
1569 SET_HARD_REG_BIT (hard_regs_in_table, i);
1572 /* Put an element for X into the right hash bucket. */
1574 elt = free_element_chain;
1575 if (elt)
1576 free_element_chain = elt->next_same_hash;
1577 else
1579 n_elements_made++;
1580 elt = (struct table_elt *) xmalloc (sizeof (struct table_elt));
1583 elt->exp = x;
1584 elt->canon_exp = NULL_RTX;
1585 elt->cost = COST (x);
1586 elt->regcost = approx_reg_cost (x);
1587 elt->next_same_value = 0;
1588 elt->prev_same_value = 0;
1589 elt->next_same_hash = table[hash];
1590 elt->prev_same_hash = 0;
1591 elt->related_value = 0;
1592 elt->in_memory = 0;
1593 elt->mode = mode;
1594 elt->is_const = (CONSTANT_P (x)
1595 /* GNU C++ takes advantage of this for `this'
1596 (and other const values). */
1597 || (GET_CODE (x) == REG
1598 && RTX_UNCHANGING_P (x)
1599 && REGNO (x) >= FIRST_PSEUDO_REGISTER)
1600 || FIXED_BASE_PLUS_P (x));
1602 if (table[hash])
1603 table[hash]->prev_same_hash = elt;
1604 table[hash] = elt;
1606 /* Put it into the proper value-class. */
1607 if (classp)
1609 classp = classp->first_same_value;
1610 if (CHEAPER (elt, classp))
1611 /* Insert at the head of the class */
1613 struct table_elt *p;
1614 elt->next_same_value = classp;
1615 classp->prev_same_value = elt;
1616 elt->first_same_value = elt;
1618 for (p = classp; p; p = p->next_same_value)
1619 p->first_same_value = elt;
1621 else
1623 /* Insert not at head of the class. */
1624 /* Put it after the last element cheaper than X. */
1625 struct table_elt *p, *next;
1627 for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1628 p = next);
1630 /* Put it after P and before NEXT. */
1631 elt->next_same_value = next;
1632 if (next)
1633 next->prev_same_value = elt;
1635 elt->prev_same_value = p;
1636 p->next_same_value = elt;
1637 elt->first_same_value = classp;
1640 else
1641 elt->first_same_value = elt;
1643 /* If this is a constant being set equivalent to a register or a register
1644 being set equivalent to a constant, note the constant equivalence.
1646 If this is a constant, it cannot be equivalent to a different constant,
1647 and a constant is the only thing that can be cheaper than a register. So
1648 we know the register is the head of the class (before the constant was
1649 inserted).
1651 If this is a register that is not already known equivalent to a
1652 constant, we must check the entire class.
1654 If this is a register that is already known equivalent to an insn,
1655 update the qtys `const_insn' to show that `this_insn' is the latest
1656 insn making that quantity equivalent to the constant. */
1658 if (elt->is_const && classp && GET_CODE (classp->exp) == REG
1659 && GET_CODE (x) != REG)
1661 int exp_q = REG_QTY (REGNO (classp->exp));
1662 struct qty_table_elem *exp_ent = &qty_table[exp_q];
1664 exp_ent->const_rtx = gen_lowpart_if_possible (exp_ent->mode, x);
1665 exp_ent->const_insn = this_insn;
1668 else if (GET_CODE (x) == REG
1669 && classp
1670 && ! qty_table[REG_QTY (REGNO (x))].const_rtx
1671 && ! elt->is_const)
1673 struct table_elt *p;
1675 for (p = classp; p != 0; p = p->next_same_value)
1677 if (p->is_const && GET_CODE (p->exp) != REG)
1679 int x_q = REG_QTY (REGNO (x));
1680 struct qty_table_elem *x_ent = &qty_table[x_q];
1682 x_ent->const_rtx
1683 = gen_lowpart_if_possible (GET_MODE (x), p->exp);
1684 x_ent->const_insn = this_insn;
1685 break;
1690 else if (GET_CODE (x) == REG
1691 && qty_table[REG_QTY (REGNO (x))].const_rtx
1692 && GET_MODE (x) == qty_table[REG_QTY (REGNO (x))].mode)
1693 qty_table[REG_QTY (REGNO (x))].const_insn = this_insn;
1695 /* If this is a constant with symbolic value,
1696 and it has a term with an explicit integer value,
1697 link it up with related expressions. */
1698 if (GET_CODE (x) == CONST)
1700 rtx subexp = get_related_value (x);
1701 unsigned subhash;
1702 struct table_elt *subelt, *subelt_prev;
1704 if (subexp != 0)
1706 /* Get the integer-free subexpression in the hash table. */
1707 subhash = safe_hash (subexp, mode) & HASH_MASK;
1708 subelt = lookup (subexp, subhash, mode);
1709 if (subelt == 0)
1710 subelt = insert (subexp, NULL, subhash, mode);
1711 /* Initialize SUBELT's circular chain if it has none. */
1712 if (subelt->related_value == 0)
1713 subelt->related_value = subelt;
1714 /* Find the element in the circular chain that precedes SUBELT. */
1715 subelt_prev = subelt;
1716 while (subelt_prev->related_value != subelt)
1717 subelt_prev = subelt_prev->related_value;
1718 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1719 This way the element that follows SUBELT is the oldest one. */
1720 elt->related_value = subelt_prev->related_value;
1721 subelt_prev->related_value = elt;
1725 return elt;
1728 /* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1729 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1730 the two classes equivalent.
1732 CLASS1 will be the surviving class; CLASS2 should not be used after this
1733 call.
1735 Any invalid entries in CLASS2 will not be copied. */
1737 static void
1738 merge_equiv_classes (class1, class2)
1739 struct table_elt *class1, *class2;
1741 struct table_elt *elt, *next, *new;
1743 /* Ensure we start with the head of the classes. */
1744 class1 = class1->first_same_value;
1745 class2 = class2->first_same_value;
1747 /* If they were already equal, forget it. */
1748 if (class1 == class2)
1749 return;
1751 for (elt = class2; elt; elt = next)
1753 unsigned int hash;
1754 rtx exp = elt->exp;
1755 enum machine_mode mode = elt->mode;
1757 next = elt->next_same_value;
1759 /* Remove old entry, make a new one in CLASS1's class.
1760 Don't do this for invalid entries as we cannot find their
1761 hash code (it also isn't necessary). */
1762 if (GET_CODE (exp) == REG || exp_equiv_p (exp, exp, 1, 0))
1764 hash_arg_in_memory = 0;
1765 hash = HASH (exp, mode);
1767 if (GET_CODE (exp) == REG)
1768 delete_reg_equiv (REGNO (exp));
1770 remove_from_table (elt, hash);
1772 if (insert_regs (exp, class1, 0))
1774 rehash_using_reg (exp);
1775 hash = HASH (exp, mode);
1777 new = insert (exp, class1, hash, mode);
1778 new->in_memory = hash_arg_in_memory;
1783 /* Flush the entire hash table. */
1785 static void
1786 flush_hash_table ()
1788 int i;
1789 struct table_elt *p;
1791 for (i = 0; i < HASH_SIZE; i++)
1792 for (p = table[i]; p; p = table[i])
1794 /* Note that invalidate can remove elements
1795 after P in the current hash chain. */
1796 if (GET_CODE (p->exp) == REG)
1797 invalidate (p->exp, p->mode);
1798 else
1799 remove_from_table (p, i);
1803 /* Function called for each rtx to check whether true dependence exist. */
1804 struct check_dependence_data
1806 enum machine_mode mode;
1807 rtx exp;
1810 static int
1811 check_dependence (x, data)
1812 rtx *x;
1813 void *data;
1815 struct check_dependence_data *d = (struct check_dependence_data *) data;
1816 if (*x && GET_CODE (*x) == MEM)
1817 return true_dependence (d->exp, d->mode, *x, cse_rtx_varies_p);
1818 else
1819 return 0;
1822 /* Remove from the hash table, or mark as invalid, all expressions whose
1823 values could be altered by storing in X. X is a register, a subreg, or
1824 a memory reference with nonvarying address (because, when a memory
1825 reference with a varying address is stored in, all memory references are
1826 removed by invalidate_memory so specific invalidation is superfluous).
1827 FULL_MODE, if not VOIDmode, indicates that this much should be
1828 invalidated instead of just the amount indicated by the mode of X. This
1829 is only used for bitfield stores into memory.
1831 A nonvarying address may be just a register or just a symbol reference,
1832 or it may be either of those plus a numeric offset. */
1834 static void
1835 invalidate (x, full_mode)
1836 rtx x;
1837 enum machine_mode full_mode;
1839 int i;
1840 struct table_elt *p;
1842 switch (GET_CODE (x))
1844 case REG:
1846 /* If X is a register, dependencies on its contents are recorded
1847 through the qty number mechanism. Just change the qty number of
1848 the register, mark it as invalid for expressions that refer to it,
1849 and remove it itself. */
1850 unsigned int regno = REGNO (x);
1851 unsigned int hash = HASH (x, GET_MODE (x));
1853 /* Remove REGNO from any quantity list it might be on and indicate
1854 that its value might have changed. If it is a pseudo, remove its
1855 entry from the hash table.
1857 For a hard register, we do the first two actions above for any
1858 additional hard registers corresponding to X. Then, if any of these
1859 registers are in the table, we must remove any REG entries that
1860 overlap these registers. */
1862 delete_reg_equiv (regno);
1863 REG_TICK (regno)++;
1865 if (regno >= FIRST_PSEUDO_REGISTER)
1867 /* Because a register can be referenced in more than one mode,
1868 we might have to remove more than one table entry. */
1869 struct table_elt *elt;
1871 while ((elt = lookup_for_remove (x, hash, GET_MODE (x))))
1872 remove_from_table (elt, hash);
1874 else
1876 HOST_WIDE_INT in_table
1877 = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1878 unsigned int endregno
1879 = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1880 unsigned int tregno, tendregno, rn;
1881 struct table_elt *p, *next;
1883 CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1885 for (rn = regno + 1; rn < endregno; rn++)
1887 in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, rn);
1888 CLEAR_HARD_REG_BIT (hard_regs_in_table, rn);
1889 delete_reg_equiv (rn);
1890 REG_TICK (rn)++;
1893 if (in_table)
1894 for (hash = 0; hash < HASH_SIZE; hash++)
1895 for (p = table[hash]; p; p = next)
1897 next = p->next_same_hash;
1899 if (GET_CODE (p->exp) != REG
1900 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1901 continue;
1903 tregno = REGNO (p->exp);
1904 tendregno
1905 = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (p->exp));
1906 if (tendregno > regno && tregno < endregno)
1907 remove_from_table (p, hash);
1911 return;
1913 case SUBREG:
1914 invalidate (SUBREG_REG (x), VOIDmode);
1915 return;
1917 case PARALLEL:
1918 for (i = XVECLEN (x, 0) - 1; i >= 0; --i)
1919 invalidate (XVECEXP (x, 0, i), VOIDmode);
1920 return;
1922 case EXPR_LIST:
1923 /* This is part of a disjoint return value; extract the location in
1924 question ignoring the offset. */
1925 invalidate (XEXP (x, 0), VOIDmode);
1926 return;
1928 case MEM:
1929 /* Calculate the canonical version of X here so that
1930 true_dependence doesn't generate new RTL for X on each call. */
1931 x = canon_rtx (x);
1933 /* Remove all hash table elements that refer to overlapping pieces of
1934 memory. */
1935 if (full_mode == VOIDmode)
1936 full_mode = GET_MODE (x);
1938 for (i = 0; i < HASH_SIZE; i++)
1940 struct table_elt *next;
1942 for (p = table[i]; p; p = next)
1944 next = p->next_same_hash;
1945 if (p->in_memory)
1947 struct check_dependence_data d;
1949 /* Just canonicalize the expression once;
1950 otherwise each time we call invalidate
1951 true_dependence will canonicalize the
1952 expression again. */
1953 if (!p->canon_exp)
1954 p->canon_exp = canon_rtx (p->exp);
1955 d.exp = x;
1956 d.mode = full_mode;
1957 if (for_each_rtx (&p->canon_exp, check_dependence, &d))
1958 remove_from_table (p, i);
1962 return;
1964 default:
1965 abort ();
1969 /* Remove all expressions that refer to register REGNO,
1970 since they are already invalid, and we are about to
1971 mark that register valid again and don't want the old
1972 expressions to reappear as valid. */
1974 static void
1975 remove_invalid_refs (regno)
1976 unsigned int regno;
1978 unsigned int i;
1979 struct table_elt *p, *next;
1981 for (i = 0; i < HASH_SIZE; i++)
1982 for (p = table[i]; p; p = next)
1984 next = p->next_same_hash;
1985 if (GET_CODE (p->exp) != REG
1986 && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
1987 remove_from_table (p, i);
1991 /* Likewise for a subreg with subreg_reg REGNO, subreg_byte OFFSET,
1992 and mode MODE. */
1993 static void
1994 remove_invalid_subreg_refs (regno, offset, mode)
1995 unsigned int regno;
1996 unsigned int offset;
1997 enum machine_mode mode;
1999 unsigned int i;
2000 struct table_elt *p, *next;
2001 unsigned int end = offset + (GET_MODE_SIZE (mode) - 1);
2003 for (i = 0; i < HASH_SIZE; i++)
2004 for (p = table[i]; p; p = next)
2006 rtx exp = p->exp;
2007 next = p->next_same_hash;
2009 if (GET_CODE (exp) != REG
2010 && (GET_CODE (exp) != SUBREG
2011 || GET_CODE (SUBREG_REG (exp)) != REG
2012 || REGNO (SUBREG_REG (exp)) != regno
2013 || (((SUBREG_BYTE (exp)
2014 + (GET_MODE_SIZE (GET_MODE (exp)) - 1)) >= offset)
2015 && SUBREG_BYTE (exp) <= end))
2016 && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
2017 remove_from_table (p, i);
2021 /* Recompute the hash codes of any valid entries in the hash table that
2022 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
2024 This is called when we make a jump equivalence. */
2026 static void
2027 rehash_using_reg (x)
2028 rtx x;
2030 unsigned int i;
2031 struct table_elt *p, *next;
2032 unsigned hash;
2034 if (GET_CODE (x) == SUBREG)
2035 x = SUBREG_REG (x);
2037 /* If X is not a register or if the register is known not to be in any
2038 valid entries in the table, we have no work to do. */
2040 if (GET_CODE (x) != REG
2041 || REG_IN_TABLE (REGNO (x)) < 0
2042 || REG_IN_TABLE (REGNO (x)) != REG_TICK (REGNO (x)))
2043 return;
2045 /* Scan all hash chains looking for valid entries that mention X.
2046 If we find one and it is in the wrong hash chain, move it. We can skip
2047 objects that are registers, since they are handled specially. */
2049 for (i = 0; i < HASH_SIZE; i++)
2050 for (p = table[i]; p; p = next)
2052 next = p->next_same_hash;
2053 if (GET_CODE (p->exp) != REG && reg_mentioned_p (x, p->exp)
2054 && exp_equiv_p (p->exp, p->exp, 1, 0)
2055 && i != (hash = safe_hash (p->exp, p->mode) & HASH_MASK))
2057 if (p->next_same_hash)
2058 p->next_same_hash->prev_same_hash = p->prev_same_hash;
2060 if (p->prev_same_hash)
2061 p->prev_same_hash->next_same_hash = p->next_same_hash;
2062 else
2063 table[i] = p->next_same_hash;
2065 p->next_same_hash = table[hash];
2066 p->prev_same_hash = 0;
2067 if (table[hash])
2068 table[hash]->prev_same_hash = p;
2069 table[hash] = p;
2074 /* Remove from the hash table any expression that is a call-clobbered
2075 register. Also update their TICK values. */
2077 static void
2078 invalidate_for_call ()
2080 unsigned int regno, endregno;
2081 unsigned int i;
2082 unsigned hash;
2083 struct table_elt *p, *next;
2084 int in_table = 0;
2086 /* Go through all the hard registers. For each that is clobbered in
2087 a CALL_INSN, remove the register from quantity chains and update
2088 reg_tick if defined. Also see if any of these registers is currently
2089 in the table. */
2091 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2092 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2094 delete_reg_equiv (regno);
2095 if (REG_TICK (regno) >= 0)
2096 REG_TICK (regno)++;
2098 in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
2101 /* In the case where we have no call-clobbered hard registers in the
2102 table, we are done. Otherwise, scan the table and remove any
2103 entry that overlaps a call-clobbered register. */
2105 if (in_table)
2106 for (hash = 0; hash < HASH_SIZE; hash++)
2107 for (p = table[hash]; p; p = next)
2109 next = p->next_same_hash;
2111 if (GET_CODE (p->exp) != REG
2112 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
2113 continue;
2115 regno = REGNO (p->exp);
2116 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (p->exp));
2118 for (i = regno; i < endregno; i++)
2119 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
2121 remove_from_table (p, hash);
2122 break;
2127 /* Given an expression X of type CONST,
2128 and ELT which is its table entry (or 0 if it
2129 is not in the hash table),
2130 return an alternate expression for X as a register plus integer.
2131 If none can be found, return 0. */
2133 static rtx
2134 use_related_value (x, elt)
2135 rtx x;
2136 struct table_elt *elt;
2138 struct table_elt *relt = 0;
2139 struct table_elt *p, *q;
2140 HOST_WIDE_INT offset;
2142 /* First, is there anything related known?
2143 If we have a table element, we can tell from that.
2144 Otherwise, must look it up. */
2146 if (elt != 0 && elt->related_value != 0)
2147 relt = elt;
2148 else if (elt == 0 && GET_CODE (x) == CONST)
2150 rtx subexp = get_related_value (x);
2151 if (subexp != 0)
2152 relt = lookup (subexp,
2153 safe_hash (subexp, GET_MODE (subexp)) & HASH_MASK,
2154 GET_MODE (subexp));
2157 if (relt == 0)
2158 return 0;
2160 /* Search all related table entries for one that has an
2161 equivalent register. */
2163 p = relt;
2164 while (1)
2166 /* This loop is strange in that it is executed in two different cases.
2167 The first is when X is already in the table. Then it is searching
2168 the RELATED_VALUE list of X's class (RELT). The second case is when
2169 X is not in the table. Then RELT points to a class for the related
2170 value.
2172 Ensure that, whatever case we are in, that we ignore classes that have
2173 the same value as X. */
2175 if (rtx_equal_p (x, p->exp))
2176 q = 0;
2177 else
2178 for (q = p->first_same_value; q; q = q->next_same_value)
2179 if (GET_CODE (q->exp) == REG)
2180 break;
2182 if (q)
2183 break;
2185 p = p->related_value;
2187 /* We went all the way around, so there is nothing to be found.
2188 Alternatively, perhaps RELT was in the table for some other reason
2189 and it has no related values recorded. */
2190 if (p == relt || p == 0)
2191 break;
2194 if (q == 0)
2195 return 0;
2197 offset = (get_integer_term (x) - get_integer_term (p->exp));
2198 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
2199 return plus_constant (q->exp, offset);
2202 /* Hash a string. Just add its bytes up. */
2203 static inline unsigned
2204 canon_hash_string (ps)
2205 const char *ps;
2207 unsigned hash = 0;
2208 const unsigned char *p = (const unsigned char *) ps;
2210 if (p)
2211 while (*p)
2212 hash += *p++;
2214 return hash;
2217 /* Hash an rtx. We are careful to make sure the value is never negative.
2218 Equivalent registers hash identically.
2219 MODE is used in hashing for CONST_INTs only;
2220 otherwise the mode of X is used.
2222 Store 1 in do_not_record if any subexpression is volatile.
2224 Store 1 in hash_arg_in_memory if X contains a MEM rtx
2225 which does not have the RTX_UNCHANGING_P bit set.
2227 Note that cse_insn knows that the hash code of a MEM expression
2228 is just (int) MEM plus the hash code of the address. */
2230 static unsigned
2231 canon_hash (x, mode)
2232 rtx x;
2233 enum machine_mode mode;
2235 int i, j;
2236 unsigned hash = 0;
2237 enum rtx_code code;
2238 const char *fmt;
2240 /* repeat is used to turn tail-recursion into iteration. */
2241 repeat:
2242 if (x == 0)
2243 return hash;
2245 code = GET_CODE (x);
2246 switch (code)
2248 case REG:
2250 unsigned int regno = REGNO (x);
2251 bool record;
2253 /* On some machines, we can't record any non-fixed hard register,
2254 because extending its life will cause reload problems. We
2255 consider ap, fp, sp, gp to be fixed for this purpose.
2257 We also consider CCmode registers to be fixed for this purpose;
2258 failure to do so leads to failure to simplify 0<100 type of
2259 conditionals.
2261 On all machines, we can't record any global registers.
2262 Nor should we record any register that is in a small
2263 class, as defined by CLASS_LIKELY_SPILLED_P. */
2265 if (regno >= FIRST_PSEUDO_REGISTER)
2266 record = true;
2267 else if (x == frame_pointer_rtx
2268 || x == hard_frame_pointer_rtx
2269 || x == arg_pointer_rtx
2270 || x == stack_pointer_rtx
2271 || x == pic_offset_table_rtx)
2272 record = true;
2273 else if (global_regs[regno])
2274 record = false;
2275 else if (fixed_regs[regno])
2276 record = true;
2277 else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_CC)
2278 record = true;
2279 else if (SMALL_REGISTER_CLASSES)
2280 record = false;
2281 else if (CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (regno)))
2282 record = false;
2283 else
2284 record = true;
2286 if (!record)
2288 do_not_record = 1;
2289 return 0;
2292 hash += ((unsigned) REG << 7) + (unsigned) REG_QTY (regno);
2293 return hash;
2296 /* We handle SUBREG of a REG specially because the underlying
2297 reg changes its hash value with every value change; we don't
2298 want to have to forget unrelated subregs when one subreg changes. */
2299 case SUBREG:
2301 if (GET_CODE (SUBREG_REG (x)) == REG)
2303 hash += (((unsigned) SUBREG << 7)
2304 + REGNO (SUBREG_REG (x))
2305 + (SUBREG_BYTE (x) / UNITS_PER_WORD));
2306 return hash;
2308 break;
2311 case CONST_INT:
2313 unsigned HOST_WIDE_INT tem = INTVAL (x);
2314 hash += ((unsigned) CONST_INT << 7) + (unsigned) mode + tem;
2315 return hash;
2318 case CONST_DOUBLE:
2319 /* This is like the general case, except that it only counts
2320 the integers representing the constant. */
2321 hash += (unsigned) code + (unsigned) GET_MODE (x);
2322 if (GET_MODE (x) != VOIDmode)
2323 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
2325 unsigned HOST_WIDE_INT tem = XWINT (x, i);
2326 hash += tem;
2328 else
2329 hash += ((unsigned) CONST_DOUBLE_LOW (x)
2330 + (unsigned) CONST_DOUBLE_HIGH (x));
2331 return hash;
2333 case CONST_VECTOR:
2335 int units;
2336 rtx elt;
2338 units = CONST_VECTOR_NUNITS (x);
2340 for (i = 0; i < units; ++i)
2342 elt = CONST_VECTOR_ELT (x, i);
2343 hash += canon_hash (elt, GET_MODE (elt));
2346 return hash;
2349 /* Assume there is only one rtx object for any given label. */
2350 case LABEL_REF:
2351 hash += ((unsigned) LABEL_REF << 7) + (unsigned long) XEXP (x, 0);
2352 return hash;
2354 case SYMBOL_REF:
2355 hash += ((unsigned) SYMBOL_REF << 7) + (unsigned long) XSTR (x, 0);
2356 return hash;
2358 case MEM:
2359 /* We don't record if marked volatile or if BLKmode since we don't
2360 know the size of the move. */
2361 if (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode)
2363 do_not_record = 1;
2364 return 0;
2366 if (! RTX_UNCHANGING_P (x) || FIXED_BASE_PLUS_P (XEXP (x, 0)))
2368 hash_arg_in_memory = 1;
2370 /* Now that we have already found this special case,
2371 might as well speed it up as much as possible. */
2372 hash += (unsigned) MEM;
2373 x = XEXP (x, 0);
2374 goto repeat;
2376 case USE:
2377 /* A USE that mentions non-volatile memory needs special
2378 handling since the MEM may be BLKmode which normally
2379 prevents an entry from being made. Pure calls are
2380 marked by a USE which mentions BLKmode memory. */
2381 if (GET_CODE (XEXP (x, 0)) == MEM
2382 && ! MEM_VOLATILE_P (XEXP (x, 0)))
2384 hash += (unsigned) USE;
2385 x = XEXP (x, 0);
2387 if (! RTX_UNCHANGING_P (x) || FIXED_BASE_PLUS_P (XEXP (x, 0)))
2388 hash_arg_in_memory = 1;
2390 /* Now that we have already found this special case,
2391 might as well speed it up as much as possible. */
2392 hash += (unsigned) MEM;
2393 x = XEXP (x, 0);
2394 goto repeat;
2396 break;
2398 case PRE_DEC:
2399 case PRE_INC:
2400 case POST_DEC:
2401 case POST_INC:
2402 case PRE_MODIFY:
2403 case POST_MODIFY:
2404 case PC:
2405 case CC0:
2406 case CALL:
2407 case UNSPEC_VOLATILE:
2408 do_not_record = 1;
2409 return 0;
2411 case ASM_OPERANDS:
2412 if (MEM_VOLATILE_P (x))
2414 do_not_record = 1;
2415 return 0;
2417 else
2419 /* We don't want to take the filename and line into account. */
2420 hash += (unsigned) code + (unsigned) GET_MODE (x)
2421 + canon_hash_string (ASM_OPERANDS_TEMPLATE (x))
2422 + canon_hash_string (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
2423 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
2425 if (ASM_OPERANDS_INPUT_LENGTH (x))
2427 for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2429 hash += (canon_hash (ASM_OPERANDS_INPUT (x, i),
2430 GET_MODE (ASM_OPERANDS_INPUT (x, i)))
2431 + canon_hash_string (ASM_OPERANDS_INPUT_CONSTRAINT
2432 (x, i)));
2435 hash += canon_hash_string (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
2436 x = ASM_OPERANDS_INPUT (x, 0);
2437 mode = GET_MODE (x);
2438 goto repeat;
2441 return hash;
2443 break;
2445 default:
2446 break;
2449 i = GET_RTX_LENGTH (code) - 1;
2450 hash += (unsigned) code + (unsigned) GET_MODE (x);
2451 fmt = GET_RTX_FORMAT (code);
2452 for (; i >= 0; i--)
2454 if (fmt[i] == 'e')
2456 rtx tem = XEXP (x, i);
2458 /* If we are about to do the last recursive call
2459 needed at this level, change it into iteration.
2460 This function is called enough to be worth it. */
2461 if (i == 0)
2463 x = tem;
2464 goto repeat;
2466 hash += canon_hash (tem, 0);
2468 else if (fmt[i] == 'E')
2469 for (j = 0; j < XVECLEN (x, i); j++)
2470 hash += canon_hash (XVECEXP (x, i, j), 0);
2471 else if (fmt[i] == 's')
2472 hash += canon_hash_string (XSTR (x, i));
2473 else if (fmt[i] == 'i')
2475 unsigned tem = XINT (x, i);
2476 hash += tem;
2478 else if (fmt[i] == '0' || fmt[i] == 't')
2479 /* Unused. */
2481 else
2482 abort ();
2484 return hash;
2487 /* Like canon_hash but with no side effects. */
2489 static unsigned
2490 safe_hash (x, mode)
2491 rtx x;
2492 enum machine_mode mode;
2494 int save_do_not_record = do_not_record;
2495 int save_hash_arg_in_memory = hash_arg_in_memory;
2496 unsigned hash = canon_hash (x, mode);
2497 hash_arg_in_memory = save_hash_arg_in_memory;
2498 do_not_record = save_do_not_record;
2499 return hash;
2502 /* Return 1 iff X and Y would canonicalize into the same thing,
2503 without actually constructing the canonicalization of either one.
2504 If VALIDATE is nonzero,
2505 we assume X is an expression being processed from the rtl
2506 and Y was found in the hash table. We check register refs
2507 in Y for being marked as valid.
2509 If EQUAL_VALUES is nonzero, we allow a register to match a constant value
2510 that is known to be in the register. Ordinarily, we don't allow them
2511 to match, because letting them match would cause unpredictable results
2512 in all the places that search a hash table chain for an equivalent
2513 for a given value. A possible equivalent that has different structure
2514 has its hash code computed from different data. Whether the hash code
2515 is the same as that of the given value is pure luck. */
2517 static int
2518 exp_equiv_p (x, y, validate, equal_values)
2519 rtx x, y;
2520 int validate;
2521 int equal_values;
2523 int i, j;
2524 enum rtx_code code;
2525 const char *fmt;
2527 /* Note: it is incorrect to assume an expression is equivalent to itself
2528 if VALIDATE is nonzero. */
2529 if (x == y && !validate)
2530 return 1;
2531 if (x == 0 || y == 0)
2532 return x == y;
2534 code = GET_CODE (x);
2535 if (code != GET_CODE (y))
2537 if (!equal_values)
2538 return 0;
2540 /* If X is a constant and Y is a register or vice versa, they may be
2541 equivalent. We only have to validate if Y is a register. */
2542 if (CONSTANT_P (x) && GET_CODE (y) == REG
2543 && REGNO_QTY_VALID_P (REGNO (y)))
2545 int y_q = REG_QTY (REGNO (y));
2546 struct qty_table_elem *y_ent = &qty_table[y_q];
2548 if (GET_MODE (y) == y_ent->mode
2549 && rtx_equal_p (x, y_ent->const_rtx)
2550 && (! validate || REG_IN_TABLE (REGNO (y)) == REG_TICK (REGNO (y))))
2551 return 1;
2554 if (CONSTANT_P (y) && code == REG
2555 && REGNO_QTY_VALID_P (REGNO (x)))
2557 int x_q = REG_QTY (REGNO (x));
2558 struct qty_table_elem *x_ent = &qty_table[x_q];
2560 if (GET_MODE (x) == x_ent->mode
2561 && rtx_equal_p (y, x_ent->const_rtx))
2562 return 1;
2565 return 0;
2568 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2569 if (GET_MODE (x) != GET_MODE (y))
2570 return 0;
2572 switch (code)
2574 case PC:
2575 case CC0:
2576 case CONST_INT:
2577 return x == y;
2579 case LABEL_REF:
2580 return XEXP (x, 0) == XEXP (y, 0);
2582 case SYMBOL_REF:
2583 return XSTR (x, 0) == XSTR (y, 0);
2585 case REG:
2587 unsigned int regno = REGNO (y);
2588 unsigned int endregno
2589 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
2590 : HARD_REGNO_NREGS (regno, GET_MODE (y)));
2591 unsigned int i;
2593 /* If the quantities are not the same, the expressions are not
2594 equivalent. If there are and we are not to validate, they
2595 are equivalent. Otherwise, ensure all regs are up-to-date. */
2597 if (REG_QTY (REGNO (x)) != REG_QTY (regno))
2598 return 0;
2600 if (! validate)
2601 return 1;
2603 for (i = regno; i < endregno; i++)
2604 if (REG_IN_TABLE (i) != REG_TICK (i))
2605 return 0;
2607 return 1;
2610 /* For commutative operations, check both orders. */
2611 case PLUS:
2612 case MULT:
2613 case AND:
2614 case IOR:
2615 case XOR:
2616 case NE:
2617 case EQ:
2618 return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0), validate, equal_values)
2619 && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2620 validate, equal_values))
2621 || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2622 validate, equal_values)
2623 && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2624 validate, equal_values)));
2626 case ASM_OPERANDS:
2627 /* We don't use the generic code below because we want to
2628 disregard filename and line numbers. */
2630 /* A volatile asm isn't equivalent to any other. */
2631 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2632 return 0;
2634 if (GET_MODE (x) != GET_MODE (y)
2635 || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
2636 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2637 ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
2638 || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
2639 || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
2640 return 0;
2642 if (ASM_OPERANDS_INPUT_LENGTH (x))
2644 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
2645 if (! exp_equiv_p (ASM_OPERANDS_INPUT (x, i),
2646 ASM_OPERANDS_INPUT (y, i),
2647 validate, equal_values)
2648 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
2649 ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
2650 return 0;
2653 return 1;
2655 default:
2656 break;
2659 /* Compare the elements. If any pair of corresponding elements
2660 fail to match, return 0 for the whole things. */
2662 fmt = GET_RTX_FORMAT (code);
2663 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2665 switch (fmt[i])
2667 case 'e':
2668 if (! exp_equiv_p (XEXP (x, i), XEXP (y, i), validate, equal_values))
2669 return 0;
2670 break;
2672 case 'E':
2673 if (XVECLEN (x, i) != XVECLEN (y, i))
2674 return 0;
2675 for (j = 0; j < XVECLEN (x, i); j++)
2676 if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2677 validate, equal_values))
2678 return 0;
2679 break;
2681 case 's':
2682 if (strcmp (XSTR (x, i), XSTR (y, i)))
2683 return 0;
2684 break;
2686 case 'i':
2687 if (XINT (x, i) != XINT (y, i))
2688 return 0;
2689 break;
2691 case 'w':
2692 if (XWINT (x, i) != XWINT (y, i))
2693 return 0;
2694 break;
2696 case '0':
2697 case 't':
2698 break;
2700 default:
2701 abort ();
2705 return 1;
2708 /* Return 1 if X has a value that can vary even between two
2709 executions of the program. 0 means X can be compared reliably
2710 against certain constants or near-constants. */
2712 static int
2713 cse_rtx_varies_p (x, from_alias)
2714 rtx x;
2715 int from_alias;
2717 /* We need not check for X and the equivalence class being of the same
2718 mode because if X is equivalent to a constant in some mode, it
2719 doesn't vary in any mode. */
2721 if (GET_CODE (x) == REG
2722 && REGNO_QTY_VALID_P (REGNO (x)))
2724 int x_q = REG_QTY (REGNO (x));
2725 struct qty_table_elem *x_ent = &qty_table[x_q];
2727 if (GET_MODE (x) == x_ent->mode
2728 && x_ent->const_rtx != NULL_RTX)
2729 return 0;
2732 if (GET_CODE (x) == PLUS
2733 && GET_CODE (XEXP (x, 1)) == CONST_INT
2734 && GET_CODE (XEXP (x, 0)) == REG
2735 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
2737 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2738 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2740 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2741 && x0_ent->const_rtx != NULL_RTX)
2742 return 0;
2745 /* This can happen as the result of virtual register instantiation, if
2746 the initial constant is too large to be a valid address. This gives
2747 us a three instruction sequence, load large offset into a register,
2748 load fp minus a constant into a register, then a MEM which is the
2749 sum of the two `constant' registers. */
2750 if (GET_CODE (x) == PLUS
2751 && GET_CODE (XEXP (x, 0)) == REG
2752 && GET_CODE (XEXP (x, 1)) == REG
2753 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2754 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
2756 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2757 int x1_q = REG_QTY (REGNO (XEXP (x, 1)));
2758 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2759 struct qty_table_elem *x1_ent = &qty_table[x1_q];
2761 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2762 && x0_ent->const_rtx != NULL_RTX
2763 && (GET_MODE (XEXP (x, 1)) == x1_ent->mode)
2764 && x1_ent->const_rtx != NULL_RTX)
2765 return 0;
2768 return rtx_varies_p (x, from_alias);
2771 /* Canonicalize an expression:
2772 replace each register reference inside it
2773 with the "oldest" equivalent register.
2775 If INSN is non-zero and we are replacing a pseudo with a hard register
2776 or vice versa, validate_change is used to ensure that INSN remains valid
2777 after we make our substitution. The calls are made with IN_GROUP non-zero
2778 so apply_change_group must be called upon the outermost return from this
2779 function (unless INSN is zero). The result of apply_change_group can
2780 generally be discarded since the changes we are making are optional. */
2782 static rtx
2783 canon_reg (x, insn)
2784 rtx x;
2785 rtx insn;
2787 int i;
2788 enum rtx_code code;
2789 const char *fmt;
2791 if (x == 0)
2792 return x;
2794 code = GET_CODE (x);
2795 switch (code)
2797 case PC:
2798 case CC0:
2799 case CONST:
2800 case CONST_INT:
2801 case CONST_DOUBLE:
2802 case CONST_VECTOR:
2803 case SYMBOL_REF:
2804 case LABEL_REF:
2805 case ADDR_VEC:
2806 case ADDR_DIFF_VEC:
2807 return x;
2809 case REG:
2811 int first;
2812 int q;
2813 struct qty_table_elem *ent;
2815 /* Never replace a hard reg, because hard regs can appear
2816 in more than one machine mode, and we must preserve the mode
2817 of each occurrence. Also, some hard regs appear in
2818 MEMs that are shared and mustn't be altered. Don't try to
2819 replace any reg that maps to a reg of class NO_REGS. */
2820 if (REGNO (x) < FIRST_PSEUDO_REGISTER
2821 || ! REGNO_QTY_VALID_P (REGNO (x)))
2822 return x;
2824 q = REG_QTY (REGNO (x));
2825 ent = &qty_table[q];
2826 first = ent->first_reg;
2827 return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2828 : REGNO_REG_CLASS (first) == NO_REGS ? x
2829 : gen_rtx_REG (ent->mode, first));
2832 default:
2833 break;
2836 fmt = GET_RTX_FORMAT (code);
2837 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2839 int j;
2841 if (fmt[i] == 'e')
2843 rtx new = canon_reg (XEXP (x, i), insn);
2844 int insn_code;
2846 /* If replacing pseudo with hard reg or vice versa, ensure the
2847 insn remains valid. Likewise if the insn has MATCH_DUPs. */
2848 if (insn != 0 && new != 0
2849 && GET_CODE (new) == REG && GET_CODE (XEXP (x, i)) == REG
2850 && (((REGNO (new) < FIRST_PSEUDO_REGISTER)
2851 != (REGNO (XEXP (x, i)) < FIRST_PSEUDO_REGISTER))
2852 || (insn_code = recog_memoized (insn)) < 0
2853 || insn_data[insn_code].n_dups > 0))
2854 validate_change (insn, &XEXP (x, i), new, 1);
2855 else
2856 XEXP (x, i) = new;
2858 else if (fmt[i] == 'E')
2859 for (j = 0; j < XVECLEN (x, i); j++)
2860 XVECEXP (x, i, j) = canon_reg (XVECEXP (x, i, j), insn);
2863 return x;
2866 /* LOC is a location within INSN that is an operand address (the contents of
2867 a MEM). Find the best equivalent address to use that is valid for this
2868 insn.
2870 On most CISC machines, complicated address modes are costly, and rtx_cost
2871 is a good approximation for that cost. However, most RISC machines have
2872 only a few (usually only one) memory reference formats. If an address is
2873 valid at all, it is often just as cheap as any other address. Hence, for
2874 RISC machines, we use the configuration macro `ADDRESS_COST' to compare the
2875 costs of various addresses. For two addresses of equal cost, choose the one
2876 with the highest `rtx_cost' value as that has the potential of eliminating
2877 the most insns. For equal costs, we choose the first in the equivalence
2878 class. Note that we ignore the fact that pseudo registers are cheaper
2879 than hard registers here because we would also prefer the pseudo registers.
2882 static void
2883 find_best_addr (insn, loc, mode)
2884 rtx insn;
2885 rtx *loc;
2886 enum machine_mode mode;
2888 struct table_elt *elt;
2889 rtx addr = *loc;
2890 #ifdef ADDRESS_COST
2891 struct table_elt *p;
2892 int found_better = 1;
2893 #endif
2894 int save_do_not_record = do_not_record;
2895 int save_hash_arg_in_memory = hash_arg_in_memory;
2896 int addr_volatile;
2897 int regno;
2898 unsigned hash;
2900 /* Do not try to replace constant addresses or addresses of local and
2901 argument slots. These MEM expressions are made only once and inserted
2902 in many instructions, as well as being used to control symbol table
2903 output. It is not safe to clobber them.
2905 There are some uncommon cases where the address is already in a register
2906 for some reason, but we cannot take advantage of that because we have
2907 no easy way to unshare the MEM. In addition, looking up all stack
2908 addresses is costly. */
2909 if ((GET_CODE (addr) == PLUS
2910 && GET_CODE (XEXP (addr, 0)) == REG
2911 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2912 && (regno = REGNO (XEXP (addr, 0)),
2913 regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM
2914 || regno == ARG_POINTER_REGNUM))
2915 || (GET_CODE (addr) == REG
2916 && (regno = REGNO (addr), regno == FRAME_POINTER_REGNUM
2917 || regno == HARD_FRAME_POINTER_REGNUM
2918 || regno == ARG_POINTER_REGNUM))
2919 || GET_CODE (addr) == ADDRESSOF
2920 || CONSTANT_ADDRESS_P (addr))
2921 return;
2923 /* If this address is not simply a register, try to fold it. This will
2924 sometimes simplify the expression. Many simplifications
2925 will not be valid, but some, usually applying the associative rule, will
2926 be valid and produce better code. */
2927 if (GET_CODE (addr) != REG)
2929 rtx folded = fold_rtx (copy_rtx (addr), NULL_RTX);
2930 int addr_folded_cost = address_cost (folded, mode);
2931 int addr_cost = address_cost (addr, mode);
2933 if ((addr_folded_cost < addr_cost
2934 || (addr_folded_cost == addr_cost
2935 /* ??? The rtx_cost comparison is left over from an older
2936 version of this code. It is probably no longer helpful. */
2937 && (rtx_cost (folded, MEM) > rtx_cost (addr, MEM)
2938 || approx_reg_cost (folded) < approx_reg_cost (addr))))
2939 && validate_change (insn, loc, folded, 0))
2940 addr = folded;
2943 /* If this address is not in the hash table, we can't look for equivalences
2944 of the whole address. Also, ignore if volatile. */
2946 do_not_record = 0;
2947 hash = HASH (addr, Pmode);
2948 addr_volatile = do_not_record;
2949 do_not_record = save_do_not_record;
2950 hash_arg_in_memory = save_hash_arg_in_memory;
2952 if (addr_volatile)
2953 return;
2955 elt = lookup (addr, hash, Pmode);
2957 #ifndef ADDRESS_COST
2958 if (elt)
2960 int our_cost = elt->cost;
2962 /* Find the lowest cost below ours that works. */
2963 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
2964 if (elt->cost < our_cost
2965 && (GET_CODE (elt->exp) == REG
2966 || exp_equiv_p (elt->exp, elt->exp, 1, 0))
2967 && validate_change (insn, loc,
2968 canon_reg (copy_rtx (elt->exp), NULL_RTX), 0))
2969 return;
2971 #else
2973 if (elt)
2975 /* We need to find the best (under the criteria documented above) entry
2976 in the class that is valid. We use the `flag' field to indicate
2977 choices that were invalid and iterate until we can't find a better
2978 one that hasn't already been tried. */
2980 for (p = elt->first_same_value; p; p = p->next_same_value)
2981 p->flag = 0;
2983 while (found_better)
2985 int best_addr_cost = address_cost (*loc, mode);
2986 int best_rtx_cost = (elt->cost + 1) >> 1;
2987 int exp_cost;
2988 struct table_elt *best_elt = elt;
2990 found_better = 0;
2991 for (p = elt->first_same_value; p; p = p->next_same_value)
2992 if (! p->flag)
2994 if ((GET_CODE (p->exp) == REG
2995 || exp_equiv_p (p->exp, p->exp, 1, 0))
2996 && ((exp_cost = address_cost (p->exp, mode)) < best_addr_cost
2997 || (exp_cost == best_addr_cost
2998 && ((p->cost + 1) >> 1) > best_rtx_cost)))
3000 found_better = 1;
3001 best_addr_cost = exp_cost;
3002 best_rtx_cost = (p->cost + 1) >> 1;
3003 best_elt = p;
3007 if (found_better)
3009 if (validate_change (insn, loc,
3010 canon_reg (copy_rtx (best_elt->exp),
3011 NULL_RTX), 0))
3012 return;
3013 else
3014 best_elt->flag = 1;
3019 /* If the address is a binary operation with the first operand a register
3020 and the second a constant, do the same as above, but looking for
3021 equivalences of the register. Then try to simplify before checking for
3022 the best address to use. This catches a few cases: First is when we
3023 have REG+const and the register is another REG+const. We can often merge
3024 the constants and eliminate one insn and one register. It may also be
3025 that a machine has a cheap REG+REG+const. Finally, this improves the
3026 code on the Alpha for unaligned byte stores. */
3028 if (flag_expensive_optimizations
3029 && (GET_RTX_CLASS (GET_CODE (*loc)) == '2'
3030 || GET_RTX_CLASS (GET_CODE (*loc)) == 'c')
3031 && GET_CODE (XEXP (*loc, 0)) == REG
3032 && GET_CODE (XEXP (*loc, 1)) == CONST_INT)
3034 rtx c = XEXP (*loc, 1);
3036 do_not_record = 0;
3037 hash = HASH (XEXP (*loc, 0), Pmode);
3038 do_not_record = save_do_not_record;
3039 hash_arg_in_memory = save_hash_arg_in_memory;
3041 elt = lookup (XEXP (*loc, 0), hash, Pmode);
3042 if (elt == 0)
3043 return;
3045 /* We need to find the best (under the criteria documented above) entry
3046 in the class that is valid. We use the `flag' field to indicate
3047 choices that were invalid and iterate until we can't find a better
3048 one that hasn't already been tried. */
3050 for (p = elt->first_same_value; p; p = p->next_same_value)
3051 p->flag = 0;
3053 while (found_better)
3055 int best_addr_cost = address_cost (*loc, mode);
3056 int best_rtx_cost = (COST (*loc) + 1) >> 1;
3057 struct table_elt *best_elt = elt;
3058 rtx best_rtx = *loc;
3059 int count;
3061 /* This is at worst case an O(n^2) algorithm, so limit our search
3062 to the first 32 elements on the list. This avoids trouble
3063 compiling code with very long basic blocks that can easily
3064 call simplify_gen_binary so many times that we run out of
3065 memory. */
3067 found_better = 0;
3068 for (p = elt->first_same_value, count = 0;
3069 p && count < 32;
3070 p = p->next_same_value, count++)
3071 if (! p->flag
3072 && (GET_CODE (p->exp) == REG
3073 || exp_equiv_p (p->exp, p->exp, 1, 0)))
3075 rtx new = simplify_gen_binary (GET_CODE (*loc), Pmode,
3076 p->exp, c);
3077 int new_cost;
3078 new_cost = address_cost (new, mode);
3080 if (new_cost < best_addr_cost
3081 || (new_cost == best_addr_cost
3082 && (COST (new) + 1) >> 1 > best_rtx_cost))
3084 found_better = 1;
3085 best_addr_cost = new_cost;
3086 best_rtx_cost = (COST (new) + 1) >> 1;
3087 best_elt = p;
3088 best_rtx = new;
3092 if (found_better)
3094 if (validate_change (insn, loc,
3095 canon_reg (copy_rtx (best_rtx),
3096 NULL_RTX), 0))
3097 return;
3098 else
3099 best_elt->flag = 1;
3103 #endif
3106 /* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
3107 operation (EQ, NE, GT, etc.), follow it back through the hash table and
3108 what values are being compared.
3110 *PARG1 and *PARG2 are updated to contain the rtx representing the values
3111 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
3112 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
3113 compared to produce cc0.
3115 The return value is the comparison operator and is either the code of
3116 A or the code corresponding to the inverse of the comparison. */
3118 static enum rtx_code
3119 find_comparison_args (code, parg1, parg2, pmode1, pmode2)
3120 enum rtx_code code;
3121 rtx *parg1, *parg2;
3122 enum machine_mode *pmode1, *pmode2;
3124 rtx arg1, arg2;
3126 arg1 = *parg1, arg2 = *parg2;
3128 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
3130 while (arg2 == CONST0_RTX (GET_MODE (arg1)))
3132 /* Set non-zero when we find something of interest. */
3133 rtx x = 0;
3134 int reverse_code = 0;
3135 struct table_elt *p = 0;
3137 /* If arg1 is a COMPARE, extract the comparison arguments from it.
3138 On machines with CC0, this is the only case that can occur, since
3139 fold_rtx will return the COMPARE or item being compared with zero
3140 when given CC0. */
3142 if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
3143 x = arg1;
3145 /* If ARG1 is a comparison operator and CODE is testing for
3146 STORE_FLAG_VALUE, get the inner arguments. */
3148 else if (GET_RTX_CLASS (GET_CODE (arg1)) == '<')
3150 if (code == NE
3151 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3152 && code == LT && STORE_FLAG_VALUE == -1)
3153 #ifdef FLOAT_STORE_FLAG_VALUE
3154 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3155 && (REAL_VALUE_NEGATIVE
3156 (FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)))))
3157 #endif
3159 x = arg1;
3160 else if (code == EQ
3161 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3162 && code == GE && STORE_FLAG_VALUE == -1)
3163 #ifdef FLOAT_STORE_FLAG_VALUE
3164 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3165 && (REAL_VALUE_NEGATIVE
3166 (FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)))))
3167 #endif
3169 x = arg1, reverse_code = 1;
3172 /* ??? We could also check for
3174 (ne (and (eq (...) (const_int 1))) (const_int 0))
3176 and related forms, but let's wait until we see them occurring. */
3178 if (x == 0)
3179 /* Look up ARG1 in the hash table and see if it has an equivalence
3180 that lets us see what is being compared. */
3181 p = lookup (arg1, safe_hash (arg1, GET_MODE (arg1)) & HASH_MASK,
3182 GET_MODE (arg1));
3183 if (p)
3185 p = p->first_same_value;
3187 /* If what we compare is already known to be constant, that is as
3188 good as it gets.
3189 We need to break the loop in this case, because otherwise we
3190 can have an infinite loop when looking at a reg that is known
3191 to be a constant which is the same as a comparison of a reg
3192 against zero which appears later in the insn stream, which in
3193 turn is constant and the same as the comparison of the first reg
3194 against zero... */
3195 if (p->is_const)
3196 break;
3199 for (; p; p = p->next_same_value)
3201 enum machine_mode inner_mode = GET_MODE (p->exp);
3203 /* If the entry isn't valid, skip it. */
3204 if (! exp_equiv_p (p->exp, p->exp, 1, 0))
3205 continue;
3207 if (GET_CODE (p->exp) == COMPARE
3208 /* Another possibility is that this machine has a compare insn
3209 that includes the comparison code. In that case, ARG1 would
3210 be equivalent to a comparison operation that would set ARG1 to
3211 either STORE_FLAG_VALUE or zero. If this is an NE operation,
3212 ORIG_CODE is the actual comparison being done; if it is an EQ,
3213 we must reverse ORIG_CODE. On machine with a negative value
3214 for STORE_FLAG_VALUE, also look at LT and GE operations. */
3215 || ((code == NE
3216 || (code == LT
3217 && GET_MODE_CLASS (inner_mode) == MODE_INT
3218 && (GET_MODE_BITSIZE (inner_mode)
3219 <= HOST_BITS_PER_WIDE_INT)
3220 && (STORE_FLAG_VALUE
3221 & ((HOST_WIDE_INT) 1
3222 << (GET_MODE_BITSIZE (inner_mode) - 1))))
3223 #ifdef FLOAT_STORE_FLAG_VALUE
3224 || (code == LT
3225 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3226 && (REAL_VALUE_NEGATIVE
3227 (FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)))))
3228 #endif
3230 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<'))
3232 x = p->exp;
3233 break;
3235 else if ((code == EQ
3236 || (code == GE
3237 && GET_MODE_CLASS (inner_mode) == MODE_INT
3238 && (GET_MODE_BITSIZE (inner_mode)
3239 <= HOST_BITS_PER_WIDE_INT)
3240 && (STORE_FLAG_VALUE
3241 & ((HOST_WIDE_INT) 1
3242 << (GET_MODE_BITSIZE (inner_mode) - 1))))
3243 #ifdef FLOAT_STORE_FLAG_VALUE
3244 || (code == GE
3245 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3246 && (REAL_VALUE_NEGATIVE
3247 (FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)))))
3248 #endif
3250 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<')
3252 reverse_code = 1;
3253 x = p->exp;
3254 break;
3257 /* If this is fp + constant, the equivalent is a better operand since
3258 it may let us predict the value of the comparison. */
3259 else if (NONZERO_BASE_PLUS_P (p->exp))
3261 arg1 = p->exp;
3262 continue;
3266 /* If we didn't find a useful equivalence for ARG1, we are done.
3267 Otherwise, set up for the next iteration. */
3268 if (x == 0)
3269 break;
3271 /* If we need to reverse the comparison, make sure that that is
3272 possible -- we can't necessarily infer the value of GE from LT
3273 with floating-point operands. */
3274 if (reverse_code)
3276 enum rtx_code reversed = reversed_comparison_code (x, NULL_RTX);
3277 if (reversed == UNKNOWN)
3278 break;
3279 else
3280 code = reversed;
3282 else if (GET_RTX_CLASS (GET_CODE (x)) == '<')
3283 code = GET_CODE (x);
3284 arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
3287 /* Return our results. Return the modes from before fold_rtx
3288 because fold_rtx might produce const_int, and then it's too late. */
3289 *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
3290 *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
3292 return code;
3295 /* If X is a nontrivial arithmetic operation on an argument
3296 for which a constant value can be determined, return
3297 the result of operating on that value, as a constant.
3298 Otherwise, return X, possibly with one or more operands
3299 modified by recursive calls to this function.
3301 If X is a register whose contents are known, we do NOT
3302 return those contents here. equiv_constant is called to
3303 perform that task.
3305 INSN is the insn that we may be modifying. If it is 0, make a copy
3306 of X before modifying it. */
3308 static rtx
3309 fold_rtx (x, insn)
3310 rtx x;
3311 rtx insn;
3313 enum rtx_code code;
3314 enum machine_mode mode;
3315 const char *fmt;
3316 int i;
3317 rtx new = 0;
3318 int copied = 0;
3319 int must_swap = 0;
3321 /* Folded equivalents of first two operands of X. */
3322 rtx folded_arg0;
3323 rtx folded_arg1;
3325 /* Constant equivalents of first three operands of X;
3326 0 when no such equivalent is known. */
3327 rtx const_arg0;
3328 rtx const_arg1;
3329 rtx const_arg2;
3331 /* The mode of the first operand of X. We need this for sign and zero
3332 extends. */
3333 enum machine_mode mode_arg0;
3335 if (x == 0)
3336 return x;
3338 mode = GET_MODE (x);
3339 code = GET_CODE (x);
3340 switch (code)
3342 case CONST:
3343 case CONST_INT:
3344 case CONST_DOUBLE:
3345 case CONST_VECTOR:
3346 case SYMBOL_REF:
3347 case LABEL_REF:
3348 case REG:
3349 /* No use simplifying an EXPR_LIST
3350 since they are used only for lists of args
3351 in a function call's REG_EQUAL note. */
3352 case EXPR_LIST:
3353 /* Changing anything inside an ADDRESSOF is incorrect; we don't
3354 want to (e.g.,) make (addressof (const_int 0)) just because
3355 the location is known to be zero. */
3356 case ADDRESSOF:
3357 return x;
3359 #ifdef HAVE_cc0
3360 case CC0:
3361 return prev_insn_cc0;
3362 #endif
3364 case PC:
3365 /* If the next insn is a CODE_LABEL followed by a jump table,
3366 PC's value is a LABEL_REF pointing to that label. That
3367 lets us fold switch statements on the VAX. */
3368 if (insn && GET_CODE (insn) == JUMP_INSN)
3370 rtx next = next_nonnote_insn (insn);
3372 if (next && GET_CODE (next) == CODE_LABEL
3373 && NEXT_INSN (next) != 0
3374 && GET_CODE (NEXT_INSN (next)) == JUMP_INSN
3375 && (GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_VEC
3376 || GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_DIFF_VEC))
3377 return gen_rtx_LABEL_REF (Pmode, next);
3379 break;
3381 case SUBREG:
3382 /* See if we previously assigned a constant value to this SUBREG. */
3383 if ((new = lookup_as_function (x, CONST_INT)) != 0
3384 || (new = lookup_as_function (x, CONST_DOUBLE)) != 0)
3385 return new;
3387 /* If this is a paradoxical SUBREG, we have no idea what value the
3388 extra bits would have. However, if the operand is equivalent
3389 to a SUBREG whose operand is the same as our mode, and all the
3390 modes are within a word, we can just use the inner operand
3391 because these SUBREGs just say how to treat the register.
3393 Similarly if we find an integer constant. */
3395 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3397 enum machine_mode imode = GET_MODE (SUBREG_REG (x));
3398 struct table_elt *elt;
3400 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
3401 && GET_MODE_SIZE (imode) <= UNITS_PER_WORD
3402 && (elt = lookup (SUBREG_REG (x), HASH (SUBREG_REG (x), imode),
3403 imode)) != 0)
3404 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
3406 if (CONSTANT_P (elt->exp)
3407 && GET_MODE (elt->exp) == VOIDmode)
3408 return elt->exp;
3410 if (GET_CODE (elt->exp) == SUBREG
3411 && GET_MODE (SUBREG_REG (elt->exp)) == mode
3412 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
3413 return copy_rtx (SUBREG_REG (elt->exp));
3416 return x;
3419 /* Fold SUBREG_REG. If it changed, see if we can simplify the SUBREG.
3420 We might be able to if the SUBREG is extracting a single word in an
3421 integral mode or extracting the low part. */
3423 folded_arg0 = fold_rtx (SUBREG_REG (x), insn);
3424 const_arg0 = equiv_constant (folded_arg0);
3425 if (const_arg0)
3426 folded_arg0 = const_arg0;
3428 if (folded_arg0 != SUBREG_REG (x))
3430 new = simplify_subreg (mode, folded_arg0,
3431 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
3432 if (new)
3433 return new;
3436 /* If this is a narrowing SUBREG and our operand is a REG, see if
3437 we can find an equivalence for REG that is an arithmetic operation
3438 in a wider mode where both operands are paradoxical SUBREGs
3439 from objects of our result mode. In that case, we couldn't report
3440 an equivalent value for that operation, since we don't know what the
3441 extra bits will be. But we can find an equivalence for this SUBREG
3442 by folding that operation is the narrow mode. This allows us to
3443 fold arithmetic in narrow modes when the machine only supports
3444 word-sized arithmetic.
3446 Also look for a case where we have a SUBREG whose operand is the
3447 same as our result. If both modes are smaller than a word, we
3448 are simply interpreting a register in different modes and we
3449 can use the inner value. */
3451 if (GET_CODE (folded_arg0) == REG
3452 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (folded_arg0))
3453 && subreg_lowpart_p (x))
3455 struct table_elt *elt;
3457 /* We can use HASH here since we know that canon_hash won't be
3458 called. */
3459 elt = lookup (folded_arg0,
3460 HASH (folded_arg0, GET_MODE (folded_arg0)),
3461 GET_MODE (folded_arg0));
3463 if (elt)
3464 elt = elt->first_same_value;
3466 for (; elt; elt = elt->next_same_value)
3468 enum rtx_code eltcode = GET_CODE (elt->exp);
3470 /* Just check for unary and binary operations. */
3471 if (GET_RTX_CLASS (GET_CODE (elt->exp)) == '1'
3472 && GET_CODE (elt->exp) != SIGN_EXTEND
3473 && GET_CODE (elt->exp) != ZERO_EXTEND
3474 && GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3475 && GET_MODE (SUBREG_REG (XEXP (elt->exp, 0))) == mode
3476 && (GET_MODE_CLASS (mode)
3477 == GET_MODE_CLASS (GET_MODE (XEXP (elt->exp, 0)))))
3479 rtx op0 = SUBREG_REG (XEXP (elt->exp, 0));
3481 if (GET_CODE (op0) != REG && ! CONSTANT_P (op0))
3482 op0 = fold_rtx (op0, NULL_RTX);
3484 op0 = equiv_constant (op0);
3485 if (op0)
3486 new = simplify_unary_operation (GET_CODE (elt->exp), mode,
3487 op0, mode);
3489 else if ((GET_RTX_CLASS (GET_CODE (elt->exp)) == '2'
3490 || GET_RTX_CLASS (GET_CODE (elt->exp)) == 'c')
3491 && eltcode != DIV && eltcode != MOD
3492 && eltcode != UDIV && eltcode != UMOD
3493 && eltcode != ASHIFTRT && eltcode != LSHIFTRT
3494 && eltcode != ROTATE && eltcode != ROTATERT
3495 && ((GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3496 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 0)))
3497 == mode))
3498 || CONSTANT_P (XEXP (elt->exp, 0)))
3499 && ((GET_CODE (XEXP (elt->exp, 1)) == SUBREG
3500 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 1)))
3501 == mode))
3502 || CONSTANT_P (XEXP (elt->exp, 1))))
3504 rtx op0 = gen_lowpart_common (mode, XEXP (elt->exp, 0));
3505 rtx op1 = gen_lowpart_common (mode, XEXP (elt->exp, 1));
3507 if (op0 && GET_CODE (op0) != REG && ! CONSTANT_P (op0))
3508 op0 = fold_rtx (op0, NULL_RTX);
3510 if (op0)
3511 op0 = equiv_constant (op0);
3513 if (op1 && GET_CODE (op1) != REG && ! CONSTANT_P (op1))
3514 op1 = fold_rtx (op1, NULL_RTX);
3516 if (op1)
3517 op1 = equiv_constant (op1);
3519 /* If we are looking for the low SImode part of
3520 (ashift:DI c (const_int 32)), it doesn't work
3521 to compute that in SImode, because a 32-bit shift
3522 in SImode is unpredictable. We know the value is 0. */
3523 if (op0 && op1
3524 && GET_CODE (elt->exp) == ASHIFT
3525 && GET_CODE (op1) == CONST_INT
3526 && INTVAL (op1) >= GET_MODE_BITSIZE (mode))
3528 if (INTVAL (op1) < GET_MODE_BITSIZE (GET_MODE (elt->exp)))
3530 /* If the count fits in the inner mode's width,
3531 but exceeds the outer mode's width,
3532 the value will get truncated to 0
3533 by the subreg. */
3534 new = const0_rtx;
3535 else
3536 /* If the count exceeds even the inner mode's width,
3537 don't fold this expression. */
3538 new = 0;
3540 else if (op0 && op1)
3541 new = simplify_binary_operation (GET_CODE (elt->exp), mode,
3542 op0, op1);
3545 else if (GET_CODE (elt->exp) == SUBREG
3546 && GET_MODE (SUBREG_REG (elt->exp)) == mode
3547 && (GET_MODE_SIZE (GET_MODE (folded_arg0))
3548 <= UNITS_PER_WORD)
3549 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
3550 new = copy_rtx (SUBREG_REG (elt->exp));
3552 if (new)
3553 return new;
3557 return x;
3559 case NOT:
3560 case NEG:
3561 /* If we have (NOT Y), see if Y is known to be (NOT Z).
3562 If so, (NOT Y) simplifies to Z. Similarly for NEG. */
3563 new = lookup_as_function (XEXP (x, 0), code);
3564 if (new)
3565 return fold_rtx (copy_rtx (XEXP (new, 0)), insn);
3566 break;
3568 case MEM:
3569 /* If we are not actually processing an insn, don't try to find the
3570 best address. Not only don't we care, but we could modify the
3571 MEM in an invalid way since we have no insn to validate against. */
3572 if (insn != 0)
3573 find_best_addr (insn, &XEXP (x, 0), GET_MODE (x));
3576 /* Even if we don't fold in the insn itself,
3577 we can safely do so here, in hopes of getting a constant. */
3578 rtx addr = fold_rtx (XEXP (x, 0), NULL_RTX);
3579 rtx base = 0;
3580 HOST_WIDE_INT offset = 0;
3582 if (GET_CODE (addr) == REG
3583 && REGNO_QTY_VALID_P (REGNO (addr)))
3585 int addr_q = REG_QTY (REGNO (addr));
3586 struct qty_table_elem *addr_ent = &qty_table[addr_q];
3588 if (GET_MODE (addr) == addr_ent->mode
3589 && addr_ent->const_rtx != NULL_RTX)
3590 addr = addr_ent->const_rtx;
3593 /* If address is constant, split it into a base and integer offset. */
3594 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
3595 base = addr;
3596 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
3597 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
3599 base = XEXP (XEXP (addr, 0), 0);
3600 offset = INTVAL (XEXP (XEXP (addr, 0), 1));
3602 else if (GET_CODE (addr) == LO_SUM
3603 && GET_CODE (XEXP (addr, 1)) == SYMBOL_REF)
3604 base = XEXP (addr, 1);
3605 else if (GET_CODE (addr) == ADDRESSOF)
3606 return change_address (x, VOIDmode, addr);
3608 /* If this is a constant pool reference, we can fold it into its
3609 constant to allow better value tracking. */
3610 if (base && GET_CODE (base) == SYMBOL_REF
3611 && CONSTANT_POOL_ADDRESS_P (base))
3613 rtx constant = get_pool_constant (base);
3614 enum machine_mode const_mode = get_pool_mode (base);
3615 rtx new;
3617 if (CONSTANT_P (constant) && GET_CODE (constant) != CONST_INT)
3618 constant_pool_entries_cost = COST (constant);
3620 /* If we are loading the full constant, we have an equivalence. */
3621 if (offset == 0 && mode == const_mode)
3622 return constant;
3624 /* If this actually isn't a constant (weird!), we can't do
3625 anything. Otherwise, handle the two most common cases:
3626 extracting a word from a multi-word constant, and extracting
3627 the low-order bits. Other cases don't seem common enough to
3628 worry about. */
3629 if (! CONSTANT_P (constant))
3630 return x;
3632 if (GET_MODE_CLASS (mode) == MODE_INT
3633 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
3634 && offset % UNITS_PER_WORD == 0
3635 && (new = operand_subword (constant,
3636 offset / UNITS_PER_WORD,
3637 0, const_mode)) != 0)
3638 return new;
3640 if (((BYTES_BIG_ENDIAN
3641 && offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
3642 || (! BYTES_BIG_ENDIAN && offset == 0))
3643 && (new = gen_lowpart_if_possible (mode, constant)) != 0)
3644 return new;
3647 /* If this is a reference to a label at a known position in a jump
3648 table, we also know its value. */
3649 if (base && GET_CODE (base) == LABEL_REF)
3651 rtx label = XEXP (base, 0);
3652 rtx table_insn = NEXT_INSN (label);
3654 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
3655 && GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
3657 rtx table = PATTERN (table_insn);
3659 if (offset >= 0
3660 && (offset / GET_MODE_SIZE (GET_MODE (table))
3661 < XVECLEN (table, 0)))
3662 return XVECEXP (table, 0,
3663 offset / GET_MODE_SIZE (GET_MODE (table)));
3665 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
3666 && GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
3668 rtx table = PATTERN (table_insn);
3670 if (offset >= 0
3671 && (offset / GET_MODE_SIZE (GET_MODE (table))
3672 < XVECLEN (table, 1)))
3674 offset /= GET_MODE_SIZE (GET_MODE (table));
3675 new = gen_rtx_MINUS (Pmode, XVECEXP (table, 1, offset),
3676 XEXP (table, 0));
3678 if (GET_MODE (table) != Pmode)
3679 new = gen_rtx_TRUNCATE (GET_MODE (table), new);
3681 /* Indicate this is a constant. This isn't a
3682 valid form of CONST, but it will only be used
3683 to fold the next insns and then discarded, so
3684 it should be safe.
3686 Note this expression must be explicitly discarded,
3687 by cse_insn, else it may end up in a REG_EQUAL note
3688 and "escape" to cause problems elsewhere. */
3689 return gen_rtx_CONST (GET_MODE (new), new);
3694 return x;
3697 #ifdef NO_FUNCTION_CSE
3698 case CALL:
3699 if (CONSTANT_P (XEXP (XEXP (x, 0), 0)))
3700 return x;
3701 break;
3702 #endif
3704 case ASM_OPERANDS:
3705 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
3706 validate_change (insn, &ASM_OPERANDS_INPUT (x, i),
3707 fold_rtx (ASM_OPERANDS_INPUT (x, i), insn), 0);
3708 break;
3710 default:
3711 break;
3714 const_arg0 = 0;
3715 const_arg1 = 0;
3716 const_arg2 = 0;
3717 mode_arg0 = VOIDmode;
3719 /* Try folding our operands.
3720 Then see which ones have constant values known. */
3722 fmt = GET_RTX_FORMAT (code);
3723 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3724 if (fmt[i] == 'e')
3726 rtx arg = XEXP (x, i);
3727 rtx folded_arg = arg, const_arg = 0;
3728 enum machine_mode mode_arg = GET_MODE (arg);
3729 rtx cheap_arg, expensive_arg;
3730 rtx replacements[2];
3731 int j;
3733 /* Most arguments are cheap, so handle them specially. */
3734 switch (GET_CODE (arg))
3736 case REG:
3737 /* This is the same as calling equiv_constant; it is duplicated
3738 here for speed. */
3739 if (REGNO_QTY_VALID_P (REGNO (arg)))
3741 int arg_q = REG_QTY (REGNO (arg));
3742 struct qty_table_elem *arg_ent = &qty_table[arg_q];
3744 if (arg_ent->const_rtx != NULL_RTX
3745 && GET_CODE (arg_ent->const_rtx) != REG
3746 && GET_CODE (arg_ent->const_rtx) != PLUS)
3747 const_arg
3748 = gen_lowpart_if_possible (GET_MODE (arg),
3749 arg_ent->const_rtx);
3751 break;
3753 case CONST:
3754 case CONST_INT:
3755 case SYMBOL_REF:
3756 case LABEL_REF:
3757 case CONST_DOUBLE:
3758 case CONST_VECTOR:
3759 const_arg = arg;
3760 break;
3762 #ifdef HAVE_cc0
3763 case CC0:
3764 folded_arg = prev_insn_cc0;
3765 mode_arg = prev_insn_cc0_mode;
3766 const_arg = equiv_constant (folded_arg);
3767 break;
3768 #endif
3770 default:
3771 folded_arg = fold_rtx (arg, insn);
3772 const_arg = equiv_constant (folded_arg);
3775 /* For the first three operands, see if the operand
3776 is constant or equivalent to a constant. */
3777 switch (i)
3779 case 0:
3780 folded_arg0 = folded_arg;
3781 const_arg0 = const_arg;
3782 mode_arg0 = mode_arg;
3783 break;
3784 case 1:
3785 folded_arg1 = folded_arg;
3786 const_arg1 = const_arg;
3787 break;
3788 case 2:
3789 const_arg2 = const_arg;
3790 break;
3793 /* Pick the least expensive of the folded argument and an
3794 equivalent constant argument. */
3795 if (const_arg == 0 || const_arg == folded_arg
3796 || COST_IN (const_arg, code) > COST_IN (folded_arg, code))
3797 cheap_arg = folded_arg, expensive_arg = const_arg;
3798 else
3799 cheap_arg = const_arg, expensive_arg = folded_arg;
3801 /* Try to replace the operand with the cheapest of the two
3802 possibilities. If it doesn't work and this is either of the first
3803 two operands of a commutative operation, try swapping them.
3804 If THAT fails, try the more expensive, provided it is cheaper
3805 than what is already there. */
3807 if (cheap_arg == XEXP (x, i))
3808 continue;
3810 if (insn == 0 && ! copied)
3812 x = copy_rtx (x);
3813 copied = 1;
3816 /* Order the replacements from cheapest to most expensive. */
3817 replacements[0] = cheap_arg;
3818 replacements[1] = expensive_arg;
3820 for (j = 0; j < 2 && replacements[j]; j++)
3822 int old_cost = COST_IN (XEXP (x, i), code);
3823 int new_cost = COST_IN (replacements[j], code);
3825 /* Stop if what existed before was cheaper. Prefer constants
3826 in the case of a tie. */
3827 if (new_cost > old_cost
3828 || (new_cost == old_cost && CONSTANT_P (XEXP (x, i))))
3829 break;
3831 if (validate_change (insn, &XEXP (x, i), replacements[j], 0))
3832 break;
3834 if (code == NE || code == EQ || GET_RTX_CLASS (code) == 'c'
3835 || code == LTGT || code == UNEQ || code == ORDERED
3836 || code == UNORDERED)
3838 validate_change (insn, &XEXP (x, i), XEXP (x, 1 - i), 1);
3839 validate_change (insn, &XEXP (x, 1 - i), replacements[j], 1);
3841 if (apply_change_group ())
3843 /* Swap them back to be invalid so that this loop can
3844 continue and flag them to be swapped back later. */
3845 rtx tem;
3847 tem = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1);
3848 XEXP (x, 1) = tem;
3849 must_swap = 1;
3850 break;
3856 else
3858 if (fmt[i] == 'E')
3859 /* Don't try to fold inside of a vector of expressions.
3860 Doing nothing is harmless. */
3864 /* If a commutative operation, place a constant integer as the second
3865 operand unless the first operand is also a constant integer. Otherwise,
3866 place any constant second unless the first operand is also a constant. */
3868 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c'
3869 || code == LTGT || code == UNEQ || code == ORDERED
3870 || code == UNORDERED)
3872 if (must_swap || (const_arg0
3873 && (const_arg1 == 0
3874 || (GET_CODE (const_arg0) == CONST_INT
3875 && GET_CODE (const_arg1) != CONST_INT))))
3877 rtx tem = XEXP (x, 0);
3879 if (insn == 0 && ! copied)
3881 x = copy_rtx (x);
3882 copied = 1;
3885 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
3886 validate_change (insn, &XEXP (x, 1), tem, 1);
3887 if (apply_change_group ())
3889 tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
3890 tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
3895 /* If X is an arithmetic operation, see if we can simplify it. */
3897 switch (GET_RTX_CLASS (code))
3899 case '1':
3901 int is_const = 0;
3903 /* We can't simplify extension ops unless we know the
3904 original mode. */
3905 if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
3906 && mode_arg0 == VOIDmode)
3907 break;
3909 /* If we had a CONST, strip it off and put it back later if we
3910 fold. */
3911 if (const_arg0 != 0 && GET_CODE (const_arg0) == CONST)
3912 is_const = 1, const_arg0 = XEXP (const_arg0, 0);
3914 new = simplify_unary_operation (code, mode,
3915 const_arg0 ? const_arg0 : folded_arg0,
3916 mode_arg0);
3917 if (new != 0 && is_const)
3918 new = gen_rtx_CONST (mode, new);
3920 break;
3922 case '<':
3923 /* See what items are actually being compared and set FOLDED_ARG[01]
3924 to those values and CODE to the actual comparison code. If any are
3925 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
3926 do anything if both operands are already known to be constant. */
3928 if (const_arg0 == 0 || const_arg1 == 0)
3930 struct table_elt *p0, *p1;
3931 rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
3932 enum machine_mode mode_arg1;
3934 #ifdef FLOAT_STORE_FLAG_VALUE
3935 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3937 true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
3938 (FLOAT_STORE_FLAG_VALUE (mode), mode));
3939 false_rtx = CONST0_RTX (mode);
3941 #endif
3943 code = find_comparison_args (code, &folded_arg0, &folded_arg1,
3944 &mode_arg0, &mode_arg1);
3945 const_arg0 = equiv_constant (folded_arg0);
3946 const_arg1 = equiv_constant (folded_arg1);
3948 /* If the mode is VOIDmode or a MODE_CC mode, we don't know
3949 what kinds of things are being compared, so we can't do
3950 anything with this comparison. */
3952 if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
3953 break;
3955 /* If we do not now have two constants being compared, see
3956 if we can nevertheless deduce some things about the
3957 comparison. */
3958 if (const_arg0 == 0 || const_arg1 == 0)
3960 /* Is FOLDED_ARG0 frame-pointer plus a constant? Or
3961 non-explicit constant? These aren't zero, but we
3962 don't know their sign. */
3963 if (const_arg1 == const0_rtx
3964 && (NONZERO_BASE_PLUS_P (folded_arg0)
3965 #if 0 /* Sad to say, on sysvr4, #pragma weak can make a symbol address
3966 come out as 0. */
3967 || GET_CODE (folded_arg0) == SYMBOL_REF
3968 #endif
3969 || GET_CODE (folded_arg0) == LABEL_REF
3970 || GET_CODE (folded_arg0) == CONST))
3972 if (code == EQ)
3973 return false_rtx;
3974 else if (code == NE)
3975 return true_rtx;
3978 /* See if the two operands are the same. */
3980 if (folded_arg0 == folded_arg1
3981 || (GET_CODE (folded_arg0) == REG
3982 && GET_CODE (folded_arg1) == REG
3983 && (REG_QTY (REGNO (folded_arg0))
3984 == REG_QTY (REGNO (folded_arg1))))
3985 || ((p0 = lookup (folded_arg0,
3986 (safe_hash (folded_arg0, mode_arg0)
3987 & HASH_MASK), mode_arg0))
3988 && (p1 = lookup (folded_arg1,
3989 (safe_hash (folded_arg1, mode_arg0)
3990 & HASH_MASK), mode_arg0))
3991 && p0->first_same_value == p1->first_same_value))
3993 /* Sadly two equal NaNs are not equivalent. */
3994 if (!HONOR_NANS (mode_arg0))
3995 return ((code == EQ || code == LE || code == GE
3996 || code == LEU || code == GEU || code == UNEQ
3997 || code == UNLE || code == UNGE
3998 || code == ORDERED)
3999 ? true_rtx : false_rtx);
4000 /* Take care for the FP compares we can resolve. */
4001 if (code == UNEQ || code == UNLE || code == UNGE)
4002 return true_rtx;
4003 if (code == LTGT || code == LT || code == GT)
4004 return false_rtx;
4007 /* If FOLDED_ARG0 is a register, see if the comparison we are
4008 doing now is either the same as we did before or the reverse
4009 (we only check the reverse if not floating-point). */
4010 else if (GET_CODE (folded_arg0) == REG)
4012 int qty = REG_QTY (REGNO (folded_arg0));
4014 if (REGNO_QTY_VALID_P (REGNO (folded_arg0)))
4016 struct qty_table_elem *ent = &qty_table[qty];
4018 if ((comparison_dominates_p (ent->comparison_code, code)
4019 || (! FLOAT_MODE_P (mode_arg0)
4020 && comparison_dominates_p (ent->comparison_code,
4021 reverse_condition (code))))
4022 && (rtx_equal_p (ent->comparison_const, folded_arg1)
4023 || (const_arg1
4024 && rtx_equal_p (ent->comparison_const,
4025 const_arg1))
4026 || (GET_CODE (folded_arg1) == REG
4027 && (REG_QTY (REGNO (folded_arg1)) == ent->comparison_qty))))
4028 return (comparison_dominates_p (ent->comparison_code, code)
4029 ? true_rtx : false_rtx);
4035 /* If we are comparing against zero, see if the first operand is
4036 equivalent to an IOR with a constant. If so, we may be able to
4037 determine the result of this comparison. */
4039 if (const_arg1 == const0_rtx)
4041 rtx y = lookup_as_function (folded_arg0, IOR);
4042 rtx inner_const;
4044 if (y != 0
4045 && (inner_const = equiv_constant (XEXP (y, 1))) != 0
4046 && GET_CODE (inner_const) == CONST_INT
4047 && INTVAL (inner_const) != 0)
4049 int sign_bitnum = GET_MODE_BITSIZE (mode_arg0) - 1;
4050 int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
4051 && (INTVAL (inner_const)
4052 & ((HOST_WIDE_INT) 1 << sign_bitnum)));
4053 rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
4055 #ifdef FLOAT_STORE_FLAG_VALUE
4056 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
4058 true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
4059 (FLOAT_STORE_FLAG_VALUE (mode), mode));
4060 false_rtx = CONST0_RTX (mode);
4062 #endif
4064 switch (code)
4066 case EQ:
4067 return false_rtx;
4068 case NE:
4069 return true_rtx;
4070 case LT: case LE:
4071 if (has_sign)
4072 return true_rtx;
4073 break;
4074 case GT: case GE:
4075 if (has_sign)
4076 return false_rtx;
4077 break;
4078 default:
4079 break;
4084 new = simplify_relational_operation (code,
4085 (mode_arg0 != VOIDmode
4086 ? mode_arg0
4087 : (GET_MODE (const_arg0
4088 ? const_arg0
4089 : folded_arg0)
4090 != VOIDmode)
4091 ? GET_MODE (const_arg0
4092 ? const_arg0
4093 : folded_arg0)
4094 : GET_MODE (const_arg1
4095 ? const_arg1
4096 : folded_arg1)),
4097 const_arg0 ? const_arg0 : folded_arg0,
4098 const_arg1 ? const_arg1 : folded_arg1);
4099 #ifdef FLOAT_STORE_FLAG_VALUE
4100 if (new != 0 && GET_MODE_CLASS (mode) == MODE_FLOAT)
4102 if (new == const0_rtx)
4103 new = CONST0_RTX (mode);
4104 else
4105 new = (CONST_DOUBLE_FROM_REAL_VALUE
4106 (FLOAT_STORE_FLAG_VALUE (mode), mode));
4108 #endif
4109 break;
4111 case '2':
4112 case 'c':
4113 switch (code)
4115 case PLUS:
4116 /* If the second operand is a LABEL_REF, see if the first is a MINUS
4117 with that LABEL_REF as its second operand. If so, the result is
4118 the first operand of that MINUS. This handles switches with an
4119 ADDR_DIFF_VEC table. */
4120 if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
4122 rtx y
4123 = GET_CODE (folded_arg0) == MINUS ? folded_arg0
4124 : lookup_as_function (folded_arg0, MINUS);
4126 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
4127 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
4128 return XEXP (y, 0);
4130 /* Now try for a CONST of a MINUS like the above. */
4131 if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
4132 : lookup_as_function (folded_arg0, CONST))) != 0
4133 && GET_CODE (XEXP (y, 0)) == MINUS
4134 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
4135 && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg1, 0))
4136 return XEXP (XEXP (y, 0), 0);
4139 /* Likewise if the operands are in the other order. */
4140 if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
4142 rtx y
4143 = GET_CODE (folded_arg1) == MINUS ? folded_arg1
4144 : lookup_as_function (folded_arg1, MINUS);
4146 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
4147 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
4148 return XEXP (y, 0);
4150 /* Now try for a CONST of a MINUS like the above. */
4151 if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
4152 : lookup_as_function (folded_arg1, CONST))) != 0
4153 && GET_CODE (XEXP (y, 0)) == MINUS
4154 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
4155 && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg0, 0))
4156 return XEXP (XEXP (y, 0), 0);
4159 /* If second operand is a register equivalent to a negative
4160 CONST_INT, see if we can find a register equivalent to the
4161 positive constant. Make a MINUS if so. Don't do this for
4162 a non-negative constant since we might then alternate between
4163 choosing positive and negative constants. Having the positive
4164 constant previously-used is the more common case. Be sure
4165 the resulting constant is non-negative; if const_arg1 were
4166 the smallest negative number this would overflow: depending
4167 on the mode, this would either just be the same value (and
4168 hence not save anything) or be incorrect. */
4169 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT
4170 && INTVAL (const_arg1) < 0
4171 /* This used to test
4173 -INTVAL (const_arg1) >= 0
4175 But The Sun V5.0 compilers mis-compiled that test. So
4176 instead we test for the problematic value in a more direct
4177 manner and hope the Sun compilers get it correct. */
4178 && INTVAL (const_arg1) !=
4179 ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1))
4180 && GET_CODE (folded_arg1) == REG)
4182 rtx new_const = GEN_INT (-INTVAL (const_arg1));
4183 struct table_elt *p
4184 = lookup (new_const, safe_hash (new_const, mode) & HASH_MASK,
4185 mode);
4187 if (p)
4188 for (p = p->first_same_value; p; p = p->next_same_value)
4189 if (GET_CODE (p->exp) == REG)
4190 return simplify_gen_binary (MINUS, mode, folded_arg0,
4191 canon_reg (p->exp, NULL_RTX));
4193 goto from_plus;
4195 case MINUS:
4196 /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
4197 If so, produce (PLUS Z C2-C). */
4198 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT)
4200 rtx y = lookup_as_function (XEXP (x, 0), PLUS);
4201 if (y && GET_CODE (XEXP (y, 1)) == CONST_INT)
4202 return fold_rtx (plus_constant (copy_rtx (y),
4203 -INTVAL (const_arg1)),
4204 NULL_RTX);
4207 /* Fall through. */
4209 from_plus:
4210 case SMIN: case SMAX: case UMIN: case UMAX:
4211 case IOR: case AND: case XOR:
4212 case MULT: case DIV: case UDIV:
4213 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
4214 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
4215 is known to be of similar form, we may be able to replace the
4216 operation with a combined operation. This may eliminate the
4217 intermediate operation if every use is simplified in this way.
4218 Note that the similar optimization done by combine.c only works
4219 if the intermediate operation's result has only one reference. */
4221 if (GET_CODE (folded_arg0) == REG
4222 && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
4224 int is_shift
4225 = (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
4226 rtx y = lookup_as_function (folded_arg0, code);
4227 rtx inner_const;
4228 enum rtx_code associate_code;
4229 rtx new_const;
4231 if (y == 0
4232 || 0 == (inner_const
4233 = equiv_constant (fold_rtx (XEXP (y, 1), 0)))
4234 || GET_CODE (inner_const) != CONST_INT
4235 /* If we have compiled a statement like
4236 "if (x == (x & mask1))", and now are looking at
4237 "x & mask2", we will have a case where the first operand
4238 of Y is the same as our first operand. Unless we detect
4239 this case, an infinite loop will result. */
4240 || XEXP (y, 0) == folded_arg0)
4241 break;
4243 /* Don't associate these operations if they are a PLUS with the
4244 same constant and it is a power of two. These might be doable
4245 with a pre- or post-increment. Similarly for two subtracts of
4246 identical powers of two with post decrement. */
4248 if (code == PLUS && INTVAL (const_arg1) == INTVAL (inner_const)
4249 && ((HAVE_PRE_INCREMENT
4250 && exact_log2 (INTVAL (const_arg1)) >= 0)
4251 || (HAVE_POST_INCREMENT
4252 && exact_log2 (INTVAL (const_arg1)) >= 0)
4253 || (HAVE_PRE_DECREMENT
4254 && exact_log2 (- INTVAL (const_arg1)) >= 0)
4255 || (HAVE_POST_DECREMENT
4256 && exact_log2 (- INTVAL (const_arg1)) >= 0)))
4257 break;
4259 /* Compute the code used to compose the constants. For example,
4260 A/C1/C2 is A/(C1 * C2), so if CODE == DIV, we want MULT. */
4262 associate_code
4263 = (code == MULT || code == DIV || code == UDIV ? MULT
4264 : is_shift || code == PLUS || code == MINUS ? PLUS : code);
4266 new_const = simplify_binary_operation (associate_code, mode,
4267 const_arg1, inner_const);
4269 if (new_const == 0)
4270 break;
4272 /* If we are associating shift operations, don't let this
4273 produce a shift of the size of the object or larger.
4274 This could occur when we follow a sign-extend by a right
4275 shift on a machine that does a sign-extend as a pair
4276 of shifts. */
4278 if (is_shift && GET_CODE (new_const) == CONST_INT
4279 && INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
4281 /* As an exception, we can turn an ASHIFTRT of this
4282 form into a shift of the number of bits - 1. */
4283 if (code == ASHIFTRT)
4284 new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
4285 else
4286 break;
4289 y = copy_rtx (XEXP (y, 0));
4291 /* If Y contains our first operand (the most common way this
4292 can happen is if Y is a MEM), we would do into an infinite
4293 loop if we tried to fold it. So don't in that case. */
4295 if (! reg_mentioned_p (folded_arg0, y))
4296 y = fold_rtx (y, insn);
4298 return simplify_gen_binary (code, mode, y, new_const);
4300 break;
4302 default:
4303 break;
4306 new = simplify_binary_operation (code, mode,
4307 const_arg0 ? const_arg0 : folded_arg0,
4308 const_arg1 ? const_arg1 : folded_arg1);
4309 break;
4311 case 'o':
4312 /* (lo_sum (high X) X) is simply X. */
4313 if (code == LO_SUM && const_arg0 != 0
4314 && GET_CODE (const_arg0) == HIGH
4315 && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
4316 return const_arg1;
4317 break;
4319 case '3':
4320 case 'b':
4321 new = simplify_ternary_operation (code, mode, mode_arg0,
4322 const_arg0 ? const_arg0 : folded_arg0,
4323 const_arg1 ? const_arg1 : folded_arg1,
4324 const_arg2 ? const_arg2 : XEXP (x, 2));
4325 break;
4327 case 'x':
4328 /* Always eliminate CONSTANT_P_RTX at this stage. */
4329 if (code == CONSTANT_P_RTX)
4330 return (const_arg0 ? const1_rtx : const0_rtx);
4331 break;
4334 return new ? new : x;
4337 /* Return a constant value currently equivalent to X.
4338 Return 0 if we don't know one. */
4340 static rtx
4341 equiv_constant (x)
4342 rtx x;
4344 if (GET_CODE (x) == REG
4345 && REGNO_QTY_VALID_P (REGNO (x)))
4347 int x_q = REG_QTY (REGNO (x));
4348 struct qty_table_elem *x_ent = &qty_table[x_q];
4350 if (x_ent->const_rtx)
4351 x = gen_lowpart_if_possible (GET_MODE (x), x_ent->const_rtx);
4354 if (x == 0 || CONSTANT_P (x))
4355 return x;
4357 /* If X is a MEM, try to fold it outside the context of any insn to see if
4358 it might be equivalent to a constant. That handles the case where it
4359 is a constant-pool reference. Then try to look it up in the hash table
4360 in case it is something whose value we have seen before. */
4362 if (GET_CODE (x) == MEM)
4364 struct table_elt *elt;
4366 x = fold_rtx (x, NULL_RTX);
4367 if (CONSTANT_P (x))
4368 return x;
4370 elt = lookup (x, safe_hash (x, GET_MODE (x)) & HASH_MASK, GET_MODE (x));
4371 if (elt == 0)
4372 return 0;
4374 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
4375 if (elt->is_const && CONSTANT_P (elt->exp))
4376 return elt->exp;
4379 return 0;
4382 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a fixed-point
4383 number, return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
4384 least-significant part of X.
4385 MODE specifies how big a part of X to return.
4387 If the requested operation cannot be done, 0 is returned.
4389 This is similar to gen_lowpart in emit-rtl.c. */
4392 gen_lowpart_if_possible (mode, x)
4393 enum machine_mode mode;
4394 rtx x;
4396 rtx result = gen_lowpart_common (mode, x);
4398 if (result)
4399 return result;
4400 else if (GET_CODE (x) == MEM)
4402 /* This is the only other case we handle. */
4403 int offset = 0;
4404 rtx new;
4406 if (WORDS_BIG_ENDIAN)
4407 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
4408 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
4409 if (BYTES_BIG_ENDIAN)
4410 /* Adjust the address so that the address-after-the-data is
4411 unchanged. */
4412 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
4413 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
4415 new = adjust_address_nv (x, mode, offset);
4416 if (! memory_address_p (mode, XEXP (new, 0)))
4417 return 0;
4419 return new;
4421 else
4422 return 0;
4425 /* Given INSN, a jump insn, TAKEN indicates if we are following the "taken"
4426 branch. It will be zero if not.
4428 In certain cases, this can cause us to add an equivalence. For example,
4429 if we are following the taken case of
4430 if (i == 2)
4431 we can add the fact that `i' and '2' are now equivalent.
4433 In any case, we can record that this comparison was passed. If the same
4434 comparison is seen later, we will know its value. */
4436 static void
4437 record_jump_equiv (insn, taken)
4438 rtx insn;
4439 int taken;
4441 int cond_known_true;
4442 rtx op0, op1;
4443 rtx set;
4444 enum machine_mode mode, mode0, mode1;
4445 int reversed_nonequality = 0;
4446 enum rtx_code code;
4448 /* Ensure this is the right kind of insn. */
4449 if (! any_condjump_p (insn))
4450 return;
4451 set = pc_set (insn);
4453 /* See if this jump condition is known true or false. */
4454 if (taken)
4455 cond_known_true = (XEXP (SET_SRC (set), 2) == pc_rtx);
4456 else
4457 cond_known_true = (XEXP (SET_SRC (set), 1) == pc_rtx);
4459 /* Get the type of comparison being done and the operands being compared.
4460 If we had to reverse a non-equality condition, record that fact so we
4461 know that it isn't valid for floating-point. */
4462 code = GET_CODE (XEXP (SET_SRC (set), 0));
4463 op0 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 0), insn);
4464 op1 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 1), insn);
4466 code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
4467 if (! cond_known_true)
4469 code = reversed_comparison_code_parts (code, op0, op1, insn);
4471 /* Don't remember if we can't find the inverse. */
4472 if (code == UNKNOWN)
4473 return;
4476 /* The mode is the mode of the non-constant. */
4477 mode = mode0;
4478 if (mode1 != VOIDmode)
4479 mode = mode1;
4481 record_jump_cond (code, mode, op0, op1, reversed_nonequality);
4484 /* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
4485 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
4486 Make any useful entries we can with that information. Called from
4487 above function and called recursively. */
4489 static void
4490 record_jump_cond (code, mode, op0, op1, reversed_nonequality)
4491 enum rtx_code code;
4492 enum machine_mode mode;
4493 rtx op0, op1;
4494 int reversed_nonequality;
4496 unsigned op0_hash, op1_hash;
4497 int op0_in_memory, op1_in_memory;
4498 struct table_elt *op0_elt, *op1_elt;
4500 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
4501 we know that they are also equal in the smaller mode (this is also
4502 true for all smaller modes whether or not there is a SUBREG, but
4503 is not worth testing for with no SUBREG). */
4505 /* Note that GET_MODE (op0) may not equal MODE. */
4506 if (code == EQ && GET_CODE (op0) == SUBREG
4507 && (GET_MODE_SIZE (GET_MODE (op0))
4508 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4510 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4511 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
4513 record_jump_cond (code, mode, SUBREG_REG (op0),
4514 tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
4515 reversed_nonequality);
4518 if (code == EQ && GET_CODE (op1) == SUBREG
4519 && (GET_MODE_SIZE (GET_MODE (op1))
4520 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4522 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4523 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
4525 record_jump_cond (code, mode, SUBREG_REG (op1),
4526 tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
4527 reversed_nonequality);
4530 /* Similarly, if this is an NE comparison, and either is a SUBREG
4531 making a smaller mode, we know the whole thing is also NE. */
4533 /* Note that GET_MODE (op0) may not equal MODE;
4534 if we test MODE instead, we can get an infinite recursion
4535 alternating between two modes each wider than MODE. */
4537 if (code == NE && GET_CODE (op0) == SUBREG
4538 && subreg_lowpart_p (op0)
4539 && (GET_MODE_SIZE (GET_MODE (op0))
4540 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4542 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4543 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
4545 record_jump_cond (code, mode, SUBREG_REG (op0),
4546 tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
4547 reversed_nonequality);
4550 if (code == NE && GET_CODE (op1) == SUBREG
4551 && subreg_lowpart_p (op1)
4552 && (GET_MODE_SIZE (GET_MODE (op1))
4553 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4555 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4556 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
4558 record_jump_cond (code, mode, SUBREG_REG (op1),
4559 tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
4560 reversed_nonequality);
4563 /* Hash both operands. */
4565 do_not_record = 0;
4566 hash_arg_in_memory = 0;
4567 op0_hash = HASH (op0, mode);
4568 op0_in_memory = hash_arg_in_memory;
4570 if (do_not_record)
4571 return;
4573 do_not_record = 0;
4574 hash_arg_in_memory = 0;
4575 op1_hash = HASH (op1, mode);
4576 op1_in_memory = hash_arg_in_memory;
4578 if (do_not_record)
4579 return;
4581 /* Look up both operands. */
4582 op0_elt = lookup (op0, op0_hash, mode);
4583 op1_elt = lookup (op1, op1_hash, mode);
4585 /* If both operands are already equivalent or if they are not in the
4586 table but are identical, do nothing. */
4587 if ((op0_elt != 0 && op1_elt != 0
4588 && op0_elt->first_same_value == op1_elt->first_same_value)
4589 || op0 == op1 || rtx_equal_p (op0, op1))
4590 return;
4592 /* If we aren't setting two things equal all we can do is save this
4593 comparison. Similarly if this is floating-point. In the latter
4594 case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
4595 If we record the equality, we might inadvertently delete code
4596 whose intent was to change -0 to +0. */
4598 if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
4600 struct qty_table_elem *ent;
4601 int qty;
4603 /* If we reversed a floating-point comparison, if OP0 is not a
4604 register, or if OP1 is neither a register or constant, we can't
4605 do anything. */
4607 if (GET_CODE (op1) != REG)
4608 op1 = equiv_constant (op1);
4610 if ((reversed_nonequality && FLOAT_MODE_P (mode))
4611 || GET_CODE (op0) != REG || op1 == 0)
4612 return;
4614 /* Put OP0 in the hash table if it isn't already. This gives it a
4615 new quantity number. */
4616 if (op0_elt == 0)
4618 if (insert_regs (op0, NULL, 0))
4620 rehash_using_reg (op0);
4621 op0_hash = HASH (op0, mode);
4623 /* If OP0 is contained in OP1, this changes its hash code
4624 as well. Faster to rehash than to check, except
4625 for the simple case of a constant. */
4626 if (! CONSTANT_P (op1))
4627 op1_hash = HASH (op1,mode);
4630 op0_elt = insert (op0, NULL, op0_hash, mode);
4631 op0_elt->in_memory = op0_in_memory;
4634 qty = REG_QTY (REGNO (op0));
4635 ent = &qty_table[qty];
4637 ent->comparison_code = code;
4638 if (GET_CODE (op1) == REG)
4640 /* Look it up again--in case op0 and op1 are the same. */
4641 op1_elt = lookup (op1, op1_hash, mode);
4643 /* Put OP1 in the hash table so it gets a new quantity number. */
4644 if (op1_elt == 0)
4646 if (insert_regs (op1, NULL, 0))
4648 rehash_using_reg (op1);
4649 op1_hash = HASH (op1, mode);
4652 op1_elt = insert (op1, NULL, op1_hash, mode);
4653 op1_elt->in_memory = op1_in_memory;
4656 ent->comparison_const = NULL_RTX;
4657 ent->comparison_qty = REG_QTY (REGNO (op1));
4659 else
4661 ent->comparison_const = op1;
4662 ent->comparison_qty = -1;
4665 return;
4668 /* If either side is still missing an equivalence, make it now,
4669 then merge the equivalences. */
4671 if (op0_elt == 0)
4673 if (insert_regs (op0, NULL, 0))
4675 rehash_using_reg (op0);
4676 op0_hash = HASH (op0, mode);
4679 op0_elt = insert (op0, NULL, op0_hash, mode);
4680 op0_elt->in_memory = op0_in_memory;
4683 if (op1_elt == 0)
4685 if (insert_regs (op1, NULL, 0))
4687 rehash_using_reg (op1);
4688 op1_hash = HASH (op1, mode);
4691 op1_elt = insert (op1, NULL, op1_hash, mode);
4692 op1_elt->in_memory = op1_in_memory;
4695 merge_equiv_classes (op0_elt, op1_elt);
4696 last_jump_equiv_class = op0_elt;
4699 /* CSE processing for one instruction.
4700 First simplify sources and addresses of all assignments
4701 in the instruction, using previously-computed equivalents values.
4702 Then install the new sources and destinations in the table
4703 of available values.
4705 If LIBCALL_INSN is nonzero, don't record any equivalence made in
4706 the insn. It means that INSN is inside libcall block. In this
4707 case LIBCALL_INSN is the corresponding insn with REG_LIBCALL. */
4709 /* Data on one SET contained in the instruction. */
4711 struct set
4713 /* The SET rtx itself. */
4714 rtx rtl;
4715 /* The SET_SRC of the rtx (the original value, if it is changing). */
4716 rtx src;
4717 /* The hash-table element for the SET_SRC of the SET. */
4718 struct table_elt *src_elt;
4719 /* Hash value for the SET_SRC. */
4720 unsigned src_hash;
4721 /* Hash value for the SET_DEST. */
4722 unsigned dest_hash;
4723 /* The SET_DEST, with SUBREG, etc., stripped. */
4724 rtx inner_dest;
4725 /* Nonzero if the SET_SRC is in memory. */
4726 char src_in_memory;
4727 /* Nonzero if the SET_SRC contains something
4728 whose value cannot be predicted and understood. */
4729 char src_volatile;
4730 /* Original machine mode, in case it becomes a CONST_INT. */
4731 enum machine_mode mode;
4732 /* A constant equivalent for SET_SRC, if any. */
4733 rtx src_const;
4734 /* Original SET_SRC value used for libcall notes. */
4735 rtx orig_src;
4736 /* Hash value of constant equivalent for SET_SRC. */
4737 unsigned src_const_hash;
4738 /* Table entry for constant equivalent for SET_SRC, if any. */
4739 struct table_elt *src_const_elt;
4742 static void
4743 cse_insn (insn, libcall_insn)
4744 rtx insn;
4745 rtx libcall_insn;
4747 rtx x = PATTERN (insn);
4748 int i;
4749 rtx tem;
4750 int n_sets = 0;
4752 #ifdef HAVE_cc0
4753 /* Records what this insn does to set CC0. */
4754 rtx this_insn_cc0 = 0;
4755 enum machine_mode this_insn_cc0_mode = VOIDmode;
4756 #endif
4758 rtx src_eqv = 0;
4759 struct table_elt *src_eqv_elt = 0;
4760 int src_eqv_volatile = 0;
4761 int src_eqv_in_memory = 0;
4762 unsigned src_eqv_hash = 0;
4764 struct set *sets = (struct set *) 0;
4766 this_insn = insn;
4768 /* Find all the SETs and CLOBBERs in this instruction.
4769 Record all the SETs in the array `set' and count them.
4770 Also determine whether there is a CLOBBER that invalidates
4771 all memory references, or all references at varying addresses. */
4773 if (GET_CODE (insn) == CALL_INSN)
4775 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4777 if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
4778 invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
4779 XEXP (tem, 0) = canon_reg (XEXP (tem, 0), insn);
4783 if (GET_CODE (x) == SET)
4785 sets = (struct set *) alloca (sizeof (struct set));
4786 sets[0].rtl = x;
4788 /* Ignore SETs that are unconditional jumps.
4789 They never need cse processing, so this does not hurt.
4790 The reason is not efficiency but rather
4791 so that we can test at the end for instructions
4792 that have been simplified to unconditional jumps
4793 and not be misled by unchanged instructions
4794 that were unconditional jumps to begin with. */
4795 if (SET_DEST (x) == pc_rtx
4796 && GET_CODE (SET_SRC (x)) == LABEL_REF)
4799 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
4800 The hard function value register is used only once, to copy to
4801 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
4802 Ensure we invalidate the destination register. On the 80386 no
4803 other code would invalidate it since it is a fixed_reg.
4804 We need not check the return of apply_change_group; see canon_reg. */
4806 else if (GET_CODE (SET_SRC (x)) == CALL)
4808 canon_reg (SET_SRC (x), insn);
4809 apply_change_group ();
4810 fold_rtx (SET_SRC (x), insn);
4811 invalidate (SET_DEST (x), VOIDmode);
4813 else
4814 n_sets = 1;
4816 else if (GET_CODE (x) == PARALLEL)
4818 int lim = XVECLEN (x, 0);
4820 sets = (struct set *) alloca (lim * sizeof (struct set));
4822 /* Find all regs explicitly clobbered in this insn,
4823 and ensure they are not replaced with any other regs
4824 elsewhere in this insn.
4825 When a reg that is clobbered is also used for input,
4826 we should presume that that is for a reason,
4827 and we should not substitute some other register
4828 which is not supposed to be clobbered.
4829 Therefore, this loop cannot be merged into the one below
4830 because a CALL may precede a CLOBBER and refer to the
4831 value clobbered. We must not let a canonicalization do
4832 anything in that case. */
4833 for (i = 0; i < lim; i++)
4835 rtx y = XVECEXP (x, 0, i);
4836 if (GET_CODE (y) == CLOBBER)
4838 rtx clobbered = XEXP (y, 0);
4840 if (GET_CODE (clobbered) == REG
4841 || GET_CODE (clobbered) == SUBREG)
4842 invalidate (clobbered, VOIDmode);
4843 else if (GET_CODE (clobbered) == STRICT_LOW_PART
4844 || GET_CODE (clobbered) == ZERO_EXTRACT)
4845 invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
4849 for (i = 0; i < lim; i++)
4851 rtx y = XVECEXP (x, 0, i);
4852 if (GET_CODE (y) == SET)
4854 /* As above, we ignore unconditional jumps and call-insns and
4855 ignore the result of apply_change_group. */
4856 if (GET_CODE (SET_SRC (y)) == CALL)
4858 canon_reg (SET_SRC (y), insn);
4859 apply_change_group ();
4860 fold_rtx (SET_SRC (y), insn);
4861 invalidate (SET_DEST (y), VOIDmode);
4863 else if (SET_DEST (y) == pc_rtx
4864 && GET_CODE (SET_SRC (y)) == LABEL_REF)
4866 else
4867 sets[n_sets++].rtl = y;
4869 else if (GET_CODE (y) == CLOBBER)
4871 /* If we clobber memory, canon the address.
4872 This does nothing when a register is clobbered
4873 because we have already invalidated the reg. */
4874 if (GET_CODE (XEXP (y, 0)) == MEM)
4875 canon_reg (XEXP (y, 0), NULL_RTX);
4877 else if (GET_CODE (y) == USE
4878 && ! (GET_CODE (XEXP (y, 0)) == REG
4879 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
4880 canon_reg (y, NULL_RTX);
4881 else if (GET_CODE (y) == CALL)
4883 /* The result of apply_change_group can be ignored; see
4884 canon_reg. */
4885 canon_reg (y, insn);
4886 apply_change_group ();
4887 fold_rtx (y, insn);
4891 else if (GET_CODE (x) == CLOBBER)
4893 if (GET_CODE (XEXP (x, 0)) == MEM)
4894 canon_reg (XEXP (x, 0), NULL_RTX);
4897 /* Canonicalize a USE of a pseudo register or memory location. */
4898 else if (GET_CODE (x) == USE
4899 && ! (GET_CODE (XEXP (x, 0)) == REG
4900 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
4901 canon_reg (XEXP (x, 0), NULL_RTX);
4902 else if (GET_CODE (x) == CALL)
4904 /* The result of apply_change_group can be ignored; see canon_reg. */
4905 canon_reg (x, insn);
4906 apply_change_group ();
4907 fold_rtx (x, insn);
4910 /* Store the equivalent value in SRC_EQV, if different, or if the DEST
4911 is a STRICT_LOW_PART. The latter condition is necessary because SRC_EQV
4912 is handled specially for this case, and if it isn't set, then there will
4913 be no equivalence for the destination. */
4914 if (n_sets == 1 && REG_NOTES (insn) != 0
4915 && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
4916 && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
4917 || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
4919 src_eqv = fold_rtx (canon_reg (XEXP (tem, 0), NULL_RTX), insn);
4920 XEXP (tem, 0) = src_eqv;
4923 /* Canonicalize sources and addresses of destinations.
4924 We do this in a separate pass to avoid problems when a MATCH_DUP is
4925 present in the insn pattern. In that case, we want to ensure that
4926 we don't break the duplicate nature of the pattern. So we will replace
4927 both operands at the same time. Otherwise, we would fail to find an
4928 equivalent substitution in the loop calling validate_change below.
4930 We used to suppress canonicalization of DEST if it appears in SRC,
4931 but we don't do this any more. */
4933 for (i = 0; i < n_sets; i++)
4935 rtx dest = SET_DEST (sets[i].rtl);
4936 rtx src = SET_SRC (sets[i].rtl);
4937 rtx new = canon_reg (src, insn);
4938 int insn_code;
4940 sets[i].orig_src = src;
4941 if ((GET_CODE (new) == REG && GET_CODE (src) == REG
4942 && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
4943 != (REGNO (src) < FIRST_PSEUDO_REGISTER)))
4944 || (insn_code = recog_memoized (insn)) < 0
4945 || insn_data[insn_code].n_dups > 0)
4946 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
4947 else
4948 SET_SRC (sets[i].rtl) = new;
4950 if (GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SIGN_EXTRACT)
4952 validate_change (insn, &XEXP (dest, 1),
4953 canon_reg (XEXP (dest, 1), insn), 1);
4954 validate_change (insn, &XEXP (dest, 2),
4955 canon_reg (XEXP (dest, 2), insn), 1);
4958 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
4959 || GET_CODE (dest) == ZERO_EXTRACT
4960 || GET_CODE (dest) == SIGN_EXTRACT)
4961 dest = XEXP (dest, 0);
4963 if (GET_CODE (dest) == MEM)
4964 canon_reg (dest, insn);
4967 /* Now that we have done all the replacements, we can apply the change
4968 group and see if they all work. Note that this will cause some
4969 canonicalizations that would have worked individually not to be applied
4970 because some other canonicalization didn't work, but this should not
4971 occur often.
4973 The result of apply_change_group can be ignored; see canon_reg. */
4975 apply_change_group ();
4977 /* Set sets[i].src_elt to the class each source belongs to.
4978 Detect assignments from or to volatile things
4979 and set set[i] to zero so they will be ignored
4980 in the rest of this function.
4982 Nothing in this loop changes the hash table or the register chains. */
4984 for (i = 0; i < n_sets; i++)
4986 rtx src, dest;
4987 rtx src_folded;
4988 struct table_elt *elt = 0, *p;
4989 enum machine_mode mode;
4990 rtx src_eqv_here;
4991 rtx src_const = 0;
4992 rtx src_related = 0;
4993 struct table_elt *src_const_elt = 0;
4994 int src_cost = MAX_COST;
4995 int src_eqv_cost = MAX_COST;
4996 int src_folded_cost = MAX_COST;
4997 int src_related_cost = MAX_COST;
4998 int src_elt_cost = MAX_COST;
4999 int src_regcost = MAX_COST;
5000 int src_eqv_regcost = MAX_COST;
5001 int src_folded_regcost = MAX_COST;
5002 int src_related_regcost = MAX_COST;
5003 int src_elt_regcost = MAX_COST;
5004 /* Set non-zero if we need to call force_const_mem on with the
5005 contents of src_folded before using it. */
5006 int src_folded_force_flag = 0;
5008 dest = SET_DEST (sets[i].rtl);
5009 src = SET_SRC (sets[i].rtl);
5011 /* If SRC is a constant that has no machine mode,
5012 hash it with the destination's machine mode.
5013 This way we can keep different modes separate. */
5015 mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5016 sets[i].mode = mode;
5018 if (src_eqv)
5020 enum machine_mode eqvmode = mode;
5021 if (GET_CODE (dest) == STRICT_LOW_PART)
5022 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5023 do_not_record = 0;
5024 hash_arg_in_memory = 0;
5025 src_eqv_hash = HASH (src_eqv, eqvmode);
5027 /* Find the equivalence class for the equivalent expression. */
5029 if (!do_not_record)
5030 src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
5032 src_eqv_volatile = do_not_record;
5033 src_eqv_in_memory = hash_arg_in_memory;
5036 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
5037 value of the INNER register, not the destination. So it is not
5038 a valid substitution for the source. But save it for later. */
5039 if (GET_CODE (dest) == STRICT_LOW_PART)
5040 src_eqv_here = 0;
5041 else
5042 src_eqv_here = src_eqv;
5044 /* Simplify and foldable subexpressions in SRC. Then get the fully-
5045 simplified result, which may not necessarily be valid. */
5046 src_folded = fold_rtx (src, insn);
5048 #if 0
5049 /* ??? This caused bad code to be generated for the m68k port with -O2.
5050 Suppose src is (CONST_INT -1), and that after truncation src_folded
5051 is (CONST_INT 3). Suppose src_folded is then used for src_const.
5052 At the end we will add src and src_const to the same equivalence
5053 class. We now have 3 and -1 on the same equivalence class. This
5054 causes later instructions to be mis-optimized. */
5055 /* If storing a constant in a bitfield, pre-truncate the constant
5056 so we will be able to record it later. */
5057 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
5058 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
5060 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5062 if (GET_CODE (src) == CONST_INT
5063 && GET_CODE (width) == CONST_INT
5064 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5065 && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5066 src_folded
5067 = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
5068 << INTVAL (width)) - 1));
5070 #endif
5072 /* Compute SRC's hash code, and also notice if it
5073 should not be recorded at all. In that case,
5074 prevent any further processing of this assignment. */
5075 do_not_record = 0;
5076 hash_arg_in_memory = 0;
5078 sets[i].src = src;
5079 sets[i].src_hash = HASH (src, mode);
5080 sets[i].src_volatile = do_not_record;
5081 sets[i].src_in_memory = hash_arg_in_memory;
5083 /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
5084 a pseudo, do not record SRC. Using SRC as a replacement for
5085 anything else will be incorrect in that situation. Note that
5086 this usually occurs only for stack slots, in which case all the
5087 RTL would be referring to SRC, so we don't lose any optimization
5088 opportunities by not having SRC in the hash table. */
5090 if (GET_CODE (src) == MEM
5091 && find_reg_note (insn, REG_EQUIV, NULL_RTX) != 0
5092 && GET_CODE (dest) == REG
5093 && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
5094 sets[i].src_volatile = 1;
5096 #if 0
5097 /* It is no longer clear why we used to do this, but it doesn't
5098 appear to still be needed. So let's try without it since this
5099 code hurts cse'ing widened ops. */
5100 /* If source is a perverse subreg (such as QI treated as an SI),
5101 treat it as volatile. It may do the work of an SI in one context
5102 where the extra bits are not being used, but cannot replace an SI
5103 in general. */
5104 if (GET_CODE (src) == SUBREG
5105 && (GET_MODE_SIZE (GET_MODE (src))
5106 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
5107 sets[i].src_volatile = 1;
5108 #endif
5110 /* Locate all possible equivalent forms for SRC. Try to replace
5111 SRC in the insn with each cheaper equivalent.
5113 We have the following types of equivalents: SRC itself, a folded
5114 version, a value given in a REG_EQUAL note, or a value related
5115 to a constant.
5117 Each of these equivalents may be part of an additional class
5118 of equivalents (if more than one is in the table, they must be in
5119 the same class; we check for this).
5121 If the source is volatile, we don't do any table lookups.
5123 We note any constant equivalent for possible later use in a
5124 REG_NOTE. */
5126 if (!sets[i].src_volatile)
5127 elt = lookup (src, sets[i].src_hash, mode);
5129 sets[i].src_elt = elt;
5131 if (elt && src_eqv_here && src_eqv_elt)
5133 if (elt->first_same_value != src_eqv_elt->first_same_value)
5135 /* The REG_EQUAL is indicating that two formerly distinct
5136 classes are now equivalent. So merge them. */
5137 merge_equiv_classes (elt, src_eqv_elt);
5138 src_eqv_hash = HASH (src_eqv, elt->mode);
5139 src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
5142 src_eqv_here = 0;
5145 else if (src_eqv_elt)
5146 elt = src_eqv_elt;
5148 /* Try to find a constant somewhere and record it in `src_const'.
5149 Record its table element, if any, in `src_const_elt'. Look in
5150 any known equivalences first. (If the constant is not in the
5151 table, also set `sets[i].src_const_hash'). */
5152 if (elt)
5153 for (p = elt->first_same_value; p; p = p->next_same_value)
5154 if (p->is_const)
5156 src_const = p->exp;
5157 src_const_elt = elt;
5158 break;
5161 if (src_const == 0
5162 && (CONSTANT_P (src_folded)
5163 /* Consider (minus (label_ref L1) (label_ref L2)) as
5164 "constant" here so we will record it. This allows us
5165 to fold switch statements when an ADDR_DIFF_VEC is used. */
5166 || (GET_CODE (src_folded) == MINUS
5167 && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
5168 && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
5169 src_const = src_folded, src_const_elt = elt;
5170 else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
5171 src_const = src_eqv_here, src_const_elt = src_eqv_elt;
5173 /* If we don't know if the constant is in the table, get its
5174 hash code and look it up. */
5175 if (src_const && src_const_elt == 0)
5177 sets[i].src_const_hash = HASH (src_const, mode);
5178 src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
5181 sets[i].src_const = src_const;
5182 sets[i].src_const_elt = src_const_elt;
5184 /* If the constant and our source are both in the table, mark them as
5185 equivalent. Otherwise, if a constant is in the table but the source
5186 isn't, set ELT to it. */
5187 if (src_const_elt && elt
5188 && src_const_elt->first_same_value != elt->first_same_value)
5189 merge_equiv_classes (elt, src_const_elt);
5190 else if (src_const_elt && elt == 0)
5191 elt = src_const_elt;
5193 /* See if there is a register linearly related to a constant
5194 equivalent of SRC. */
5195 if (src_const
5196 && (GET_CODE (src_const) == CONST
5197 || (src_const_elt && src_const_elt->related_value != 0)))
5199 src_related = use_related_value (src_const, src_const_elt);
5200 if (src_related)
5202 struct table_elt *src_related_elt
5203 = lookup (src_related, HASH (src_related, mode), mode);
5204 if (src_related_elt && elt)
5206 if (elt->first_same_value
5207 != src_related_elt->first_same_value)
5208 /* This can occur when we previously saw a CONST
5209 involving a SYMBOL_REF and then see the SYMBOL_REF
5210 twice. Merge the involved classes. */
5211 merge_equiv_classes (elt, src_related_elt);
5213 src_related = 0;
5214 src_related_elt = 0;
5216 else if (src_related_elt && elt == 0)
5217 elt = src_related_elt;
5221 /* See if we have a CONST_INT that is already in a register in a
5222 wider mode. */
5224 if (src_const && src_related == 0 && GET_CODE (src_const) == CONST_INT
5225 && GET_MODE_CLASS (mode) == MODE_INT
5226 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
5228 enum machine_mode wider_mode;
5230 for (wider_mode = GET_MODE_WIDER_MODE (mode);
5231 GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
5232 && src_related == 0;
5233 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5235 struct table_elt *const_elt
5236 = lookup (src_const, HASH (src_const, wider_mode), wider_mode);
5238 if (const_elt == 0)
5239 continue;
5241 for (const_elt = const_elt->first_same_value;
5242 const_elt; const_elt = const_elt->next_same_value)
5243 if (GET_CODE (const_elt->exp) == REG)
5245 src_related = gen_lowpart_if_possible (mode,
5246 const_elt->exp);
5247 break;
5252 /* Another possibility is that we have an AND with a constant in
5253 a mode narrower than a word. If so, it might have been generated
5254 as part of an "if" which would narrow the AND. If we already
5255 have done the AND in a wider mode, we can use a SUBREG of that
5256 value. */
5258 if (flag_expensive_optimizations && ! src_related
5259 && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
5260 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5262 enum machine_mode tmode;
5263 rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
5265 for (tmode = GET_MODE_WIDER_MODE (mode);
5266 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5267 tmode = GET_MODE_WIDER_MODE (tmode))
5269 rtx inner = gen_lowpart_if_possible (tmode, XEXP (src, 0));
5270 struct table_elt *larger_elt;
5272 if (inner)
5274 PUT_MODE (new_and, tmode);
5275 XEXP (new_and, 0) = inner;
5276 larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
5277 if (larger_elt == 0)
5278 continue;
5280 for (larger_elt = larger_elt->first_same_value;
5281 larger_elt; larger_elt = larger_elt->next_same_value)
5282 if (GET_CODE (larger_elt->exp) == REG)
5284 src_related
5285 = gen_lowpart_if_possible (mode, larger_elt->exp);
5286 break;
5289 if (src_related)
5290 break;
5295 #ifdef LOAD_EXTEND_OP
5296 /* See if a MEM has already been loaded with a widening operation;
5297 if it has, we can use a subreg of that. Many CISC machines
5298 also have such operations, but this is only likely to be
5299 beneficial these machines. */
5301 if (flag_expensive_optimizations && src_related == 0
5302 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5303 && GET_MODE_CLASS (mode) == MODE_INT
5304 && GET_CODE (src) == MEM && ! do_not_record
5305 && LOAD_EXTEND_OP (mode) != NIL)
5307 enum machine_mode tmode;
5309 /* Set what we are trying to extend and the operation it might
5310 have been extended with. */
5311 PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
5312 XEXP (memory_extend_rtx, 0) = src;
5314 for (tmode = GET_MODE_WIDER_MODE (mode);
5315 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5316 tmode = GET_MODE_WIDER_MODE (tmode))
5318 struct table_elt *larger_elt;
5320 PUT_MODE (memory_extend_rtx, tmode);
5321 larger_elt = lookup (memory_extend_rtx,
5322 HASH (memory_extend_rtx, tmode), tmode);
5323 if (larger_elt == 0)
5324 continue;
5326 for (larger_elt = larger_elt->first_same_value;
5327 larger_elt; larger_elt = larger_elt->next_same_value)
5328 if (GET_CODE (larger_elt->exp) == REG)
5330 src_related = gen_lowpart_if_possible (mode,
5331 larger_elt->exp);
5332 break;
5335 if (src_related)
5336 break;
5339 #endif /* LOAD_EXTEND_OP */
5341 if (src == src_folded)
5342 src_folded = 0;
5344 /* At this point, ELT, if non-zero, points to a class of expressions
5345 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
5346 and SRC_RELATED, if non-zero, each contain additional equivalent
5347 expressions. Prune these latter expressions by deleting expressions
5348 already in the equivalence class.
5350 Check for an equivalent identical to the destination. If found,
5351 this is the preferred equivalent since it will likely lead to
5352 elimination of the insn. Indicate this by placing it in
5353 `src_related'. */
5355 if (elt)
5356 elt = elt->first_same_value;
5357 for (p = elt; p; p = p->next_same_value)
5359 enum rtx_code code = GET_CODE (p->exp);
5361 /* If the expression is not valid, ignore it. Then we do not
5362 have to check for validity below. In most cases, we can use
5363 `rtx_equal_p', since canonicalization has already been done. */
5364 if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, 0))
5365 continue;
5367 /* Also skip paradoxical subregs, unless that's what we're
5368 looking for. */
5369 if (code == SUBREG
5370 && (GET_MODE_SIZE (GET_MODE (p->exp))
5371 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))
5372 && ! (src != 0
5373 && GET_CODE (src) == SUBREG
5374 && GET_MODE (src) == GET_MODE (p->exp)
5375 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5376 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))))
5377 continue;
5379 if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
5380 src = 0;
5381 else if (src_folded && GET_CODE (src_folded) == code
5382 && rtx_equal_p (src_folded, p->exp))
5383 src_folded = 0;
5384 else if (src_eqv_here && GET_CODE (src_eqv_here) == code
5385 && rtx_equal_p (src_eqv_here, p->exp))
5386 src_eqv_here = 0;
5387 else if (src_related && GET_CODE (src_related) == code
5388 && rtx_equal_p (src_related, p->exp))
5389 src_related = 0;
5391 /* This is the same as the destination of the insns, we want
5392 to prefer it. Copy it to src_related. The code below will
5393 then give it a negative cost. */
5394 if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
5395 src_related = dest;
5398 /* Find the cheapest valid equivalent, trying all the available
5399 possibilities. Prefer items not in the hash table to ones
5400 that are when they are equal cost. Note that we can never
5401 worsen an insn as the current contents will also succeed.
5402 If we find an equivalent identical to the destination, use it as best,
5403 since this insn will probably be eliminated in that case. */
5404 if (src)
5406 if (rtx_equal_p (src, dest))
5407 src_cost = src_regcost = -1;
5408 else
5410 src_cost = COST (src);
5411 src_regcost = approx_reg_cost (src);
5415 if (src_eqv_here)
5417 if (rtx_equal_p (src_eqv_here, dest))
5418 src_eqv_cost = src_eqv_regcost = -1;
5419 else
5421 src_eqv_cost = COST (src_eqv_here);
5422 src_eqv_regcost = approx_reg_cost (src_eqv_here);
5426 if (src_folded)
5428 if (rtx_equal_p (src_folded, dest))
5429 src_folded_cost = src_folded_regcost = -1;
5430 else
5432 src_folded_cost = COST (src_folded);
5433 src_folded_regcost = approx_reg_cost (src_folded);
5437 if (src_related)
5439 if (rtx_equal_p (src_related, dest))
5440 src_related_cost = src_related_regcost = -1;
5441 else
5443 src_related_cost = COST (src_related);
5444 src_related_regcost = approx_reg_cost (src_related);
5448 /* If this was an indirect jump insn, a known label will really be
5449 cheaper even though it looks more expensive. */
5450 if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
5451 src_folded = src_const, src_folded_cost = src_folded_regcost = -1;
5453 /* Terminate loop when replacement made. This must terminate since
5454 the current contents will be tested and will always be valid. */
5455 while (1)
5457 rtx trial;
5459 /* Skip invalid entries. */
5460 while (elt && GET_CODE (elt->exp) != REG
5461 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
5462 elt = elt->next_same_value;
5464 /* A paradoxical subreg would be bad here: it'll be the right
5465 size, but later may be adjusted so that the upper bits aren't
5466 what we want. So reject it. */
5467 if (elt != 0
5468 && GET_CODE (elt->exp) == SUBREG
5469 && (GET_MODE_SIZE (GET_MODE (elt->exp))
5470 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))
5471 /* It is okay, though, if the rtx we're trying to match
5472 will ignore any of the bits we can't predict. */
5473 && ! (src != 0
5474 && GET_CODE (src) == SUBREG
5475 && GET_MODE (src) == GET_MODE (elt->exp)
5476 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5477 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))))
5479 elt = elt->next_same_value;
5480 continue;
5483 if (elt)
5485 src_elt_cost = elt->cost;
5486 src_elt_regcost = elt->regcost;
5489 /* Find cheapest and skip it for the next time. For items
5490 of equal cost, use this order:
5491 src_folded, src, src_eqv, src_related and hash table entry. */
5492 if (src_folded
5493 && preferrable (src_folded_cost, src_folded_regcost,
5494 src_cost, src_regcost) <= 0
5495 && preferrable (src_folded_cost, src_folded_regcost,
5496 src_eqv_cost, src_eqv_regcost) <= 0
5497 && preferrable (src_folded_cost, src_folded_regcost,
5498 src_related_cost, src_related_regcost) <= 0
5499 && preferrable (src_folded_cost, src_folded_regcost,
5500 src_elt_cost, src_elt_regcost) <= 0)
5502 trial = src_folded, src_folded_cost = MAX_COST;
5503 if (src_folded_force_flag)
5504 trial = force_const_mem (mode, trial);
5506 else if (src
5507 && preferrable (src_cost, src_regcost,
5508 src_eqv_cost, src_eqv_regcost) <= 0
5509 && preferrable (src_cost, src_regcost,
5510 src_related_cost, src_related_regcost) <= 0
5511 && preferrable (src_cost, src_regcost,
5512 src_elt_cost, src_elt_regcost) <= 0)
5513 trial = src, src_cost = MAX_COST;
5514 else if (src_eqv_here
5515 && preferrable (src_eqv_cost, src_eqv_regcost,
5516 src_related_cost, src_related_regcost) <= 0
5517 && preferrable (src_eqv_cost, src_eqv_regcost,
5518 src_elt_cost, src_elt_regcost) <= 0)
5519 trial = copy_rtx (src_eqv_here), src_eqv_cost = MAX_COST;
5520 else if (src_related
5521 && preferrable (src_related_cost, src_related_regcost,
5522 src_elt_cost, src_elt_regcost) <= 0)
5523 trial = copy_rtx (src_related), src_related_cost = MAX_COST;
5524 else
5526 trial = copy_rtx (elt->exp);
5527 elt = elt->next_same_value;
5528 src_elt_cost = MAX_COST;
5531 /* We don't normally have an insn matching (set (pc) (pc)), so
5532 check for this separately here. We will delete such an
5533 insn below.
5535 For other cases such as a table jump or conditional jump
5536 where we know the ultimate target, go ahead and replace the
5537 operand. While that may not make a valid insn, we will
5538 reemit the jump below (and also insert any necessary
5539 barriers). */
5540 if (n_sets == 1 && dest == pc_rtx
5541 && (trial == pc_rtx
5542 || (GET_CODE (trial) == LABEL_REF
5543 && ! condjump_p (insn))))
5545 SET_SRC (sets[i].rtl) = trial;
5546 cse_jumps_altered = 1;
5547 break;
5550 /* Look for a substitution that makes a valid insn. */
5551 else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
5553 /* If we just made a substitution inside a libcall, then we
5554 need to make the same substitution in any notes attached
5555 to the RETVAL insn. */
5556 if (libcall_insn
5557 && (GET_CODE (sets[i].orig_src) == REG
5558 || GET_CODE (sets[i].orig_src) == SUBREG
5559 || GET_CODE (sets[i].orig_src) == MEM))
5560 replace_rtx (REG_NOTES (libcall_insn), sets[i].orig_src,
5561 canon_reg (SET_SRC (sets[i].rtl), insn));
5563 /* The result of apply_change_group can be ignored; see
5564 canon_reg. */
5566 validate_change (insn, &SET_SRC (sets[i].rtl),
5567 canon_reg (SET_SRC (sets[i].rtl), insn),
5569 apply_change_group ();
5570 break;
5573 /* If we previously found constant pool entries for
5574 constants and this is a constant, try making a
5575 pool entry. Put it in src_folded unless we already have done
5576 this since that is where it likely came from. */
5578 else if (constant_pool_entries_cost
5579 && CONSTANT_P (trial)
5580 /* Reject cases that will abort in decode_rtx_const.
5581 On the alpha when simplifying a switch, we get
5582 (const (truncate (minus (label_ref) (label_ref)))). */
5583 && ! (GET_CODE (trial) == CONST
5584 && GET_CODE (XEXP (trial, 0)) == TRUNCATE)
5585 /* Likewise on IA-64, except without the truncate. */
5586 && ! (GET_CODE (trial) == CONST
5587 && GET_CODE (XEXP (trial, 0)) == MINUS
5588 && GET_CODE (XEXP (XEXP (trial, 0), 0)) == LABEL_REF
5589 && GET_CODE (XEXP (XEXP (trial, 0), 1)) == LABEL_REF)
5590 && (src_folded == 0
5591 || (GET_CODE (src_folded) != MEM
5592 && ! src_folded_force_flag))
5593 && GET_MODE_CLASS (mode) != MODE_CC
5594 && mode != VOIDmode)
5596 src_folded_force_flag = 1;
5597 src_folded = trial;
5598 src_folded_cost = constant_pool_entries_cost;
5602 src = SET_SRC (sets[i].rtl);
5604 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
5605 However, there is an important exception: If both are registers
5606 that are not the head of their equivalence class, replace SET_SRC
5607 with the head of the class. If we do not do this, we will have
5608 both registers live over a portion of the basic block. This way,
5609 their lifetimes will likely abut instead of overlapping. */
5610 if (GET_CODE (dest) == REG
5611 && REGNO_QTY_VALID_P (REGNO (dest)))
5613 int dest_q = REG_QTY (REGNO (dest));
5614 struct qty_table_elem *dest_ent = &qty_table[dest_q];
5616 if (dest_ent->mode == GET_MODE (dest)
5617 && dest_ent->first_reg != REGNO (dest)
5618 && GET_CODE (src) == REG && REGNO (src) == REGNO (dest)
5619 /* Don't do this if the original insn had a hard reg as
5620 SET_SRC or SET_DEST. */
5621 && (GET_CODE (sets[i].src) != REG
5622 || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER)
5623 && (GET_CODE (dest) != REG || REGNO (dest) >= FIRST_PSEUDO_REGISTER))
5624 /* We can't call canon_reg here because it won't do anything if
5625 SRC is a hard register. */
5627 int src_q = REG_QTY (REGNO (src));
5628 struct qty_table_elem *src_ent = &qty_table[src_q];
5629 int first = src_ent->first_reg;
5630 rtx new_src
5631 = (first >= FIRST_PSEUDO_REGISTER
5632 ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first));
5634 /* We must use validate-change even for this, because this
5635 might be a special no-op instruction, suitable only to
5636 tag notes onto. */
5637 if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0))
5639 src = new_src;
5640 /* If we had a constant that is cheaper than what we are now
5641 setting SRC to, use that constant. We ignored it when we
5642 thought we could make this into a no-op. */
5643 if (src_const && COST (src_const) < COST (src)
5644 && validate_change (insn, &SET_SRC (sets[i].rtl),
5645 src_const, 0))
5646 src = src_const;
5651 /* If we made a change, recompute SRC values. */
5652 if (src != sets[i].src)
5654 cse_altered = 1;
5655 do_not_record = 0;
5656 hash_arg_in_memory = 0;
5657 sets[i].src = src;
5658 sets[i].src_hash = HASH (src, mode);
5659 sets[i].src_volatile = do_not_record;
5660 sets[i].src_in_memory = hash_arg_in_memory;
5661 sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
5664 /* If this is a single SET, we are setting a register, and we have an
5665 equivalent constant, we want to add a REG_NOTE. We don't want
5666 to write a REG_EQUAL note for a constant pseudo since verifying that
5667 that pseudo hasn't been eliminated is a pain. Such a note also
5668 won't help anything.
5670 Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
5671 which can be created for a reference to a compile time computable
5672 entry in a jump table. */
5674 if (n_sets == 1 && src_const && GET_CODE (dest) == REG
5675 && GET_CODE (src_const) != REG
5676 && ! (GET_CODE (src_const) == CONST
5677 && GET_CODE (XEXP (src_const, 0)) == MINUS
5678 && GET_CODE (XEXP (XEXP (src_const, 0), 0)) == LABEL_REF
5679 && GET_CODE (XEXP (XEXP (src_const, 0), 1)) == LABEL_REF))
5681 /* Make sure that the rtx is not shared with any other insn. */
5682 src_const = copy_rtx (src_const);
5684 /* Record the actual constant value in a REG_EQUAL note, making
5685 a new one if one does not already exist. */
5686 set_unique_reg_note (insn, REG_EQUAL, src_const);
5688 /* If storing a constant value in a register that
5689 previously held the constant value 0,
5690 record this fact with a REG_WAS_0 note on this insn.
5692 Note that the *register* is required to have previously held 0,
5693 not just any register in the quantity and we must point to the
5694 insn that set that register to zero.
5696 Rather than track each register individually, we just see if
5697 the last set for this quantity was for this register. */
5699 if (REGNO_QTY_VALID_P (REGNO (dest)))
5701 int dest_q = REG_QTY (REGNO (dest));
5702 struct qty_table_elem *dest_ent = &qty_table[dest_q];
5704 if (dest_ent->const_rtx == const0_rtx)
5706 /* See if we previously had a REG_WAS_0 note. */
5707 rtx note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
5708 rtx const_insn = dest_ent->const_insn;
5710 if ((tem = single_set (const_insn)) != 0
5711 && rtx_equal_p (SET_DEST (tem), dest))
5713 if (note)
5714 XEXP (note, 0) = const_insn;
5715 else
5716 REG_NOTES (insn)
5717 = gen_rtx_INSN_LIST (REG_WAS_0, const_insn,
5718 REG_NOTES (insn));
5724 /* Now deal with the destination. */
5725 do_not_record = 0;
5727 /* Look within any SIGN_EXTRACT or ZERO_EXTRACT
5728 to the MEM or REG within it. */
5729 while (GET_CODE (dest) == SIGN_EXTRACT
5730 || GET_CODE (dest) == ZERO_EXTRACT
5731 || GET_CODE (dest) == SUBREG
5732 || GET_CODE (dest) == STRICT_LOW_PART)
5733 dest = XEXP (dest, 0);
5735 sets[i].inner_dest = dest;
5737 if (GET_CODE (dest) == MEM)
5739 #ifdef PUSH_ROUNDING
5740 /* Stack pushes invalidate the stack pointer. */
5741 rtx addr = XEXP (dest, 0);
5742 if (GET_RTX_CLASS (GET_CODE (addr)) == 'a'
5743 && XEXP (addr, 0) == stack_pointer_rtx)
5744 invalidate (stack_pointer_rtx, Pmode);
5745 #endif
5746 dest = fold_rtx (dest, insn);
5749 /* Compute the hash code of the destination now,
5750 before the effects of this instruction are recorded,
5751 since the register values used in the address computation
5752 are those before this instruction. */
5753 sets[i].dest_hash = HASH (dest, mode);
5755 /* Don't enter a bit-field in the hash table
5756 because the value in it after the store
5757 may not equal what was stored, due to truncation. */
5759 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
5760 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
5762 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5764 if (src_const != 0 && GET_CODE (src_const) == CONST_INT
5765 && GET_CODE (width) == CONST_INT
5766 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5767 && ! (INTVAL (src_const)
5768 & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5769 /* Exception: if the value is constant,
5770 and it won't be truncated, record it. */
5772 else
5774 /* This is chosen so that the destination will be invalidated
5775 but no new value will be recorded.
5776 We must invalidate because sometimes constant
5777 values can be recorded for bitfields. */
5778 sets[i].src_elt = 0;
5779 sets[i].src_volatile = 1;
5780 src_eqv = 0;
5781 src_eqv_elt = 0;
5785 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
5786 the insn. */
5787 else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
5789 /* One less use of the label this insn used to jump to. */
5790 delete_insn (insn);
5791 cse_jumps_altered = 1;
5792 /* No more processing for this set. */
5793 sets[i].rtl = 0;
5796 /* If this SET is now setting PC to a label, we know it used to
5797 be a conditional or computed branch. */
5798 else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF)
5800 /* Now emit a BARRIER after the unconditional jump. */
5801 if (NEXT_INSN (insn) == 0
5802 || GET_CODE (NEXT_INSN (insn)) != BARRIER)
5803 emit_barrier_after (insn);
5805 /* We reemit the jump in as many cases as possible just in
5806 case the form of an unconditional jump is significantly
5807 different than a computed jump or conditional jump.
5809 If this insn has multiple sets, then reemitting the
5810 jump is nontrivial. So instead we just force rerecognition
5811 and hope for the best. */
5812 if (n_sets == 1)
5814 rtx new = emit_jump_insn_after (gen_jump (XEXP (src, 0)), insn);
5816 JUMP_LABEL (new) = XEXP (src, 0);
5817 LABEL_NUSES (XEXP (src, 0))++;
5818 delete_insn (insn);
5819 insn = new;
5821 /* Now emit a BARRIER after the unconditional jump. */
5822 if (NEXT_INSN (insn) == 0
5823 || GET_CODE (NEXT_INSN (insn)) != BARRIER)
5824 emit_barrier_after (insn);
5826 else
5827 INSN_CODE (insn) = -1;
5829 never_reached_warning (insn, NULL);
5831 /* Do not bother deleting any unreachable code,
5832 let jump/flow do that. */
5834 cse_jumps_altered = 1;
5835 sets[i].rtl = 0;
5838 /* If destination is volatile, invalidate it and then do no further
5839 processing for this assignment. */
5841 else if (do_not_record)
5843 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
5844 invalidate (dest, VOIDmode);
5845 else if (GET_CODE (dest) == MEM)
5847 /* Outgoing arguments for a libcall don't
5848 affect any recorded expressions. */
5849 if (! libcall_insn || insn == libcall_insn)
5850 invalidate (dest, VOIDmode);
5852 else if (GET_CODE (dest) == STRICT_LOW_PART
5853 || GET_CODE (dest) == ZERO_EXTRACT)
5854 invalidate (XEXP (dest, 0), GET_MODE (dest));
5855 sets[i].rtl = 0;
5858 if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
5859 sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
5861 #ifdef HAVE_cc0
5862 /* If setting CC0, record what it was set to, or a constant, if it
5863 is equivalent to a constant. If it is being set to a floating-point
5864 value, make a COMPARE with the appropriate constant of 0. If we
5865 don't do this, later code can interpret this as a test against
5866 const0_rtx, which can cause problems if we try to put it into an
5867 insn as a floating-point operand. */
5868 if (dest == cc0_rtx)
5870 this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
5871 this_insn_cc0_mode = mode;
5872 if (FLOAT_MODE_P (mode))
5873 this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0,
5874 CONST0_RTX (mode));
5876 #endif
5879 /* Now enter all non-volatile source expressions in the hash table
5880 if they are not already present.
5881 Record their equivalence classes in src_elt.
5882 This way we can insert the corresponding destinations into
5883 the same classes even if the actual sources are no longer in them
5884 (having been invalidated). */
5886 if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
5887 && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
5889 struct table_elt *elt;
5890 struct table_elt *classp = sets[0].src_elt;
5891 rtx dest = SET_DEST (sets[0].rtl);
5892 enum machine_mode eqvmode = GET_MODE (dest);
5894 if (GET_CODE (dest) == STRICT_LOW_PART)
5896 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5897 classp = 0;
5899 if (insert_regs (src_eqv, classp, 0))
5901 rehash_using_reg (src_eqv);
5902 src_eqv_hash = HASH (src_eqv, eqvmode);
5904 elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
5905 elt->in_memory = src_eqv_in_memory;
5906 src_eqv_elt = elt;
5908 /* Check to see if src_eqv_elt is the same as a set source which
5909 does not yet have an elt, and if so set the elt of the set source
5910 to src_eqv_elt. */
5911 for (i = 0; i < n_sets; i++)
5912 if (sets[i].rtl && sets[i].src_elt == 0
5913 && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
5914 sets[i].src_elt = src_eqv_elt;
5917 for (i = 0; i < n_sets; i++)
5918 if (sets[i].rtl && ! sets[i].src_volatile
5919 && ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
5921 if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
5923 /* REG_EQUAL in setting a STRICT_LOW_PART
5924 gives an equivalent for the entire destination register,
5925 not just for the subreg being stored in now.
5926 This is a more interesting equivalence, so we arrange later
5927 to treat the entire reg as the destination. */
5928 sets[i].src_elt = src_eqv_elt;
5929 sets[i].src_hash = src_eqv_hash;
5931 else
5933 /* Insert source and constant equivalent into hash table, if not
5934 already present. */
5935 struct table_elt *classp = src_eqv_elt;
5936 rtx src = sets[i].src;
5937 rtx dest = SET_DEST (sets[i].rtl);
5938 enum machine_mode mode
5939 = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5941 if (sets[i].src_elt == 0)
5943 /* Don't put a hard register source into the table if this is
5944 the last insn of a libcall. In this case, we only need
5945 to put src_eqv_elt in src_elt. */
5946 if (! find_reg_note (insn, REG_RETVAL, NULL_RTX))
5948 struct table_elt *elt;
5950 /* Note that these insert_regs calls cannot remove
5951 any of the src_elt's, because they would have failed to
5952 match if not still valid. */
5953 if (insert_regs (src, classp, 0))
5955 rehash_using_reg (src);
5956 sets[i].src_hash = HASH (src, mode);
5958 elt = insert (src, classp, sets[i].src_hash, mode);
5959 elt->in_memory = sets[i].src_in_memory;
5960 sets[i].src_elt = classp = elt;
5962 else
5963 sets[i].src_elt = classp;
5965 if (sets[i].src_const && sets[i].src_const_elt == 0
5966 && src != sets[i].src_const
5967 && ! rtx_equal_p (sets[i].src_const, src))
5968 sets[i].src_elt = insert (sets[i].src_const, classp,
5969 sets[i].src_const_hash, mode);
5972 else if (sets[i].src_elt == 0)
5973 /* If we did not insert the source into the hash table (e.g., it was
5974 volatile), note the equivalence class for the REG_EQUAL value, if any,
5975 so that the destination goes into that class. */
5976 sets[i].src_elt = src_eqv_elt;
5978 invalidate_from_clobbers (x);
5980 /* Some registers are invalidated by subroutine calls. Memory is
5981 invalidated by non-constant calls. */
5983 if (GET_CODE (insn) == CALL_INSN)
5985 if (! CONST_OR_PURE_CALL_P (insn))
5986 invalidate_memory ();
5987 invalidate_for_call ();
5990 /* Now invalidate everything set by this instruction.
5991 If a SUBREG or other funny destination is being set,
5992 sets[i].rtl is still nonzero, so here we invalidate the reg
5993 a part of which is being set. */
5995 for (i = 0; i < n_sets; i++)
5996 if (sets[i].rtl)
5998 /* We can't use the inner dest, because the mode associated with
5999 a ZERO_EXTRACT is significant. */
6000 rtx dest = SET_DEST (sets[i].rtl);
6002 /* Needed for registers to remove the register from its
6003 previous quantity's chain.
6004 Needed for memory if this is a nonvarying address, unless
6005 we have just done an invalidate_memory that covers even those. */
6006 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
6007 invalidate (dest, VOIDmode);
6008 else if (GET_CODE (dest) == MEM)
6010 /* Outgoing arguments for a libcall don't
6011 affect any recorded expressions. */
6012 if (! libcall_insn || insn == libcall_insn)
6013 invalidate (dest, VOIDmode);
6015 else if (GET_CODE (dest) == STRICT_LOW_PART
6016 || GET_CODE (dest) == ZERO_EXTRACT)
6017 invalidate (XEXP (dest, 0), GET_MODE (dest));
6020 /* A volatile ASM invalidates everything. */
6021 if (GET_CODE (insn) == INSN
6022 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
6023 && MEM_VOLATILE_P (PATTERN (insn)))
6024 flush_hash_table ();
6026 /* Make sure registers mentioned in destinations
6027 are safe for use in an expression to be inserted.
6028 This removes from the hash table
6029 any invalid entry that refers to one of these registers.
6031 We don't care about the return value from mention_regs because
6032 we are going to hash the SET_DEST values unconditionally. */
6034 for (i = 0; i < n_sets; i++)
6036 if (sets[i].rtl)
6038 rtx x = SET_DEST (sets[i].rtl);
6040 if (GET_CODE (x) != REG)
6041 mention_regs (x);
6042 else
6044 /* We used to rely on all references to a register becoming
6045 inaccessible when a register changes to a new quantity,
6046 since that changes the hash code. However, that is not
6047 safe, since after HASH_SIZE new quantities we get a
6048 hash 'collision' of a register with its own invalid
6049 entries. And since SUBREGs have been changed not to
6050 change their hash code with the hash code of the register,
6051 it wouldn't work any longer at all. So we have to check
6052 for any invalid references lying around now.
6053 This code is similar to the REG case in mention_regs,
6054 but it knows that reg_tick has been incremented, and
6055 it leaves reg_in_table as -1 . */
6056 unsigned int regno = REGNO (x);
6057 unsigned int endregno
6058 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
6059 : HARD_REGNO_NREGS (regno, GET_MODE (x)));
6060 unsigned int i;
6062 for (i = regno; i < endregno; i++)
6064 if (REG_IN_TABLE (i) >= 0)
6066 remove_invalid_refs (i);
6067 REG_IN_TABLE (i) = -1;
6074 /* We may have just removed some of the src_elt's from the hash table.
6075 So replace each one with the current head of the same class. */
6077 for (i = 0; i < n_sets; i++)
6078 if (sets[i].rtl)
6080 if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
6081 /* If elt was removed, find current head of same class,
6082 or 0 if nothing remains of that class. */
6084 struct table_elt *elt = sets[i].src_elt;
6086 while (elt && elt->prev_same_value)
6087 elt = elt->prev_same_value;
6089 while (elt && elt->first_same_value == 0)
6090 elt = elt->next_same_value;
6091 sets[i].src_elt = elt ? elt->first_same_value : 0;
6095 /* Now insert the destinations into their equivalence classes. */
6097 for (i = 0; i < n_sets; i++)
6098 if (sets[i].rtl)
6100 rtx dest = SET_DEST (sets[i].rtl);
6101 rtx inner_dest = sets[i].inner_dest;
6102 struct table_elt *elt;
6104 /* Don't record value if we are not supposed to risk allocating
6105 floating-point values in registers that might be wider than
6106 memory. */
6107 if ((flag_float_store
6108 && GET_CODE (dest) == MEM
6109 && FLOAT_MODE_P (GET_MODE (dest)))
6110 /* Don't record BLKmode values, because we don't know the
6111 size of it, and can't be sure that other BLKmode values
6112 have the same or smaller size. */
6113 || GET_MODE (dest) == BLKmode
6114 /* Don't record values of destinations set inside a libcall block
6115 since we might delete the libcall. Things should have been set
6116 up so we won't want to reuse such a value, but we play it safe
6117 here. */
6118 || libcall_insn
6119 /* If we didn't put a REG_EQUAL value or a source into the hash
6120 table, there is no point is recording DEST. */
6121 || sets[i].src_elt == 0
6122 /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
6123 or SIGN_EXTEND, don't record DEST since it can cause
6124 some tracking to be wrong.
6126 ??? Think about this more later. */
6127 || (GET_CODE (dest) == SUBREG
6128 && (GET_MODE_SIZE (GET_MODE (dest))
6129 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6130 && (GET_CODE (sets[i].src) == SIGN_EXTEND
6131 || GET_CODE (sets[i].src) == ZERO_EXTEND)))
6132 continue;
6134 /* STRICT_LOW_PART isn't part of the value BEING set,
6135 and neither is the SUBREG inside it.
6136 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
6137 if (GET_CODE (dest) == STRICT_LOW_PART)
6138 dest = SUBREG_REG (XEXP (dest, 0));
6140 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
6141 /* Registers must also be inserted into chains for quantities. */
6142 if (insert_regs (dest, sets[i].src_elt, 1))
6144 /* If `insert_regs' changes something, the hash code must be
6145 recalculated. */
6146 rehash_using_reg (dest);
6147 sets[i].dest_hash = HASH (dest, GET_MODE (dest));
6150 if (GET_CODE (inner_dest) == MEM
6151 && GET_CODE (XEXP (inner_dest, 0)) == ADDRESSOF)
6152 /* Given (SET (MEM (ADDRESSOF (X))) Y) we don't want to say
6153 that (MEM (ADDRESSOF (X))) is equivalent to Y.
6154 Consider the case in which the address of the MEM is
6155 passed to a function, which alters the MEM. Then, if we
6156 later use Y instead of the MEM we'll miss the update. */
6157 elt = insert (dest, 0, sets[i].dest_hash, GET_MODE (dest));
6158 else
6159 elt = insert (dest, sets[i].src_elt,
6160 sets[i].dest_hash, GET_MODE (dest));
6162 elt->in_memory = (GET_CODE (sets[i].inner_dest) == MEM
6163 && (! RTX_UNCHANGING_P (sets[i].inner_dest)
6164 || FIXED_BASE_PLUS_P (XEXP (sets[i].inner_dest,
6165 0))));
6167 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
6168 narrower than M2, and both M1 and M2 are the same number of words,
6169 we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
6170 make that equivalence as well.
6172 However, BAR may have equivalences for which gen_lowpart_if_possible
6173 will produce a simpler value than gen_lowpart_if_possible applied to
6174 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
6175 BAR's equivalences. If we don't get a simplified form, make
6176 the SUBREG. It will not be used in an equivalence, but will
6177 cause two similar assignments to be detected.
6179 Note the loop below will find SUBREG_REG (DEST) since we have
6180 already entered SRC and DEST of the SET in the table. */
6182 if (GET_CODE (dest) == SUBREG
6183 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
6184 / UNITS_PER_WORD)
6185 == (GET_MODE_SIZE (GET_MODE (dest)) - 1) / UNITS_PER_WORD)
6186 && (GET_MODE_SIZE (GET_MODE (dest))
6187 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6188 && sets[i].src_elt != 0)
6190 enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
6191 struct table_elt *elt, *classp = 0;
6193 for (elt = sets[i].src_elt->first_same_value; elt;
6194 elt = elt->next_same_value)
6196 rtx new_src = 0;
6197 unsigned src_hash;
6198 struct table_elt *src_elt;
6200 /* Ignore invalid entries. */
6201 if (GET_CODE (elt->exp) != REG
6202 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
6203 continue;
6205 new_src = gen_lowpart_if_possible (new_mode, elt->exp);
6206 if (new_src == 0)
6207 new_src = gen_rtx_SUBREG (new_mode, elt->exp, 0);
6209 src_hash = HASH (new_src, new_mode);
6210 src_elt = lookup (new_src, src_hash, new_mode);
6212 /* Put the new source in the hash table is if isn't
6213 already. */
6214 if (src_elt == 0)
6216 if (insert_regs (new_src, classp, 0))
6218 rehash_using_reg (new_src);
6219 src_hash = HASH (new_src, new_mode);
6221 src_elt = insert (new_src, classp, src_hash, new_mode);
6222 src_elt->in_memory = elt->in_memory;
6224 else if (classp && classp != src_elt->first_same_value)
6225 /* Show that two things that we've seen before are
6226 actually the same. */
6227 merge_equiv_classes (src_elt, classp);
6229 classp = src_elt->first_same_value;
6230 /* Ignore invalid entries. */
6231 while (classp
6232 && GET_CODE (classp->exp) != REG
6233 && ! exp_equiv_p (classp->exp, classp->exp, 1, 0))
6234 classp = classp->next_same_value;
6239 /* Special handling for (set REG0 REG1) where REG0 is the
6240 "cheapest", cheaper than REG1. After cse, REG1 will probably not
6241 be used in the sequel, so (if easily done) change this insn to
6242 (set REG1 REG0) and replace REG1 with REG0 in the previous insn
6243 that computed their value. Then REG1 will become a dead store
6244 and won't cloud the situation for later optimizations.
6246 Do not make this change if REG1 is a hard register, because it will
6247 then be used in the sequel and we may be changing a two-operand insn
6248 into a three-operand insn.
6250 Also do not do this if we are operating on a copy of INSN.
6252 Also don't do this if INSN ends a libcall; this would cause an unrelated
6253 register to be set in the middle of a libcall, and we then get bad code
6254 if the libcall is deleted. */
6256 if (n_sets == 1 && sets[0].rtl && GET_CODE (SET_DEST (sets[0].rtl)) == REG
6257 && NEXT_INSN (PREV_INSN (insn)) == insn
6258 && GET_CODE (SET_SRC (sets[0].rtl)) == REG
6259 && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
6260 && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl))))
6262 int src_q = REG_QTY (REGNO (SET_SRC (sets[0].rtl)));
6263 struct qty_table_elem *src_ent = &qty_table[src_q];
6265 if ((src_ent->first_reg == REGNO (SET_DEST (sets[0].rtl)))
6266 && ! find_reg_note (insn, REG_RETVAL, NULL_RTX))
6268 rtx prev = prev_nonnote_insn (insn);
6270 /* Do not swap the registers around if the previous instruction
6271 attaches a REG_EQUIV note to REG1.
6273 ??? It's not entirely clear whether we can transfer a REG_EQUIV
6274 from the pseudo that originally shadowed an incoming argument
6275 to another register. Some uses of REG_EQUIV might rely on it
6276 being attached to REG1 rather than REG2.
6278 This section previously turned the REG_EQUIV into a REG_EQUAL
6279 note. We cannot do that because REG_EQUIV may provide an
6280 uninitialised stack slot when REG_PARM_STACK_SPACE is used. */
6282 if (prev != 0 && GET_CODE (prev) == INSN
6283 && GET_CODE (PATTERN (prev)) == SET
6284 && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl)
6285 && ! find_reg_note (prev, REG_EQUIV, NULL_RTX))
6287 rtx dest = SET_DEST (sets[0].rtl);
6288 rtx src = SET_SRC (sets[0].rtl);
6289 rtx note;
6291 validate_change (prev, &SET_DEST (PATTERN (prev)), dest, 1);
6292 validate_change (insn, &SET_DEST (sets[0].rtl), src, 1);
6293 validate_change (insn, &SET_SRC (sets[0].rtl), dest, 1);
6294 apply_change_group ();
6296 /* If there was a REG_WAS_0 note on PREV, remove it. Move
6297 any REG_WAS_0 note on INSN to PREV. */
6298 note = find_reg_note (prev, REG_WAS_0, NULL_RTX);
6299 if (note)
6300 remove_note (prev, note);
6302 note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
6303 if (note)
6305 remove_note (insn, note);
6306 XEXP (note, 1) = REG_NOTES (prev);
6307 REG_NOTES (prev) = note;
6310 /* If INSN has a REG_EQUAL note, and this note mentions
6311 REG0, then we must delete it, because the value in
6312 REG0 has changed. If the note's value is REG1, we must
6313 also delete it because that is now this insn's dest. */
6314 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
6315 if (note != 0
6316 && (reg_mentioned_p (dest, XEXP (note, 0))
6317 || rtx_equal_p (src, XEXP (note, 0))))
6318 remove_note (insn, note);
6323 /* If this is a conditional jump insn, record any known equivalences due to
6324 the condition being tested. */
6326 last_jump_equiv_class = 0;
6327 if (GET_CODE (insn) == JUMP_INSN
6328 && n_sets == 1 && GET_CODE (x) == SET
6329 && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
6330 record_jump_equiv (insn, 0);
6332 #ifdef HAVE_cc0
6333 /* If the previous insn set CC0 and this insn no longer references CC0,
6334 delete the previous insn. Here we use the fact that nothing expects CC0
6335 to be valid over an insn, which is true until the final pass. */
6336 if (prev_insn && GET_CODE (prev_insn) == INSN
6337 && (tem = single_set (prev_insn)) != 0
6338 && SET_DEST (tem) == cc0_rtx
6339 && ! reg_mentioned_p (cc0_rtx, x))
6340 delete_insn (prev_insn);
6342 prev_insn_cc0 = this_insn_cc0;
6343 prev_insn_cc0_mode = this_insn_cc0_mode;
6344 #endif
6346 prev_insn = insn;
6349 /* Remove from the hash table all expressions that reference memory. */
6351 static void
6352 invalidate_memory ()
6354 int i;
6355 struct table_elt *p, *next;
6357 for (i = 0; i < HASH_SIZE; i++)
6358 for (p = table[i]; p; p = next)
6360 next = p->next_same_hash;
6361 if (p->in_memory)
6362 remove_from_table (p, i);
6366 /* If ADDR is an address that implicitly affects the stack pointer, return
6367 1 and update the register tables to show the effect. Else, return 0. */
6369 static int
6370 addr_affects_sp_p (addr)
6371 rtx addr;
6373 if (GET_RTX_CLASS (GET_CODE (addr)) == 'a'
6374 && GET_CODE (XEXP (addr, 0)) == REG
6375 && REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
6377 if (REG_TICK (STACK_POINTER_REGNUM) >= 0)
6378 REG_TICK (STACK_POINTER_REGNUM)++;
6380 /* This should be *very* rare. */
6381 if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
6382 invalidate (stack_pointer_rtx, VOIDmode);
6384 return 1;
6387 return 0;
6390 /* Perform invalidation on the basis of everything about an insn
6391 except for invalidating the actual places that are SET in it.
6392 This includes the places CLOBBERed, and anything that might
6393 alias with something that is SET or CLOBBERed.
6395 X is the pattern of the insn. */
6397 static void
6398 invalidate_from_clobbers (x)
6399 rtx x;
6401 if (GET_CODE (x) == CLOBBER)
6403 rtx ref = XEXP (x, 0);
6404 if (ref)
6406 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
6407 || GET_CODE (ref) == MEM)
6408 invalidate (ref, VOIDmode);
6409 else if (GET_CODE (ref) == STRICT_LOW_PART
6410 || GET_CODE (ref) == ZERO_EXTRACT)
6411 invalidate (XEXP (ref, 0), GET_MODE (ref));
6414 else if (GET_CODE (x) == PARALLEL)
6416 int i;
6417 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
6419 rtx y = XVECEXP (x, 0, i);
6420 if (GET_CODE (y) == CLOBBER)
6422 rtx ref = XEXP (y, 0);
6423 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
6424 || GET_CODE (ref) == MEM)
6425 invalidate (ref, VOIDmode);
6426 else if (GET_CODE (ref) == STRICT_LOW_PART
6427 || GET_CODE (ref) == ZERO_EXTRACT)
6428 invalidate (XEXP (ref, 0), GET_MODE (ref));
6434 /* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes
6435 and replace any registers in them with either an equivalent constant
6436 or the canonical form of the register. If we are inside an address,
6437 only do this if the address remains valid.
6439 OBJECT is 0 except when within a MEM in which case it is the MEM.
6441 Return the replacement for X. */
6443 static rtx
6444 cse_process_notes (x, object)
6445 rtx x;
6446 rtx object;
6448 enum rtx_code code = GET_CODE (x);
6449 const char *fmt = GET_RTX_FORMAT (code);
6450 int i;
6452 switch (code)
6454 case CONST_INT:
6455 case CONST:
6456 case SYMBOL_REF:
6457 case LABEL_REF:
6458 case CONST_DOUBLE:
6459 case CONST_VECTOR:
6460 case PC:
6461 case CC0:
6462 case LO_SUM:
6463 return x;
6465 case MEM:
6466 validate_change (x, &XEXP (x, 0),
6467 cse_process_notes (XEXP (x, 0), x), 0);
6468 return x;
6470 case EXPR_LIST:
6471 case INSN_LIST:
6472 if (REG_NOTE_KIND (x) == REG_EQUAL)
6473 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX);
6474 if (XEXP (x, 1))
6475 XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX);
6476 return x;
6478 case SIGN_EXTEND:
6479 case ZERO_EXTEND:
6480 case SUBREG:
6482 rtx new = cse_process_notes (XEXP (x, 0), object);
6483 /* We don't substitute VOIDmode constants into these rtx,
6484 since they would impede folding. */
6485 if (GET_MODE (new) != VOIDmode)
6486 validate_change (object, &XEXP (x, 0), new, 0);
6487 return x;
6490 case REG:
6491 i = REG_QTY (REGNO (x));
6493 /* Return a constant or a constant register. */
6494 if (REGNO_QTY_VALID_P (REGNO (x)))
6496 struct qty_table_elem *ent = &qty_table[i];
6498 if (ent->const_rtx != NULL_RTX
6499 && (CONSTANT_P (ent->const_rtx)
6500 || GET_CODE (ent->const_rtx) == REG))
6502 rtx new = gen_lowpart_if_possible (GET_MODE (x), ent->const_rtx);
6503 if (new)
6504 return new;
6508 /* Otherwise, canonicalize this register. */
6509 return canon_reg (x, NULL_RTX);
6511 default:
6512 break;
6515 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6516 if (fmt[i] == 'e')
6517 validate_change (object, &XEXP (x, i),
6518 cse_process_notes (XEXP (x, i), object), 0);
6520 return x;
6523 /* Find common subexpressions between the end test of a loop and the beginning
6524 of the loop. LOOP_START is the CODE_LABEL at the start of a loop.
6526 Often we have a loop where an expression in the exit test is used
6527 in the body of the loop. For example "while (*p) *q++ = *p++;".
6528 Because of the way we duplicate the loop exit test in front of the loop,
6529 however, we don't detect that common subexpression. This will be caught
6530 when global cse is implemented, but this is a quite common case.
6532 This function handles the most common cases of these common expressions.
6533 It is called after we have processed the basic block ending with the
6534 NOTE_INSN_LOOP_END note that ends a loop and the previous JUMP_INSN
6535 jumps to a label used only once. */
6537 static void
6538 cse_around_loop (loop_start)
6539 rtx loop_start;
6541 rtx insn;
6542 int i;
6543 struct table_elt *p;
6545 /* If the jump at the end of the loop doesn't go to the start, we don't
6546 do anything. */
6547 for (insn = PREV_INSN (loop_start);
6548 insn && (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0);
6549 insn = PREV_INSN (insn))
6552 if (insn == 0
6553 || GET_CODE (insn) != NOTE
6554 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_BEG)
6555 return;
6557 /* If the last insn of the loop (the end test) was an NE comparison,
6558 we will interpret it as an EQ comparison, since we fell through
6559 the loop. Any equivalences resulting from that comparison are
6560 therefore not valid and must be invalidated. */
6561 if (last_jump_equiv_class)
6562 for (p = last_jump_equiv_class->first_same_value; p;
6563 p = p->next_same_value)
6565 if (GET_CODE (p->exp) == MEM || GET_CODE (p->exp) == REG
6566 || (GET_CODE (p->exp) == SUBREG
6567 && GET_CODE (SUBREG_REG (p->exp)) == REG))
6568 invalidate (p->exp, VOIDmode);
6569 else if (GET_CODE (p->exp) == STRICT_LOW_PART
6570 || GET_CODE (p->exp) == ZERO_EXTRACT)
6571 invalidate (XEXP (p->exp, 0), GET_MODE (p->exp));
6574 /* Process insns starting after LOOP_START until we hit a CALL_INSN or
6575 a CODE_LABEL (we could handle a CALL_INSN, but it isn't worth it).
6577 The only thing we do with SET_DEST is invalidate entries, so we
6578 can safely process each SET in order. It is slightly less efficient
6579 to do so, but we only want to handle the most common cases.
6581 The gen_move_insn call in cse_set_around_loop may create new pseudos.
6582 These pseudos won't have valid entries in any of the tables indexed
6583 by register number, such as reg_qty. We avoid out-of-range array
6584 accesses by not processing any instructions created after cse started. */
6586 for (insn = NEXT_INSN (loop_start);
6587 GET_CODE (insn) != CALL_INSN && GET_CODE (insn) != CODE_LABEL
6588 && INSN_UID (insn) < max_insn_uid
6589 && ! (GET_CODE (insn) == NOTE
6590 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
6591 insn = NEXT_INSN (insn))
6593 if (INSN_P (insn)
6594 && (GET_CODE (PATTERN (insn)) == SET
6595 || GET_CODE (PATTERN (insn)) == CLOBBER))
6596 cse_set_around_loop (PATTERN (insn), insn, loop_start);
6597 else if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == PARALLEL)
6598 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6599 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET
6600 || GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == CLOBBER)
6601 cse_set_around_loop (XVECEXP (PATTERN (insn), 0, i), insn,
6602 loop_start);
6606 /* Process one SET of an insn that was skipped. We ignore CLOBBERs
6607 since they are done elsewhere. This function is called via note_stores. */
6609 static void
6610 invalidate_skipped_set (dest, set, data)
6611 rtx set;
6612 rtx dest;
6613 void *data ATTRIBUTE_UNUSED;
6615 enum rtx_code code = GET_CODE (dest);
6617 if (code == MEM
6618 && ! addr_affects_sp_p (dest) /* If this is not a stack push ... */
6619 /* There are times when an address can appear varying and be a PLUS
6620 during this scan when it would be a fixed address were we to know
6621 the proper equivalences. So invalidate all memory if there is
6622 a BLKmode or nonscalar memory reference or a reference to a
6623 variable address. */
6624 && (MEM_IN_STRUCT_P (dest) || GET_MODE (dest) == BLKmode
6625 || cse_rtx_varies_p (XEXP (dest, 0), 0)))
6627 invalidate_memory ();
6628 return;
6631 if (GET_CODE (set) == CLOBBER
6632 #ifdef HAVE_cc0
6633 || dest == cc0_rtx
6634 #endif
6635 || dest == pc_rtx)
6636 return;
6638 if (code == STRICT_LOW_PART || code == ZERO_EXTRACT)
6639 invalidate (XEXP (dest, 0), GET_MODE (dest));
6640 else if (code == REG || code == SUBREG || code == MEM)
6641 invalidate (dest, VOIDmode);
6644 /* Invalidate all insns from START up to the end of the function or the
6645 next label. This called when we wish to CSE around a block that is
6646 conditionally executed. */
6648 static void
6649 invalidate_skipped_block (start)
6650 rtx start;
6652 rtx insn;
6654 for (insn = start; insn && GET_CODE (insn) != CODE_LABEL;
6655 insn = NEXT_INSN (insn))
6657 if (! INSN_P (insn))
6658 continue;
6660 if (GET_CODE (insn) == CALL_INSN)
6662 if (! CONST_OR_PURE_CALL_P (insn))
6663 invalidate_memory ();
6664 invalidate_for_call ();
6667 invalidate_from_clobbers (PATTERN (insn));
6668 note_stores (PATTERN (insn), invalidate_skipped_set, NULL);
6672 /* If modifying X will modify the value in *DATA (which is really an
6673 `rtx *'), indicate that fact by setting the pointed to value to
6674 NULL_RTX. */
6676 static void
6677 cse_check_loop_start (x, set, data)
6678 rtx x;
6679 rtx set ATTRIBUTE_UNUSED;
6680 void *data;
6682 rtx *cse_check_loop_start_value = (rtx *) data;
6684 if (*cse_check_loop_start_value == NULL_RTX
6685 || GET_CODE (x) == CC0 || GET_CODE (x) == PC)
6686 return;
6688 if ((GET_CODE (x) == MEM && GET_CODE (*cse_check_loop_start_value) == MEM)
6689 || reg_overlap_mentioned_p (x, *cse_check_loop_start_value))
6690 *cse_check_loop_start_value = NULL_RTX;
6693 /* X is a SET or CLOBBER contained in INSN that was found near the start of
6694 a loop that starts with the label at LOOP_START.
6696 If X is a SET, we see if its SET_SRC is currently in our hash table.
6697 If so, we see if it has a value equal to some register used only in the
6698 loop exit code (as marked by jump.c).
6700 If those two conditions are true, we search backwards from the start of
6701 the loop to see if that same value was loaded into a register that still
6702 retains its value at the start of the loop.
6704 If so, we insert an insn after the load to copy the destination of that
6705 load into the equivalent register and (try to) replace our SET_SRC with that
6706 register.
6708 In any event, we invalidate whatever this SET or CLOBBER modifies. */
6710 static void
6711 cse_set_around_loop (x, insn, loop_start)
6712 rtx x;
6713 rtx insn;
6714 rtx loop_start;
6716 struct table_elt *src_elt;
6718 /* If this is a SET, see if we can replace SET_SRC, but ignore SETs that
6719 are setting PC or CC0 or whose SET_SRC is already a register. */
6720 if (GET_CODE (x) == SET
6721 && GET_CODE (SET_DEST (x)) != PC && GET_CODE (SET_DEST (x)) != CC0
6722 && GET_CODE (SET_SRC (x)) != REG)
6724 src_elt = lookup (SET_SRC (x),
6725 HASH (SET_SRC (x), GET_MODE (SET_DEST (x))),
6726 GET_MODE (SET_DEST (x)));
6728 if (src_elt)
6729 for (src_elt = src_elt->first_same_value; src_elt;
6730 src_elt = src_elt->next_same_value)
6731 if (GET_CODE (src_elt->exp) == REG && REG_LOOP_TEST_P (src_elt->exp)
6732 && COST (src_elt->exp) < COST (SET_SRC (x)))
6734 rtx p, set;
6736 /* Look for an insn in front of LOOP_START that sets
6737 something in the desired mode to SET_SRC (x) before we hit
6738 a label or CALL_INSN. */
6740 for (p = prev_nonnote_insn (loop_start);
6741 p && GET_CODE (p) != CALL_INSN
6742 && GET_CODE (p) != CODE_LABEL;
6743 p = prev_nonnote_insn (p))
6744 if ((set = single_set (p)) != 0
6745 && GET_CODE (SET_DEST (set)) == REG
6746 && GET_MODE (SET_DEST (set)) == src_elt->mode
6747 && rtx_equal_p (SET_SRC (set), SET_SRC (x)))
6749 /* We now have to ensure that nothing between P
6750 and LOOP_START modified anything referenced in
6751 SET_SRC (x). We know that nothing within the loop
6752 can modify it, or we would have invalidated it in
6753 the hash table. */
6754 rtx q;
6755 rtx cse_check_loop_start_value = SET_SRC (x);
6756 for (q = p; q != loop_start; q = NEXT_INSN (q))
6757 if (INSN_P (q))
6758 note_stores (PATTERN (q),
6759 cse_check_loop_start,
6760 &cse_check_loop_start_value);
6762 /* If nothing was changed and we can replace our
6763 SET_SRC, add an insn after P to copy its destination
6764 to what we will be replacing SET_SRC with. */
6765 if (cse_check_loop_start_value
6766 && validate_change (insn, &SET_SRC (x),
6767 src_elt->exp, 0))
6769 /* If this creates new pseudos, this is unsafe,
6770 because the regno of new pseudo is unsuitable
6771 to index into reg_qty when cse_insn processes
6772 the new insn. Therefore, if a new pseudo was
6773 created, discard this optimization. */
6774 int nregs = max_reg_num ();
6775 rtx move
6776 = gen_move_insn (src_elt->exp, SET_DEST (set));
6777 if (nregs != max_reg_num ())
6779 if (! validate_change (insn, &SET_SRC (x),
6780 SET_SRC (set), 0))
6781 abort ();
6783 else
6784 emit_insn_after (move, p);
6786 break;
6791 /* Deal with the destination of X affecting the stack pointer. */
6792 addr_affects_sp_p (SET_DEST (x));
6794 /* See comment on similar code in cse_insn for explanation of these
6795 tests. */
6796 if (GET_CODE (SET_DEST (x)) == REG || GET_CODE (SET_DEST (x)) == SUBREG
6797 || GET_CODE (SET_DEST (x)) == MEM)
6798 invalidate (SET_DEST (x), VOIDmode);
6799 else if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
6800 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
6801 invalidate (XEXP (SET_DEST (x), 0), GET_MODE (SET_DEST (x)));
6804 /* Find the end of INSN's basic block and return its range,
6805 the total number of SETs in all the insns of the block, the last insn of the
6806 block, and the branch path.
6808 The branch path indicates which branches should be followed. If a non-zero
6809 path size is specified, the block should be rescanned and a different set
6810 of branches will be taken. The branch path is only used if
6811 FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is non-zero.
6813 DATA is a pointer to a struct cse_basic_block_data, defined below, that is
6814 used to describe the block. It is filled in with the information about
6815 the current block. The incoming structure's branch path, if any, is used
6816 to construct the output branch path. */
6818 void
6819 cse_end_of_basic_block (insn, data, follow_jumps, after_loop, skip_blocks)
6820 rtx insn;
6821 struct cse_basic_block_data *data;
6822 int follow_jumps;
6823 int after_loop;
6824 int skip_blocks;
6826 rtx p = insn, q;
6827 int nsets = 0;
6828 int low_cuid = INSN_CUID (insn), high_cuid = INSN_CUID (insn);
6829 rtx next = INSN_P (insn) ? insn : next_real_insn (insn);
6830 int path_size = data->path_size;
6831 int path_entry = 0;
6832 int i;
6834 /* Update the previous branch path, if any. If the last branch was
6835 previously TAKEN, mark it NOT_TAKEN. If it was previously NOT_TAKEN,
6836 shorten the path by one and look at the previous branch. We know that
6837 at least one branch must have been taken if PATH_SIZE is non-zero. */
6838 while (path_size > 0)
6840 if (data->path[path_size - 1].status != NOT_TAKEN)
6842 data->path[path_size - 1].status = NOT_TAKEN;
6843 break;
6845 else
6846 path_size--;
6849 /* If the first instruction is marked with QImode, that means we've
6850 already processed this block. Our caller will look at DATA->LAST
6851 to figure out where to go next. We want to return the next block
6852 in the instruction stream, not some branched-to block somewhere
6853 else. We accomplish this by pretending our called forbid us to
6854 follow jumps, or skip blocks. */
6855 if (GET_MODE (insn) == QImode)
6856 follow_jumps = skip_blocks = 0;
6858 /* Scan to end of this basic block. */
6859 while (p && GET_CODE (p) != CODE_LABEL)
6861 /* Don't cse out the end of a loop. This makes a difference
6862 only for the unusual loops that always execute at least once;
6863 all other loops have labels there so we will stop in any case.
6864 Cse'ing out the end of the loop is dangerous because it
6865 might cause an invariant expression inside the loop
6866 to be reused after the end of the loop. This would make it
6867 hard to move the expression out of the loop in loop.c,
6868 especially if it is one of several equivalent expressions
6869 and loop.c would like to eliminate it.
6871 If we are running after loop.c has finished, we can ignore
6872 the NOTE_INSN_LOOP_END. */
6874 if (! after_loop && GET_CODE (p) == NOTE
6875 && NOTE_LINE_NUMBER (p) == NOTE_INSN_LOOP_END)
6876 break;
6878 /* Don't cse over a call to setjmp; on some machines (eg VAX)
6879 the regs restored by the longjmp come from
6880 a later time than the setjmp. */
6881 if (PREV_INSN (p) && GET_CODE (PREV_INSN (p)) == CALL_INSN
6882 && find_reg_note (PREV_INSN (p), REG_SETJMP, NULL))
6883 break;
6885 /* A PARALLEL can have lots of SETs in it,
6886 especially if it is really an ASM_OPERANDS. */
6887 if (INSN_P (p) && GET_CODE (PATTERN (p)) == PARALLEL)
6888 nsets += XVECLEN (PATTERN (p), 0);
6889 else if (GET_CODE (p) != NOTE)
6890 nsets += 1;
6892 /* Ignore insns made by CSE; they cannot affect the boundaries of
6893 the basic block. */
6895 if (INSN_UID (p) <= max_uid && INSN_CUID (p) > high_cuid)
6896 high_cuid = INSN_CUID (p);
6897 if (INSN_UID (p) <= max_uid && INSN_CUID (p) < low_cuid)
6898 low_cuid = INSN_CUID (p);
6900 /* See if this insn is in our branch path. If it is and we are to
6901 take it, do so. */
6902 if (path_entry < path_size && data->path[path_entry].branch == p)
6904 if (data->path[path_entry].status != NOT_TAKEN)
6905 p = JUMP_LABEL (p);
6907 /* Point to next entry in path, if any. */
6908 path_entry++;
6911 /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
6912 was specified, we haven't reached our maximum path length, there are
6913 insns following the target of the jump, this is the only use of the
6914 jump label, and the target label is preceded by a BARRIER.
6916 Alternatively, we can follow the jump if it branches around a
6917 block of code and there are no other branches into the block.
6918 In this case invalidate_skipped_block will be called to invalidate any
6919 registers set in the block when following the jump. */
6921 else if ((follow_jumps || skip_blocks) && path_size < PATHLENGTH - 1
6922 && GET_CODE (p) == JUMP_INSN
6923 && GET_CODE (PATTERN (p)) == SET
6924 && GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
6925 && JUMP_LABEL (p) != 0
6926 && LABEL_NUSES (JUMP_LABEL (p)) == 1
6927 && NEXT_INSN (JUMP_LABEL (p)) != 0)
6929 for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
6930 if ((GET_CODE (q) != NOTE
6931 || NOTE_LINE_NUMBER (q) == NOTE_INSN_LOOP_END
6932 || (PREV_INSN (q) && GET_CODE (PREV_INSN (q)) == CALL_INSN
6933 && find_reg_note (PREV_INSN (q), REG_SETJMP, NULL)))
6934 && (GET_CODE (q) != CODE_LABEL || LABEL_NUSES (q) != 0))
6935 break;
6937 /* If we ran into a BARRIER, this code is an extension of the
6938 basic block when the branch is taken. */
6939 if (follow_jumps && q != 0 && GET_CODE (q) == BARRIER)
6941 /* Don't allow ourself to keep walking around an
6942 always-executed loop. */
6943 if (next_real_insn (q) == next)
6945 p = NEXT_INSN (p);
6946 continue;
6949 /* Similarly, don't put a branch in our path more than once. */
6950 for (i = 0; i < path_entry; i++)
6951 if (data->path[i].branch == p)
6952 break;
6954 if (i != path_entry)
6955 break;
6957 data->path[path_entry].branch = p;
6958 data->path[path_entry++].status = TAKEN;
6960 /* This branch now ends our path. It was possible that we
6961 didn't see this branch the last time around (when the
6962 insn in front of the target was a JUMP_INSN that was
6963 turned into a no-op). */
6964 path_size = path_entry;
6966 p = JUMP_LABEL (p);
6967 /* Mark block so we won't scan it again later. */
6968 PUT_MODE (NEXT_INSN (p), QImode);
6970 /* Detect a branch around a block of code. */
6971 else if (skip_blocks && q != 0 && GET_CODE (q) != CODE_LABEL)
6973 rtx tmp;
6975 if (next_real_insn (q) == next)
6977 p = NEXT_INSN (p);
6978 continue;
6981 for (i = 0; i < path_entry; i++)
6982 if (data->path[i].branch == p)
6983 break;
6985 if (i != path_entry)
6986 break;
6988 /* This is no_labels_between_p (p, q) with an added check for
6989 reaching the end of a function (in case Q precedes P). */
6990 for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
6991 if (GET_CODE (tmp) == CODE_LABEL)
6992 break;
6994 if (tmp == q)
6996 data->path[path_entry].branch = p;
6997 data->path[path_entry++].status = AROUND;
6999 path_size = path_entry;
7001 p = JUMP_LABEL (p);
7002 /* Mark block so we won't scan it again later. */
7003 PUT_MODE (NEXT_INSN (p), QImode);
7007 p = NEXT_INSN (p);
7010 data->low_cuid = low_cuid;
7011 data->high_cuid = high_cuid;
7012 data->nsets = nsets;
7013 data->last = p;
7015 /* If all jumps in the path are not taken, set our path length to zero
7016 so a rescan won't be done. */
7017 for (i = path_size - 1; i >= 0; i--)
7018 if (data->path[i].status != NOT_TAKEN)
7019 break;
7021 if (i == -1)
7022 data->path_size = 0;
7023 else
7024 data->path_size = path_size;
7026 /* End the current branch path. */
7027 data->path[path_size].branch = 0;
7030 /* Perform cse on the instructions of a function.
7031 F is the first instruction.
7032 NREGS is one plus the highest pseudo-reg number used in the instruction.
7034 AFTER_LOOP is 1 if this is the cse call done after loop optimization
7035 (only if -frerun-cse-after-loop).
7037 Returns 1 if jump_optimize should be redone due to simplifications
7038 in conditional jump instructions. */
7041 cse_main (f, nregs, after_loop, file)
7042 rtx f;
7043 int nregs;
7044 int after_loop;
7045 FILE *file;
7047 struct cse_basic_block_data val;
7048 rtx insn = f;
7049 int i;
7051 cse_jumps_altered = 0;
7052 recorded_label_ref = 0;
7053 constant_pool_entries_cost = 0;
7054 val.path_size = 0;
7056 init_recog ();
7057 init_alias_analysis ();
7059 max_reg = nregs;
7061 max_insn_uid = get_max_uid ();
7063 reg_eqv_table = (struct reg_eqv_elem *)
7064 xmalloc (nregs * sizeof (struct reg_eqv_elem));
7066 #ifdef LOAD_EXTEND_OP
7068 /* Allocate scratch rtl here. cse_insn will fill in the memory reference
7069 and change the code and mode as appropriate. */
7070 memory_extend_rtx = gen_rtx_ZERO_EXTEND (VOIDmode, NULL_RTX);
7071 #endif
7073 /* Reset the counter indicating how many elements have been made
7074 thus far. */
7075 n_elements_made = 0;
7077 /* Find the largest uid. */
7079 max_uid = get_max_uid ();
7080 uid_cuid = (int *) xcalloc (max_uid + 1, sizeof (int));
7082 /* Compute the mapping from uids to cuids.
7083 CUIDs are numbers assigned to insns, like uids,
7084 except that cuids increase monotonically through the code.
7085 Don't assign cuids to line-number NOTEs, so that the distance in cuids
7086 between two insns is not affected by -g. */
7088 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
7090 if (GET_CODE (insn) != NOTE
7091 || NOTE_LINE_NUMBER (insn) < 0)
7092 INSN_CUID (insn) = ++i;
7093 else
7094 /* Give a line number note the same cuid as preceding insn. */
7095 INSN_CUID (insn) = i;
7098 ggc_push_context ();
7100 /* Loop over basic blocks.
7101 Compute the maximum number of qty's needed for each basic block
7102 (which is 2 for each SET). */
7103 insn = f;
7104 while (insn)
7106 cse_altered = 0;
7107 cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps, after_loop,
7108 flag_cse_skip_blocks);
7110 /* If this basic block was already processed or has no sets, skip it. */
7111 if (val.nsets == 0 || GET_MODE (insn) == QImode)
7113 PUT_MODE (insn, VOIDmode);
7114 insn = (val.last ? NEXT_INSN (val.last) : 0);
7115 val.path_size = 0;
7116 continue;
7119 cse_basic_block_start = val.low_cuid;
7120 cse_basic_block_end = val.high_cuid;
7121 max_qty = val.nsets * 2;
7123 if (file)
7124 fnotice (file, ";; Processing block from %d to %d, %d sets.\n",
7125 INSN_UID (insn), val.last ? INSN_UID (val.last) : 0,
7126 val.nsets);
7128 /* Make MAX_QTY bigger to give us room to optimize
7129 past the end of this basic block, if that should prove useful. */
7130 if (max_qty < 500)
7131 max_qty = 500;
7133 max_qty += max_reg;
7135 /* If this basic block is being extended by following certain jumps,
7136 (see `cse_end_of_basic_block'), we reprocess the code from the start.
7137 Otherwise, we start after this basic block. */
7138 if (val.path_size > 0)
7139 cse_basic_block (insn, val.last, val.path, 0);
7140 else
7142 int old_cse_jumps_altered = cse_jumps_altered;
7143 rtx temp;
7145 /* When cse changes a conditional jump to an unconditional
7146 jump, we want to reprocess the block, since it will give
7147 us a new branch path to investigate. */
7148 cse_jumps_altered = 0;
7149 temp = cse_basic_block (insn, val.last, val.path, ! after_loop);
7150 if (cse_jumps_altered == 0
7151 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7152 insn = temp;
7154 cse_jumps_altered |= old_cse_jumps_altered;
7157 if (cse_altered)
7158 ggc_collect ();
7160 #ifdef USE_C_ALLOCA
7161 alloca (0);
7162 #endif
7165 ggc_pop_context ();
7167 if (max_elements_made < n_elements_made)
7168 max_elements_made = n_elements_made;
7170 /* Clean up. */
7171 end_alias_analysis ();
7172 free (uid_cuid);
7173 free (reg_eqv_table);
7175 return cse_jumps_altered || recorded_label_ref;
7178 /* Process a single basic block. FROM and TO and the limits of the basic
7179 block. NEXT_BRANCH points to the branch path when following jumps or
7180 a null path when not following jumps.
7182 AROUND_LOOP is non-zero if we are to try to cse around to the start of a
7183 loop. This is true when we are being called for the last time on a
7184 block and this CSE pass is before loop.c. */
7186 static rtx
7187 cse_basic_block (from, to, next_branch, around_loop)
7188 rtx from, to;
7189 struct branch_path *next_branch;
7190 int around_loop;
7192 rtx insn;
7193 int to_usage = 0;
7194 rtx libcall_insn = NULL_RTX;
7195 int num_insns = 0;
7197 /* This array is undefined before max_reg, so only allocate
7198 the space actually needed and adjust the start. */
7200 qty_table
7201 = (struct qty_table_elem *) xmalloc ((max_qty - max_reg)
7202 * sizeof (struct qty_table_elem));
7203 qty_table -= max_reg;
7205 new_basic_block ();
7207 /* TO might be a label. If so, protect it from being deleted. */
7208 if (to != 0 && GET_CODE (to) == CODE_LABEL)
7209 ++LABEL_NUSES (to);
7211 for (insn = from; insn != to; insn = NEXT_INSN (insn))
7213 enum rtx_code code = GET_CODE (insn);
7215 /* If we have processed 1,000 insns, flush the hash table to
7216 avoid extreme quadratic behavior. We must not include NOTEs
7217 in the count since there may be more of them when generating
7218 debugging information. If we clear the table at different
7219 times, code generated with -g -O might be different than code
7220 generated with -O but not -g.
7222 ??? This is a real kludge and needs to be done some other way.
7223 Perhaps for 2.9. */
7224 if (code != NOTE && num_insns++ > 1000)
7226 flush_hash_table ();
7227 num_insns = 0;
7230 /* See if this is a branch that is part of the path. If so, and it is
7231 to be taken, do so. */
7232 if (next_branch->branch == insn)
7234 enum taken status = next_branch++->status;
7235 if (status != NOT_TAKEN)
7237 if (status == TAKEN)
7238 record_jump_equiv (insn, 1);
7239 else
7240 invalidate_skipped_block (NEXT_INSN (insn));
7242 /* Set the last insn as the jump insn; it doesn't affect cc0.
7243 Then follow this branch. */
7244 #ifdef HAVE_cc0
7245 prev_insn_cc0 = 0;
7246 #endif
7247 prev_insn = insn;
7248 insn = JUMP_LABEL (insn);
7249 continue;
7253 if (GET_MODE (insn) == QImode)
7254 PUT_MODE (insn, VOIDmode);
7256 if (GET_RTX_CLASS (code) == 'i')
7258 rtx p;
7260 /* Process notes first so we have all notes in canonical forms when
7261 looking for duplicate operations. */
7263 if (REG_NOTES (insn))
7264 REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn), NULL_RTX);
7266 /* Track when we are inside in LIBCALL block. Inside such a block,
7267 we do not want to record destinations. The last insn of a
7268 LIBCALL block is not considered to be part of the block, since
7269 its destination is the result of the block and hence should be
7270 recorded. */
7272 if (REG_NOTES (insn) != 0)
7274 if ((p = find_reg_note (insn, REG_LIBCALL, NULL_RTX)))
7275 libcall_insn = XEXP (p, 0);
7276 else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7277 libcall_insn = 0;
7280 cse_insn (insn, libcall_insn);
7282 /* If we haven't already found an insn where we added a LABEL_REF,
7283 check this one. */
7284 if (GET_CODE (insn) == INSN && ! recorded_label_ref
7285 && for_each_rtx (&PATTERN (insn), check_for_label_ref,
7286 (void *) insn))
7287 recorded_label_ref = 1;
7290 /* If INSN is now an unconditional jump, skip to the end of our
7291 basic block by pretending that we just did the last insn in the
7292 basic block. If we are jumping to the end of our block, show
7293 that we can have one usage of TO. */
7295 if (any_uncondjump_p (insn))
7297 if (to == 0)
7299 free (qty_table + max_reg);
7300 return 0;
7303 if (JUMP_LABEL (insn) == to)
7304 to_usage = 1;
7306 /* Maybe TO was deleted because the jump is unconditional.
7307 If so, there is nothing left in this basic block. */
7308 /* ??? Perhaps it would be smarter to set TO
7309 to whatever follows this insn,
7310 and pretend the basic block had always ended here. */
7311 if (INSN_DELETED_P (to))
7312 break;
7314 insn = PREV_INSN (to);
7317 /* See if it is ok to keep on going past the label
7318 which used to end our basic block. Remember that we incremented
7319 the count of that label, so we decrement it here. If we made
7320 a jump unconditional, TO_USAGE will be one; in that case, we don't
7321 want to count the use in that jump. */
7323 if (to != 0 && NEXT_INSN (insn) == to
7324 && GET_CODE (to) == CODE_LABEL && --LABEL_NUSES (to) == to_usage)
7326 struct cse_basic_block_data val;
7327 rtx prev;
7329 insn = NEXT_INSN (to);
7331 /* If TO was the last insn in the function, we are done. */
7332 if (insn == 0)
7334 free (qty_table + max_reg);
7335 return 0;
7338 /* If TO was preceded by a BARRIER we are done with this block
7339 because it has no continuation. */
7340 prev = prev_nonnote_insn (to);
7341 if (prev && GET_CODE (prev) == BARRIER)
7343 free (qty_table + max_reg);
7344 return insn;
7347 /* Find the end of the following block. Note that we won't be
7348 following branches in this case. */
7349 to_usage = 0;
7350 val.path_size = 0;
7351 cse_end_of_basic_block (insn, &val, 0, 0, 0);
7353 /* If the tables we allocated have enough space left
7354 to handle all the SETs in the next basic block,
7355 continue through it. Otherwise, return,
7356 and that block will be scanned individually. */
7357 if (val.nsets * 2 + next_qty > max_qty)
7358 break;
7360 cse_basic_block_start = val.low_cuid;
7361 cse_basic_block_end = val.high_cuid;
7362 to = val.last;
7364 /* Prevent TO from being deleted if it is a label. */
7365 if (to != 0 && GET_CODE (to) == CODE_LABEL)
7366 ++LABEL_NUSES (to);
7368 /* Back up so we process the first insn in the extension. */
7369 insn = PREV_INSN (insn);
7373 if (next_qty > max_qty)
7374 abort ();
7376 /* If we are running before loop.c, we stopped on a NOTE_INSN_LOOP_END, and
7377 the previous insn is the only insn that branches to the head of a loop,
7378 we can cse into the loop. Don't do this if we changed the jump
7379 structure of a loop unless we aren't going to be following jumps. */
7381 insn = prev_nonnote_insn (to);
7382 if ((cse_jumps_altered == 0
7383 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7384 && around_loop && to != 0
7385 && GET_CODE (to) == NOTE && NOTE_LINE_NUMBER (to) == NOTE_INSN_LOOP_END
7386 && GET_CODE (insn) == JUMP_INSN
7387 && JUMP_LABEL (insn) != 0
7388 && LABEL_NUSES (JUMP_LABEL (insn)) == 1)
7389 cse_around_loop (JUMP_LABEL (insn));
7391 free (qty_table + max_reg);
7393 return to ? NEXT_INSN (to) : 0;
7396 /* Called via for_each_rtx to see if an insn is using a LABEL_REF for which
7397 there isn't a REG_LABEL note. Return one if so. DATA is the insn. */
7399 static int
7400 check_for_label_ref (rtl, data)
7401 rtx *rtl;
7402 void *data;
7404 rtx insn = (rtx) data;
7406 /* If this insn uses a LABEL_REF and there isn't a REG_LABEL note for it,
7407 we must rerun jump since it needs to place the note. If this is a
7408 LABEL_REF for a CODE_LABEL that isn't in the insn chain, don't do this
7409 since no REG_LABEL will be added. */
7410 return (GET_CODE (*rtl) == LABEL_REF
7411 && ! LABEL_REF_NONLOCAL_P (*rtl)
7412 && LABEL_P (XEXP (*rtl, 0))
7413 && INSN_UID (XEXP (*rtl, 0)) != 0
7414 && ! find_reg_note (insn, REG_LABEL, XEXP (*rtl, 0)));
7417 /* Count the number of times registers are used (not set) in X.
7418 COUNTS is an array in which we accumulate the count, INCR is how much
7419 we count each register usage.
7421 Don't count a usage of DEST, which is the SET_DEST of a SET which
7422 contains X in its SET_SRC. This is because such a SET does not
7423 modify the liveness of DEST. */
7425 static void
7426 count_reg_usage (x, counts, dest, incr)
7427 rtx x;
7428 int *counts;
7429 rtx dest;
7430 int incr;
7432 enum rtx_code code;
7433 const char *fmt;
7434 int i, j;
7436 if (x == 0)
7437 return;
7439 switch (code = GET_CODE (x))
7441 case REG:
7442 if (x != dest)
7443 counts[REGNO (x)] += incr;
7444 return;
7446 case PC:
7447 case CC0:
7448 case CONST:
7449 case CONST_INT:
7450 case CONST_DOUBLE:
7451 case CONST_VECTOR:
7452 case SYMBOL_REF:
7453 case LABEL_REF:
7454 return;
7456 case CLOBBER:
7457 /* If we are clobbering a MEM, mark any registers inside the address
7458 as being used. */
7459 if (GET_CODE (XEXP (x, 0)) == MEM)
7460 count_reg_usage (XEXP (XEXP (x, 0), 0), counts, NULL_RTX, incr);
7461 return;
7463 case SET:
7464 /* Unless we are setting a REG, count everything in SET_DEST. */
7465 if (GET_CODE (SET_DEST (x)) != REG)
7466 count_reg_usage (SET_DEST (x), counts, NULL_RTX, incr);
7468 /* If SRC has side-effects, then we can't delete this insn, so the
7469 usage of SET_DEST inside SRC counts.
7471 ??? Strictly-speaking, we might be preserving this insn
7472 because some other SET has side-effects, but that's hard
7473 to do and can't happen now. */
7474 count_reg_usage (SET_SRC (x), counts,
7475 side_effects_p (SET_SRC (x)) ? NULL_RTX : SET_DEST (x),
7476 incr);
7477 return;
7479 case CALL_INSN:
7480 count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, NULL_RTX, incr);
7481 /* Fall through. */
7483 case INSN:
7484 case JUMP_INSN:
7485 count_reg_usage (PATTERN (x), counts, NULL_RTX, incr);
7487 /* Things used in a REG_EQUAL note aren't dead since loop may try to
7488 use them. */
7490 count_reg_usage (REG_NOTES (x), counts, NULL_RTX, incr);
7491 return;
7493 case EXPR_LIST:
7494 case INSN_LIST:
7495 if (REG_NOTE_KIND (x) == REG_EQUAL
7496 || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE))
7497 count_reg_usage (XEXP (x, 0), counts, NULL_RTX, incr);
7498 count_reg_usage (XEXP (x, 1), counts, NULL_RTX, incr);
7499 return;
7501 default:
7502 break;
7505 fmt = GET_RTX_FORMAT (code);
7506 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7508 if (fmt[i] == 'e')
7509 count_reg_usage (XEXP (x, i), counts, dest, incr);
7510 else if (fmt[i] == 'E')
7511 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7512 count_reg_usage (XVECEXP (x, i, j), counts, dest, incr);
7516 /* Return true if set is live. */
7517 static bool
7518 set_live_p (set, insn, counts)
7519 rtx set;
7520 rtx insn ATTRIBUTE_UNUSED; /* Only used with HAVE_cc0. */
7521 int *counts;
7523 #ifdef HAVE_cc0
7524 rtx tem;
7525 #endif
7527 if (set_noop_p (set))
7530 #ifdef HAVE_cc0
7531 else if (GET_CODE (SET_DEST (set)) == CC0
7532 && !side_effects_p (SET_SRC (set))
7533 && ((tem = next_nonnote_insn (insn)) == 0
7534 || !INSN_P (tem)
7535 || !reg_referenced_p (cc0_rtx, PATTERN (tem))))
7536 return false;
7537 #endif
7538 else if (GET_CODE (SET_DEST (set)) != REG
7539 || REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
7540 || counts[REGNO (SET_DEST (set))] != 0
7541 || side_effects_p (SET_SRC (set))
7542 /* An ADDRESSOF expression can turn into a use of the
7543 internal arg pointer, so always consider the
7544 internal arg pointer live. If it is truly dead,
7545 flow will delete the initializing insn. */
7546 || (SET_DEST (set) == current_function_internal_arg_pointer))
7547 return true;
7548 return false;
7551 /* Return true if insn is live. */
7553 static bool
7554 insn_live_p (insn, counts)
7555 rtx insn;
7556 int *counts;
7558 int i;
7559 if (GET_CODE (PATTERN (insn)) == SET)
7560 return set_live_p (PATTERN (insn), insn, counts);
7561 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
7563 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7565 rtx elt = XVECEXP (PATTERN (insn), 0, i);
7567 if (GET_CODE (elt) == SET)
7569 if (set_live_p (elt, insn, counts))
7570 return true;
7572 else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
7573 return true;
7575 return false;
7577 else
7578 return true;
7581 /* Return true if libcall is dead as a whole. */
7583 static bool
7584 dead_libcall_p (insn, counts)
7585 rtx insn;
7586 int *counts;
7588 rtx note;
7589 /* See if there's a REG_EQUAL note on this insn and try to
7590 replace the source with the REG_EQUAL expression.
7592 We assume that insns with REG_RETVALs can only be reg->reg
7593 copies at this point. */
7594 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
7595 if (note)
7597 rtx set = single_set (insn);
7598 rtx new = simplify_rtx (XEXP (note, 0));
7600 if (!new)
7601 new = XEXP (note, 0);
7603 /* While changing insn, we must update the counts accordingly. */
7604 count_reg_usage (insn, counts, NULL_RTX, -1);
7606 if (set && validate_change (insn, &SET_SRC (set), new, 0))
7608 count_reg_usage (insn, counts, NULL_RTX, 1);
7609 remove_note (insn, find_reg_note (insn, REG_RETVAL, NULL_RTX));
7610 remove_note (insn, note);
7611 return true;
7613 count_reg_usage (insn, counts, NULL_RTX, 1);
7615 return false;
7618 /* Scan all the insns and delete any that are dead; i.e., they store a register
7619 that is never used or they copy a register to itself.
7621 This is used to remove insns made obviously dead by cse, loop or other
7622 optimizations. It improves the heuristics in loop since it won't try to
7623 move dead invariants out of loops or make givs for dead quantities. The
7624 remaining passes of the compilation are also sped up. */
7627 delete_trivially_dead_insns (insns, nreg)
7628 rtx insns;
7629 int nreg;
7631 int *counts;
7632 rtx insn, prev;
7633 int in_libcall = 0, dead_libcall = 0;
7634 int ndead = 0, nlastdead, niterations = 0;
7636 timevar_push (TV_DELETE_TRIVIALLY_DEAD);
7637 /* First count the number of times each register is used. */
7638 counts = (int *) xcalloc (nreg, sizeof (int));
7639 for (insn = next_real_insn (insns); insn; insn = next_real_insn (insn))
7640 count_reg_usage (insn, counts, NULL_RTX, 1);
7644 nlastdead = ndead;
7645 niterations++;
7646 /* Go from the last insn to the first and delete insns that only set unused
7647 registers or copy a register to itself. As we delete an insn, remove
7648 usage counts for registers it uses.
7650 The first jump optimization pass may leave a real insn as the last
7651 insn in the function. We must not skip that insn or we may end
7652 up deleting code that is not really dead. */
7653 insn = get_last_insn ();
7654 if (! INSN_P (insn))
7655 insn = prev_real_insn (insn);
7657 for (; insn; insn = prev)
7659 int live_insn = 0;
7661 prev = prev_real_insn (insn);
7663 /* Don't delete any insns that are part of a libcall block unless
7664 we can delete the whole libcall block.
7666 Flow or loop might get confused if we did that. Remember
7667 that we are scanning backwards. */
7668 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7670 in_libcall = 1;
7671 live_insn = 1;
7672 dead_libcall = dead_libcall_p (insn, counts);
7674 else if (in_libcall)
7675 live_insn = ! dead_libcall;
7676 else
7677 live_insn = insn_live_p (insn, counts);
7679 /* If this is a dead insn, delete it and show registers in it aren't
7680 being used. */
7682 if (! live_insn)
7684 count_reg_usage (insn, counts, NULL_RTX, -1);
7685 delete_insn_and_edges (insn);
7686 ndead++;
7689 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
7691 in_libcall = 0;
7692 dead_libcall = 0;
7696 while (ndead != nlastdead);
7698 if (rtl_dump_file && ndead)
7699 fprintf (rtl_dump_file, "Deleted %i trivially dead insns; %i iterations\n",
7700 ndead, niterations);
7701 /* Clean up. */
7702 free (counts);
7703 timevar_pop (TV_DELETE_TRIVIALLY_DEAD);
7704 return ndead;