oops - omitted from previous delta fixing UNIQUE_SECTION
[official-gcc.git] / gcc / cse.c
blob9aced05f6072b21618e12971a3ae25bfbb3436fd
1 /* Common subexpression elimination for GNU compiler.
2 Copyright (C) 1987, 88, 89, 92-99, 2000 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 /* stdio.h must precede rtl.h for FFS. */
24 #include "system.h"
25 #include <setjmp.h>
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "flags.h"
32 #include "real.h"
33 #include "insn-config.h"
34 #include "recog.h"
35 #include "function.h"
36 #include "expr.h"
37 #include "toplev.h"
38 #include "output.h"
39 #include "ggc.h"
41 /* The basic idea of common subexpression elimination is to go
42 through the code, keeping a record of expressions that would
43 have the same value at the current scan point, and replacing
44 expressions encountered with the cheapest equivalent expression.
46 It is too complicated to keep track of the different possibilities
47 when control paths merge in this code; so, at each label, we forget all
48 that is known and start fresh. This can be described as processing each
49 extended basic block separately. We have a separate pass to perform
50 global CSE.
52 Note CSE can turn a conditional or computed jump into a nop or
53 an unconditional jump. When this occurs we arrange to run the jump
54 optimizer after CSE to delete the unreachable code.
56 We use two data structures to record the equivalent expressions:
57 a hash table for most expressions, and a vector of "quantity
58 numbers" to record equivalent (pseudo) registers.
60 The use of the special data structure for registers is desirable
61 because it is faster. It is possible because registers references
62 contain a fairly small number, the register number, taken from
63 a contiguously allocated series, and two register references are
64 identical if they have the same number. General expressions
65 do not have any such thing, so the only way to retrieve the
66 information recorded on an expression other than a register
67 is to keep it in a hash table.
69 Registers and "quantity numbers":
71 At the start of each basic block, all of the (hardware and pseudo)
72 registers used in the function are given distinct quantity
73 numbers to indicate their contents. During scan, when the code
74 copies one register into another, we copy the quantity number.
75 When a register is loaded in any other way, we allocate a new
76 quantity number to describe the value generated by this operation.
77 `reg_qty' records what quantity a register is currently thought
78 of as containing.
80 All real quantity numbers are greater than or equal to `max_reg'.
81 If register N has not been assigned a quantity, reg_qty[N] will equal N.
83 Quantity numbers below `max_reg' do not exist and none of the `qty_table'
84 entries should be referenced with an index below `max_reg'.
86 We also maintain a bidirectional chain of registers for each
87 quantity number. The `qty_table` members `first_reg' and `last_reg',
88 and `reg_eqv_table' members `next' and `prev' hold these chains.
90 The first register in a chain is the one whose lifespan is least local.
91 Among equals, it is the one that was seen first.
92 We replace any equivalent register with that one.
94 If two registers have the same quantity number, it must be true that
95 REG expressions with qty_table `mode' must be in the hash table for both
96 registers and must be in the same class.
98 The converse is not true. Since hard registers may be referenced in
99 any mode, two REG expressions might be equivalent in the hash table
100 but not have the same quantity number if the quantity number of one
101 of the registers is not the same mode as those expressions.
103 Constants and quantity numbers
105 When a quantity has a known constant value, that value is stored
106 in the appropriate qty_table `const_rtx'. This is in addition to
107 putting the constant in the hash table as is usual for non-regs.
109 Whether a reg or a constant is preferred is determined by the configuration
110 macro CONST_COSTS and will often depend on the constant value. In any
111 event, expressions containing constants can be simplified, by fold_rtx.
113 When a quantity has a known nearly constant value (such as an address
114 of a stack slot), that value is stored in the appropriate qty_table
115 `const_rtx'.
117 Integer constants don't have a machine mode. However, cse
118 determines the intended machine mode from the destination
119 of the instruction that moves the constant. The machine mode
120 is recorded in the hash table along with the actual RTL
121 constant expression so that different modes are kept separate.
123 Other expressions:
125 To record known equivalences among expressions in general
126 we use a hash table called `table'. It has a fixed number of buckets
127 that contain chains of `struct table_elt' elements for expressions.
128 These chains connect the elements whose expressions have the same
129 hash codes.
131 Other chains through the same elements connect the elements which
132 currently have equivalent values.
134 Register references in an expression are canonicalized before hashing
135 the expression. This is done using `reg_qty' and qty_table `first_reg'.
136 The hash code of a register reference is computed using the quantity
137 number, not the register number.
139 When the value of an expression changes, it is necessary to remove from the
140 hash table not just that expression but all expressions whose values
141 could be different as a result.
143 1. If the value changing is in memory, except in special cases
144 ANYTHING referring to memory could be changed. That is because
145 nobody knows where a pointer does not point.
146 The function `invalidate_memory' removes what is necessary.
148 The special cases are when the address is constant or is
149 a constant plus a fixed register such as the frame pointer
150 or a static chain pointer. When such addresses are stored in,
151 we can tell exactly which other such addresses must be invalidated
152 due to overlap. `invalidate' does this.
153 All expressions that refer to non-constant
154 memory addresses are also invalidated. `invalidate_memory' does this.
156 2. If the value changing is a register, all expressions
157 containing references to that register, and only those,
158 must be removed.
160 Because searching the entire hash table for expressions that contain
161 a register is very slow, we try to figure out when it isn't necessary.
162 Precisely, this is necessary only when expressions have been
163 entered in the hash table using this register, and then the value has
164 changed, and then another expression wants to be added to refer to
165 the register's new value. This sequence of circumstances is rare
166 within any one basic block.
168 The vectors `reg_tick' and `reg_in_table' are used to detect this case.
169 reg_tick[i] is incremented whenever a value is stored in register i.
170 reg_in_table[i] holds -1 if no references to register i have been
171 entered in the table; otherwise, it contains the value reg_tick[i] had
172 when the references were entered. If we want to enter a reference
173 and reg_in_table[i] != reg_tick[i], we must scan and remove old references.
174 Until we want to enter a new entry, the mere fact that the two vectors
175 don't match makes the entries be ignored if anyone tries to match them.
177 Registers themselves are entered in the hash table as well as in
178 the equivalent-register chains. However, the vectors `reg_tick'
179 and `reg_in_table' do not apply to expressions which are simple
180 register references. These expressions are removed from the table
181 immediately when they become invalid, and this can be done even if
182 we do not immediately search for all the expressions that refer to
183 the register.
185 A CLOBBER rtx in an instruction invalidates its operand for further
186 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
187 invalidates everything that resides in memory.
189 Related expressions:
191 Constant expressions that differ only by an additive integer
192 are called related. When a constant expression is put in
193 the table, the related expression with no constant term
194 is also entered. These are made to point at each other
195 so that it is possible to find out if there exists any
196 register equivalent to an expression related to a given expression. */
198 /* One plus largest register number used in this function. */
200 static int max_reg;
202 /* One plus largest instruction UID used in this function at time of
203 cse_main call. */
205 static int max_insn_uid;
207 /* Length of qty_table vector. We know in advance we will not need
208 a quantity number this big. */
210 static int max_qty;
212 /* Next quantity number to be allocated.
213 This is 1 + the largest number needed so far. */
215 static int next_qty;
217 /* Per-qty information tracking.
219 `first_reg' and `last_reg' track the head and tail of the
220 chain of registers which currently contain this quantity.
222 `mode' contains the machine mode of this quantity.
224 `const_rtx' holds the rtx of the constant value of this
225 quantity, if known. A summations of the frame/arg pointer
226 and a constant can also be entered here. When this holds
227 a known value, `const_insn' is the insn which stored the
228 constant value.
230 `comparison_{code,const,qty}' are used to track when a
231 comparison between a quantity and some constant or register has
232 been passed. In such a case, we know the results of the comparison
233 in case we see it again. These members record a comparison that
234 is known to be true. `comparison_code' holds the rtx code of such
235 a comparison, else it is set to UNKNOWN and the other two
236 comparison members are undefined. `comparison_const' holds
237 the constant being compared against, or zero if the comparison
238 is not against a constant. `comparison_qty' holds the quantity
239 being compared against when the result is known. If the comparison
240 is not with a register, `comparison_qty' is -1. */
242 struct qty_table_elem
244 rtx const_rtx;
245 rtx const_insn;
246 rtx comparison_const;
247 int comparison_qty;
248 int first_reg, last_reg;
249 enum machine_mode mode;
250 enum rtx_code comparison_code;
253 /* The table of all qtys, indexed by qty number. */
254 static struct qty_table_elem *qty_table;
256 #ifdef HAVE_cc0
257 /* For machines that have a CC0, we do not record its value in the hash
258 table since its use is guaranteed to be the insn immediately following
259 its definition and any other insn is presumed to invalidate it.
261 Instead, we store below the value last assigned to CC0. If it should
262 happen to be a constant, it is stored in preference to the actual
263 assigned value. In case it is a constant, we store the mode in which
264 the constant should be interpreted. */
266 static rtx prev_insn_cc0;
267 static enum machine_mode prev_insn_cc0_mode;
268 #endif
270 /* Previous actual insn. 0 if at first insn of basic block. */
272 static rtx prev_insn;
274 /* Insn being scanned. */
276 static rtx this_insn;
278 /* Index by register number, gives the number of the next (or
279 previous) register in the chain of registers sharing the same
280 value.
282 Or -1 if this register is at the end of the chain.
284 If reg_qty[N] == N, reg_eqv_table[N].next is undefined. */
286 /* Per-register equivalence chain. */
287 struct reg_eqv_elem
289 int next, prev;
292 /* The table of all register equivalence chains. */
293 static struct reg_eqv_elem *reg_eqv_table;
295 struct cse_reg_info
297 /* Next in hash chain. */
298 struct cse_reg_info *hash_next;
300 /* The next cse_reg_info structure in the free or used list. */
301 struct cse_reg_info *next;
303 /* Search key */
304 int regno;
306 /* The quantity number of the register's current contents. */
307 int reg_qty;
309 /* The number of times the register has been altered in the current
310 basic block. */
311 int reg_tick;
313 /* The REG_TICK value at which rtx's containing this register are
314 valid in the hash table. If this does not equal the current
315 reg_tick value, such expressions existing in the hash table are
316 invalid. */
317 int reg_in_table;
320 /* A free list of cse_reg_info entries. */
321 static struct cse_reg_info *cse_reg_info_free_list;
323 /* A used list of cse_reg_info entries. */
324 static struct cse_reg_info *cse_reg_info_used_list;
325 static struct cse_reg_info *cse_reg_info_used_list_end;
327 /* A mapping from registers to cse_reg_info data structures. */
328 #define REGHASH_SHIFT 7
329 #define REGHASH_SIZE (1 << REGHASH_SHIFT)
330 #define REGHASH_MASK (REGHASH_SIZE - 1)
331 static struct cse_reg_info *reg_hash[REGHASH_SIZE];
333 #define REGHASH_FN(REGNO) \
334 (((REGNO) ^ ((REGNO) >> REGHASH_SHIFT)) & REGHASH_MASK)
336 /* The last lookup we did into the cse_reg_info_tree. This allows us
337 to cache repeated lookups. */
338 static int cached_regno;
339 static struct cse_reg_info *cached_cse_reg_info;
341 /* A HARD_REG_SET containing all the hard registers for which there is
342 currently a REG expression in the hash table. Note the difference
343 from the above variables, which indicate if the REG is mentioned in some
344 expression in the table. */
346 static HARD_REG_SET hard_regs_in_table;
348 /* A HARD_REG_SET containing all the hard registers that are invalidated
349 by a CALL_INSN. */
351 static HARD_REG_SET regs_invalidated_by_call;
353 /* CUID of insn that starts the basic block currently being cse-processed. */
355 static int cse_basic_block_start;
357 /* CUID of insn that ends the basic block currently being cse-processed. */
359 static int cse_basic_block_end;
361 /* Vector mapping INSN_UIDs to cuids.
362 The cuids are like uids but increase monotonically always.
363 We use them to see whether a reg is used outside a given basic block. */
365 static int *uid_cuid;
367 /* Highest UID in UID_CUID. */
368 static int max_uid;
370 /* Get the cuid of an insn. */
372 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
374 /* Nonzero if cse has altered conditional jump insns
375 in such a way that jump optimization should be redone. */
377 static int cse_jumps_altered;
379 /* Nonzero if we put a LABEL_REF into the hash table. Since we may have put
380 it into an INSN without a REG_LABEL, we have to rerun jump after CSE
381 to put in the note. */
382 static int recorded_label_ref;
384 /* canon_hash stores 1 in do_not_record
385 if it notices a reference to CC0, PC, or some other volatile
386 subexpression. */
388 static int do_not_record;
390 #ifdef LOAD_EXTEND_OP
392 /* Scratch rtl used when looking for load-extended copy of a MEM. */
393 static rtx memory_extend_rtx;
394 #endif
396 /* canon_hash stores 1 in hash_arg_in_memory
397 if it notices a reference to memory within the expression being hashed. */
399 static int hash_arg_in_memory;
401 /* The hash table contains buckets which are chains of `struct table_elt's,
402 each recording one expression's information.
403 That expression is in the `exp' field.
405 Those elements with the same hash code are chained in both directions
406 through the `next_same_hash' and `prev_same_hash' fields.
408 Each set of expressions with equivalent values
409 are on a two-way chain through the `next_same_value'
410 and `prev_same_value' fields, and all point with
411 the `first_same_value' field at the first element in
412 that chain. The chain is in order of increasing cost.
413 Each element's cost value is in its `cost' field.
415 The `in_memory' field is nonzero for elements that
416 involve any reference to memory. These elements are removed
417 whenever a write is done to an unidentified location in memory.
418 To be safe, we assume that a memory address is unidentified unless
419 the address is either a symbol constant or a constant plus
420 the frame pointer or argument pointer.
422 The `related_value' field is used to connect related expressions
423 (that differ by adding an integer).
424 The related expressions are chained in a circular fashion.
425 `related_value' is zero for expressions for which this
426 chain is not useful.
428 The `cost' field stores the cost of this element's expression.
430 The `is_const' flag is set if the element is a constant (including
431 a fixed address).
433 The `flag' field is used as a temporary during some search routines.
435 The `mode' field is usually the same as GET_MODE (`exp'), but
436 if `exp' is a CONST_INT and has no machine mode then the `mode'
437 field is the mode it was being used as. Each constant is
438 recorded separately for each mode it is used with. */
441 struct table_elt
443 rtx exp;
444 struct table_elt *next_same_hash;
445 struct table_elt *prev_same_hash;
446 struct table_elt *next_same_value;
447 struct table_elt *prev_same_value;
448 struct table_elt *first_same_value;
449 struct table_elt *related_value;
450 int cost;
451 enum machine_mode mode;
452 char in_memory;
453 char is_const;
454 char flag;
457 /* We don't want a lot of buckets, because we rarely have very many
458 things stored in the hash table, and a lot of buckets slows
459 down a lot of loops that happen frequently. */
460 #define HASH_SHIFT 5
461 #define HASH_SIZE (1 << HASH_SHIFT)
462 #define HASH_MASK (HASH_SIZE - 1)
464 /* Compute hash code of X in mode M. Special-case case where X is a pseudo
465 register (hard registers may require `do_not_record' to be set). */
467 #define HASH(X, M) \
468 ((GET_CODE (X) == REG && REGNO (X) >= FIRST_PSEUDO_REGISTER \
469 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
470 : canon_hash (X, M)) & HASH_MASK)
472 /* Determine whether register number N is considered a fixed register for CSE.
473 It is desirable to replace other regs with fixed regs, to reduce need for
474 non-fixed hard regs.
475 A reg wins if it is either the frame pointer or designated as fixed. */
476 #define FIXED_REGNO_P(N) \
477 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
478 || fixed_regs[N] || global_regs[N])
480 /* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
481 hard registers and pointers into the frame are the cheapest with a cost
482 of 0. Next come pseudos with a cost of one and other hard registers with
483 a cost of 2. Aside from these special cases, call `rtx_cost'. */
485 #define CHEAP_REGNO(N) \
486 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
487 || (N) == STACK_POINTER_REGNUM || (N) == ARG_POINTER_REGNUM \
488 || ((N) >= FIRST_VIRTUAL_REGISTER && (N) <= LAST_VIRTUAL_REGISTER) \
489 || ((N) < FIRST_PSEUDO_REGISTER \
490 && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
492 /* A register is cheap if it is a user variable assigned to the register
493 or if its register number always corresponds to a cheap register. */
495 #define CHEAP_REG(N) \
496 ((REG_USERVAR_P (N) && REGNO (N) < FIRST_PSEUDO_REGISTER) \
497 || CHEAP_REGNO (REGNO (N)))
499 #define COST(X) \
500 (GET_CODE (X) == REG \
501 ? (CHEAP_REG (X) ? 0 \
502 : REGNO (X) >= FIRST_PSEUDO_REGISTER ? 1 \
503 : 2) \
504 : notreg_cost(X))
506 /* Get the info associated with register N. */
508 #define GET_CSE_REG_INFO(N) \
509 (((N) == cached_regno && cached_cse_reg_info) \
510 ? cached_cse_reg_info : get_cse_reg_info ((N)))
512 /* Get the number of times this register has been updated in this
513 basic block. */
515 #define REG_TICK(N) ((GET_CSE_REG_INFO (N))->reg_tick)
517 /* Get the point at which REG was recorded in the table. */
519 #define REG_IN_TABLE(N) ((GET_CSE_REG_INFO (N))->reg_in_table)
521 /* Get the quantity number for REG. */
523 #define REG_QTY(N) ((GET_CSE_REG_INFO (N))->reg_qty)
525 /* Determine if the quantity number for register X represents a valid index
526 into the qty_table. */
528 #define REGNO_QTY_VALID_P(N) (REG_QTY (N) != (N))
530 #ifdef ADDRESS_COST
531 /* The ADDRESS_COST macro does not deal with ADDRESSOF nodes. But,
532 during CSE, such nodes are present. Using an ADDRESSOF node which
533 refers to the address of a REG is a good thing because we can then
534 turn (MEM (ADDRESSSOF (REG))) into just plain REG. */
535 #define CSE_ADDRESS_COST(RTX) \
536 ((GET_CODE (RTX) == ADDRESSOF && REG_P (XEXP ((RTX), 0))) \
537 ? -1 : ADDRESS_COST(RTX))
538 #endif
540 static struct table_elt *table[HASH_SIZE];
542 /* Chain of `struct table_elt's made so far for this function
543 but currently removed from the table. */
545 static struct table_elt *free_element_chain;
547 /* Number of `struct table_elt' structures made so far for this function. */
549 static int n_elements_made;
551 /* Maximum value `n_elements_made' has had so far in this compilation
552 for functions previously processed. */
554 static int max_elements_made;
556 /* Surviving equivalence class when two equivalence classes are merged
557 by recording the effects of a jump in the last insn. Zero if the
558 last insn was not a conditional jump. */
560 static struct table_elt *last_jump_equiv_class;
562 /* Set to the cost of a constant pool reference if one was found for a
563 symbolic constant. If this was found, it means we should try to
564 convert constants into constant pool entries if they don't fit in
565 the insn. */
567 static int constant_pool_entries_cost;
569 /* Define maximum length of a branch path. */
571 #define PATHLENGTH 10
573 /* This data describes a block that will be processed by cse_basic_block. */
575 struct cse_basic_block_data
577 /* Lowest CUID value of insns in block. */
578 int low_cuid;
579 /* Highest CUID value of insns in block. */
580 int high_cuid;
581 /* Total number of SETs in block. */
582 int nsets;
583 /* Last insn in the block. */
584 rtx last;
585 /* Size of current branch path, if any. */
586 int path_size;
587 /* Current branch path, indicating which branches will be taken. */
588 struct branch_path
590 /* The branch insn. */
591 rtx branch;
592 /* Whether it should be taken or not. AROUND is the same as taken
593 except that it is used when the destination label is not preceded
594 by a BARRIER. */
595 enum taken {TAKEN, NOT_TAKEN, AROUND} status;
596 } path[PATHLENGTH];
599 /* Nonzero if X has the form (PLUS frame-pointer integer). We check for
600 virtual regs here because the simplify_*_operation routines are called
601 by integrate.c, which is called before virtual register instantiation.
603 ?!? FIXED_BASE_PLUS_P and NONZERO_BASE_PLUS_P need to move into
604 a header file so that their definitions can be shared with the
605 simplification routines in simplify-rtx.c. Until then, do not
606 change these macros without also changing the copy in simplify-rtx.c. */
608 #define FIXED_BASE_PLUS_P(X) \
609 ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx \
610 || ((X) == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])\
611 || (X) == virtual_stack_vars_rtx \
612 || (X) == virtual_incoming_args_rtx \
613 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
614 && (XEXP (X, 0) == frame_pointer_rtx \
615 || XEXP (X, 0) == hard_frame_pointer_rtx \
616 || ((X) == arg_pointer_rtx \
617 && fixed_regs[ARG_POINTER_REGNUM]) \
618 || XEXP (X, 0) == virtual_stack_vars_rtx \
619 || XEXP (X, 0) == virtual_incoming_args_rtx)) \
620 || GET_CODE (X) == ADDRESSOF)
622 /* Similar, but also allows reference to the stack pointer.
624 This used to include FIXED_BASE_PLUS_P, however, we can't assume that
625 arg_pointer_rtx by itself is nonzero, because on at least one machine,
626 the i960, the arg pointer is zero when it is unused. */
628 #define NONZERO_BASE_PLUS_P(X) \
629 ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx \
630 || (X) == virtual_stack_vars_rtx \
631 || (X) == virtual_incoming_args_rtx \
632 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
633 && (XEXP (X, 0) == frame_pointer_rtx \
634 || XEXP (X, 0) == hard_frame_pointer_rtx \
635 || ((X) == arg_pointer_rtx \
636 && fixed_regs[ARG_POINTER_REGNUM]) \
637 || XEXP (X, 0) == virtual_stack_vars_rtx \
638 || XEXP (X, 0) == virtual_incoming_args_rtx)) \
639 || (X) == stack_pointer_rtx \
640 || (X) == virtual_stack_dynamic_rtx \
641 || (X) == virtual_outgoing_args_rtx \
642 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
643 && (XEXP (X, 0) == stack_pointer_rtx \
644 || XEXP (X, 0) == virtual_stack_dynamic_rtx \
645 || XEXP (X, 0) == virtual_outgoing_args_rtx)) \
646 || GET_CODE (X) == ADDRESSOF)
648 static int notreg_cost PARAMS ((rtx));
649 static void new_basic_block PARAMS ((void));
650 static void make_new_qty PARAMS ((int, enum machine_mode));
651 static void make_regs_eqv PARAMS ((int, int));
652 static void delete_reg_equiv PARAMS ((int));
653 static int mention_regs PARAMS ((rtx));
654 static int insert_regs PARAMS ((rtx, struct table_elt *, int));
655 static void remove_from_table PARAMS ((struct table_elt *, unsigned));
656 static struct table_elt *lookup PARAMS ((rtx, unsigned, enum machine_mode)),
657 *lookup_for_remove PARAMS ((rtx, unsigned, enum machine_mode));
658 static rtx lookup_as_function PARAMS ((rtx, enum rtx_code));
659 static struct table_elt *insert PARAMS ((rtx, struct table_elt *, unsigned,
660 enum machine_mode));
661 static void merge_equiv_classes PARAMS ((struct table_elt *,
662 struct table_elt *));
663 static void invalidate PARAMS ((rtx, enum machine_mode));
664 static int cse_rtx_varies_p PARAMS ((rtx));
665 static void remove_invalid_refs PARAMS ((int));
666 static void remove_invalid_subreg_refs PARAMS ((int, int, enum machine_mode));
667 static void rehash_using_reg PARAMS ((rtx));
668 static void invalidate_memory PARAMS ((void));
669 static void invalidate_for_call PARAMS ((void));
670 static rtx use_related_value PARAMS ((rtx, struct table_elt *));
671 static unsigned canon_hash PARAMS ((rtx, enum machine_mode));
672 static unsigned safe_hash PARAMS ((rtx, enum machine_mode));
673 static int exp_equiv_p PARAMS ((rtx, rtx, int, int));
674 static rtx canon_reg PARAMS ((rtx, rtx));
675 static void find_best_addr PARAMS ((rtx, rtx *));
676 static enum rtx_code find_comparison_args PARAMS ((enum rtx_code, rtx *, rtx *,
677 enum machine_mode *,
678 enum machine_mode *));
679 static rtx fold_rtx PARAMS ((rtx, rtx));
680 static rtx equiv_constant PARAMS ((rtx));
681 static void record_jump_equiv PARAMS ((rtx, int));
682 static void record_jump_cond PARAMS ((enum rtx_code, enum machine_mode,
683 rtx, rtx, int));
684 static void cse_insn PARAMS ((rtx, rtx));
685 static int addr_affects_sp_p PARAMS ((rtx));
686 static void invalidate_from_clobbers PARAMS ((rtx));
687 static rtx cse_process_notes PARAMS ((rtx, rtx));
688 static void cse_around_loop PARAMS ((rtx));
689 static void invalidate_skipped_set PARAMS ((rtx, rtx, void *));
690 static void invalidate_skipped_block PARAMS ((rtx));
691 static void cse_check_loop_start PARAMS ((rtx, rtx, void *));
692 static void cse_set_around_loop PARAMS ((rtx, rtx, rtx));
693 static rtx cse_basic_block PARAMS ((rtx, rtx, struct branch_path *, int));
694 static void count_reg_usage PARAMS ((rtx, int *, rtx, int));
695 extern void dump_class PARAMS ((struct table_elt*));
696 static struct cse_reg_info* get_cse_reg_info PARAMS ((int));
698 static void flush_hash_table PARAMS ((void));
700 /* Dump the expressions in the equivalence class indicated by CLASSP.
701 This function is used only for debugging. */
702 void
703 dump_class (classp)
704 struct table_elt *classp;
706 struct table_elt *elt;
708 fprintf (stderr, "Equivalence chain for ");
709 print_rtl (stderr, classp->exp);
710 fprintf (stderr, ": \n");
712 for (elt = classp->first_same_value; elt; elt = elt->next_same_value)
714 print_rtl (stderr, elt->exp);
715 fprintf (stderr, "\n");
719 /* Internal function, to compute cost when X is not a register; called
720 from COST macro to keep it simple. */
722 static int
723 notreg_cost (x)
724 rtx x;
726 return ((GET_CODE (x) == SUBREG
727 && GET_CODE (SUBREG_REG (x)) == REG
728 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
729 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
730 && (GET_MODE_SIZE (GET_MODE (x))
731 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
732 && subreg_lowpart_p (x)
733 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (x)),
734 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))))
735 ? (CHEAP_REG (SUBREG_REG (x)) ? 0
736 : (REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER ? 1
737 : 2))
738 : rtx_cost (x, SET) * 2);
741 /* Return the right cost to give to an operation
742 to make the cost of the corresponding register-to-register instruction
743 N times that of a fast register-to-register instruction. */
745 #define COSTS_N_INSNS(N) ((N) * 4 - 2)
747 /* Return an estimate of the cost of computing rtx X.
748 One use is in cse, to decide which expression to keep in the hash table.
749 Another is in rtl generation, to pick the cheapest way to multiply.
750 Other uses like the latter are expected in the future. */
753 rtx_cost (x, outer_code)
754 rtx x;
755 enum rtx_code outer_code ATTRIBUTE_UNUSED;
757 register int i, j;
758 register enum rtx_code code;
759 register const char *fmt;
760 register int total;
762 if (x == 0)
763 return 0;
765 /* Compute the default costs of certain things.
766 Note that RTX_COSTS can override the defaults. */
768 code = GET_CODE (x);
769 switch (code)
771 case MULT:
772 /* Count multiplication by 2**n as a shift,
773 because if we are considering it, we would output it as a shift. */
774 if (GET_CODE (XEXP (x, 1)) == CONST_INT
775 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
776 total = 2;
777 else
778 total = COSTS_N_INSNS (5);
779 break;
780 case DIV:
781 case UDIV:
782 case MOD:
783 case UMOD:
784 total = COSTS_N_INSNS (7);
785 break;
786 case USE:
787 /* Used in loop.c and combine.c as a marker. */
788 total = 0;
789 break;
790 case ASM_OPERANDS:
791 /* We don't want these to be used in substitutions because
792 we have no way of validating the resulting insn. So assign
793 anything containing an ASM_OPERANDS a very high cost. */
794 total = 1000;
795 break;
796 default:
797 total = 2;
800 switch (code)
802 case REG:
803 return ! CHEAP_REG (x);
805 case SUBREG:
806 /* If we can't tie these modes, make this expensive. The larger
807 the mode, the more expensive it is. */
808 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
809 return COSTS_N_INSNS (2
810 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
811 return 2;
812 #ifdef RTX_COSTS
813 RTX_COSTS (x, code, outer_code);
814 #endif
815 #ifdef CONST_COSTS
816 CONST_COSTS (x, code, outer_code);
817 #endif
819 default:
820 #ifdef DEFAULT_RTX_COSTS
821 DEFAULT_RTX_COSTS(x, code, outer_code);
822 #endif
823 break;
826 /* Sum the costs of the sub-rtx's, plus cost of this operation,
827 which is already in total. */
829 fmt = GET_RTX_FORMAT (code);
830 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
831 if (fmt[i] == 'e')
832 total += rtx_cost (XEXP (x, i), code);
833 else if (fmt[i] == 'E')
834 for (j = 0; j < XVECLEN (x, i); j++)
835 total += rtx_cost (XVECEXP (x, i, j), code);
837 return total;
840 static struct cse_reg_info *
841 get_cse_reg_info (regno)
842 int regno;
844 struct cse_reg_info **hash_head = &reg_hash[REGHASH_FN (regno)];
845 struct cse_reg_info *p;
847 for (p = *hash_head ; p != NULL; p = p->hash_next)
848 if (p->regno == regno)
849 break;
851 if (p == NULL)
853 /* Get a new cse_reg_info structure. */
854 if (cse_reg_info_free_list)
856 p = cse_reg_info_free_list;
857 cse_reg_info_free_list = p->next;
859 else
860 p = (struct cse_reg_info *) xmalloc (sizeof (struct cse_reg_info));
862 /* Insert into hash table. */
863 p->hash_next = *hash_head;
864 *hash_head = p;
866 /* Initialize it. */
867 p->reg_tick = 1;
868 p->reg_in_table = -1;
869 p->reg_qty = regno;
870 p->regno = regno;
871 p->next = cse_reg_info_used_list;
872 cse_reg_info_used_list = p;
873 if (!cse_reg_info_used_list_end)
874 cse_reg_info_used_list_end = p;
877 /* Cache this lookup; we tend to be looking up information about the
878 same register several times in a row. */
879 cached_regno = regno;
880 cached_cse_reg_info = p;
882 return p;
885 /* Clear the hash table and initialize each register with its own quantity,
886 for a new basic block. */
888 static void
889 new_basic_block ()
891 register int i;
893 next_qty = max_reg;
895 /* Clear out hash table state for this pass. */
897 bzero ((char *) reg_hash, sizeof reg_hash);
899 if (cse_reg_info_used_list)
901 cse_reg_info_used_list_end->next = cse_reg_info_free_list;
902 cse_reg_info_free_list = cse_reg_info_used_list;
903 cse_reg_info_used_list = cse_reg_info_used_list_end = 0;
905 cached_cse_reg_info = 0;
907 CLEAR_HARD_REG_SET (hard_regs_in_table);
909 /* The per-quantity values used to be initialized here, but it is
910 much faster to initialize each as it is made in `make_new_qty'. */
912 for (i = 0; i < HASH_SIZE; i++)
914 struct table_elt *first;
916 first = table[i];
917 if (first != NULL)
919 struct table_elt *last = first;
921 table[i] = NULL;
923 while (last->next_same_hash != NULL)
924 last = last->next_same_hash;
926 /* Now relink this hash entire chain into
927 the free element list. */
929 last->next_same_hash = free_element_chain;
930 free_element_chain = first;
934 prev_insn = 0;
936 #ifdef HAVE_cc0
937 prev_insn_cc0 = 0;
938 #endif
941 /* Say that register REG contains a quantity in mode MODE not in any
942 register before and initialize that quantity. */
944 static void
945 make_new_qty (reg, mode)
946 register int reg;
947 register enum machine_mode mode;
949 register int q;
950 register struct qty_table_elem *ent;
951 register struct reg_eqv_elem *eqv;
953 if (next_qty >= max_qty)
954 abort ();
956 q = REG_QTY (reg) = next_qty++;
957 ent = &qty_table[q];
958 ent->first_reg = reg;
959 ent->last_reg = reg;
960 ent->mode = mode;
961 ent->const_rtx = ent->const_insn = NULL_RTX;
962 ent->comparison_code = UNKNOWN;
964 eqv = &reg_eqv_table[reg];
965 eqv->next = eqv->prev = -1;
968 /* Make reg NEW equivalent to reg OLD.
969 OLD is not changing; NEW is. */
971 static void
972 make_regs_eqv (new, old)
973 register int new, old;
975 register int lastr, firstr;
976 register int q = REG_QTY (old);
977 register struct qty_table_elem *ent;
979 ent = &qty_table[q];
981 /* Nothing should become eqv until it has a "non-invalid" qty number. */
982 if (! REGNO_QTY_VALID_P (old))
983 abort ();
985 REG_QTY (new) = q;
986 firstr = ent->first_reg;
987 lastr = ent->last_reg;
989 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
990 hard regs. Among pseudos, if NEW will live longer than any other reg
991 of the same qty, and that is beyond the current basic block,
992 make it the new canonical replacement for this qty. */
993 if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
994 /* Certain fixed registers might be of the class NO_REGS. This means
995 that not only can they not be allocated by the compiler, but
996 they cannot be used in substitutions or canonicalizations
997 either. */
998 && (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
999 && ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
1000 || (new >= FIRST_PSEUDO_REGISTER
1001 && (firstr < FIRST_PSEUDO_REGISTER
1002 || ((uid_cuid[REGNO_LAST_UID (new)] > cse_basic_block_end
1003 || (uid_cuid[REGNO_FIRST_UID (new)]
1004 < cse_basic_block_start))
1005 && (uid_cuid[REGNO_LAST_UID (new)]
1006 > uid_cuid[REGNO_LAST_UID (firstr)]))))))
1008 reg_eqv_table[firstr].prev = new;
1009 reg_eqv_table[new].next = firstr;
1010 reg_eqv_table[new].prev = -1;
1011 ent->first_reg = new;
1013 else
1015 /* If NEW is a hard reg (known to be non-fixed), insert at end.
1016 Otherwise, insert before any non-fixed hard regs that are at the
1017 end. Registers of class NO_REGS cannot be used as an
1018 equivalent for anything. */
1019 while (lastr < FIRST_PSEUDO_REGISTER && reg_eqv_table[lastr].prev >= 0
1020 && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
1021 && new >= FIRST_PSEUDO_REGISTER)
1022 lastr = reg_eqv_table[lastr].prev;
1023 reg_eqv_table[new].next = reg_eqv_table[lastr].next;
1024 if (reg_eqv_table[lastr].next >= 0)
1025 reg_eqv_table[reg_eqv_table[lastr].next].prev = new;
1026 else
1027 qty_table[q].last_reg = new;
1028 reg_eqv_table[lastr].next = new;
1029 reg_eqv_table[new].prev = lastr;
1033 /* Remove REG from its equivalence class. */
1035 static void
1036 delete_reg_equiv (reg)
1037 register int reg;
1039 register struct qty_table_elem *ent;
1040 register int q = REG_QTY (reg);
1041 register int p, n;
1043 /* If invalid, do nothing. */
1044 if (q == reg)
1045 return;
1047 ent = &qty_table[q];
1049 p = reg_eqv_table[reg].prev;
1050 n = reg_eqv_table[reg].next;
1052 if (n != -1)
1053 reg_eqv_table[n].prev = p;
1054 else
1055 ent->last_reg = p;
1056 if (p != -1)
1057 reg_eqv_table[p].next = n;
1058 else
1059 ent->first_reg = n;
1061 REG_QTY (reg) = reg;
1064 /* Remove any invalid expressions from the hash table
1065 that refer to any of the registers contained in expression X.
1067 Make sure that newly inserted references to those registers
1068 as subexpressions will be considered valid.
1070 mention_regs is not called when a register itself
1071 is being stored in the table.
1073 Return 1 if we have done something that may have changed the hash code
1074 of X. */
1076 static int
1077 mention_regs (x)
1078 rtx x;
1080 register enum rtx_code code;
1081 register int i, j;
1082 register const char *fmt;
1083 register int changed = 0;
1085 if (x == 0)
1086 return 0;
1088 code = GET_CODE (x);
1089 if (code == REG)
1091 register int regno = REGNO (x);
1092 register int endregno
1093 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
1094 : HARD_REGNO_NREGS (regno, GET_MODE (x)));
1095 int i;
1097 for (i = regno; i < endregno; i++)
1099 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1100 remove_invalid_refs (i);
1102 REG_IN_TABLE (i) = REG_TICK (i);
1105 return 0;
1108 /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1109 pseudo if they don't use overlapping words. We handle only pseudos
1110 here for simplicity. */
1111 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1112 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1114 int i = REGNO (SUBREG_REG (x));
1116 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1118 /* If reg_tick has been incremented more than once since
1119 reg_in_table was last set, that means that the entire
1120 register has been set before, so discard anything memorized
1121 for the entrire register, including all SUBREG expressions. */
1122 if (REG_IN_TABLE (i) != REG_TICK (i) - 1)
1123 remove_invalid_refs (i);
1124 else
1125 remove_invalid_subreg_refs (i, SUBREG_WORD (x), GET_MODE (x));
1128 REG_IN_TABLE (i) = REG_TICK (i);
1129 return 0;
1132 /* If X is a comparison or a COMPARE and either operand is a register
1133 that does not have a quantity, give it one. This is so that a later
1134 call to record_jump_equiv won't cause X to be assigned a different
1135 hash code and not found in the table after that call.
1137 It is not necessary to do this here, since rehash_using_reg can
1138 fix up the table later, but doing this here eliminates the need to
1139 call that expensive function in the most common case where the only
1140 use of the register is in the comparison. */
1142 if (code == COMPARE || GET_RTX_CLASS (code) == '<')
1144 if (GET_CODE (XEXP (x, 0)) == REG
1145 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
1146 if (insert_regs (XEXP (x, 0), NULL_PTR, 0))
1148 rehash_using_reg (XEXP (x, 0));
1149 changed = 1;
1152 if (GET_CODE (XEXP (x, 1)) == REG
1153 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
1154 if (insert_regs (XEXP (x, 1), NULL_PTR, 0))
1156 rehash_using_reg (XEXP (x, 1));
1157 changed = 1;
1161 fmt = GET_RTX_FORMAT (code);
1162 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1163 if (fmt[i] == 'e')
1164 changed |= mention_regs (XEXP (x, i));
1165 else if (fmt[i] == 'E')
1166 for (j = 0; j < XVECLEN (x, i); j++)
1167 changed |= mention_regs (XVECEXP (x, i, j));
1169 return changed;
1172 /* Update the register quantities for inserting X into the hash table
1173 with a value equivalent to CLASSP.
1174 (If the class does not contain a REG, it is irrelevant.)
1175 If MODIFIED is nonzero, X is a destination; it is being modified.
1176 Note that delete_reg_equiv should be called on a register
1177 before insert_regs is done on that register with MODIFIED != 0.
1179 Nonzero value means that elements of reg_qty have changed
1180 so X's hash code may be different. */
1182 static int
1183 insert_regs (x, classp, modified)
1184 rtx x;
1185 struct table_elt *classp;
1186 int modified;
1188 if (GET_CODE (x) == REG)
1190 register int regno = REGNO (x);
1191 register int qty_valid;
1193 /* If REGNO is in the equivalence table already but is of the
1194 wrong mode for that equivalence, don't do anything here. */
1196 qty_valid = REGNO_QTY_VALID_P (regno);
1197 if (qty_valid)
1199 struct qty_table_elem *ent = &qty_table[REG_QTY (regno)];
1201 if (ent->mode != GET_MODE (x))
1202 return 0;
1205 if (modified || ! qty_valid)
1207 if (classp)
1208 for (classp = classp->first_same_value;
1209 classp != 0;
1210 classp = classp->next_same_value)
1211 if (GET_CODE (classp->exp) == REG
1212 && GET_MODE (classp->exp) == GET_MODE (x))
1214 make_regs_eqv (regno, REGNO (classp->exp));
1215 return 1;
1218 make_new_qty (regno, GET_MODE (x));
1219 return 1;
1222 return 0;
1225 /* If X is a SUBREG, we will likely be inserting the inner register in the
1226 table. If that register doesn't have an assigned quantity number at
1227 this point but does later, the insertion that we will be doing now will
1228 not be accessible because its hash code will have changed. So assign
1229 a quantity number now. */
1231 else if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1232 && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1234 int regno = REGNO (SUBREG_REG (x));
1236 insert_regs (SUBREG_REG (x), NULL_PTR, 0);
1237 /* Mention_regs checks if REG_TICK is exactly one larger than
1238 REG_IN_TABLE to find out if there was only a single preceding
1239 invalidation - for the SUBREG - or another one, which would be
1240 for the full register. Since we don't invalidate the SUBREG
1241 here first, we might have to bump up REG_TICK so that mention_regs
1242 will do the right thing. */
1243 if (REG_IN_TABLE (regno) >= 0
1244 && REG_TICK (regno) == REG_IN_TABLE (regno) + 1)
1245 REG_TICK (regno)++;
1246 mention_regs (x);
1247 return 1;
1249 else
1250 return mention_regs (x);
1253 /* Look in or update the hash table. */
1255 /* Remove table element ELT from use in the table.
1256 HASH is its hash code, made using the HASH macro.
1257 It's an argument because often that is known in advance
1258 and we save much time not recomputing it. */
1260 static void
1261 remove_from_table (elt, hash)
1262 register struct table_elt *elt;
1263 unsigned hash;
1265 if (elt == 0)
1266 return;
1268 /* Mark this element as removed. See cse_insn. */
1269 elt->first_same_value = 0;
1271 /* Remove the table element from its equivalence class. */
1274 register struct table_elt *prev = elt->prev_same_value;
1275 register struct table_elt *next = elt->next_same_value;
1277 if (next) next->prev_same_value = prev;
1279 if (prev)
1280 prev->next_same_value = next;
1281 else
1283 register struct table_elt *newfirst = next;
1284 while (next)
1286 next->first_same_value = newfirst;
1287 next = next->next_same_value;
1292 /* Remove the table element from its hash bucket. */
1295 register struct table_elt *prev = elt->prev_same_hash;
1296 register struct table_elt *next = elt->next_same_hash;
1298 if (next) next->prev_same_hash = prev;
1300 if (prev)
1301 prev->next_same_hash = next;
1302 else if (table[hash] == elt)
1303 table[hash] = next;
1304 else
1306 /* This entry is not in the proper hash bucket. This can happen
1307 when two classes were merged by `merge_equiv_classes'. Search
1308 for the hash bucket that it heads. This happens only very
1309 rarely, so the cost is acceptable. */
1310 for (hash = 0; hash < HASH_SIZE; hash++)
1311 if (table[hash] == elt)
1312 table[hash] = next;
1316 /* Remove the table element from its related-value circular chain. */
1318 if (elt->related_value != 0 && elt->related_value != elt)
1320 register struct table_elt *p = elt->related_value;
1321 while (p->related_value != elt)
1322 p = p->related_value;
1323 p->related_value = elt->related_value;
1324 if (p->related_value == p)
1325 p->related_value = 0;
1328 /* Now add it to the free element chain. */
1329 elt->next_same_hash = free_element_chain;
1330 free_element_chain = elt;
1333 /* Look up X in the hash table and return its table element,
1334 or 0 if X is not in the table.
1336 MODE is the machine-mode of X, or if X is an integer constant
1337 with VOIDmode then MODE is the mode with which X will be used.
1339 Here we are satisfied to find an expression whose tree structure
1340 looks like X. */
1342 static struct table_elt *
1343 lookup (x, hash, mode)
1344 rtx x;
1345 unsigned hash;
1346 enum machine_mode mode;
1348 register struct table_elt *p;
1350 for (p = table[hash]; p; p = p->next_same_hash)
1351 if (mode == p->mode && ((x == p->exp && GET_CODE (x) == REG)
1352 || exp_equiv_p (x, p->exp, GET_CODE (x) != REG, 0)))
1353 return p;
1355 return 0;
1358 /* Like `lookup' but don't care whether the table element uses invalid regs.
1359 Also ignore discrepancies in the machine mode of a register. */
1361 static struct table_elt *
1362 lookup_for_remove (x, hash, mode)
1363 rtx x;
1364 unsigned hash;
1365 enum machine_mode mode;
1367 register struct table_elt *p;
1369 if (GET_CODE (x) == REG)
1371 int regno = REGNO (x);
1372 /* Don't check the machine mode when comparing registers;
1373 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1374 for (p = table[hash]; p; p = p->next_same_hash)
1375 if (GET_CODE (p->exp) == REG
1376 && REGNO (p->exp) == regno)
1377 return p;
1379 else
1381 for (p = table[hash]; p; p = p->next_same_hash)
1382 if (mode == p->mode && (x == p->exp || exp_equiv_p (x, p->exp, 0, 0)))
1383 return p;
1386 return 0;
1389 /* Look for an expression equivalent to X and with code CODE.
1390 If one is found, return that expression. */
1392 static rtx
1393 lookup_as_function (x, code)
1394 rtx x;
1395 enum rtx_code code;
1397 register struct table_elt *p = lookup (x, safe_hash (x, VOIDmode) & HASH_MASK,
1398 GET_MODE (x));
1399 /* If we are looking for a CONST_INT, the mode doesn't really matter, as
1400 long as we are narrowing. So if we looked in vain for a mode narrower
1401 than word_mode before, look for word_mode now. */
1402 if (p == 0 && code == CONST_INT
1403 && GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (word_mode))
1405 x = copy_rtx (x);
1406 PUT_MODE (x, word_mode);
1407 p = lookup (x, safe_hash (x, VOIDmode) & HASH_MASK, word_mode);
1410 if (p == 0)
1411 return 0;
1413 for (p = p->first_same_value; p; p = p->next_same_value)
1415 if (GET_CODE (p->exp) == code
1416 /* Make sure this is a valid entry in the table. */
1417 && exp_equiv_p (p->exp, p->exp, 1, 0))
1418 return p->exp;
1421 return 0;
1424 /* Insert X in the hash table, assuming HASH is its hash code
1425 and CLASSP is an element of the class it should go in
1426 (or 0 if a new class should be made).
1427 It is inserted at the proper position to keep the class in
1428 the order cheapest first.
1430 MODE is the machine-mode of X, or if X is an integer constant
1431 with VOIDmode then MODE is the mode with which X will be used.
1433 For elements of equal cheapness, the most recent one
1434 goes in front, except that the first element in the list
1435 remains first unless a cheaper element is added. The order of
1436 pseudo-registers does not matter, as canon_reg will be called to
1437 find the cheapest when a register is retrieved from the table.
1439 The in_memory field in the hash table element is set to 0.
1440 The caller must set it nonzero if appropriate.
1442 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1443 and if insert_regs returns a nonzero value
1444 you must then recompute its hash code before calling here.
1446 If necessary, update table showing constant values of quantities. */
1448 #define CHEAPER(X,Y) ((X)->cost < (Y)->cost)
1450 static struct table_elt *
1451 insert (x, classp, hash, mode)
1452 register rtx x;
1453 register struct table_elt *classp;
1454 unsigned hash;
1455 enum machine_mode mode;
1457 register struct table_elt *elt;
1459 /* If X is a register and we haven't made a quantity for it,
1460 something is wrong. */
1461 if (GET_CODE (x) == REG && ! REGNO_QTY_VALID_P (REGNO (x)))
1462 abort ();
1464 /* If X is a hard register, show it is being put in the table. */
1465 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
1467 int regno = REGNO (x);
1468 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1469 int i;
1471 for (i = regno; i < endregno; i++)
1472 SET_HARD_REG_BIT (hard_regs_in_table, i);
1475 /* If X is a label, show we recorded it. */
1476 if (GET_CODE (x) == LABEL_REF
1477 || (GET_CODE (x) == CONST && GET_CODE (XEXP (x, 0)) == PLUS
1478 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF))
1479 recorded_label_ref = 1;
1481 /* Put an element for X into the right hash bucket. */
1483 elt = free_element_chain;
1484 if (elt)
1486 free_element_chain = elt->next_same_hash;
1488 else
1490 n_elements_made++;
1491 elt = (struct table_elt *) oballoc (sizeof (struct table_elt));
1494 elt->exp = x;
1495 elt->cost = COST (x);
1496 elt->next_same_value = 0;
1497 elt->prev_same_value = 0;
1498 elt->next_same_hash = table[hash];
1499 elt->prev_same_hash = 0;
1500 elt->related_value = 0;
1501 elt->in_memory = 0;
1502 elt->mode = mode;
1503 elt->is_const = (CONSTANT_P (x)
1504 /* GNU C++ takes advantage of this for `this'
1505 (and other const values). */
1506 || (RTX_UNCHANGING_P (x)
1507 && GET_CODE (x) == REG
1508 && REGNO (x) >= FIRST_PSEUDO_REGISTER)
1509 || FIXED_BASE_PLUS_P (x));
1511 if (table[hash])
1512 table[hash]->prev_same_hash = elt;
1513 table[hash] = elt;
1515 /* Put it into the proper value-class. */
1516 if (classp)
1518 classp = classp->first_same_value;
1519 if (CHEAPER (elt, classp))
1520 /* Insert at the head of the class */
1522 register struct table_elt *p;
1523 elt->next_same_value = classp;
1524 classp->prev_same_value = elt;
1525 elt->first_same_value = elt;
1527 for (p = classp; p; p = p->next_same_value)
1528 p->first_same_value = elt;
1530 else
1532 /* Insert not at head of the class. */
1533 /* Put it after the last element cheaper than X. */
1534 register struct table_elt *p, *next;
1535 for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1536 p = next);
1537 /* Put it after P and before NEXT. */
1538 elt->next_same_value = next;
1539 if (next)
1540 next->prev_same_value = elt;
1541 elt->prev_same_value = p;
1542 p->next_same_value = elt;
1543 elt->first_same_value = classp;
1546 else
1547 elt->first_same_value = elt;
1549 /* If this is a constant being set equivalent to a register or a register
1550 being set equivalent to a constant, note the constant equivalence.
1552 If this is a constant, it cannot be equivalent to a different constant,
1553 and a constant is the only thing that can be cheaper than a register. So
1554 we know the register is the head of the class (before the constant was
1555 inserted).
1557 If this is a register that is not already known equivalent to a
1558 constant, we must check the entire class.
1560 If this is a register that is already known equivalent to an insn,
1561 update the qtys `const_insn' to show that `this_insn' is the latest
1562 insn making that quantity equivalent to the constant. */
1564 if (elt->is_const && classp && GET_CODE (classp->exp) == REG
1565 && GET_CODE (x) != REG)
1567 int exp_q = REG_QTY (REGNO (classp->exp));
1568 struct qty_table_elem *exp_ent = &qty_table[exp_q];
1570 exp_ent->const_rtx = gen_lowpart_if_possible (exp_ent->mode, x);
1571 exp_ent->const_insn = this_insn;
1574 else if (GET_CODE (x) == REG
1575 && classp
1576 && ! qty_table[REG_QTY (REGNO (x))].const_rtx
1577 && ! elt->is_const)
1579 register struct table_elt *p;
1581 for (p = classp; p != 0; p = p->next_same_value)
1583 if (p->is_const && GET_CODE (p->exp) != REG)
1585 int x_q = REG_QTY (REGNO (x));
1586 struct qty_table_elem *x_ent = &qty_table[x_q];
1588 x_ent->const_rtx = gen_lowpart_if_possible (GET_MODE (x), p->exp);
1589 x_ent->const_insn = this_insn;
1590 break;
1595 else if (GET_CODE (x) == REG
1596 && qty_table[REG_QTY (REGNO (x))].const_rtx
1597 && GET_MODE (x) == qty_table[REG_QTY (REGNO (x))].mode)
1598 qty_table[REG_QTY (REGNO (x))].const_insn = this_insn;
1600 /* If this is a constant with symbolic value,
1601 and it has a term with an explicit integer value,
1602 link it up with related expressions. */
1603 if (GET_CODE (x) == CONST)
1605 rtx subexp = get_related_value (x);
1606 unsigned subhash;
1607 struct table_elt *subelt, *subelt_prev;
1609 if (subexp != 0)
1611 /* Get the integer-free subexpression in the hash table. */
1612 subhash = safe_hash (subexp, mode) & HASH_MASK;
1613 subelt = lookup (subexp, subhash, mode);
1614 if (subelt == 0)
1615 subelt = insert (subexp, NULL_PTR, subhash, mode);
1616 /* Initialize SUBELT's circular chain if it has none. */
1617 if (subelt->related_value == 0)
1618 subelt->related_value = subelt;
1619 /* Find the element in the circular chain that precedes SUBELT. */
1620 subelt_prev = subelt;
1621 while (subelt_prev->related_value != subelt)
1622 subelt_prev = subelt_prev->related_value;
1623 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1624 This way the element that follows SUBELT is the oldest one. */
1625 elt->related_value = subelt_prev->related_value;
1626 subelt_prev->related_value = elt;
1630 return elt;
1633 /* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1634 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1635 the two classes equivalent.
1637 CLASS1 will be the surviving class; CLASS2 should not be used after this
1638 call.
1640 Any invalid entries in CLASS2 will not be copied. */
1642 static void
1643 merge_equiv_classes (class1, class2)
1644 struct table_elt *class1, *class2;
1646 struct table_elt *elt, *next, *new;
1648 /* Ensure we start with the head of the classes. */
1649 class1 = class1->first_same_value;
1650 class2 = class2->first_same_value;
1652 /* If they were already equal, forget it. */
1653 if (class1 == class2)
1654 return;
1656 for (elt = class2; elt; elt = next)
1658 unsigned hash;
1659 rtx exp = elt->exp;
1660 enum machine_mode mode = elt->mode;
1662 next = elt->next_same_value;
1664 /* Remove old entry, make a new one in CLASS1's class.
1665 Don't do this for invalid entries as we cannot find their
1666 hash code (it also isn't necessary). */
1667 if (GET_CODE (exp) == REG || exp_equiv_p (exp, exp, 1, 0))
1669 hash_arg_in_memory = 0;
1670 hash = HASH (exp, mode);
1672 if (GET_CODE (exp) == REG)
1673 delete_reg_equiv (REGNO (exp));
1675 remove_from_table (elt, hash);
1677 if (insert_regs (exp, class1, 0))
1679 rehash_using_reg (exp);
1680 hash = HASH (exp, mode);
1682 new = insert (exp, class1, hash, mode);
1683 new->in_memory = hash_arg_in_memory;
1689 /* Flush the entire hash table. */
1691 static void
1692 flush_hash_table ()
1694 int i;
1695 struct table_elt *p;
1697 for (i = 0; i < HASH_SIZE; i++)
1698 for (p = table[i]; p; p = table[i])
1700 /* Note that invalidate can remove elements
1701 after P in the current hash chain. */
1702 if (GET_CODE (p->exp) == REG)
1703 invalidate (p->exp, p->mode);
1704 else
1705 remove_from_table (p, i);
1709 /* Remove from the hash table, or mark as invalid, all expressions whose
1710 values could be altered by storing in X. X is a register, a subreg, or
1711 a memory reference with nonvarying address (because, when a memory
1712 reference with a varying address is stored in, all memory references are
1713 removed by invalidate_memory so specific invalidation is superfluous).
1714 FULL_MODE, if not VOIDmode, indicates that this much should be
1715 invalidated instead of just the amount indicated by the mode of X. This
1716 is only used for bitfield stores into memory.
1718 A nonvarying address may be just a register or just a symbol reference,
1719 or it may be either of those plus a numeric offset. */
1721 static void
1722 invalidate (x, full_mode)
1723 rtx x;
1724 enum machine_mode full_mode;
1726 register int i;
1727 register struct table_elt *p;
1729 switch (GET_CODE (x))
1731 case REG:
1733 /* If X is a register, dependencies on its contents are recorded
1734 through the qty number mechanism. Just change the qty number of
1735 the register, mark it as invalid for expressions that refer to it,
1736 and remove it itself. */
1737 register int regno = REGNO (x);
1738 register unsigned hash = HASH (x, GET_MODE (x));
1740 /* Remove REGNO from any quantity list it might be on and indicate
1741 that its value might have changed. If it is a pseudo, remove its
1742 entry from the hash table.
1744 For a hard register, we do the first two actions above for any
1745 additional hard registers corresponding to X. Then, if any of these
1746 registers are in the table, we must remove any REG entries that
1747 overlap these registers. */
1749 delete_reg_equiv (regno);
1750 REG_TICK (regno)++;
1752 if (regno >= FIRST_PSEUDO_REGISTER)
1754 /* Because a register can be referenced in more than one mode,
1755 we might have to remove more than one table entry. */
1756 struct table_elt *elt;
1758 while ((elt = lookup_for_remove (x, hash, GET_MODE (x))))
1759 remove_from_table (elt, hash);
1761 else
1763 HOST_WIDE_INT in_table
1764 = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1765 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1766 int tregno, tendregno;
1767 register struct table_elt *p, *next;
1769 CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1771 for (i = regno + 1; i < endregno; i++)
1773 in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, i);
1774 CLEAR_HARD_REG_BIT (hard_regs_in_table, i);
1775 delete_reg_equiv (i);
1776 REG_TICK (i)++;
1779 if (in_table)
1780 for (hash = 0; hash < HASH_SIZE; hash++)
1781 for (p = table[hash]; p; p = next)
1783 next = p->next_same_hash;
1785 if (GET_CODE (p->exp) != REG
1786 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1787 continue;
1789 tregno = REGNO (p->exp);
1790 tendregno
1791 = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (p->exp));
1792 if (tendregno > regno && tregno < endregno)
1793 remove_from_table (p, hash);
1797 return;
1799 case SUBREG:
1800 invalidate (SUBREG_REG (x), VOIDmode);
1801 return;
1803 case PARALLEL:
1804 for (i = XVECLEN (x, 0) - 1; i >= 0 ; --i)
1805 invalidate (XVECEXP (x, 0, i), VOIDmode);
1806 return;
1808 case EXPR_LIST:
1809 /* This is part of a disjoint return value; extract the location in
1810 question ignoring the offset. */
1811 invalidate (XEXP (x, 0), VOIDmode);
1812 return;
1814 case MEM:
1815 /* Remove all hash table elements that refer to overlapping pieces of
1816 memory. */
1817 if (full_mode == VOIDmode)
1818 full_mode = GET_MODE (x);
1820 for (i = 0; i < HASH_SIZE; i++)
1822 register struct table_elt *next;
1824 for (p = table[i]; p; p = next)
1826 next = p->next_same_hash;
1827 if (p->in_memory
1828 && (GET_CODE (p->exp) != MEM
1829 || true_dependence (x, full_mode, p->exp,
1830 cse_rtx_varies_p)))
1831 remove_from_table (p, i);
1834 return;
1836 default:
1837 abort ();
1841 /* Remove all expressions that refer to register REGNO,
1842 since they are already invalid, and we are about to
1843 mark that register valid again and don't want the old
1844 expressions to reappear as valid. */
1846 static void
1847 remove_invalid_refs (regno)
1848 int regno;
1850 register int i;
1851 register struct table_elt *p, *next;
1853 for (i = 0; i < HASH_SIZE; i++)
1854 for (p = table[i]; p; p = next)
1856 next = p->next_same_hash;
1857 if (GET_CODE (p->exp) != REG
1858 && refers_to_regno_p (regno, regno + 1, p->exp, NULL_PTR))
1859 remove_from_table (p, i);
1863 /* Likewise for a subreg with subreg_reg WORD and mode MODE. */
1864 static void
1865 remove_invalid_subreg_refs (regno, word, mode)
1866 int regno;
1867 int word;
1868 enum machine_mode mode;
1870 register int i;
1871 register struct table_elt *p, *next;
1872 int end = word + (GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD;
1874 for (i = 0; i < HASH_SIZE; i++)
1875 for (p = table[i]; p; p = next)
1877 rtx exp;
1878 next = p->next_same_hash;
1880 exp = p->exp;
1881 if (GET_CODE (p->exp) != REG
1882 && (GET_CODE (exp) != SUBREG
1883 || GET_CODE (SUBREG_REG (exp)) != REG
1884 || REGNO (SUBREG_REG (exp)) != regno
1885 || (((SUBREG_WORD (exp)
1886 + (GET_MODE_SIZE (GET_MODE (exp)) - 1) / UNITS_PER_WORD)
1887 >= word)
1888 && SUBREG_WORD (exp) <= end))
1889 && refers_to_regno_p (regno, regno + 1, p->exp, NULL_PTR))
1890 remove_from_table (p, i);
1894 /* Recompute the hash codes of any valid entries in the hash table that
1895 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
1897 This is called when we make a jump equivalence. */
1899 static void
1900 rehash_using_reg (x)
1901 rtx x;
1903 unsigned int i;
1904 struct table_elt *p, *next;
1905 unsigned hash;
1907 if (GET_CODE (x) == SUBREG)
1908 x = SUBREG_REG (x);
1910 /* If X is not a register or if the register is known not to be in any
1911 valid entries in the table, we have no work to do. */
1913 if (GET_CODE (x) != REG
1914 || REG_IN_TABLE (REGNO (x)) < 0
1915 || REG_IN_TABLE (REGNO (x)) != REG_TICK (REGNO (x)))
1916 return;
1918 /* Scan all hash chains looking for valid entries that mention X.
1919 If we find one and it is in the wrong hash chain, move it. We can skip
1920 objects that are registers, since they are handled specially. */
1922 for (i = 0; i < HASH_SIZE; i++)
1923 for (p = table[i]; p; p = next)
1925 next = p->next_same_hash;
1926 if (GET_CODE (p->exp) != REG && reg_mentioned_p (x, p->exp)
1927 && exp_equiv_p (p->exp, p->exp, 1, 0)
1928 && i != (hash = safe_hash (p->exp, p->mode) & HASH_MASK))
1930 if (p->next_same_hash)
1931 p->next_same_hash->prev_same_hash = p->prev_same_hash;
1933 if (p->prev_same_hash)
1934 p->prev_same_hash->next_same_hash = p->next_same_hash;
1935 else
1936 table[i] = p->next_same_hash;
1938 p->next_same_hash = table[hash];
1939 p->prev_same_hash = 0;
1940 if (table[hash])
1941 table[hash]->prev_same_hash = p;
1942 table[hash] = p;
1947 /* Remove from the hash table any expression that is a call-clobbered
1948 register. Also update their TICK values. */
1950 static void
1951 invalidate_for_call ()
1953 int regno, endregno;
1954 int i;
1955 unsigned hash;
1956 struct table_elt *p, *next;
1957 int in_table = 0;
1959 /* Go through all the hard registers. For each that is clobbered in
1960 a CALL_INSN, remove the register from quantity chains and update
1961 reg_tick if defined. Also see if any of these registers is currently
1962 in the table. */
1964 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1965 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
1967 delete_reg_equiv (regno);
1968 if (REG_TICK (regno) >= 0)
1969 REG_TICK (regno)++;
1971 in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
1974 /* In the case where we have no call-clobbered hard registers in the
1975 table, we are done. Otherwise, scan the table and remove any
1976 entry that overlaps a call-clobbered register. */
1978 if (in_table)
1979 for (hash = 0; hash < HASH_SIZE; hash++)
1980 for (p = table[hash]; p; p = next)
1982 next = p->next_same_hash;
1984 if (GET_CODE (p->exp) != REG
1985 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1986 continue;
1988 regno = REGNO (p->exp);
1989 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (p->exp));
1991 for (i = regno; i < endregno; i++)
1992 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1994 remove_from_table (p, hash);
1995 break;
2000 /* Given an expression X of type CONST,
2001 and ELT which is its table entry (or 0 if it
2002 is not in the hash table),
2003 return an alternate expression for X as a register plus integer.
2004 If none can be found, return 0. */
2006 static rtx
2007 use_related_value (x, elt)
2008 rtx x;
2009 struct table_elt *elt;
2011 register struct table_elt *relt = 0;
2012 register struct table_elt *p, *q;
2013 HOST_WIDE_INT offset;
2015 /* First, is there anything related known?
2016 If we have a table element, we can tell from that.
2017 Otherwise, must look it up. */
2019 if (elt != 0 && elt->related_value != 0)
2020 relt = elt;
2021 else if (elt == 0 && GET_CODE (x) == CONST)
2023 rtx subexp = get_related_value (x);
2024 if (subexp != 0)
2025 relt = lookup (subexp,
2026 safe_hash (subexp, GET_MODE (subexp)) & HASH_MASK,
2027 GET_MODE (subexp));
2030 if (relt == 0)
2031 return 0;
2033 /* Search all related table entries for one that has an
2034 equivalent register. */
2036 p = relt;
2037 while (1)
2039 /* This loop is strange in that it is executed in two different cases.
2040 The first is when X is already in the table. Then it is searching
2041 the RELATED_VALUE list of X's class (RELT). The second case is when
2042 X is not in the table. Then RELT points to a class for the related
2043 value.
2045 Ensure that, whatever case we are in, that we ignore classes that have
2046 the same value as X. */
2048 if (rtx_equal_p (x, p->exp))
2049 q = 0;
2050 else
2051 for (q = p->first_same_value; q; q = q->next_same_value)
2052 if (GET_CODE (q->exp) == REG)
2053 break;
2055 if (q)
2056 break;
2058 p = p->related_value;
2060 /* We went all the way around, so there is nothing to be found.
2061 Alternatively, perhaps RELT was in the table for some other reason
2062 and it has no related values recorded. */
2063 if (p == relt || p == 0)
2064 break;
2067 if (q == 0)
2068 return 0;
2070 offset = (get_integer_term (x) - get_integer_term (p->exp));
2071 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
2072 return plus_constant (q->exp, offset);
2075 /* Hash an rtx. We are careful to make sure the value is never negative.
2076 Equivalent registers hash identically.
2077 MODE is used in hashing for CONST_INTs only;
2078 otherwise the mode of X is used.
2080 Store 1 in do_not_record if any subexpression is volatile.
2082 Store 1 in hash_arg_in_memory if X contains a MEM rtx
2083 which does not have the RTX_UNCHANGING_P bit set.
2085 Note that cse_insn knows that the hash code of a MEM expression
2086 is just (int) MEM plus the hash code of the address. */
2088 static unsigned
2089 canon_hash (x, mode)
2090 rtx x;
2091 enum machine_mode mode;
2093 register int i, j;
2094 register unsigned hash = 0;
2095 register enum rtx_code code;
2096 register const char *fmt;
2098 /* repeat is used to turn tail-recursion into iteration. */
2099 repeat:
2100 if (x == 0)
2101 return hash;
2103 code = GET_CODE (x);
2104 switch (code)
2106 case REG:
2108 register int regno = REGNO (x);
2110 /* On some machines, we can't record any non-fixed hard register,
2111 because extending its life will cause reload problems. We
2112 consider ap, fp, and sp to be fixed for this purpose.
2114 We also consider CCmode registers to be fixed for this purpose;
2115 failure to do so leads to failure to simplify 0<100 type of
2116 conditionals.
2118 On all machines, we can't record any global registers. */
2120 if (regno < FIRST_PSEUDO_REGISTER
2121 && (global_regs[regno]
2122 || (SMALL_REGISTER_CLASSES
2123 && ! fixed_regs[regno]
2124 && regno != FRAME_POINTER_REGNUM
2125 && regno != HARD_FRAME_POINTER_REGNUM
2126 && regno != ARG_POINTER_REGNUM
2127 && regno != STACK_POINTER_REGNUM
2128 && GET_MODE_CLASS (GET_MODE (x)) != MODE_CC)))
2130 do_not_record = 1;
2131 return 0;
2133 hash += ((unsigned) REG << 7) + (unsigned) REG_QTY (regno);
2134 return hash;
2137 /* We handle SUBREG of a REG specially because the underlying
2138 reg changes its hash value with every value change; we don't
2139 want to have to forget unrelated subregs when one subreg changes. */
2140 case SUBREG:
2142 if (GET_CODE (SUBREG_REG (x)) == REG)
2144 hash += (((unsigned) SUBREG << 7)
2145 + REGNO (SUBREG_REG (x)) + SUBREG_WORD (x));
2146 return hash;
2148 break;
2151 case CONST_INT:
2153 unsigned HOST_WIDE_INT tem = INTVAL (x);
2154 hash += ((unsigned) CONST_INT << 7) + (unsigned) mode + tem;
2155 return hash;
2158 case CONST_DOUBLE:
2159 /* This is like the general case, except that it only counts
2160 the integers representing the constant. */
2161 hash += (unsigned) code + (unsigned) GET_MODE (x);
2162 if (GET_MODE (x) != VOIDmode)
2163 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
2165 unsigned HOST_WIDE_INT tem = XWINT (x, i);
2166 hash += tem;
2168 else
2169 hash += ((unsigned) CONST_DOUBLE_LOW (x)
2170 + (unsigned) CONST_DOUBLE_HIGH (x));
2171 return hash;
2173 /* Assume there is only one rtx object for any given label. */
2174 case LABEL_REF:
2175 hash
2176 += ((unsigned) LABEL_REF << 7) + (unsigned long) XEXP (x, 0);
2177 return hash;
2179 case SYMBOL_REF:
2180 hash
2181 += ((unsigned) SYMBOL_REF << 7) + (unsigned long) XSTR (x, 0);
2182 return hash;
2184 case MEM:
2185 /* We don't record if marked volatile or if BLKmode since we don't
2186 know the size of the move. */
2187 if (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode)
2189 do_not_record = 1;
2190 return 0;
2192 if (! RTX_UNCHANGING_P (x) || FIXED_BASE_PLUS_P (XEXP (x, 0)))
2194 hash_arg_in_memory = 1;
2196 /* Now that we have already found this special case,
2197 might as well speed it up as much as possible. */
2198 hash += (unsigned) MEM;
2199 x = XEXP (x, 0);
2200 goto repeat;
2202 case PRE_DEC:
2203 case PRE_INC:
2204 case POST_DEC:
2205 case POST_INC:
2206 case PC:
2207 case CC0:
2208 case CALL:
2209 case UNSPEC_VOLATILE:
2210 do_not_record = 1;
2211 return 0;
2213 case ASM_OPERANDS:
2214 if (MEM_VOLATILE_P (x))
2216 do_not_record = 1;
2217 return 0;
2219 break;
2221 default:
2222 break;
2225 i = GET_RTX_LENGTH (code) - 1;
2226 hash += (unsigned) code + (unsigned) GET_MODE (x);
2227 fmt = GET_RTX_FORMAT (code);
2228 for (; i >= 0; i--)
2230 if (fmt[i] == 'e')
2232 rtx tem = XEXP (x, i);
2234 /* If we are about to do the last recursive call
2235 needed at this level, change it into iteration.
2236 This function is called enough to be worth it. */
2237 if (i == 0)
2239 x = tem;
2240 goto repeat;
2242 hash += canon_hash (tem, 0);
2244 else if (fmt[i] == 'E')
2245 for (j = 0; j < XVECLEN (x, i); j++)
2246 hash += canon_hash (XVECEXP (x, i, j), 0);
2247 else if (fmt[i] == 's')
2249 register unsigned char *p = (unsigned char *) XSTR (x, i);
2250 if (p)
2251 while (*p)
2252 hash += *p++;
2254 else if (fmt[i] == 'i')
2256 register unsigned tem = XINT (x, i);
2257 hash += tem;
2259 else if (fmt[i] == '0' || fmt[i] == 't')
2260 /* unused */;
2261 else
2262 abort ();
2264 return hash;
2267 /* Like canon_hash but with no side effects. */
2269 static unsigned
2270 safe_hash (x, mode)
2271 rtx x;
2272 enum machine_mode mode;
2274 int save_do_not_record = do_not_record;
2275 int save_hash_arg_in_memory = hash_arg_in_memory;
2276 unsigned hash = canon_hash (x, mode);
2277 hash_arg_in_memory = save_hash_arg_in_memory;
2278 do_not_record = save_do_not_record;
2279 return hash;
2282 /* Return 1 iff X and Y would canonicalize into the same thing,
2283 without actually constructing the canonicalization of either one.
2284 If VALIDATE is nonzero,
2285 we assume X is an expression being processed from the rtl
2286 and Y was found in the hash table. We check register refs
2287 in Y for being marked as valid.
2289 If EQUAL_VALUES is nonzero, we allow a register to match a constant value
2290 that is known to be in the register. Ordinarily, we don't allow them
2291 to match, because letting them match would cause unpredictable results
2292 in all the places that search a hash table chain for an equivalent
2293 for a given value. A possible equivalent that has different structure
2294 has its hash code computed from different data. Whether the hash code
2295 is the same as that of the given value is pure luck. */
2297 static int
2298 exp_equiv_p (x, y, validate, equal_values)
2299 rtx x, y;
2300 int validate;
2301 int equal_values;
2303 register int i, j;
2304 register enum rtx_code code;
2305 register const char *fmt;
2307 /* Note: it is incorrect to assume an expression is equivalent to itself
2308 if VALIDATE is nonzero. */
2309 if (x == y && !validate)
2310 return 1;
2311 if (x == 0 || y == 0)
2312 return x == y;
2314 code = GET_CODE (x);
2315 if (code != GET_CODE (y))
2317 if (!equal_values)
2318 return 0;
2320 /* If X is a constant and Y is a register or vice versa, they may be
2321 equivalent. We only have to validate if Y is a register. */
2322 if (CONSTANT_P (x) && GET_CODE (y) == REG
2323 && REGNO_QTY_VALID_P (REGNO (y)))
2325 int y_q = REG_QTY (REGNO (y));
2326 struct qty_table_elem *y_ent = &qty_table[y_q];
2328 if (GET_MODE (y) == y_ent->mode
2329 && rtx_equal_p (x, y_ent->const_rtx)
2330 && (! validate || REG_IN_TABLE (REGNO (y)) == REG_TICK (REGNO (y))))
2331 return 1;
2334 if (CONSTANT_P (y) && code == REG
2335 && REGNO_QTY_VALID_P (REGNO (x)))
2337 int x_q = REG_QTY (REGNO (x));
2338 struct qty_table_elem *x_ent = &qty_table[x_q];
2340 if (GET_MODE (x) == x_ent->mode
2341 && rtx_equal_p (y, x_ent->const_rtx))
2342 return 1;
2345 return 0;
2348 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2349 if (GET_MODE (x) != GET_MODE (y))
2350 return 0;
2352 switch (code)
2354 case PC:
2355 case CC0:
2356 return x == y;
2358 case CONST_INT:
2359 return INTVAL (x) == INTVAL (y);
2361 case LABEL_REF:
2362 return XEXP (x, 0) == XEXP (y, 0);
2364 case SYMBOL_REF:
2365 return XSTR (x, 0) == XSTR (y, 0);
2367 case REG:
2369 int regno = REGNO (y);
2370 int endregno
2371 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
2372 : HARD_REGNO_NREGS (regno, GET_MODE (y)));
2373 int i;
2375 /* If the quantities are not the same, the expressions are not
2376 equivalent. If there are and we are not to validate, they
2377 are equivalent. Otherwise, ensure all regs are up-to-date. */
2379 if (REG_QTY (REGNO (x)) != REG_QTY (regno))
2380 return 0;
2382 if (! validate)
2383 return 1;
2385 for (i = regno; i < endregno; i++)
2386 if (REG_IN_TABLE (i) != REG_TICK (i))
2387 return 0;
2389 return 1;
2392 /* For commutative operations, check both orders. */
2393 case PLUS:
2394 case MULT:
2395 case AND:
2396 case IOR:
2397 case XOR:
2398 case NE:
2399 case EQ:
2400 return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0), validate, equal_values)
2401 && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2402 validate, equal_values))
2403 || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2404 validate, equal_values)
2405 && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2406 validate, equal_values)));
2408 default:
2409 break;
2412 /* Compare the elements. If any pair of corresponding elements
2413 fail to match, return 0 for the whole things. */
2415 fmt = GET_RTX_FORMAT (code);
2416 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2418 switch (fmt[i])
2420 case 'e':
2421 if (! exp_equiv_p (XEXP (x, i), XEXP (y, i), validate, equal_values))
2422 return 0;
2423 break;
2425 case 'E':
2426 if (XVECLEN (x, i) != XVECLEN (y, i))
2427 return 0;
2428 for (j = 0; j < XVECLEN (x, i); j++)
2429 if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2430 validate, equal_values))
2431 return 0;
2432 break;
2434 case 's':
2435 if (strcmp (XSTR (x, i), XSTR (y, i)))
2436 return 0;
2437 break;
2439 case 'i':
2440 if (XINT (x, i) != XINT (y, i))
2441 return 0;
2442 break;
2444 case 'w':
2445 if (XWINT (x, i) != XWINT (y, i))
2446 return 0;
2447 break;
2449 case '0':
2450 case 't':
2451 break;
2453 default:
2454 abort ();
2458 return 1;
2461 /* Return 1 if X has a value that can vary even between two
2462 executions of the program. 0 means X can be compared reliably
2463 against certain constants or near-constants. */
2465 static int
2466 cse_rtx_varies_p (x)
2467 register rtx x;
2469 /* We need not check for X and the equivalence class being of the same
2470 mode because if X is equivalent to a constant in some mode, it
2471 doesn't vary in any mode. */
2473 if (GET_CODE (x) == REG
2474 && REGNO_QTY_VALID_P (REGNO (x)))
2476 int x_q = REG_QTY (REGNO (x));
2477 struct qty_table_elem *x_ent = &qty_table[x_q];
2479 if (GET_MODE (x) == x_ent->mode
2480 && x_ent->const_rtx != NULL_RTX)
2481 return 0;
2484 if (GET_CODE (x) == PLUS
2485 && GET_CODE (XEXP (x, 1)) == CONST_INT
2486 && GET_CODE (XEXP (x, 0)) == REG
2487 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
2489 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2490 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2492 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2493 && x0_ent->const_rtx != NULL_RTX)
2494 return 0;
2497 /* This can happen as the result of virtual register instantiation, if
2498 the initial constant is too large to be a valid address. This gives
2499 us a three instruction sequence, load large offset into a register,
2500 load fp minus a constant into a register, then a MEM which is the
2501 sum of the two `constant' registers. */
2502 if (GET_CODE (x) == PLUS
2503 && GET_CODE (XEXP (x, 0)) == REG
2504 && GET_CODE (XEXP (x, 1)) == REG
2505 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2506 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
2508 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2509 int x1_q = REG_QTY (REGNO (XEXP (x, 1)));
2510 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2511 struct qty_table_elem *x1_ent = &qty_table[x1_q];
2513 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2514 && x0_ent->const_rtx != NULL_RTX
2515 && (GET_MODE (XEXP (x, 1)) == x1_ent->mode)
2516 && x1_ent->const_rtx != NULL_RTX)
2517 return 0;
2520 return rtx_varies_p (x);
2523 /* Canonicalize an expression:
2524 replace each register reference inside it
2525 with the "oldest" equivalent register.
2527 If INSN is non-zero and we are replacing a pseudo with a hard register
2528 or vice versa, validate_change is used to ensure that INSN remains valid
2529 after we make our substitution. The calls are made with IN_GROUP non-zero
2530 so apply_change_group must be called upon the outermost return from this
2531 function (unless INSN is zero). The result of apply_change_group can
2532 generally be discarded since the changes we are making are optional. */
2534 static rtx
2535 canon_reg (x, insn)
2536 rtx x;
2537 rtx insn;
2539 register int i;
2540 register enum rtx_code code;
2541 register const char *fmt;
2543 if (x == 0)
2544 return x;
2546 code = GET_CODE (x);
2547 switch (code)
2549 case PC:
2550 case CC0:
2551 case CONST:
2552 case CONST_INT:
2553 case CONST_DOUBLE:
2554 case SYMBOL_REF:
2555 case LABEL_REF:
2556 case ADDR_VEC:
2557 case ADDR_DIFF_VEC:
2558 return x;
2560 case REG:
2562 register int first;
2563 register int q;
2564 register struct qty_table_elem *ent;
2566 /* Never replace a hard reg, because hard regs can appear
2567 in more than one machine mode, and we must preserve the mode
2568 of each occurrence. Also, some hard regs appear in
2569 MEMs that are shared and mustn't be altered. Don't try to
2570 replace any reg that maps to a reg of class NO_REGS. */
2571 if (REGNO (x) < FIRST_PSEUDO_REGISTER
2572 || ! REGNO_QTY_VALID_P (REGNO (x)))
2573 return x;
2575 q = REG_QTY (REGNO(x));
2576 ent = &qty_table[q];
2577 first = ent->first_reg;
2578 return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2579 : REGNO_REG_CLASS (first) == NO_REGS ? x
2580 : gen_rtx_REG (ent->mode, first));
2583 default:
2584 break;
2587 fmt = GET_RTX_FORMAT (code);
2588 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2590 register int j;
2592 if (fmt[i] == 'e')
2594 rtx new = canon_reg (XEXP (x, i), insn);
2595 int insn_code;
2597 /* If replacing pseudo with hard reg or vice versa, ensure the
2598 insn remains valid. Likewise if the insn has MATCH_DUPs. */
2599 if (insn != 0 && new != 0
2600 && GET_CODE (new) == REG && GET_CODE (XEXP (x, i)) == REG
2601 && (((REGNO (new) < FIRST_PSEUDO_REGISTER)
2602 != (REGNO (XEXP (x, i)) < FIRST_PSEUDO_REGISTER))
2603 || (insn_code = recog_memoized (insn)) < 0
2604 || insn_data[insn_code].n_dups > 0))
2605 validate_change (insn, &XEXP (x, i), new, 1);
2606 else
2607 XEXP (x, i) = new;
2609 else if (fmt[i] == 'E')
2610 for (j = 0; j < XVECLEN (x, i); j++)
2611 XVECEXP (x, i, j) = canon_reg (XVECEXP (x, i, j), insn);
2614 return x;
2617 /* LOC is a location within INSN that is an operand address (the contents of
2618 a MEM). Find the best equivalent address to use that is valid for this
2619 insn.
2621 On most CISC machines, complicated address modes are costly, and rtx_cost
2622 is a good approximation for that cost. However, most RISC machines have
2623 only a few (usually only one) memory reference formats. If an address is
2624 valid at all, it is often just as cheap as any other address. Hence, for
2625 RISC machines, we use the configuration macro `ADDRESS_COST' to compare the
2626 costs of various addresses. For two addresses of equal cost, choose the one
2627 with the highest `rtx_cost' value as that has the potential of eliminating
2628 the most insns. For equal costs, we choose the first in the equivalence
2629 class. Note that we ignore the fact that pseudo registers are cheaper
2630 than hard registers here because we would also prefer the pseudo registers.
2633 static void
2634 find_best_addr (insn, loc)
2635 rtx insn;
2636 rtx *loc;
2638 struct table_elt *elt;
2639 rtx addr = *loc;
2640 #ifdef ADDRESS_COST
2641 struct table_elt *p;
2642 int found_better = 1;
2643 #endif
2644 int save_do_not_record = do_not_record;
2645 int save_hash_arg_in_memory = hash_arg_in_memory;
2646 int addr_volatile;
2647 int regno;
2648 unsigned hash;
2650 /* Do not try to replace constant addresses or addresses of local and
2651 argument slots. These MEM expressions are made only once and inserted
2652 in many instructions, as well as being used to control symbol table
2653 output. It is not safe to clobber them.
2655 There are some uncommon cases where the address is already in a register
2656 for some reason, but we cannot take advantage of that because we have
2657 no easy way to unshare the MEM. In addition, looking up all stack
2658 addresses is costly. */
2659 if ((GET_CODE (addr) == PLUS
2660 && GET_CODE (XEXP (addr, 0)) == REG
2661 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2662 && (regno = REGNO (XEXP (addr, 0)),
2663 regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM
2664 || regno == ARG_POINTER_REGNUM))
2665 || (GET_CODE (addr) == REG
2666 && (regno = REGNO (addr), regno == FRAME_POINTER_REGNUM
2667 || regno == HARD_FRAME_POINTER_REGNUM
2668 || regno == ARG_POINTER_REGNUM))
2669 || GET_CODE (addr) == ADDRESSOF
2670 || CONSTANT_ADDRESS_P (addr))
2671 return;
2673 /* If this address is not simply a register, try to fold it. This will
2674 sometimes simplify the expression. Many simplifications
2675 will not be valid, but some, usually applying the associative rule, will
2676 be valid and produce better code. */
2677 if (GET_CODE (addr) != REG)
2679 rtx folded = fold_rtx (copy_rtx (addr), NULL_RTX);
2681 if (1
2682 #ifdef ADDRESS_COST
2683 && (CSE_ADDRESS_COST (folded) < CSE_ADDRESS_COST (addr)
2684 || (CSE_ADDRESS_COST (folded) == CSE_ADDRESS_COST (addr)
2685 && rtx_cost (folded, MEM) > rtx_cost (addr, MEM)))
2686 #else
2687 && rtx_cost (folded, MEM) < rtx_cost (addr, MEM)
2688 #endif
2689 && validate_change (insn, loc, folded, 0))
2690 addr = folded;
2693 /* If this address is not in the hash table, we can't look for equivalences
2694 of the whole address. Also, ignore if volatile. */
2696 do_not_record = 0;
2697 hash = HASH (addr, Pmode);
2698 addr_volatile = do_not_record;
2699 do_not_record = save_do_not_record;
2700 hash_arg_in_memory = save_hash_arg_in_memory;
2702 if (addr_volatile)
2703 return;
2705 elt = lookup (addr, hash, Pmode);
2707 #ifndef ADDRESS_COST
2708 if (elt)
2710 int our_cost = elt->cost;
2712 /* Find the lowest cost below ours that works. */
2713 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
2714 if (elt->cost < our_cost
2715 && (GET_CODE (elt->exp) == REG
2716 || exp_equiv_p (elt->exp, elt->exp, 1, 0))
2717 && validate_change (insn, loc,
2718 canon_reg (copy_rtx (elt->exp), NULL_RTX), 0))
2719 return;
2721 #else
2723 if (elt)
2725 /* We need to find the best (under the criteria documented above) entry
2726 in the class that is valid. We use the `flag' field to indicate
2727 choices that were invalid and iterate until we can't find a better
2728 one that hasn't already been tried. */
2730 for (p = elt->first_same_value; p; p = p->next_same_value)
2731 p->flag = 0;
2733 while (found_better)
2735 int best_addr_cost = CSE_ADDRESS_COST (*loc);
2736 int best_rtx_cost = (elt->cost + 1) >> 1;
2737 struct table_elt *best_elt = elt;
2739 found_better = 0;
2740 for (p = elt->first_same_value; p; p = p->next_same_value)
2741 if (! p->flag)
2743 if ((GET_CODE (p->exp) == REG
2744 || exp_equiv_p (p->exp, p->exp, 1, 0))
2745 && (CSE_ADDRESS_COST (p->exp) < best_addr_cost
2746 || (CSE_ADDRESS_COST (p->exp) == best_addr_cost
2747 && (p->cost + 1) >> 1 > best_rtx_cost)))
2749 found_better = 1;
2750 best_addr_cost = CSE_ADDRESS_COST (p->exp);
2751 best_rtx_cost = (p->cost + 1) >> 1;
2752 best_elt = p;
2756 if (found_better)
2758 if (validate_change (insn, loc,
2759 canon_reg (copy_rtx (best_elt->exp),
2760 NULL_RTX), 0))
2761 return;
2762 else
2763 best_elt->flag = 1;
2768 /* If the address is a binary operation with the first operand a register
2769 and the second a constant, do the same as above, but looking for
2770 equivalences of the register. Then try to simplify before checking for
2771 the best address to use. This catches a few cases: First is when we
2772 have REG+const and the register is another REG+const. We can often merge
2773 the constants and eliminate one insn and one register. It may also be
2774 that a machine has a cheap REG+REG+const. Finally, this improves the
2775 code on the Alpha for unaligned byte stores. */
2777 if (flag_expensive_optimizations
2778 && (GET_RTX_CLASS (GET_CODE (*loc)) == '2'
2779 || GET_RTX_CLASS (GET_CODE (*loc)) == 'c')
2780 && GET_CODE (XEXP (*loc, 0)) == REG
2781 && GET_CODE (XEXP (*loc, 1)) == CONST_INT)
2783 rtx c = XEXP (*loc, 1);
2785 do_not_record = 0;
2786 hash = HASH (XEXP (*loc, 0), Pmode);
2787 do_not_record = save_do_not_record;
2788 hash_arg_in_memory = save_hash_arg_in_memory;
2790 elt = lookup (XEXP (*loc, 0), hash, Pmode);
2791 if (elt == 0)
2792 return;
2794 /* We need to find the best (under the criteria documented above) entry
2795 in the class that is valid. We use the `flag' field to indicate
2796 choices that were invalid and iterate until we can't find a better
2797 one that hasn't already been tried. */
2799 for (p = elt->first_same_value; p; p = p->next_same_value)
2800 p->flag = 0;
2802 while (found_better)
2804 int best_addr_cost = CSE_ADDRESS_COST (*loc);
2805 int best_rtx_cost = (COST (*loc) + 1) >> 1;
2806 struct table_elt *best_elt = elt;
2807 rtx best_rtx = *loc;
2808 int count;
2810 /* This is at worst case an O(n^2) algorithm, so limit our search
2811 to the first 32 elements on the list. This avoids trouble
2812 compiling code with very long basic blocks that can easily
2813 call simplify_gen_binary so many times that we run out of
2814 memory. */
2816 found_better = 0;
2817 for (p = elt->first_same_value, count = 0;
2818 p && count < 32;
2819 p = p->next_same_value, count++)
2820 if (! p->flag
2821 && (GET_CODE (p->exp) == REG
2822 || exp_equiv_p (p->exp, p->exp, 1, 0)))
2824 rtx new = simplify_gen_binary (GET_CODE (*loc), Pmode,
2825 p->exp, c);
2827 if ((CSE_ADDRESS_COST (new) < best_addr_cost
2828 || (CSE_ADDRESS_COST (new) == best_addr_cost
2829 && (COST (new) + 1) >> 1 > best_rtx_cost)))
2831 found_better = 1;
2832 best_addr_cost = CSE_ADDRESS_COST (new);
2833 best_rtx_cost = (COST (new) + 1) >> 1;
2834 best_elt = p;
2835 best_rtx = new;
2839 if (found_better)
2841 if (validate_change (insn, loc,
2842 canon_reg (copy_rtx (best_rtx),
2843 NULL_RTX), 0))
2844 return;
2845 else
2846 best_elt->flag = 1;
2850 #endif
2853 /* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
2854 operation (EQ, NE, GT, etc.), follow it back through the hash table and
2855 what values are being compared.
2857 *PARG1 and *PARG2 are updated to contain the rtx representing the values
2858 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
2859 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
2860 compared to produce cc0.
2862 The return value is the comparison operator and is either the code of
2863 A or the code corresponding to the inverse of the comparison. */
2865 static enum rtx_code
2866 find_comparison_args (code, parg1, parg2, pmode1, pmode2)
2867 enum rtx_code code;
2868 rtx *parg1, *parg2;
2869 enum machine_mode *pmode1, *pmode2;
2871 rtx arg1, arg2;
2873 arg1 = *parg1, arg2 = *parg2;
2875 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
2877 while (arg2 == CONST0_RTX (GET_MODE (arg1)))
2879 /* Set non-zero when we find something of interest. */
2880 rtx x = 0;
2881 int reverse_code = 0;
2882 struct table_elt *p = 0;
2884 /* If arg1 is a COMPARE, extract the comparison arguments from it.
2885 On machines with CC0, this is the only case that can occur, since
2886 fold_rtx will return the COMPARE or item being compared with zero
2887 when given CC0. */
2889 if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
2890 x = arg1;
2892 /* If ARG1 is a comparison operator and CODE is testing for
2893 STORE_FLAG_VALUE, get the inner arguments. */
2895 else if (GET_RTX_CLASS (GET_CODE (arg1)) == '<')
2897 if (code == NE
2898 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2899 && code == LT && STORE_FLAG_VALUE == -1)
2900 #ifdef FLOAT_STORE_FLAG_VALUE
2901 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
2902 && (REAL_VALUE_NEGATIVE
2903 (FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)))))
2904 #endif
2906 x = arg1;
2907 else if (code == EQ
2908 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2909 && code == GE && STORE_FLAG_VALUE == -1)
2910 #ifdef FLOAT_STORE_FLAG_VALUE
2911 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
2912 && (REAL_VALUE_NEGATIVE
2913 (FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)))))
2914 #endif
2916 x = arg1, reverse_code = 1;
2919 /* ??? We could also check for
2921 (ne (and (eq (...) (const_int 1))) (const_int 0))
2923 and related forms, but let's wait until we see them occurring. */
2925 if (x == 0)
2926 /* Look up ARG1 in the hash table and see if it has an equivalence
2927 that lets us see what is being compared. */
2928 p = lookup (arg1, safe_hash (arg1, GET_MODE (arg1)) & HASH_MASK,
2929 GET_MODE (arg1));
2930 if (p) p = p->first_same_value;
2932 for (; p; p = p->next_same_value)
2934 enum machine_mode inner_mode = GET_MODE (p->exp);
2936 /* If the entry isn't valid, skip it. */
2937 if (! exp_equiv_p (p->exp, p->exp, 1, 0))
2938 continue;
2940 if (GET_CODE (p->exp) == COMPARE
2941 /* Another possibility is that this machine has a compare insn
2942 that includes the comparison code. In that case, ARG1 would
2943 be equivalent to a comparison operation that would set ARG1 to
2944 either STORE_FLAG_VALUE or zero. If this is an NE operation,
2945 ORIG_CODE is the actual comparison being done; if it is an EQ,
2946 we must reverse ORIG_CODE. On machine with a negative value
2947 for STORE_FLAG_VALUE, also look at LT and GE operations. */
2948 || ((code == NE
2949 || (code == LT
2950 && GET_MODE_CLASS (inner_mode) == MODE_INT
2951 && (GET_MODE_BITSIZE (inner_mode)
2952 <= HOST_BITS_PER_WIDE_INT)
2953 && (STORE_FLAG_VALUE
2954 & ((HOST_WIDE_INT) 1
2955 << (GET_MODE_BITSIZE (inner_mode) - 1))))
2956 #ifdef FLOAT_STORE_FLAG_VALUE
2957 || (code == LT
2958 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
2959 && (REAL_VALUE_NEGATIVE
2960 (FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)))))
2961 #endif
2963 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<'))
2965 x = p->exp;
2966 break;
2968 else if ((code == EQ
2969 || (code == GE
2970 && GET_MODE_CLASS (inner_mode) == MODE_INT
2971 && (GET_MODE_BITSIZE (inner_mode)
2972 <= HOST_BITS_PER_WIDE_INT)
2973 && (STORE_FLAG_VALUE
2974 & ((HOST_WIDE_INT) 1
2975 << (GET_MODE_BITSIZE (inner_mode) - 1))))
2976 #ifdef FLOAT_STORE_FLAG_VALUE
2977 || (code == GE
2978 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
2979 && (REAL_VALUE_NEGATIVE
2980 (FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)))))
2981 #endif
2983 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<')
2985 reverse_code = 1;
2986 x = p->exp;
2987 break;
2990 /* If this is fp + constant, the equivalent is a better operand since
2991 it may let us predict the value of the comparison. */
2992 else if (NONZERO_BASE_PLUS_P (p->exp))
2994 arg1 = p->exp;
2995 continue;
2999 /* If we didn't find a useful equivalence for ARG1, we are done.
3000 Otherwise, set up for the next iteration. */
3001 if (x == 0)
3002 break;
3004 arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
3005 if (GET_RTX_CLASS (GET_CODE (x)) == '<')
3006 code = GET_CODE (x);
3008 if (reverse_code)
3009 code = reverse_condition (code);
3012 /* Return our results. Return the modes from before fold_rtx
3013 because fold_rtx might produce const_int, and then it's too late. */
3014 *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
3015 *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
3017 return code;
3020 /* If X is a nontrivial arithmetic operation on an argument
3021 for which a constant value can be determined, return
3022 the result of operating on that value, as a constant.
3023 Otherwise, return X, possibly with one or more operands
3024 modified by recursive calls to this function.
3026 If X is a register whose contents are known, we do NOT
3027 return those contents here. equiv_constant is called to
3028 perform that task.
3030 INSN is the insn that we may be modifying. If it is 0, make a copy
3031 of X before modifying it. */
3033 static rtx
3034 fold_rtx (x, insn)
3035 rtx x;
3036 rtx insn;
3038 register enum rtx_code code;
3039 register enum machine_mode mode;
3040 register const char *fmt;
3041 register int i;
3042 rtx new = 0;
3043 int copied = 0;
3044 int must_swap = 0;
3046 /* Folded equivalents of first two operands of X. */
3047 rtx folded_arg0;
3048 rtx folded_arg1;
3050 /* Constant equivalents of first three operands of X;
3051 0 when no such equivalent is known. */
3052 rtx const_arg0;
3053 rtx const_arg1;
3054 rtx const_arg2;
3056 /* The mode of the first operand of X. We need this for sign and zero
3057 extends. */
3058 enum machine_mode mode_arg0;
3060 if (x == 0)
3061 return x;
3063 mode = GET_MODE (x);
3064 code = GET_CODE (x);
3065 switch (code)
3067 case CONST:
3068 case CONST_INT:
3069 case CONST_DOUBLE:
3070 case SYMBOL_REF:
3071 case LABEL_REF:
3072 case REG:
3073 /* No use simplifying an EXPR_LIST
3074 since they are used only for lists of args
3075 in a function call's REG_EQUAL note. */
3076 case EXPR_LIST:
3077 /* Changing anything inside an ADDRESSOF is incorrect; we don't
3078 want to (e.g.,) make (addressof (const_int 0)) just because
3079 the location is known to be zero. */
3080 case ADDRESSOF:
3081 return x;
3083 #ifdef HAVE_cc0
3084 case CC0:
3085 return prev_insn_cc0;
3086 #endif
3088 case PC:
3089 /* If the next insn is a CODE_LABEL followed by a jump table,
3090 PC's value is a LABEL_REF pointing to that label. That
3091 lets us fold switch statements on the Vax. */
3092 if (insn && GET_CODE (insn) == JUMP_INSN)
3094 rtx next = next_nonnote_insn (insn);
3096 if (next && GET_CODE (next) == CODE_LABEL
3097 && NEXT_INSN (next) != 0
3098 && GET_CODE (NEXT_INSN (next)) == JUMP_INSN
3099 && (GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_VEC
3100 || GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_DIFF_VEC))
3101 return gen_rtx_LABEL_REF (Pmode, next);
3103 break;
3105 case SUBREG:
3106 /* See if we previously assigned a constant value to this SUBREG. */
3107 if ((new = lookup_as_function (x, CONST_INT)) != 0
3108 || (new = lookup_as_function (x, CONST_DOUBLE)) != 0)
3109 return new;
3111 /* If this is a paradoxical SUBREG, we have no idea what value the
3112 extra bits would have. However, if the operand is equivalent
3113 to a SUBREG whose operand is the same as our mode, and all the
3114 modes are within a word, we can just use the inner operand
3115 because these SUBREGs just say how to treat the register.
3117 Similarly if we find an integer constant. */
3119 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3121 enum machine_mode imode = GET_MODE (SUBREG_REG (x));
3122 struct table_elt *elt;
3124 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
3125 && GET_MODE_SIZE (imode) <= UNITS_PER_WORD
3126 && (elt = lookup (SUBREG_REG (x), HASH (SUBREG_REG (x), imode),
3127 imode)) != 0)
3128 for (elt = elt->first_same_value;
3129 elt; elt = elt->next_same_value)
3131 if (CONSTANT_P (elt->exp)
3132 && GET_MODE (elt->exp) == VOIDmode)
3133 return elt->exp;
3135 if (GET_CODE (elt->exp) == SUBREG
3136 && GET_MODE (SUBREG_REG (elt->exp)) == mode
3137 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
3138 return copy_rtx (SUBREG_REG (elt->exp));
3141 return x;
3144 /* Fold SUBREG_REG. If it changed, see if we can simplify the SUBREG.
3145 We might be able to if the SUBREG is extracting a single word in an
3146 integral mode or extracting the low part. */
3148 folded_arg0 = fold_rtx (SUBREG_REG (x), insn);
3149 const_arg0 = equiv_constant (folded_arg0);
3150 if (const_arg0)
3151 folded_arg0 = const_arg0;
3153 if (folded_arg0 != SUBREG_REG (x))
3155 new = 0;
3157 if (GET_MODE_CLASS (mode) == MODE_INT
3158 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
3159 && GET_MODE (SUBREG_REG (x)) != VOIDmode)
3160 new = operand_subword (folded_arg0, SUBREG_WORD (x), 0,
3161 GET_MODE (SUBREG_REG (x)));
3162 if (new == 0 && subreg_lowpart_p (x))
3163 new = gen_lowpart_if_possible (mode, folded_arg0);
3164 if (new)
3165 return new;
3168 /* If this is a narrowing SUBREG and our operand is a REG, see if
3169 we can find an equivalence for REG that is an arithmetic operation
3170 in a wider mode where both operands are paradoxical SUBREGs
3171 from objects of our result mode. In that case, we couldn't report
3172 an equivalent value for that operation, since we don't know what the
3173 extra bits will be. But we can find an equivalence for this SUBREG
3174 by folding that operation is the narrow mode. This allows us to
3175 fold arithmetic in narrow modes when the machine only supports
3176 word-sized arithmetic.
3178 Also look for a case where we have a SUBREG whose operand is the
3179 same as our result. If both modes are smaller than a word, we
3180 are simply interpreting a register in different modes and we
3181 can use the inner value. */
3183 if (GET_CODE (folded_arg0) == REG
3184 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (folded_arg0))
3185 && subreg_lowpart_p (x))
3187 struct table_elt *elt;
3189 /* We can use HASH here since we know that canon_hash won't be
3190 called. */
3191 elt = lookup (folded_arg0,
3192 HASH (folded_arg0, GET_MODE (folded_arg0)),
3193 GET_MODE (folded_arg0));
3195 if (elt)
3196 elt = elt->first_same_value;
3198 for (; elt; elt = elt->next_same_value)
3200 enum rtx_code eltcode = GET_CODE (elt->exp);
3202 /* Just check for unary and binary operations. */
3203 if (GET_RTX_CLASS (GET_CODE (elt->exp)) == '1'
3204 && GET_CODE (elt->exp) != SIGN_EXTEND
3205 && GET_CODE (elt->exp) != ZERO_EXTEND
3206 && GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3207 && GET_MODE (SUBREG_REG (XEXP (elt->exp, 0))) == mode)
3209 rtx op0 = SUBREG_REG (XEXP (elt->exp, 0));
3211 if (GET_CODE (op0) != REG && ! CONSTANT_P (op0))
3212 op0 = fold_rtx (op0, NULL_RTX);
3214 op0 = equiv_constant (op0);
3215 if (op0)
3216 new = simplify_unary_operation (GET_CODE (elt->exp), mode,
3217 op0, mode);
3219 else if ((GET_RTX_CLASS (GET_CODE (elt->exp)) == '2'
3220 || GET_RTX_CLASS (GET_CODE (elt->exp)) == 'c')
3221 && eltcode != DIV && eltcode != MOD
3222 && eltcode != UDIV && eltcode != UMOD
3223 && eltcode != ASHIFTRT && eltcode != LSHIFTRT
3224 && eltcode != ROTATE && eltcode != ROTATERT
3225 && ((GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3226 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 0)))
3227 == mode))
3228 || CONSTANT_P (XEXP (elt->exp, 0)))
3229 && ((GET_CODE (XEXP (elt->exp, 1)) == SUBREG
3230 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 1)))
3231 == mode))
3232 || CONSTANT_P (XEXP (elt->exp, 1))))
3234 rtx op0 = gen_lowpart_common (mode, XEXP (elt->exp, 0));
3235 rtx op1 = gen_lowpart_common (mode, XEXP (elt->exp, 1));
3237 if (op0 && GET_CODE (op0) != REG && ! CONSTANT_P (op0))
3238 op0 = fold_rtx (op0, NULL_RTX);
3240 if (op0)
3241 op0 = equiv_constant (op0);
3243 if (op1 && GET_CODE (op1) != REG && ! CONSTANT_P (op1))
3244 op1 = fold_rtx (op1, NULL_RTX);
3246 if (op1)
3247 op1 = equiv_constant (op1);
3249 /* If we are looking for the low SImode part of
3250 (ashift:DI c (const_int 32)), it doesn't work
3251 to compute that in SImode, because a 32-bit shift
3252 in SImode is unpredictable. We know the value is 0. */
3253 if (op0 && op1
3254 && GET_CODE (elt->exp) == ASHIFT
3255 && GET_CODE (op1) == CONST_INT
3256 && INTVAL (op1) >= GET_MODE_BITSIZE (mode))
3258 if (INTVAL (op1) < GET_MODE_BITSIZE (GET_MODE (elt->exp)))
3260 /* If the count fits in the inner mode's width,
3261 but exceeds the outer mode's width,
3262 the value will get truncated to 0
3263 by the subreg. */
3264 new = const0_rtx;
3265 else
3266 /* If the count exceeds even the inner mode's width,
3267 don't fold this expression. */
3268 new = 0;
3270 else if (op0 && op1)
3271 new = simplify_binary_operation (GET_CODE (elt->exp), mode,
3272 op0, op1);
3275 else if (GET_CODE (elt->exp) == SUBREG
3276 && GET_MODE (SUBREG_REG (elt->exp)) == mode
3277 && (GET_MODE_SIZE (GET_MODE (folded_arg0))
3278 <= UNITS_PER_WORD)
3279 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
3280 new = copy_rtx (SUBREG_REG (elt->exp));
3282 if (new)
3283 return new;
3287 return x;
3289 case NOT:
3290 case NEG:
3291 /* If we have (NOT Y), see if Y is known to be (NOT Z).
3292 If so, (NOT Y) simplifies to Z. Similarly for NEG. */
3293 new = lookup_as_function (XEXP (x, 0), code);
3294 if (new)
3295 return fold_rtx (copy_rtx (XEXP (new, 0)), insn);
3296 break;
3298 case MEM:
3299 /* If we are not actually processing an insn, don't try to find the
3300 best address. Not only don't we care, but we could modify the
3301 MEM in an invalid way since we have no insn to validate against. */
3302 if (insn != 0)
3303 find_best_addr (insn, &XEXP (x, 0));
3306 /* Even if we don't fold in the insn itself,
3307 we can safely do so here, in hopes of getting a constant. */
3308 rtx addr = fold_rtx (XEXP (x, 0), NULL_RTX);
3309 rtx base = 0;
3310 HOST_WIDE_INT offset = 0;
3312 if (GET_CODE (addr) == REG
3313 && REGNO_QTY_VALID_P (REGNO (addr)))
3315 int addr_q = REG_QTY (REGNO (addr));
3316 struct qty_table_elem *addr_ent = &qty_table[addr_q];
3318 if (GET_MODE (addr) == addr_ent->mode
3319 && addr_ent->const_rtx != NULL_RTX)
3320 addr = addr_ent->const_rtx;
3323 /* If address is constant, split it into a base and integer offset. */
3324 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
3325 base = addr;
3326 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
3327 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
3329 base = XEXP (XEXP (addr, 0), 0);
3330 offset = INTVAL (XEXP (XEXP (addr, 0), 1));
3332 else if (GET_CODE (addr) == LO_SUM
3333 && GET_CODE (XEXP (addr, 1)) == SYMBOL_REF)
3334 base = XEXP (addr, 1);
3335 else if (GET_CODE (addr) == ADDRESSOF)
3336 return change_address (x, VOIDmode, addr);
3338 /* If this is a constant pool reference, we can fold it into its
3339 constant to allow better value tracking. */
3340 if (base && GET_CODE (base) == SYMBOL_REF
3341 && CONSTANT_POOL_ADDRESS_P (base))
3343 rtx constant = get_pool_constant (base);
3344 enum machine_mode const_mode = get_pool_mode (base);
3345 rtx new;
3347 if (CONSTANT_P (constant) && GET_CODE (constant) != CONST_INT)
3348 constant_pool_entries_cost = COST (constant);
3350 /* If we are loading the full constant, we have an equivalence. */
3351 if (offset == 0 && mode == const_mode)
3352 return constant;
3354 /* If this actually isn't a constant (weird!), we can't do
3355 anything. Otherwise, handle the two most common cases:
3356 extracting a word from a multi-word constant, and extracting
3357 the low-order bits. Other cases don't seem common enough to
3358 worry about. */
3359 if (! CONSTANT_P (constant))
3360 return x;
3362 if (GET_MODE_CLASS (mode) == MODE_INT
3363 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
3364 && offset % UNITS_PER_WORD == 0
3365 && (new = operand_subword (constant,
3366 offset / UNITS_PER_WORD,
3367 0, const_mode)) != 0)
3368 return new;
3370 if (((BYTES_BIG_ENDIAN
3371 && offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
3372 || (! BYTES_BIG_ENDIAN && offset == 0))
3373 && (new = gen_lowpart_if_possible (mode, constant)) != 0)
3374 return new;
3377 /* If this is a reference to a label at a known position in a jump
3378 table, we also know its value. */
3379 if (base && GET_CODE (base) == LABEL_REF)
3381 rtx label = XEXP (base, 0);
3382 rtx table_insn = NEXT_INSN (label);
3384 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
3385 && GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
3387 rtx table = PATTERN (table_insn);
3389 if (offset >= 0
3390 && (offset / GET_MODE_SIZE (GET_MODE (table))
3391 < XVECLEN (table, 0)))
3392 return XVECEXP (table, 0,
3393 offset / GET_MODE_SIZE (GET_MODE (table)));
3395 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
3396 && GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
3398 rtx table = PATTERN (table_insn);
3400 if (offset >= 0
3401 && (offset / GET_MODE_SIZE (GET_MODE (table))
3402 < XVECLEN (table, 1)))
3404 offset /= GET_MODE_SIZE (GET_MODE (table));
3405 new = gen_rtx_MINUS (Pmode, XVECEXP (table, 1, offset),
3406 XEXP (table, 0));
3408 if (GET_MODE (table) != Pmode)
3409 new = gen_rtx_TRUNCATE (GET_MODE (table), new);
3411 /* Indicate this is a constant. This isn't a
3412 valid form of CONST, but it will only be used
3413 to fold the next insns and then discarded, so
3414 it should be safe.
3416 Note this expression must be explicitly discarded,
3417 by cse_insn, else it may end up in a REG_EQUAL note
3418 and "escape" to cause problems elsewhere. */
3419 return gen_rtx_CONST (GET_MODE (new), new);
3424 return x;
3427 case ASM_OPERANDS:
3428 for (i = XVECLEN (x, 3) - 1; i >= 0; i--)
3429 validate_change (insn, &XVECEXP (x, 3, i),
3430 fold_rtx (XVECEXP (x, 3, i), insn), 0);
3431 break;
3433 default:
3434 break;
3437 const_arg0 = 0;
3438 const_arg1 = 0;
3439 const_arg2 = 0;
3440 mode_arg0 = VOIDmode;
3442 /* Try folding our operands.
3443 Then see which ones have constant values known. */
3445 fmt = GET_RTX_FORMAT (code);
3446 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3447 if (fmt[i] == 'e')
3449 rtx arg = XEXP (x, i);
3450 rtx folded_arg = arg, const_arg = 0;
3451 enum machine_mode mode_arg = GET_MODE (arg);
3452 rtx cheap_arg, expensive_arg;
3453 rtx replacements[2];
3454 int j;
3456 /* Most arguments are cheap, so handle them specially. */
3457 switch (GET_CODE (arg))
3459 case REG:
3460 /* This is the same as calling equiv_constant; it is duplicated
3461 here for speed. */
3462 if (REGNO_QTY_VALID_P (REGNO (arg)))
3464 int arg_q = REG_QTY (REGNO (arg));
3465 struct qty_table_elem *arg_ent = &qty_table[arg_q];
3467 if (arg_ent->const_rtx != NULL_RTX
3468 && GET_CODE (arg_ent->const_rtx) != REG
3469 && GET_CODE (arg_ent->const_rtx) != PLUS)
3470 const_arg
3471 = gen_lowpart_if_possible (GET_MODE (arg),
3472 arg_ent->const_rtx);
3474 break;
3476 case CONST:
3477 case CONST_INT:
3478 case SYMBOL_REF:
3479 case LABEL_REF:
3480 case CONST_DOUBLE:
3481 const_arg = arg;
3482 break;
3484 #ifdef HAVE_cc0
3485 case CC0:
3486 folded_arg = prev_insn_cc0;
3487 mode_arg = prev_insn_cc0_mode;
3488 const_arg = equiv_constant (folded_arg);
3489 break;
3490 #endif
3492 default:
3493 folded_arg = fold_rtx (arg, insn);
3494 const_arg = equiv_constant (folded_arg);
3497 /* For the first three operands, see if the operand
3498 is constant or equivalent to a constant. */
3499 switch (i)
3501 case 0:
3502 folded_arg0 = folded_arg;
3503 const_arg0 = const_arg;
3504 mode_arg0 = mode_arg;
3505 break;
3506 case 1:
3507 folded_arg1 = folded_arg;
3508 const_arg1 = const_arg;
3509 break;
3510 case 2:
3511 const_arg2 = const_arg;
3512 break;
3515 /* Pick the least expensive of the folded argument and an
3516 equivalent constant argument. */
3517 if (const_arg == 0 || const_arg == folded_arg
3518 || COST (const_arg) > COST (folded_arg))
3519 cheap_arg = folded_arg, expensive_arg = const_arg;
3520 else
3521 cheap_arg = const_arg, expensive_arg = folded_arg;
3523 /* Try to replace the operand with the cheapest of the two
3524 possibilities. If it doesn't work and this is either of the first
3525 two operands of a commutative operation, try swapping them.
3526 If THAT fails, try the more expensive, provided it is cheaper
3527 than what is already there. */
3529 if (cheap_arg == XEXP (x, i))
3530 continue;
3532 if (insn == 0 && ! copied)
3534 x = copy_rtx (x);
3535 copied = 1;
3538 replacements[0] = cheap_arg, replacements[1] = expensive_arg;
3539 for (j = 0;
3540 j < 2 && replacements[j]
3541 && COST (replacements[j]) < COST (XEXP (x, i));
3542 j++)
3544 if (validate_change (insn, &XEXP (x, i), replacements[j], 0))
3545 break;
3547 if (code == NE || code == EQ || GET_RTX_CLASS (code) == 'c')
3549 validate_change (insn, &XEXP (x, i), XEXP (x, 1 - i), 1);
3550 validate_change (insn, &XEXP (x, 1 - i), replacements[j], 1);
3552 if (apply_change_group ())
3554 /* Swap them back to be invalid so that this loop can
3555 continue and flag them to be swapped back later. */
3556 rtx tem;
3558 tem = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1);
3559 XEXP (x, 1) = tem;
3560 must_swap = 1;
3561 break;
3567 else
3569 if (fmt[i] == 'E')
3570 /* Don't try to fold inside of a vector of expressions.
3571 Doing nothing is harmless. */
3572 {;}
3575 /* If a commutative operation, place a constant integer as the second
3576 operand unless the first operand is also a constant integer. Otherwise,
3577 place any constant second unless the first operand is also a constant. */
3579 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
3581 if (must_swap || (const_arg0
3582 && (const_arg1 == 0
3583 || (GET_CODE (const_arg0) == CONST_INT
3584 && GET_CODE (const_arg1) != CONST_INT))))
3586 register rtx tem = XEXP (x, 0);
3588 if (insn == 0 && ! copied)
3590 x = copy_rtx (x);
3591 copied = 1;
3594 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
3595 validate_change (insn, &XEXP (x, 1), tem, 1);
3596 if (apply_change_group ())
3598 tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
3599 tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
3604 /* If X is an arithmetic operation, see if we can simplify it. */
3606 switch (GET_RTX_CLASS (code))
3608 case '1':
3610 int is_const = 0;
3612 /* We can't simplify extension ops unless we know the
3613 original mode. */
3614 if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
3615 && mode_arg0 == VOIDmode)
3616 break;
3618 /* If we had a CONST, strip it off and put it back later if we
3619 fold. */
3620 if (const_arg0 != 0 && GET_CODE (const_arg0) == CONST)
3621 is_const = 1, const_arg0 = XEXP (const_arg0, 0);
3623 new = simplify_unary_operation (code, mode,
3624 const_arg0 ? const_arg0 : folded_arg0,
3625 mode_arg0);
3626 if (new != 0 && is_const)
3627 new = gen_rtx_CONST (mode, new);
3629 break;
3631 case '<':
3632 /* See what items are actually being compared and set FOLDED_ARG[01]
3633 to those values and CODE to the actual comparison code. If any are
3634 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
3635 do anything if both operands are already known to be constant. */
3637 if (const_arg0 == 0 || const_arg1 == 0)
3639 struct table_elt *p0, *p1;
3640 rtx true = const_true_rtx, false = const0_rtx;
3641 enum machine_mode mode_arg1;
3643 #ifdef FLOAT_STORE_FLAG_VALUE
3644 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3646 true = (CONST_DOUBLE_FROM_REAL_VALUE
3647 (FLOAT_STORE_FLAG_VALUE (mode), mode));
3648 false = CONST0_RTX (mode);
3650 #endif
3652 code = find_comparison_args (code, &folded_arg0, &folded_arg1,
3653 &mode_arg0, &mode_arg1);
3654 const_arg0 = equiv_constant (folded_arg0);
3655 const_arg1 = equiv_constant (folded_arg1);
3657 /* If the mode is VOIDmode or a MODE_CC mode, we don't know
3658 what kinds of things are being compared, so we can't do
3659 anything with this comparison. */
3661 if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
3662 break;
3664 /* If we do not now have two constants being compared, see
3665 if we can nevertheless deduce some things about the
3666 comparison. */
3667 if (const_arg0 == 0 || const_arg1 == 0)
3669 /* Is FOLDED_ARG0 frame-pointer plus a constant? Or
3670 non-explicit constant? These aren't zero, but we
3671 don't know their sign. */
3672 if (const_arg1 == const0_rtx
3673 && (NONZERO_BASE_PLUS_P (folded_arg0)
3674 #if 0 /* Sad to say, on sysvr4, #pragma weak can make a symbol address
3675 come out as 0. */
3676 || GET_CODE (folded_arg0) == SYMBOL_REF
3677 #endif
3678 || GET_CODE (folded_arg0) == LABEL_REF
3679 || GET_CODE (folded_arg0) == CONST))
3681 if (code == EQ)
3682 return false;
3683 else if (code == NE)
3684 return true;
3687 /* See if the two operands are the same. We don't do this
3688 for IEEE floating-point since we can't assume x == x
3689 since x might be a NaN. */
3691 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3692 || ! FLOAT_MODE_P (mode_arg0) || flag_fast_math)
3693 && (folded_arg0 == folded_arg1
3694 || (GET_CODE (folded_arg0) == REG
3695 && GET_CODE (folded_arg1) == REG
3696 && (REG_QTY (REGNO (folded_arg0))
3697 == REG_QTY (REGNO (folded_arg1))))
3698 || ((p0 = lookup (folded_arg0,
3699 (safe_hash (folded_arg0, mode_arg0)
3700 & HASH_MASK), mode_arg0))
3701 && (p1 = lookup (folded_arg1,
3702 (safe_hash (folded_arg1, mode_arg0)
3703 & HASH_MASK), mode_arg0))
3704 && p0->first_same_value == p1->first_same_value)))
3705 return ((code == EQ || code == LE || code == GE
3706 || code == LEU || code == GEU)
3707 ? true : false);
3709 /* If FOLDED_ARG0 is a register, see if the comparison we are
3710 doing now is either the same as we did before or the reverse
3711 (we only check the reverse if not floating-point). */
3712 else if (GET_CODE (folded_arg0) == REG)
3714 int qty = REG_QTY (REGNO (folded_arg0));
3716 if (REGNO_QTY_VALID_P (REGNO (folded_arg0)))
3718 struct qty_table_elem *ent = &qty_table[qty];
3720 if ((comparison_dominates_p (ent->comparison_code, code)
3721 || (! FLOAT_MODE_P (mode_arg0)
3722 && comparison_dominates_p (ent->comparison_code,
3723 reverse_condition (code))))
3724 && (rtx_equal_p (ent->comparison_const, folded_arg1)
3725 || (const_arg1
3726 && rtx_equal_p (ent->comparison_const,
3727 const_arg1))
3728 || (GET_CODE (folded_arg1) == REG
3729 && (REG_QTY (REGNO (folded_arg1)) == ent->comparison_qty))))
3730 return (comparison_dominates_p (ent->comparison_code, code)
3731 ? true : false);
3737 /* If we are comparing against zero, see if the first operand is
3738 equivalent to an IOR with a constant. If so, we may be able to
3739 determine the result of this comparison. */
3741 if (const_arg1 == const0_rtx)
3743 rtx y = lookup_as_function (folded_arg0, IOR);
3744 rtx inner_const;
3746 if (y != 0
3747 && (inner_const = equiv_constant (XEXP (y, 1))) != 0
3748 && GET_CODE (inner_const) == CONST_INT
3749 && INTVAL (inner_const) != 0)
3751 int sign_bitnum = GET_MODE_BITSIZE (mode_arg0) - 1;
3752 int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
3753 && (INTVAL (inner_const)
3754 & ((HOST_WIDE_INT) 1 << sign_bitnum)));
3755 rtx true = const_true_rtx, false = const0_rtx;
3757 #ifdef FLOAT_STORE_FLAG_VALUE
3758 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3760 true = (CONST_DOUBLE_FROM_REAL_VALUE
3761 (FLOAT_STORE_FLAG_VALUE (mode), mode));
3762 false = CONST0_RTX (mode);
3764 #endif
3766 switch (code)
3768 case EQ:
3769 return false;
3770 case NE:
3771 return true;
3772 case LT: case LE:
3773 if (has_sign)
3774 return true;
3775 break;
3776 case GT: case GE:
3777 if (has_sign)
3778 return false;
3779 break;
3780 default:
3781 break;
3786 new = simplify_relational_operation (code, mode_arg0,
3787 const_arg0 ? const_arg0 : folded_arg0,
3788 const_arg1 ? const_arg1 : folded_arg1);
3789 #ifdef FLOAT_STORE_FLAG_VALUE
3790 if (new != 0 && GET_MODE_CLASS (mode) == MODE_FLOAT)
3792 if (new == const0_rtx)
3793 new = CONST0_RTX (mode);
3794 else
3795 new = (CONST_DOUBLE_FROM_REAL_VALUE
3796 (FLOAT_STORE_FLAG_VALUE (mode), mode));
3798 #endif
3799 break;
3801 case '2':
3802 case 'c':
3803 switch (code)
3805 case PLUS:
3806 /* If the second operand is a LABEL_REF, see if the first is a MINUS
3807 with that LABEL_REF as its second operand. If so, the result is
3808 the first operand of that MINUS. This handles switches with an
3809 ADDR_DIFF_VEC table. */
3810 if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
3812 rtx y
3813 = GET_CODE (folded_arg0) == MINUS ? folded_arg0
3814 : lookup_as_function (folded_arg0, MINUS);
3816 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
3817 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
3818 return XEXP (y, 0);
3820 /* Now try for a CONST of a MINUS like the above. */
3821 if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
3822 : lookup_as_function (folded_arg0, CONST))) != 0
3823 && GET_CODE (XEXP (y, 0)) == MINUS
3824 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
3825 && XEXP (XEXP (XEXP (y, 0),1), 0) == XEXP (const_arg1, 0))
3826 return XEXP (XEXP (y, 0), 0);
3829 /* Likewise if the operands are in the other order. */
3830 if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
3832 rtx y
3833 = GET_CODE (folded_arg1) == MINUS ? folded_arg1
3834 : lookup_as_function (folded_arg1, MINUS);
3836 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
3837 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
3838 return XEXP (y, 0);
3840 /* Now try for a CONST of a MINUS like the above. */
3841 if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
3842 : lookup_as_function (folded_arg1, CONST))) != 0
3843 && GET_CODE (XEXP (y, 0)) == MINUS
3844 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
3845 && XEXP (XEXP (XEXP (y, 0),1), 0) == XEXP (const_arg0, 0))
3846 return XEXP (XEXP (y, 0), 0);
3849 /* If second operand is a register equivalent to a negative
3850 CONST_INT, see if we can find a register equivalent to the
3851 positive constant. Make a MINUS if so. Don't do this for
3852 a non-negative constant since we might then alternate between
3853 chosing positive and negative constants. Having the positive
3854 constant previously-used is the more common case. Be sure
3855 the resulting constant is non-negative; if const_arg1 were
3856 the smallest negative number this would overflow: depending
3857 on the mode, this would either just be the same value (and
3858 hence not save anything) or be incorrect. */
3859 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT
3860 && INTVAL (const_arg1) < 0
3861 /* This used to test
3863 - INTVAL (const_arg1) >= 0
3865 But The Sun V5.0 compilers mis-compiled that test. So
3866 instead we test for the problematic value in a more direct
3867 manner and hope the Sun compilers get it correct. */
3868 && INTVAL (const_arg1) !=
3869 ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1))
3870 && GET_CODE (folded_arg1) == REG)
3872 rtx new_const = GEN_INT (- INTVAL (const_arg1));
3873 struct table_elt *p
3874 = lookup (new_const, safe_hash (new_const, mode) & HASH_MASK,
3875 mode);
3877 if (p)
3878 for (p = p->first_same_value; p; p = p->next_same_value)
3879 if (GET_CODE (p->exp) == REG)
3880 return simplify_gen_binary (MINUS, mode, folded_arg0,
3881 canon_reg (p->exp, NULL_RTX));
3883 goto from_plus;
3885 case MINUS:
3886 /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
3887 If so, produce (PLUS Z C2-C). */
3888 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT)
3890 rtx y = lookup_as_function (XEXP (x, 0), PLUS);
3891 if (y && GET_CODE (XEXP (y, 1)) == CONST_INT)
3892 return fold_rtx (plus_constant (copy_rtx (y),
3893 -INTVAL (const_arg1)),
3894 NULL_RTX);
3897 /* ... fall through ... */
3899 from_plus:
3900 case SMIN: case SMAX: case UMIN: case UMAX:
3901 case IOR: case AND: case XOR:
3902 case MULT: case DIV: case UDIV:
3903 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
3904 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
3905 is known to be of similar form, we may be able to replace the
3906 operation with a combined operation. This may eliminate the
3907 intermediate operation if every use is simplified in this way.
3908 Note that the similar optimization done by combine.c only works
3909 if the intermediate operation's result has only one reference. */
3911 if (GET_CODE (folded_arg0) == REG
3912 && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
3914 int is_shift
3915 = (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
3916 rtx y = lookup_as_function (folded_arg0, code);
3917 rtx inner_const;
3918 enum rtx_code associate_code;
3919 rtx new_const;
3921 if (y == 0
3922 || 0 == (inner_const
3923 = equiv_constant (fold_rtx (XEXP (y, 1), 0)))
3924 || GET_CODE (inner_const) != CONST_INT
3925 /* If we have compiled a statement like
3926 "if (x == (x & mask1))", and now are looking at
3927 "x & mask2", we will have a case where the first operand
3928 of Y is the same as our first operand. Unless we detect
3929 this case, an infinite loop will result. */
3930 || XEXP (y, 0) == folded_arg0)
3931 break;
3933 /* Don't associate these operations if they are a PLUS with the
3934 same constant and it is a power of two. These might be doable
3935 with a pre- or post-increment. Similarly for two subtracts of
3936 identical powers of two with post decrement. */
3938 if (code == PLUS && INTVAL (const_arg1) == INTVAL (inner_const)
3939 && ((HAVE_PRE_INCREMENT
3940 && exact_log2 (INTVAL (const_arg1)) >= 0)
3941 || (HAVE_POST_INCREMENT
3942 && exact_log2 (INTVAL (const_arg1)) >= 0)
3943 || (HAVE_PRE_DECREMENT
3944 && exact_log2 (- INTVAL (const_arg1)) >= 0)
3945 || (HAVE_POST_DECREMENT
3946 && exact_log2 (- INTVAL (const_arg1)) >= 0)))
3947 break;
3949 /* Compute the code used to compose the constants. For example,
3950 A/C1/C2 is A/(C1 * C2), so if CODE == DIV, we want MULT. */
3952 associate_code
3953 = (code == MULT || code == DIV || code == UDIV ? MULT
3954 : is_shift || code == PLUS || code == MINUS ? PLUS : code);
3956 new_const = simplify_binary_operation (associate_code, mode,
3957 const_arg1, inner_const);
3959 if (new_const == 0)
3960 break;
3962 /* If we are associating shift operations, don't let this
3963 produce a shift of the size of the object or larger.
3964 This could occur when we follow a sign-extend by a right
3965 shift on a machine that does a sign-extend as a pair
3966 of shifts. */
3968 if (is_shift && GET_CODE (new_const) == CONST_INT
3969 && INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
3971 /* As an exception, we can turn an ASHIFTRT of this
3972 form into a shift of the number of bits - 1. */
3973 if (code == ASHIFTRT)
3974 new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
3975 else
3976 break;
3979 y = copy_rtx (XEXP (y, 0));
3981 /* If Y contains our first operand (the most common way this
3982 can happen is if Y is a MEM), we would do into an infinite
3983 loop if we tried to fold it. So don't in that case. */
3985 if (! reg_mentioned_p (folded_arg0, y))
3986 y = fold_rtx (y, insn);
3988 return simplify_gen_binary (code, mode, y, new_const);
3990 break;
3992 default:
3993 break;
3996 new = simplify_binary_operation (code, mode,
3997 const_arg0 ? const_arg0 : folded_arg0,
3998 const_arg1 ? const_arg1 : folded_arg1);
3999 break;
4001 case 'o':
4002 /* (lo_sum (high X) X) is simply X. */
4003 if (code == LO_SUM && const_arg0 != 0
4004 && GET_CODE (const_arg0) == HIGH
4005 && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
4006 return const_arg1;
4007 break;
4009 case '3':
4010 case 'b':
4011 new = simplify_ternary_operation (code, mode, mode_arg0,
4012 const_arg0 ? const_arg0 : folded_arg0,
4013 const_arg1 ? const_arg1 : folded_arg1,
4014 const_arg2 ? const_arg2 : XEXP (x, 2));
4015 break;
4017 case 'x':
4018 /* Always eliminate CONSTANT_P_RTX at this stage. */
4019 if (code == CONSTANT_P_RTX)
4020 return (const_arg0 ? const1_rtx : const0_rtx);
4021 break;
4024 return new ? new : x;
4027 /* Return a constant value currently equivalent to X.
4028 Return 0 if we don't know one. */
4030 static rtx
4031 equiv_constant (x)
4032 rtx x;
4034 if (GET_CODE (x) == REG
4035 && REGNO_QTY_VALID_P (REGNO (x)))
4037 int x_q = REG_QTY (REGNO (x));
4038 struct qty_table_elem *x_ent = &qty_table[x_q];
4040 if (x_ent->const_rtx)
4041 x = gen_lowpart_if_possible (GET_MODE (x), x_ent->const_rtx);
4044 if (x == 0 || CONSTANT_P (x))
4045 return x;
4047 /* If X is a MEM, try to fold it outside the context of any insn to see if
4048 it might be equivalent to a constant. That handles the case where it
4049 is a constant-pool reference. Then try to look it up in the hash table
4050 in case it is something whose value we have seen before. */
4052 if (GET_CODE (x) == MEM)
4054 struct table_elt *elt;
4056 x = fold_rtx (x, NULL_RTX);
4057 if (CONSTANT_P (x))
4058 return x;
4060 elt = lookup (x, safe_hash (x, GET_MODE (x)) & HASH_MASK, GET_MODE (x));
4061 if (elt == 0)
4062 return 0;
4064 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
4065 if (elt->is_const && CONSTANT_P (elt->exp))
4066 return elt->exp;
4069 return 0;
4072 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a fixed-point
4073 number, return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
4074 least-significant part of X.
4075 MODE specifies how big a part of X to return.
4077 If the requested operation cannot be done, 0 is returned.
4079 This is similar to gen_lowpart in emit-rtl.c. */
4082 gen_lowpart_if_possible (mode, x)
4083 enum machine_mode mode;
4084 register rtx x;
4086 rtx result = gen_lowpart_common (mode, x);
4088 if (result)
4089 return result;
4090 else if (GET_CODE (x) == MEM)
4092 /* This is the only other case we handle. */
4093 register int offset = 0;
4094 rtx new;
4096 if (WORDS_BIG_ENDIAN)
4097 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
4098 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
4099 if (BYTES_BIG_ENDIAN)
4100 /* Adjust the address so that the address-after-the-data is
4101 unchanged. */
4102 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
4103 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
4104 new = gen_rtx_MEM (mode, plus_constant (XEXP (x, 0), offset));
4105 if (! memory_address_p (mode, XEXP (new, 0)))
4106 return 0;
4107 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
4108 MEM_COPY_ATTRIBUTES (new, x);
4109 return new;
4111 else
4112 return 0;
4115 /* Given INSN, a jump insn, TAKEN indicates if we are following the "taken"
4116 branch. It will be zero if not.
4118 In certain cases, this can cause us to add an equivalence. For example,
4119 if we are following the taken case of
4120 if (i == 2)
4121 we can add the fact that `i' and '2' are now equivalent.
4123 In any case, we can record that this comparison was passed. If the same
4124 comparison is seen later, we will know its value. */
4126 static void
4127 record_jump_equiv (insn, taken)
4128 rtx insn;
4129 int taken;
4131 int cond_known_true;
4132 rtx op0, op1;
4133 enum machine_mode mode, mode0, mode1;
4134 int reversed_nonequality = 0;
4135 enum rtx_code code;
4137 /* Ensure this is the right kind of insn. */
4138 if (! condjump_p (insn) || simplejump_p (insn))
4139 return;
4141 /* See if this jump condition is known true or false. */
4142 if (taken)
4143 cond_known_true = (XEXP (SET_SRC (PATTERN (insn)), 2) == pc_rtx);
4144 else
4145 cond_known_true = (XEXP (SET_SRC (PATTERN (insn)), 1) == pc_rtx);
4147 /* Get the type of comparison being done and the operands being compared.
4148 If we had to reverse a non-equality condition, record that fact so we
4149 know that it isn't valid for floating-point. */
4150 code = GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 0));
4151 op0 = fold_rtx (XEXP (XEXP (SET_SRC (PATTERN (insn)), 0), 0), insn);
4152 op1 = fold_rtx (XEXP (XEXP (SET_SRC (PATTERN (insn)), 0), 1), insn);
4154 code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
4155 if (! cond_known_true)
4157 reversed_nonequality = (code != EQ && code != NE);
4158 code = reverse_condition (code);
4160 /* Don't remember if we can't find the inverse. */
4161 if (code == UNKNOWN)
4162 return;
4165 /* The mode is the mode of the non-constant. */
4166 mode = mode0;
4167 if (mode1 != VOIDmode)
4168 mode = mode1;
4170 record_jump_cond (code, mode, op0, op1, reversed_nonequality);
4173 /* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
4174 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
4175 Make any useful entries we can with that information. Called from
4176 above function and called recursively. */
4178 static void
4179 record_jump_cond (code, mode, op0, op1, reversed_nonequality)
4180 enum rtx_code code;
4181 enum machine_mode mode;
4182 rtx op0, op1;
4183 int reversed_nonequality;
4185 unsigned op0_hash, op1_hash;
4186 int op0_in_memory, op1_in_memory;
4187 struct table_elt *op0_elt, *op1_elt;
4189 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
4190 we know that they are also equal in the smaller mode (this is also
4191 true for all smaller modes whether or not there is a SUBREG, but
4192 is not worth testing for with no SUBREG). */
4194 /* Note that GET_MODE (op0) may not equal MODE. */
4195 if (code == EQ && GET_CODE (op0) == SUBREG
4196 && (GET_MODE_SIZE (GET_MODE (op0))
4197 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4199 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4200 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
4202 record_jump_cond (code, mode, SUBREG_REG (op0),
4203 tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
4204 reversed_nonequality);
4207 if (code == EQ && GET_CODE (op1) == SUBREG
4208 && (GET_MODE_SIZE (GET_MODE (op1))
4209 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4211 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4212 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
4214 record_jump_cond (code, mode, SUBREG_REG (op1),
4215 tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
4216 reversed_nonequality);
4219 /* Similarly, if this is an NE comparison, and either is a SUBREG
4220 making a smaller mode, we know the whole thing is also NE. */
4222 /* Note that GET_MODE (op0) may not equal MODE;
4223 if we test MODE instead, we can get an infinite recursion
4224 alternating between two modes each wider than MODE. */
4226 if (code == NE && GET_CODE (op0) == SUBREG
4227 && subreg_lowpart_p (op0)
4228 && (GET_MODE_SIZE (GET_MODE (op0))
4229 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4231 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4232 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
4234 record_jump_cond (code, mode, SUBREG_REG (op0),
4235 tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
4236 reversed_nonequality);
4239 if (code == NE && GET_CODE (op1) == SUBREG
4240 && subreg_lowpart_p (op1)
4241 && (GET_MODE_SIZE (GET_MODE (op1))
4242 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4244 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4245 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
4247 record_jump_cond (code, mode, SUBREG_REG (op1),
4248 tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
4249 reversed_nonequality);
4252 /* Hash both operands. */
4254 do_not_record = 0;
4255 hash_arg_in_memory = 0;
4256 op0_hash = HASH (op0, mode);
4257 op0_in_memory = hash_arg_in_memory;
4259 if (do_not_record)
4260 return;
4262 do_not_record = 0;
4263 hash_arg_in_memory = 0;
4264 op1_hash = HASH (op1, mode);
4265 op1_in_memory = hash_arg_in_memory;
4267 if (do_not_record)
4268 return;
4270 /* Look up both operands. */
4271 op0_elt = lookup (op0, op0_hash, mode);
4272 op1_elt = lookup (op1, op1_hash, mode);
4274 /* If both operands are already equivalent or if they are not in the
4275 table but are identical, do nothing. */
4276 if ((op0_elt != 0 && op1_elt != 0
4277 && op0_elt->first_same_value == op1_elt->first_same_value)
4278 || op0 == op1 || rtx_equal_p (op0, op1))
4279 return;
4281 /* If we aren't setting two things equal all we can do is save this
4282 comparison. Similarly if this is floating-point. In the latter
4283 case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
4284 If we record the equality, we might inadvertently delete code
4285 whose intent was to change -0 to +0. */
4287 if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
4289 struct qty_table_elem *ent;
4290 int qty;
4292 /* If we reversed a floating-point comparison, if OP0 is not a
4293 register, or if OP1 is neither a register or constant, we can't
4294 do anything. */
4296 if (GET_CODE (op1) != REG)
4297 op1 = equiv_constant (op1);
4299 if ((reversed_nonequality && FLOAT_MODE_P (mode))
4300 || GET_CODE (op0) != REG || op1 == 0)
4301 return;
4303 /* Put OP0 in the hash table if it isn't already. This gives it a
4304 new quantity number. */
4305 if (op0_elt == 0)
4307 if (insert_regs (op0, NULL_PTR, 0))
4309 rehash_using_reg (op0);
4310 op0_hash = HASH (op0, mode);
4312 /* If OP0 is contained in OP1, this changes its hash code
4313 as well. Faster to rehash than to check, except
4314 for the simple case of a constant. */
4315 if (! CONSTANT_P (op1))
4316 op1_hash = HASH (op1,mode);
4319 op0_elt = insert (op0, NULL_PTR, op0_hash, mode);
4320 op0_elt->in_memory = op0_in_memory;
4323 qty = REG_QTY (REGNO (op0));
4324 ent = &qty_table[qty];
4326 ent->comparison_code = code;
4327 if (GET_CODE (op1) == REG)
4329 /* Look it up again--in case op0 and op1 are the same. */
4330 op1_elt = lookup (op1, op1_hash, mode);
4332 /* Put OP1 in the hash table so it gets a new quantity number. */
4333 if (op1_elt == 0)
4335 if (insert_regs (op1, NULL_PTR, 0))
4337 rehash_using_reg (op1);
4338 op1_hash = HASH (op1, mode);
4341 op1_elt = insert (op1, NULL_PTR, op1_hash, mode);
4342 op1_elt->in_memory = op1_in_memory;
4345 ent->comparison_const = NULL_RTX;
4346 ent->comparison_qty = REG_QTY (REGNO (op1));
4348 else
4350 ent->comparison_const = op1;
4351 ent->comparison_qty = -1;
4354 return;
4357 /* If either side is still missing an equivalence, make it now,
4358 then merge the equivalences. */
4360 if (op0_elt == 0)
4362 if (insert_regs (op0, NULL_PTR, 0))
4364 rehash_using_reg (op0);
4365 op0_hash = HASH (op0, mode);
4368 op0_elt = insert (op0, NULL_PTR, op0_hash, mode);
4369 op0_elt->in_memory = op0_in_memory;
4372 if (op1_elt == 0)
4374 if (insert_regs (op1, NULL_PTR, 0))
4376 rehash_using_reg (op1);
4377 op1_hash = HASH (op1, mode);
4380 op1_elt = insert (op1, NULL_PTR, op1_hash, mode);
4381 op1_elt->in_memory = op1_in_memory;
4384 merge_equiv_classes (op0_elt, op1_elt);
4385 last_jump_equiv_class = op0_elt;
4388 /* CSE processing for one instruction.
4389 First simplify sources and addresses of all assignments
4390 in the instruction, using previously-computed equivalents values.
4391 Then install the new sources and destinations in the table
4392 of available values.
4394 If LIBCALL_INSN is nonzero, don't record any equivalence made in
4395 the insn. It means that INSN is inside libcall block. In this
4396 case LIBCALL_INSN is the corresponding insn with REG_LIBCALL. */
4398 /* Data on one SET contained in the instruction. */
4400 struct set
4402 /* The SET rtx itself. */
4403 rtx rtl;
4404 /* The SET_SRC of the rtx (the original value, if it is changing). */
4405 rtx src;
4406 /* The hash-table element for the SET_SRC of the SET. */
4407 struct table_elt *src_elt;
4408 /* Hash value for the SET_SRC. */
4409 unsigned src_hash;
4410 /* Hash value for the SET_DEST. */
4411 unsigned dest_hash;
4412 /* The SET_DEST, with SUBREG, etc., stripped. */
4413 rtx inner_dest;
4414 /* Nonzero if the SET_SRC is in memory. */
4415 char src_in_memory;
4416 /* Nonzero if the SET_SRC contains something
4417 whose value cannot be predicted and understood. */
4418 char src_volatile;
4419 /* Original machine mode, in case it becomes a CONST_INT. */
4420 enum machine_mode mode;
4421 /* A constant equivalent for SET_SRC, if any. */
4422 rtx src_const;
4423 /* Original SET_SRC value used for libcall notes. */
4424 rtx orig_src;
4425 /* Hash value of constant equivalent for SET_SRC. */
4426 unsigned src_const_hash;
4427 /* Table entry for constant equivalent for SET_SRC, if any. */
4428 struct table_elt *src_const_elt;
4431 static void
4432 cse_insn (insn, libcall_insn)
4433 rtx insn;
4434 rtx libcall_insn;
4436 register rtx x = PATTERN (insn);
4437 register int i;
4438 rtx tem;
4439 register int n_sets = 0;
4441 #ifdef HAVE_cc0
4442 /* Records what this insn does to set CC0. */
4443 rtx this_insn_cc0 = 0;
4444 enum machine_mode this_insn_cc0_mode = VOIDmode;
4445 #endif
4447 rtx src_eqv = 0;
4448 struct table_elt *src_eqv_elt = 0;
4449 int src_eqv_volatile = 0;
4450 int src_eqv_in_memory = 0;
4451 unsigned src_eqv_hash = 0;
4453 struct set *sets = (struct set *) NULL_PTR;
4455 this_insn = insn;
4457 /* Find all the SETs and CLOBBERs in this instruction.
4458 Record all the SETs in the array `set' and count them.
4459 Also determine whether there is a CLOBBER that invalidates
4460 all memory references, or all references at varying addresses. */
4462 if (GET_CODE (insn) == CALL_INSN)
4464 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4465 if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
4466 invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
4469 if (GET_CODE (x) == SET)
4471 sets = (struct set *) alloca (sizeof (struct set));
4472 sets[0].rtl = x;
4474 /* Ignore SETs that are unconditional jumps.
4475 They never need cse processing, so this does not hurt.
4476 The reason is not efficiency but rather
4477 so that we can test at the end for instructions
4478 that have been simplified to unconditional jumps
4479 and not be misled by unchanged instructions
4480 that were unconditional jumps to begin with. */
4481 if (SET_DEST (x) == pc_rtx
4482 && GET_CODE (SET_SRC (x)) == LABEL_REF)
4485 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
4486 The hard function value register is used only once, to copy to
4487 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
4488 Ensure we invalidate the destination register. On the 80386 no
4489 other code would invalidate it since it is a fixed_reg.
4490 We need not check the return of apply_change_group; see canon_reg. */
4492 else if (GET_CODE (SET_SRC (x)) == CALL)
4494 canon_reg (SET_SRC (x), insn);
4495 apply_change_group ();
4496 fold_rtx (SET_SRC (x), insn);
4497 invalidate (SET_DEST (x), VOIDmode);
4499 else
4500 n_sets = 1;
4502 else if (GET_CODE (x) == PARALLEL)
4504 register int lim = XVECLEN (x, 0);
4506 sets = (struct set *) alloca (lim * sizeof (struct set));
4508 /* Find all regs explicitly clobbered in this insn,
4509 and ensure they are not replaced with any other regs
4510 elsewhere in this insn.
4511 When a reg that is clobbered is also used for input,
4512 we should presume that that is for a reason,
4513 and we should not substitute some other register
4514 which is not supposed to be clobbered.
4515 Therefore, this loop cannot be merged into the one below
4516 because a CALL may precede a CLOBBER and refer to the
4517 value clobbered. We must not let a canonicalization do
4518 anything in that case. */
4519 for (i = 0; i < lim; i++)
4521 register rtx y = XVECEXP (x, 0, i);
4522 if (GET_CODE (y) == CLOBBER)
4524 rtx clobbered = XEXP (y, 0);
4526 if (GET_CODE (clobbered) == REG
4527 || GET_CODE (clobbered) == SUBREG)
4528 invalidate (clobbered, VOIDmode);
4529 else if (GET_CODE (clobbered) == STRICT_LOW_PART
4530 || GET_CODE (clobbered) == ZERO_EXTRACT)
4531 invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
4535 for (i = 0; i < lim; i++)
4537 register rtx y = XVECEXP (x, 0, i);
4538 if (GET_CODE (y) == SET)
4540 /* As above, we ignore unconditional jumps and call-insns and
4541 ignore the result of apply_change_group. */
4542 if (GET_CODE (SET_SRC (y)) == CALL)
4544 canon_reg (SET_SRC (y), insn);
4545 apply_change_group ();
4546 fold_rtx (SET_SRC (y), insn);
4547 invalidate (SET_DEST (y), VOIDmode);
4549 else if (SET_DEST (y) == pc_rtx
4550 && GET_CODE (SET_SRC (y)) == LABEL_REF)
4552 else
4553 sets[n_sets++].rtl = y;
4555 else if (GET_CODE (y) == CLOBBER)
4557 /* If we clobber memory, canon the address.
4558 This does nothing when a register is clobbered
4559 because we have already invalidated the reg. */
4560 if (GET_CODE (XEXP (y, 0)) == MEM)
4561 canon_reg (XEXP (y, 0), NULL_RTX);
4563 else if (GET_CODE (y) == USE
4564 && ! (GET_CODE (XEXP (y, 0)) == REG
4565 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
4566 canon_reg (y, NULL_RTX);
4567 else if (GET_CODE (y) == CALL)
4569 /* The result of apply_change_group can be ignored; see
4570 canon_reg. */
4571 canon_reg (y, insn);
4572 apply_change_group ();
4573 fold_rtx (y, insn);
4577 else if (GET_CODE (x) == CLOBBER)
4579 if (GET_CODE (XEXP (x, 0)) == MEM)
4580 canon_reg (XEXP (x, 0), NULL_RTX);
4583 /* Canonicalize a USE of a pseudo register or memory location. */
4584 else if (GET_CODE (x) == USE
4585 && ! (GET_CODE (XEXP (x, 0)) == REG
4586 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
4587 canon_reg (XEXP (x, 0), NULL_RTX);
4588 else if (GET_CODE (x) == CALL)
4590 /* The result of apply_change_group can be ignored; see canon_reg. */
4591 canon_reg (x, insn);
4592 apply_change_group ();
4593 fold_rtx (x, insn);
4596 /* Store the equivalent value in SRC_EQV, if different, or if the DEST
4597 is a STRICT_LOW_PART. The latter condition is necessary because SRC_EQV
4598 is handled specially for this case, and if it isn't set, then there will
4599 be no equivalence for the destination. */
4600 if (n_sets == 1 && REG_NOTES (insn) != 0
4601 && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
4602 && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
4603 || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
4604 src_eqv = canon_reg (XEXP (tem, 0), NULL_RTX);
4606 /* Canonicalize sources and addresses of destinations.
4607 We do this in a separate pass to avoid problems when a MATCH_DUP is
4608 present in the insn pattern. In that case, we want to ensure that
4609 we don't break the duplicate nature of the pattern. So we will replace
4610 both operands at the same time. Otherwise, we would fail to find an
4611 equivalent substitution in the loop calling validate_change below.
4613 We used to suppress canonicalization of DEST if it appears in SRC,
4614 but we don't do this any more. */
4616 for (i = 0; i < n_sets; i++)
4618 rtx dest = SET_DEST (sets[i].rtl);
4619 rtx src = SET_SRC (sets[i].rtl);
4620 rtx new = canon_reg (src, insn);
4621 int insn_code;
4623 sets[i].orig_src = src;
4624 if ((GET_CODE (new) == REG && GET_CODE (src) == REG
4625 && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
4626 != (REGNO (src) < FIRST_PSEUDO_REGISTER)))
4627 || (insn_code = recog_memoized (insn)) < 0
4628 || insn_data[insn_code].n_dups > 0)
4629 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
4630 else
4631 SET_SRC (sets[i].rtl) = new;
4633 if (GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SIGN_EXTRACT)
4635 validate_change (insn, &XEXP (dest, 1),
4636 canon_reg (XEXP (dest, 1), insn), 1);
4637 validate_change (insn, &XEXP (dest, 2),
4638 canon_reg (XEXP (dest, 2), insn), 1);
4641 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
4642 || GET_CODE (dest) == ZERO_EXTRACT
4643 || GET_CODE (dest) == SIGN_EXTRACT)
4644 dest = XEXP (dest, 0);
4646 if (GET_CODE (dest) == MEM)
4647 canon_reg (dest, insn);
4650 /* Now that we have done all the replacements, we can apply the change
4651 group and see if they all work. Note that this will cause some
4652 canonicalizations that would have worked individually not to be applied
4653 because some other canonicalization didn't work, but this should not
4654 occur often.
4656 The result of apply_change_group can be ignored; see canon_reg. */
4658 apply_change_group ();
4660 /* Set sets[i].src_elt to the class each source belongs to.
4661 Detect assignments from or to volatile things
4662 and set set[i] to zero so they will be ignored
4663 in the rest of this function.
4665 Nothing in this loop changes the hash table or the register chains. */
4667 for (i = 0; i < n_sets; i++)
4669 register rtx src, dest;
4670 register rtx src_folded;
4671 register struct table_elt *elt = 0, *p;
4672 enum machine_mode mode;
4673 rtx src_eqv_here;
4674 rtx src_const = 0;
4675 rtx src_related = 0;
4676 struct table_elt *src_const_elt = 0;
4677 int src_cost = 10000, src_eqv_cost = 10000, src_folded_cost = 10000;
4678 int src_related_cost = 10000, src_elt_cost = 10000;
4679 /* Set non-zero if we need to call force_const_mem on with the
4680 contents of src_folded before using it. */
4681 int src_folded_force_flag = 0;
4683 dest = SET_DEST (sets[i].rtl);
4684 src = SET_SRC (sets[i].rtl);
4686 /* If SRC is a constant that has no machine mode,
4687 hash it with the destination's machine mode.
4688 This way we can keep different modes separate. */
4690 mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
4691 sets[i].mode = mode;
4693 if (src_eqv)
4695 enum machine_mode eqvmode = mode;
4696 if (GET_CODE (dest) == STRICT_LOW_PART)
4697 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
4698 do_not_record = 0;
4699 hash_arg_in_memory = 0;
4700 src_eqv = fold_rtx (src_eqv, insn);
4701 src_eqv_hash = HASH (src_eqv, eqvmode);
4703 /* Find the equivalence class for the equivalent expression. */
4705 if (!do_not_record)
4706 src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
4708 src_eqv_volatile = do_not_record;
4709 src_eqv_in_memory = hash_arg_in_memory;
4712 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
4713 value of the INNER register, not the destination. So it is not
4714 a valid substitution for the source. But save it for later. */
4715 if (GET_CODE (dest) == STRICT_LOW_PART)
4716 src_eqv_here = 0;
4717 else
4718 src_eqv_here = src_eqv;
4720 /* Simplify and foldable subexpressions in SRC. Then get the fully-
4721 simplified result, which may not necessarily be valid. */
4722 src_folded = fold_rtx (src, insn);
4724 #if 0
4725 /* ??? This caused bad code to be generated for the m68k port with -O2.
4726 Suppose src is (CONST_INT -1), and that after truncation src_folded
4727 is (CONST_INT 3). Suppose src_folded is then used for src_const.
4728 At the end we will add src and src_const to the same equivalence
4729 class. We now have 3 and -1 on the same equivalence class. This
4730 causes later instructions to be mis-optimized. */
4731 /* If storing a constant in a bitfield, pre-truncate the constant
4732 so we will be able to record it later. */
4733 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
4734 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
4736 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
4738 if (GET_CODE (src) == CONST_INT
4739 && GET_CODE (width) == CONST_INT
4740 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
4741 && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
4742 src_folded
4743 = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
4744 << INTVAL (width)) - 1));
4746 #endif
4748 /* Compute SRC's hash code, and also notice if it
4749 should not be recorded at all. In that case,
4750 prevent any further processing of this assignment. */
4751 do_not_record = 0;
4752 hash_arg_in_memory = 0;
4754 sets[i].src = src;
4755 sets[i].src_hash = HASH (src, mode);
4756 sets[i].src_volatile = do_not_record;
4757 sets[i].src_in_memory = hash_arg_in_memory;
4759 /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
4760 a pseudo that is set more than once, do not record SRC. Using
4761 SRC as a replacement for anything else will be incorrect in that
4762 situation. Note that this usually occurs only for stack slots,
4763 in which case all the RTL would be referring to SRC, so we don't
4764 lose any optimization opportunities by not having SRC in the
4765 hash table. */
4767 if (GET_CODE (src) == MEM
4768 && find_reg_note (insn, REG_EQUIV, src) != 0
4769 && GET_CODE (dest) == REG
4770 && REGNO (dest) >= FIRST_PSEUDO_REGISTER
4771 && REG_N_SETS (REGNO (dest)) != 1)
4772 sets[i].src_volatile = 1;
4774 #if 0
4775 /* It is no longer clear why we used to do this, but it doesn't
4776 appear to still be needed. So let's try without it since this
4777 code hurts cse'ing widened ops. */
4778 /* If source is a perverse subreg (such as QI treated as an SI),
4779 treat it as volatile. It may do the work of an SI in one context
4780 where the extra bits are not being used, but cannot replace an SI
4781 in general. */
4782 if (GET_CODE (src) == SUBREG
4783 && (GET_MODE_SIZE (GET_MODE (src))
4784 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
4785 sets[i].src_volatile = 1;
4786 #endif
4788 /* Locate all possible equivalent forms for SRC. Try to replace
4789 SRC in the insn with each cheaper equivalent.
4791 We have the following types of equivalents: SRC itself, a folded
4792 version, a value given in a REG_EQUAL note, or a value related
4793 to a constant.
4795 Each of these equivalents may be part of an additional class
4796 of equivalents (if more than one is in the table, they must be in
4797 the same class; we check for this).
4799 If the source is volatile, we don't do any table lookups.
4801 We note any constant equivalent for possible later use in a
4802 REG_NOTE. */
4804 if (!sets[i].src_volatile)
4805 elt = lookup (src, sets[i].src_hash, mode);
4807 sets[i].src_elt = elt;
4809 if (elt && src_eqv_here && src_eqv_elt)
4811 if (elt->first_same_value != src_eqv_elt->first_same_value)
4813 /* The REG_EQUAL is indicating that two formerly distinct
4814 classes are now equivalent. So merge them. */
4815 merge_equiv_classes (elt, src_eqv_elt);
4816 src_eqv_hash = HASH (src_eqv, elt->mode);
4817 src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
4820 src_eqv_here = 0;
4823 else if (src_eqv_elt)
4824 elt = src_eqv_elt;
4826 /* Try to find a constant somewhere and record it in `src_const'.
4827 Record its table element, if any, in `src_const_elt'. Look in
4828 any known equivalences first. (If the constant is not in the
4829 table, also set `sets[i].src_const_hash'). */
4830 if (elt)
4831 for (p = elt->first_same_value; p; p = p->next_same_value)
4832 if (p->is_const)
4834 src_const = p->exp;
4835 src_const_elt = elt;
4836 break;
4839 if (src_const == 0
4840 && (CONSTANT_P (src_folded)
4841 /* Consider (minus (label_ref L1) (label_ref L2)) as
4842 "constant" here so we will record it. This allows us
4843 to fold switch statements when an ADDR_DIFF_VEC is used. */
4844 || (GET_CODE (src_folded) == MINUS
4845 && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
4846 && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
4847 src_const = src_folded, src_const_elt = elt;
4848 else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
4849 src_const = src_eqv_here, src_const_elt = src_eqv_elt;
4851 /* If we don't know if the constant is in the table, get its
4852 hash code and look it up. */
4853 if (src_const && src_const_elt == 0)
4855 sets[i].src_const_hash = HASH (src_const, mode);
4856 src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
4859 sets[i].src_const = src_const;
4860 sets[i].src_const_elt = src_const_elt;
4862 /* If the constant and our source are both in the table, mark them as
4863 equivalent. Otherwise, if a constant is in the table but the source
4864 isn't, set ELT to it. */
4865 if (src_const_elt && elt
4866 && src_const_elt->first_same_value != elt->first_same_value)
4867 merge_equiv_classes (elt, src_const_elt);
4868 else if (src_const_elt && elt == 0)
4869 elt = src_const_elt;
4871 /* See if there is a register linearly related to a constant
4872 equivalent of SRC. */
4873 if (src_const
4874 && (GET_CODE (src_const) == CONST
4875 || (src_const_elt && src_const_elt->related_value != 0)))
4877 src_related = use_related_value (src_const, src_const_elt);
4878 if (src_related)
4880 struct table_elt *src_related_elt
4881 = lookup (src_related, HASH (src_related, mode), mode);
4882 if (src_related_elt && elt)
4884 if (elt->first_same_value
4885 != src_related_elt->first_same_value)
4886 /* This can occur when we previously saw a CONST
4887 involving a SYMBOL_REF and then see the SYMBOL_REF
4888 twice. Merge the involved classes. */
4889 merge_equiv_classes (elt, src_related_elt);
4891 src_related = 0;
4892 src_related_elt = 0;
4894 else if (src_related_elt && elt == 0)
4895 elt = src_related_elt;
4899 /* See if we have a CONST_INT that is already in a register in a
4900 wider mode. */
4902 if (src_const && src_related == 0 && GET_CODE (src_const) == CONST_INT
4903 && GET_MODE_CLASS (mode) == MODE_INT
4904 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
4906 enum machine_mode wider_mode;
4908 for (wider_mode = GET_MODE_WIDER_MODE (mode);
4909 GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
4910 && src_related == 0;
4911 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
4913 struct table_elt *const_elt
4914 = lookup (src_const, HASH (src_const, wider_mode), wider_mode);
4916 if (const_elt == 0)
4917 continue;
4919 for (const_elt = const_elt->first_same_value;
4920 const_elt; const_elt = const_elt->next_same_value)
4921 if (GET_CODE (const_elt->exp) == REG)
4923 src_related = gen_lowpart_if_possible (mode,
4924 const_elt->exp);
4925 break;
4930 /* Another possibility is that we have an AND with a constant in
4931 a mode narrower than a word. If so, it might have been generated
4932 as part of an "if" which would narrow the AND. If we already
4933 have done the AND in a wider mode, we can use a SUBREG of that
4934 value. */
4936 if (flag_expensive_optimizations && ! src_related
4937 && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
4938 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
4940 enum machine_mode tmode;
4941 rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
4943 for (tmode = GET_MODE_WIDER_MODE (mode);
4944 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
4945 tmode = GET_MODE_WIDER_MODE (tmode))
4947 rtx inner = gen_lowpart_if_possible (tmode, XEXP (src, 0));
4948 struct table_elt *larger_elt;
4950 if (inner)
4952 PUT_MODE (new_and, tmode);
4953 XEXP (new_and, 0) = inner;
4954 larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
4955 if (larger_elt == 0)
4956 continue;
4958 for (larger_elt = larger_elt->first_same_value;
4959 larger_elt; larger_elt = larger_elt->next_same_value)
4960 if (GET_CODE (larger_elt->exp) == REG)
4962 src_related
4963 = gen_lowpart_if_possible (mode, larger_elt->exp);
4964 break;
4967 if (src_related)
4968 break;
4973 #ifdef LOAD_EXTEND_OP
4974 /* See if a MEM has already been loaded with a widening operation;
4975 if it has, we can use a subreg of that. Many CISC machines
4976 also have such operations, but this is only likely to be
4977 beneficial these machines. */
4979 if (flag_expensive_optimizations && src_related == 0
4980 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
4981 && GET_MODE_CLASS (mode) == MODE_INT
4982 && GET_CODE (src) == MEM && ! do_not_record
4983 && LOAD_EXTEND_OP (mode) != NIL)
4985 enum machine_mode tmode;
4987 /* Set what we are trying to extend and the operation it might
4988 have been extended with. */
4989 PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
4990 XEXP (memory_extend_rtx, 0) = src;
4992 for (tmode = GET_MODE_WIDER_MODE (mode);
4993 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
4994 tmode = GET_MODE_WIDER_MODE (tmode))
4996 struct table_elt *larger_elt;
4998 PUT_MODE (memory_extend_rtx, tmode);
4999 larger_elt = lookup (memory_extend_rtx,
5000 HASH (memory_extend_rtx, tmode), tmode);
5001 if (larger_elt == 0)
5002 continue;
5004 for (larger_elt = larger_elt->first_same_value;
5005 larger_elt; larger_elt = larger_elt->next_same_value)
5006 if (GET_CODE (larger_elt->exp) == REG)
5008 src_related = gen_lowpart_if_possible (mode,
5009 larger_elt->exp);
5010 break;
5013 if (src_related)
5014 break;
5017 #endif /* LOAD_EXTEND_OP */
5019 if (src == src_folded)
5020 src_folded = 0;
5022 /* At this point, ELT, if non-zero, points to a class of expressions
5023 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
5024 and SRC_RELATED, if non-zero, each contain additional equivalent
5025 expressions. Prune these latter expressions by deleting expressions
5026 already in the equivalence class.
5028 Check for an equivalent identical to the destination. If found,
5029 this is the preferred equivalent since it will likely lead to
5030 elimination of the insn. Indicate this by placing it in
5031 `src_related'. */
5033 if (elt) elt = elt->first_same_value;
5034 for (p = elt; p; p = p->next_same_value)
5036 enum rtx_code code = GET_CODE (p->exp);
5038 /* If the expression is not valid, ignore it. Then we do not
5039 have to check for validity below. In most cases, we can use
5040 `rtx_equal_p', since canonicalization has already been done. */
5041 if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, 0))
5042 continue;
5044 /* Also skip paradoxical subregs, unless that's what we're
5045 looking for. */
5046 if (code == SUBREG
5047 && (GET_MODE_SIZE (GET_MODE (p->exp))
5048 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))
5049 && ! (src != 0
5050 && GET_CODE (src) == SUBREG
5051 && GET_MODE (src) == GET_MODE (p->exp)
5052 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5053 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))))
5054 continue;
5056 if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
5057 src = 0;
5058 else if (src_folded && GET_CODE (src_folded) == code
5059 && rtx_equal_p (src_folded, p->exp))
5060 src_folded = 0;
5061 else if (src_eqv_here && GET_CODE (src_eqv_here) == code
5062 && rtx_equal_p (src_eqv_here, p->exp))
5063 src_eqv_here = 0;
5064 else if (src_related && GET_CODE (src_related) == code
5065 && rtx_equal_p (src_related, p->exp))
5066 src_related = 0;
5068 /* This is the same as the destination of the insns, we want
5069 to prefer it. Copy it to src_related. The code below will
5070 then give it a negative cost. */
5071 if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
5072 src_related = dest;
5076 /* Find the cheapest valid equivalent, trying all the available
5077 possibilities. Prefer items not in the hash table to ones
5078 that are when they are equal cost. Note that we can never
5079 worsen an insn as the current contents will also succeed.
5080 If we find an equivalent identical to the destination, use it as best,
5081 since this insn will probably be eliminated in that case. */
5082 if (src)
5084 if (rtx_equal_p (src, dest))
5085 src_cost = -1;
5086 else
5087 src_cost = COST (src);
5090 if (src_eqv_here)
5092 if (rtx_equal_p (src_eqv_here, dest))
5093 src_eqv_cost = -1;
5094 else
5095 src_eqv_cost = COST (src_eqv_here);
5098 if (src_folded)
5100 if (rtx_equal_p (src_folded, dest))
5101 src_folded_cost = -1;
5102 else
5103 src_folded_cost = COST (src_folded);
5106 if (src_related)
5108 if (rtx_equal_p (src_related, dest))
5109 src_related_cost = -1;
5110 else
5111 src_related_cost = COST (src_related);
5114 /* If this was an indirect jump insn, a known label will really be
5115 cheaper even though it looks more expensive. */
5116 if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
5117 src_folded = src_const, src_folded_cost = -1;
5119 /* Terminate loop when replacement made. This must terminate since
5120 the current contents will be tested and will always be valid. */
5121 while (1)
5123 rtx trial;
5125 /* Skip invalid entries. */
5126 while (elt && GET_CODE (elt->exp) != REG
5127 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
5128 elt = elt->next_same_value;
5130 /* A paradoxical subreg would be bad here: it'll be the right
5131 size, but later may be adjusted so that the upper bits aren't
5132 what we want. So reject it. */
5133 if (elt != 0
5134 && GET_CODE (elt->exp) == SUBREG
5135 && (GET_MODE_SIZE (GET_MODE (elt->exp))
5136 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))
5137 /* It is okay, though, if the rtx we're trying to match
5138 will ignore any of the bits we can't predict. */
5139 && ! (src != 0
5140 && GET_CODE (src) == SUBREG
5141 && GET_MODE (src) == GET_MODE (elt->exp)
5142 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5143 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))))
5145 elt = elt->next_same_value;
5146 continue;
5149 if (elt) src_elt_cost = elt->cost;
5151 /* Find cheapest and skip it for the next time. For items
5152 of equal cost, use this order:
5153 src_folded, src, src_eqv, src_related and hash table entry. */
5154 if (src_folded_cost <= src_cost
5155 && src_folded_cost <= src_eqv_cost
5156 && src_folded_cost <= src_related_cost
5157 && src_folded_cost <= src_elt_cost)
5159 trial = src_folded, src_folded_cost = 10000;
5160 if (src_folded_force_flag)
5161 trial = force_const_mem (mode, trial);
5163 else if (src_cost <= src_eqv_cost
5164 && src_cost <= src_related_cost
5165 && src_cost <= src_elt_cost)
5166 trial = src, src_cost = 10000;
5167 else if (src_eqv_cost <= src_related_cost
5168 && src_eqv_cost <= src_elt_cost)
5169 trial = copy_rtx (src_eqv_here), src_eqv_cost = 10000;
5170 else if (src_related_cost <= src_elt_cost)
5171 trial = copy_rtx (src_related), src_related_cost = 10000;
5172 else
5174 trial = copy_rtx (elt->exp);
5175 elt = elt->next_same_value;
5176 src_elt_cost = 10000;
5179 /* We don't normally have an insn matching (set (pc) (pc)), so
5180 check for this separately here. We will delete such an
5181 insn below.
5183 Tablejump insns contain a USE of the table, so simply replacing
5184 the operand with the constant won't match. This is simply an
5185 unconditional branch, however, and is therefore valid. Just
5186 insert the substitution here and we will delete and re-emit
5187 the insn later. */
5189 if (n_sets == 1 && dest == pc_rtx
5190 && (trial == pc_rtx
5191 || (GET_CODE (trial) == LABEL_REF
5192 && ! condjump_p (insn))))
5194 /* If TRIAL is a label in front of a jump table, we are
5195 really falling through the switch (this is how casesi
5196 insns work), so we must branch around the table. */
5197 if (GET_CODE (trial) == CODE_LABEL
5198 && NEXT_INSN (trial) != 0
5199 && GET_CODE (NEXT_INSN (trial)) == JUMP_INSN
5200 && (GET_CODE (PATTERN (NEXT_INSN (trial))) == ADDR_DIFF_VEC
5201 || GET_CODE (PATTERN (NEXT_INSN (trial))) == ADDR_VEC))
5203 trial = gen_rtx_LABEL_REF (Pmode, get_label_after (trial));
5205 if (trial == pc_rtx)
5207 SET_SRC (sets[i].rtl) = trial;
5208 cse_jumps_altered = 1;
5209 break;
5212 /* We must actually validate the change. Consider a target
5213 where unconditional jumps are more complex than
5214 (set (pc) (label_ref)) such as the fr30. */
5215 if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
5216 cse_jumps_altered = 1;
5217 break;
5220 /* Look for a substitution that makes a valid insn. */
5221 else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
5223 /* If we just made a substitution inside a libcall, then we
5224 need to make the same substitution in any notes attached
5225 to the RETVAL insn. */
5226 if (libcall_insn
5227 && (GET_CODE (sets[i].orig_src) == REG
5228 || GET_CODE (sets[i].orig_src) == SUBREG
5229 || GET_CODE (sets[i].orig_src) == MEM))
5230 replace_rtx (REG_NOTES (libcall_insn), sets[i].orig_src,
5231 canon_reg (SET_SRC (sets[i].rtl), insn));
5233 /* The result of apply_change_group can be ignored; see
5234 canon_reg. */
5236 validate_change (insn, &SET_SRC (sets[i].rtl),
5237 canon_reg (SET_SRC (sets[i].rtl), insn),
5239 apply_change_group ();
5240 break;
5243 /* If we previously found constant pool entries for
5244 constants and this is a constant, try making a
5245 pool entry. Put it in src_folded unless we already have done
5246 this since that is where it likely came from. */
5248 else if (constant_pool_entries_cost
5249 && CONSTANT_P (trial)
5250 && ! (GET_CODE (trial) == CONST
5251 && GET_CODE (XEXP (trial, 0)) == TRUNCATE)
5252 && (src_folded == 0
5253 || (GET_CODE (src_folded) != MEM
5254 && ! src_folded_force_flag))
5255 && GET_MODE_CLASS (mode) != MODE_CC
5256 && mode != VOIDmode)
5258 src_folded_force_flag = 1;
5259 src_folded = trial;
5260 src_folded_cost = constant_pool_entries_cost;
5264 src = SET_SRC (sets[i].rtl);
5266 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
5267 However, there is an important exception: If both are registers
5268 that are not the head of their equivalence class, replace SET_SRC
5269 with the head of the class. If we do not do this, we will have
5270 both registers live over a portion of the basic block. This way,
5271 their lifetimes will likely abut instead of overlapping. */
5272 if (GET_CODE (dest) == REG
5273 && REGNO_QTY_VALID_P (REGNO (dest)))
5275 int dest_q = REG_QTY (REGNO (dest));
5276 struct qty_table_elem *dest_ent = &qty_table[dest_q];
5278 if (dest_ent->mode == GET_MODE (dest)
5279 && dest_ent->first_reg != REGNO (dest)
5280 && GET_CODE (src) == REG && REGNO (src) == REGNO (dest)
5281 /* Don't do this if the original insn had a hard reg as
5282 SET_SRC or SET_DEST. */
5283 && (GET_CODE (sets[i].src) != REG
5284 || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER)
5285 && (GET_CODE (dest) != REG || REGNO (dest) >= FIRST_PSEUDO_REGISTER))
5286 /* We can't call canon_reg here because it won't do anything if
5287 SRC is a hard register. */
5289 int src_q = REG_QTY (REGNO (src));
5290 struct qty_table_elem *src_ent = &qty_table[src_q];
5291 int first = src_ent->first_reg;
5292 rtx new_src
5293 = (first >= FIRST_PSEUDO_REGISTER
5294 ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first));
5296 /* We must use validate-change even for this, because this
5297 might be a special no-op instruction, suitable only to
5298 tag notes onto. */
5299 if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0))
5301 src = new_src;
5302 /* If we had a constant that is cheaper than what we are now
5303 setting SRC to, use that constant. We ignored it when we
5304 thought we could make this into a no-op. */
5305 if (src_const && COST (src_const) < COST (src)
5306 && validate_change (insn, &SET_SRC (sets[i].rtl), src_const,
5308 src = src_const;
5313 /* If we made a change, recompute SRC values. */
5314 if (src != sets[i].src)
5316 do_not_record = 0;
5317 hash_arg_in_memory = 0;
5318 sets[i].src = src;
5319 sets[i].src_hash = HASH (src, mode);
5320 sets[i].src_volatile = do_not_record;
5321 sets[i].src_in_memory = hash_arg_in_memory;
5322 sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
5325 /* If this is a single SET, we are setting a register, and we have an
5326 equivalent constant, we want to add a REG_NOTE. We don't want
5327 to write a REG_EQUAL note for a constant pseudo since verifying that
5328 that pseudo hasn't been eliminated is a pain. Such a note also
5329 won't help anything.
5331 Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
5332 which can be created for a reference to a compile time computable
5333 entry in a jump table. */
5335 if (n_sets == 1 && src_const && GET_CODE (dest) == REG
5336 && GET_CODE (src_const) != REG
5337 && ! (GET_CODE (src_const) == CONST
5338 && GET_CODE (XEXP (src_const, 0)) == MINUS
5339 && GET_CODE (XEXP (XEXP (src_const, 0), 0)) == LABEL_REF
5340 && GET_CODE (XEXP (XEXP (src_const, 0), 1)) == LABEL_REF))
5342 tem = find_reg_note (insn, REG_EQUAL, NULL_RTX);
5344 /* Make sure that the rtx is not shared with any other insn. */
5345 src_const = copy_rtx (src_const);
5347 /* Record the actual constant value in a REG_EQUAL note, making
5348 a new one if one does not already exist. */
5349 if (tem)
5350 XEXP (tem, 0) = src_const;
5351 else
5352 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL,
5353 src_const, REG_NOTES (insn));
5355 /* If storing a constant value in a register that
5356 previously held the constant value 0,
5357 record this fact with a REG_WAS_0 note on this insn.
5359 Note that the *register* is required to have previously held 0,
5360 not just any register in the quantity and we must point to the
5361 insn that set that register to zero.
5363 Rather than track each register individually, we just see if
5364 the last set for this quantity was for this register. */
5366 if (REGNO_QTY_VALID_P (REGNO (dest)))
5368 int dest_q = REG_QTY (REGNO (dest));
5369 struct qty_table_elem *dest_ent = &qty_table[dest_q];
5371 if (dest_ent->const_rtx == const0_rtx)
5373 /* See if we previously had a REG_WAS_0 note. */
5374 rtx note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
5375 rtx const_insn = dest_ent->const_insn;
5377 if ((tem = single_set (const_insn)) != 0
5378 && rtx_equal_p (SET_DEST (tem), dest))
5380 if (note)
5381 XEXP (note, 0) = const_insn;
5382 else
5383 REG_NOTES (insn)
5384 = gen_rtx_INSN_LIST (REG_WAS_0, const_insn,
5385 REG_NOTES (insn));
5391 /* Now deal with the destination. */
5392 do_not_record = 0;
5394 /* Look within any SIGN_EXTRACT or ZERO_EXTRACT
5395 to the MEM or REG within it. */
5396 while (GET_CODE (dest) == SIGN_EXTRACT
5397 || GET_CODE (dest) == ZERO_EXTRACT
5398 || GET_CODE (dest) == SUBREG
5399 || GET_CODE (dest) == STRICT_LOW_PART)
5400 dest = XEXP (dest, 0);
5402 sets[i].inner_dest = dest;
5404 if (GET_CODE (dest) == MEM)
5406 #ifdef PUSH_ROUNDING
5407 /* Stack pushes invalidate the stack pointer. */
5408 rtx addr = XEXP (dest, 0);
5409 if ((GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
5410 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
5411 && XEXP (addr, 0) == stack_pointer_rtx)
5412 invalidate (stack_pointer_rtx, Pmode);
5413 #endif
5414 dest = fold_rtx (dest, insn);
5417 /* Compute the hash code of the destination now,
5418 before the effects of this instruction are recorded,
5419 since the register values used in the address computation
5420 are those before this instruction. */
5421 sets[i].dest_hash = HASH (dest, mode);
5423 /* Don't enter a bit-field in the hash table
5424 because the value in it after the store
5425 may not equal what was stored, due to truncation. */
5427 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
5428 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
5430 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5432 if (src_const != 0 && GET_CODE (src_const) == CONST_INT
5433 && GET_CODE (width) == CONST_INT
5434 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5435 && ! (INTVAL (src_const)
5436 & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5437 /* Exception: if the value is constant,
5438 and it won't be truncated, record it. */
5440 else
5442 /* This is chosen so that the destination will be invalidated
5443 but no new value will be recorded.
5444 We must invalidate because sometimes constant
5445 values can be recorded for bitfields. */
5446 sets[i].src_elt = 0;
5447 sets[i].src_volatile = 1;
5448 src_eqv = 0;
5449 src_eqv_elt = 0;
5453 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
5454 the insn. */
5455 else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
5457 /* One less use of the label this insn used to jump to. */
5458 if (JUMP_LABEL (insn) != 0)
5459 --LABEL_NUSES (JUMP_LABEL (insn));
5460 PUT_CODE (insn, NOTE);
5461 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
5462 NOTE_SOURCE_FILE (insn) = 0;
5463 cse_jumps_altered = 1;
5464 /* No more processing for this set. */
5465 sets[i].rtl = 0;
5468 /* If this SET is now setting PC to a label, we know it used to
5469 be a conditional or computed branch. So we see if we can follow
5470 it. If it was a computed branch, delete it and re-emit. */
5471 else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF)
5473 /* If this is not in the format for a simple branch and
5474 we are the only SET in it, re-emit it. */
5475 if (! simplejump_p (insn) && n_sets == 1)
5477 rtx new = emit_jump_insn_before (gen_jump (XEXP (src, 0)), insn);
5478 JUMP_LABEL (new) = XEXP (src, 0);
5479 LABEL_NUSES (XEXP (src, 0))++;
5480 insn = new;
5482 else
5483 /* Otherwise, force rerecognition, since it probably had
5484 a different pattern before.
5485 This shouldn't really be necessary, since whatever
5486 changed the source value above should have done this.
5487 Until the right place is found, might as well do this here. */
5488 INSN_CODE (insn) = -1;
5490 never_reached_warning (insn);
5492 /* Now emit a BARRIER after the unconditional jump. Do not bother
5493 deleting any unreachable code, let jump/flow do that. */
5494 if (NEXT_INSN (insn) != 0
5495 && GET_CODE (NEXT_INSN (insn)) != BARRIER)
5496 emit_barrier_after (insn);
5498 cse_jumps_altered = 1;
5499 sets[i].rtl = 0;
5502 /* If destination is volatile, invalidate it and then do no further
5503 processing for this assignment. */
5505 else if (do_not_record)
5507 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
5508 || GET_CODE (dest) == MEM)
5509 invalidate (dest, VOIDmode);
5510 else if (GET_CODE (dest) == STRICT_LOW_PART
5511 || GET_CODE (dest) == ZERO_EXTRACT)
5512 invalidate (XEXP (dest, 0), GET_MODE (dest));
5513 sets[i].rtl = 0;
5516 if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
5517 sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
5519 #ifdef HAVE_cc0
5520 /* If setting CC0, record what it was set to, or a constant, if it
5521 is equivalent to a constant. If it is being set to a floating-point
5522 value, make a COMPARE with the appropriate constant of 0. If we
5523 don't do this, later code can interpret this as a test against
5524 const0_rtx, which can cause problems if we try to put it into an
5525 insn as a floating-point operand. */
5526 if (dest == cc0_rtx)
5528 this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
5529 this_insn_cc0_mode = mode;
5530 if (FLOAT_MODE_P (mode))
5531 this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0,
5532 CONST0_RTX (mode));
5534 #endif
5537 /* Now enter all non-volatile source expressions in the hash table
5538 if they are not already present.
5539 Record their equivalence classes in src_elt.
5540 This way we can insert the corresponding destinations into
5541 the same classes even if the actual sources are no longer in them
5542 (having been invalidated). */
5544 if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
5545 && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
5547 register struct table_elt *elt;
5548 register struct table_elt *classp = sets[0].src_elt;
5549 rtx dest = SET_DEST (sets[0].rtl);
5550 enum machine_mode eqvmode = GET_MODE (dest);
5552 if (GET_CODE (dest) == STRICT_LOW_PART)
5554 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5555 classp = 0;
5557 if (insert_regs (src_eqv, classp, 0))
5559 rehash_using_reg (src_eqv);
5560 src_eqv_hash = HASH (src_eqv, eqvmode);
5562 elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
5563 elt->in_memory = src_eqv_in_memory;
5564 src_eqv_elt = elt;
5566 /* Check to see if src_eqv_elt is the same as a set source which
5567 does not yet have an elt, and if so set the elt of the set source
5568 to src_eqv_elt. */
5569 for (i = 0; i < n_sets; i++)
5570 if (sets[i].rtl && sets[i].src_elt == 0
5571 && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
5572 sets[i].src_elt = src_eqv_elt;
5575 for (i = 0; i < n_sets; i++)
5576 if (sets[i].rtl && ! sets[i].src_volatile
5577 && ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
5579 if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
5581 /* REG_EQUAL in setting a STRICT_LOW_PART
5582 gives an equivalent for the entire destination register,
5583 not just for the subreg being stored in now.
5584 This is a more interesting equivalence, so we arrange later
5585 to treat the entire reg as the destination. */
5586 sets[i].src_elt = src_eqv_elt;
5587 sets[i].src_hash = src_eqv_hash;
5589 else
5591 /* Insert source and constant equivalent into hash table, if not
5592 already present. */
5593 register struct table_elt *classp = src_eqv_elt;
5594 register rtx src = sets[i].src;
5595 register rtx dest = SET_DEST (sets[i].rtl);
5596 enum machine_mode mode
5597 = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5599 if (sets[i].src_elt == 0)
5601 /* Don't put a hard register source into the table if this is
5602 the last insn of a libcall. In this case, we only need
5603 to put src_eqv_elt in src_elt. */
5604 if (GET_CODE (src) != REG
5605 || REGNO (src) >= FIRST_PSEUDO_REGISTER
5606 || ! find_reg_note (insn, REG_RETVAL, NULL_RTX))
5608 register struct table_elt *elt;
5610 /* Note that these insert_regs calls cannot remove
5611 any of the src_elt's, because they would have failed to
5612 match if not still valid. */
5613 if (insert_regs (src, classp, 0))
5615 rehash_using_reg (src);
5616 sets[i].src_hash = HASH (src, mode);
5618 elt = insert (src, classp, sets[i].src_hash, mode);
5619 elt->in_memory = sets[i].src_in_memory;
5620 sets[i].src_elt = classp = elt;
5622 else
5623 sets[i].src_elt = classp;
5625 if (sets[i].src_const && sets[i].src_const_elt == 0
5626 && src != sets[i].src_const
5627 && ! rtx_equal_p (sets[i].src_const, src))
5628 sets[i].src_elt = insert (sets[i].src_const, classp,
5629 sets[i].src_const_hash, mode);
5632 else if (sets[i].src_elt == 0)
5633 /* If we did not insert the source into the hash table (e.g., it was
5634 volatile), note the equivalence class for the REG_EQUAL value, if any,
5635 so that the destination goes into that class. */
5636 sets[i].src_elt = src_eqv_elt;
5638 invalidate_from_clobbers (x);
5640 /* Some registers are invalidated by subroutine calls. Memory is
5641 invalidated by non-constant calls. */
5643 if (GET_CODE (insn) == CALL_INSN)
5645 if (! CONST_CALL_P (insn))
5646 invalidate_memory ();
5647 invalidate_for_call ();
5650 /* Now invalidate everything set by this instruction.
5651 If a SUBREG or other funny destination is being set,
5652 sets[i].rtl is still nonzero, so here we invalidate the reg
5653 a part of which is being set. */
5655 for (i = 0; i < n_sets; i++)
5656 if (sets[i].rtl)
5658 /* We can't use the inner dest, because the mode associated with
5659 a ZERO_EXTRACT is significant. */
5660 register rtx dest = SET_DEST (sets[i].rtl);
5662 /* Needed for registers to remove the register from its
5663 previous quantity's chain.
5664 Needed for memory if this is a nonvarying address, unless
5665 we have just done an invalidate_memory that covers even those. */
5666 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
5667 || GET_CODE (dest) == MEM)
5668 invalidate (dest, VOIDmode);
5669 else if (GET_CODE (dest) == STRICT_LOW_PART
5670 || GET_CODE (dest) == ZERO_EXTRACT)
5671 invalidate (XEXP (dest, 0), GET_MODE (dest));
5674 /* A volatile ASM invalidates everything. */
5675 if (GET_CODE (insn) == INSN
5676 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
5677 && MEM_VOLATILE_P (PATTERN (insn)))
5678 flush_hash_table ();
5680 /* Make sure registers mentioned in destinations
5681 are safe for use in an expression to be inserted.
5682 This removes from the hash table
5683 any invalid entry that refers to one of these registers.
5685 We don't care about the return value from mention_regs because
5686 we are going to hash the SET_DEST values unconditionally. */
5688 for (i = 0; i < n_sets; i++)
5690 if (sets[i].rtl)
5692 rtx x = SET_DEST (sets[i].rtl);
5694 if (GET_CODE (x) != REG)
5695 mention_regs (x);
5696 else
5698 /* We used to rely on all references to a register becoming
5699 inaccessible when a register changes to a new quantity,
5700 since that changes the hash code. However, that is not
5701 safe, since after HASH_SIZE new quantities we get a
5702 hash 'collision' of a register with its own invalid
5703 entries. And since SUBREGs have been changed not to
5704 change their hash code with the hash code of the register,
5705 it wouldn't work any longer at all. So we have to check
5706 for any invalid references lying around now.
5707 This code is similar to the REG case in mention_regs,
5708 but it knows that reg_tick has been incremented, and
5709 it leaves reg_in_table as -1 . */
5710 register int regno = REGNO (x);
5711 register int endregno
5712 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
5713 : HARD_REGNO_NREGS (regno, GET_MODE (x)));
5714 int i;
5716 for (i = regno; i < endregno; i++)
5718 if (REG_IN_TABLE (i) >= 0)
5720 remove_invalid_refs (i);
5721 REG_IN_TABLE (i) = -1;
5728 /* We may have just removed some of the src_elt's from the hash table.
5729 So replace each one with the current head of the same class. */
5731 for (i = 0; i < n_sets; i++)
5732 if (sets[i].rtl)
5734 if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
5735 /* If elt was removed, find current head of same class,
5736 or 0 if nothing remains of that class. */
5738 register struct table_elt *elt = sets[i].src_elt;
5740 while (elt && elt->prev_same_value)
5741 elt = elt->prev_same_value;
5743 while (elt && elt->first_same_value == 0)
5744 elt = elt->next_same_value;
5745 sets[i].src_elt = elt ? elt->first_same_value : 0;
5749 /* Now insert the destinations into their equivalence classes. */
5751 for (i = 0; i < n_sets; i++)
5752 if (sets[i].rtl)
5754 register rtx dest = SET_DEST (sets[i].rtl);
5755 rtx inner_dest = sets[i].inner_dest;
5756 register struct table_elt *elt;
5758 /* Don't record value if we are not supposed to risk allocating
5759 floating-point values in registers that might be wider than
5760 memory. */
5761 if ((flag_float_store
5762 && GET_CODE (dest) == MEM
5763 && FLOAT_MODE_P (GET_MODE (dest)))
5764 /* Don't record BLKmode values, because we don't know the
5765 size of it, and can't be sure that other BLKmode values
5766 have the same or smaller size. */
5767 || GET_MODE (dest) == BLKmode
5768 /* Don't record values of destinations set inside a libcall block
5769 since we might delete the libcall. Things should have been set
5770 up so we won't want to reuse such a value, but we play it safe
5771 here. */
5772 || libcall_insn
5773 /* If we didn't put a REG_EQUAL value or a source into the hash
5774 table, there is no point is recording DEST. */
5775 || sets[i].src_elt == 0
5776 /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
5777 or SIGN_EXTEND, don't record DEST since it can cause
5778 some tracking to be wrong.
5780 ??? Think about this more later. */
5781 || (GET_CODE (dest) == SUBREG
5782 && (GET_MODE_SIZE (GET_MODE (dest))
5783 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
5784 && (GET_CODE (sets[i].src) == SIGN_EXTEND
5785 || GET_CODE (sets[i].src) == ZERO_EXTEND)))
5786 continue;
5788 /* STRICT_LOW_PART isn't part of the value BEING set,
5789 and neither is the SUBREG inside it.
5790 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
5791 if (GET_CODE (dest) == STRICT_LOW_PART)
5792 dest = SUBREG_REG (XEXP (dest, 0));
5794 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
5795 /* Registers must also be inserted into chains for quantities. */
5796 if (insert_regs (dest, sets[i].src_elt, 1))
5798 /* If `insert_regs' changes something, the hash code must be
5799 recalculated. */
5800 rehash_using_reg (dest);
5801 sets[i].dest_hash = HASH (dest, GET_MODE (dest));
5804 if (GET_CODE (inner_dest) == MEM
5805 && GET_CODE (XEXP (inner_dest, 0)) == ADDRESSOF)
5806 /* Given (SET (MEM (ADDRESSOF (X))) Y) we don't want to say
5807 that (MEM (ADDRESSOF (X))) is equivalent to Y.
5808 Consider the case in which the address of the MEM is
5809 passed to a function, which alters the MEM. Then, if we
5810 later use Y instead of the MEM we'll miss the update. */
5811 elt = insert (dest, 0, sets[i].dest_hash, GET_MODE (dest));
5812 else
5813 elt = insert (dest, sets[i].src_elt,
5814 sets[i].dest_hash, GET_MODE (dest));
5816 elt->in_memory = (GET_CODE (sets[i].inner_dest) == MEM
5817 && (! RTX_UNCHANGING_P (sets[i].inner_dest)
5818 || FIXED_BASE_PLUS_P (XEXP (sets[i].inner_dest,
5819 0))));
5821 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
5822 narrower than M2, and both M1 and M2 are the same number of words,
5823 we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
5824 make that equivalence as well.
5826 However, BAR may have equivalences for which gen_lowpart_if_possible
5827 will produce a simpler value than gen_lowpart_if_possible applied to
5828 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
5829 BAR's equivalences. If we don't get a simplified form, make
5830 the SUBREG. It will not be used in an equivalence, but will
5831 cause two similar assignments to be detected.
5833 Note the loop below will find SUBREG_REG (DEST) since we have
5834 already entered SRC and DEST of the SET in the table. */
5836 if (GET_CODE (dest) == SUBREG
5837 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
5838 / UNITS_PER_WORD)
5839 == (GET_MODE_SIZE (GET_MODE (dest)) - 1)/ UNITS_PER_WORD)
5840 && (GET_MODE_SIZE (GET_MODE (dest))
5841 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
5842 && sets[i].src_elt != 0)
5844 enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
5845 struct table_elt *elt, *classp = 0;
5847 for (elt = sets[i].src_elt->first_same_value; elt;
5848 elt = elt->next_same_value)
5850 rtx new_src = 0;
5851 unsigned src_hash;
5852 struct table_elt *src_elt;
5854 /* Ignore invalid entries. */
5855 if (GET_CODE (elt->exp) != REG
5856 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
5857 continue;
5859 new_src = gen_lowpart_if_possible (new_mode, elt->exp);
5860 if (new_src == 0)
5861 new_src = gen_rtx_SUBREG (new_mode, elt->exp, 0);
5863 src_hash = HASH (new_src, new_mode);
5864 src_elt = lookup (new_src, src_hash, new_mode);
5866 /* Put the new source in the hash table is if isn't
5867 already. */
5868 if (src_elt == 0)
5870 if (insert_regs (new_src, classp, 0))
5872 rehash_using_reg (new_src);
5873 src_hash = HASH (new_src, new_mode);
5875 src_elt = insert (new_src, classp, src_hash, new_mode);
5876 src_elt->in_memory = elt->in_memory;
5878 else if (classp && classp != src_elt->first_same_value)
5879 /* Show that two things that we've seen before are
5880 actually the same. */
5881 merge_equiv_classes (src_elt, classp);
5883 classp = src_elt->first_same_value;
5884 /* Ignore invalid entries. */
5885 while (classp
5886 && GET_CODE (classp->exp) != REG
5887 && ! exp_equiv_p (classp->exp, classp->exp, 1, 0))
5888 classp = classp->next_same_value;
5893 /* Special handling for (set REG0 REG1)
5894 where REG0 is the "cheapest", cheaper than REG1.
5895 After cse, REG1 will probably not be used in the sequel,
5896 so (if easily done) change this insn to (set REG1 REG0) and
5897 replace REG1 with REG0 in the previous insn that computed their value.
5898 Then REG1 will become a dead store and won't cloud the situation
5899 for later optimizations.
5901 Do not make this change if REG1 is a hard register, because it will
5902 then be used in the sequel and we may be changing a two-operand insn
5903 into a three-operand insn.
5905 Also do not do this if we are operating on a copy of INSN.
5907 Also don't do this if INSN ends a libcall; this would cause an unrelated
5908 register to be set in the middle of a libcall, and we then get bad code
5909 if the libcall is deleted. */
5911 if (n_sets == 1 && sets[0].rtl && GET_CODE (SET_DEST (sets[0].rtl)) == REG
5912 && NEXT_INSN (PREV_INSN (insn)) == insn
5913 && GET_CODE (SET_SRC (sets[0].rtl)) == REG
5914 && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
5915 && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl))))
5917 int src_q = REG_QTY (REGNO (SET_SRC (sets[0].rtl)));
5918 struct qty_table_elem *src_ent = &qty_table[src_q];
5920 if ((src_ent->first_reg == REGNO (SET_DEST (sets[0].rtl)))
5921 && ! find_reg_note (insn, REG_RETVAL, NULL_RTX))
5923 rtx prev = PREV_INSN (insn);
5924 while (prev && GET_CODE (prev) == NOTE)
5925 prev = PREV_INSN (prev);
5927 if (prev && GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SET
5928 && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl))
5930 rtx dest = SET_DEST (sets[0].rtl);
5931 rtx note = find_reg_note (prev, REG_EQUIV, NULL_RTX);
5933 validate_change (prev, & SET_DEST (PATTERN (prev)), dest, 1);
5934 validate_change (insn, & SET_DEST (sets[0].rtl),
5935 SET_SRC (sets[0].rtl), 1);
5936 validate_change (insn, & SET_SRC (sets[0].rtl), dest, 1);
5937 apply_change_group ();
5939 /* If REG1 was equivalent to a constant, REG0 is not. */
5940 if (note)
5941 PUT_REG_NOTE_KIND (note, REG_EQUAL);
5943 /* If there was a REG_WAS_0 note on PREV, remove it. Move
5944 any REG_WAS_0 note on INSN to PREV. */
5945 note = find_reg_note (prev, REG_WAS_0, NULL_RTX);
5946 if (note)
5947 remove_note (prev, note);
5949 note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
5950 if (note)
5952 remove_note (insn, note);
5953 XEXP (note, 1) = REG_NOTES (prev);
5954 REG_NOTES (prev) = note;
5957 /* If INSN has a REG_EQUAL note, and this note mentions REG0,
5958 then we must delete it, because the value in REG0 has changed. */
5959 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
5960 if (note && reg_mentioned_p (dest, XEXP (note, 0)))
5961 remove_note (insn, note);
5966 /* If this is a conditional jump insn, record any known equivalences due to
5967 the condition being tested. */
5969 last_jump_equiv_class = 0;
5970 if (GET_CODE (insn) == JUMP_INSN
5971 && n_sets == 1 && GET_CODE (x) == SET
5972 && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
5973 record_jump_equiv (insn, 0);
5975 #ifdef HAVE_cc0
5976 /* If the previous insn set CC0 and this insn no longer references CC0,
5977 delete the previous insn. Here we use the fact that nothing expects CC0
5978 to be valid over an insn, which is true until the final pass. */
5979 if (prev_insn && GET_CODE (prev_insn) == INSN
5980 && (tem = single_set (prev_insn)) != 0
5981 && SET_DEST (tem) == cc0_rtx
5982 && ! reg_mentioned_p (cc0_rtx, x))
5984 PUT_CODE (prev_insn, NOTE);
5985 NOTE_LINE_NUMBER (prev_insn) = NOTE_INSN_DELETED;
5986 NOTE_SOURCE_FILE (prev_insn) = 0;
5989 prev_insn_cc0 = this_insn_cc0;
5990 prev_insn_cc0_mode = this_insn_cc0_mode;
5991 #endif
5993 prev_insn = insn;
5996 /* Remove from the hash table all expressions that reference memory. */
5998 static void
5999 invalidate_memory ()
6001 register int i;
6002 register struct table_elt *p, *next;
6004 for (i = 0; i < HASH_SIZE; i++)
6005 for (p = table[i]; p; p = next)
6007 next = p->next_same_hash;
6008 if (p->in_memory)
6009 remove_from_table (p, i);
6013 /* If ADDR is an address that implicitly affects the stack pointer, return
6014 1 and update the register tables to show the effect. Else, return 0. */
6016 static int
6017 addr_affects_sp_p (addr)
6018 register rtx addr;
6020 if ((GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
6021 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
6022 && GET_CODE (XEXP (addr, 0)) == REG
6023 && REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
6025 if (REG_TICK (STACK_POINTER_REGNUM) >= 0)
6026 REG_TICK (STACK_POINTER_REGNUM)++;
6028 /* This should be *very* rare. */
6029 if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
6030 invalidate (stack_pointer_rtx, VOIDmode);
6032 return 1;
6035 return 0;
6038 /* Perform invalidation on the basis of everything about an insn
6039 except for invalidating the actual places that are SET in it.
6040 This includes the places CLOBBERed, and anything that might
6041 alias with something that is SET or CLOBBERed.
6043 X is the pattern of the insn. */
6045 static void
6046 invalidate_from_clobbers (x)
6047 rtx x;
6049 if (GET_CODE (x) == CLOBBER)
6051 rtx ref = XEXP (x, 0);
6052 if (ref)
6054 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
6055 || GET_CODE (ref) == MEM)
6056 invalidate (ref, VOIDmode);
6057 else if (GET_CODE (ref) == STRICT_LOW_PART
6058 || GET_CODE (ref) == ZERO_EXTRACT)
6059 invalidate (XEXP (ref, 0), GET_MODE (ref));
6062 else if (GET_CODE (x) == PARALLEL)
6064 register int i;
6065 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
6067 register rtx y = XVECEXP (x, 0, i);
6068 if (GET_CODE (y) == CLOBBER)
6070 rtx ref = XEXP (y, 0);
6071 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
6072 || GET_CODE (ref) == MEM)
6073 invalidate (ref, VOIDmode);
6074 else if (GET_CODE (ref) == STRICT_LOW_PART
6075 || GET_CODE (ref) == ZERO_EXTRACT)
6076 invalidate (XEXP (ref, 0), GET_MODE (ref));
6082 /* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes
6083 and replace any registers in them with either an equivalent constant
6084 or the canonical form of the register. If we are inside an address,
6085 only do this if the address remains valid.
6087 OBJECT is 0 except when within a MEM in which case it is the MEM.
6089 Return the replacement for X. */
6091 static rtx
6092 cse_process_notes (x, object)
6093 rtx x;
6094 rtx object;
6096 enum rtx_code code = GET_CODE (x);
6097 const char *fmt = GET_RTX_FORMAT (code);
6098 int i;
6100 switch (code)
6102 case CONST_INT:
6103 case CONST:
6104 case SYMBOL_REF:
6105 case LABEL_REF:
6106 case CONST_DOUBLE:
6107 case PC:
6108 case CC0:
6109 case LO_SUM:
6110 return x;
6112 case MEM:
6113 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), x);
6114 return x;
6116 case EXPR_LIST:
6117 case INSN_LIST:
6118 if (REG_NOTE_KIND (x) == REG_EQUAL)
6119 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX);
6120 if (XEXP (x, 1))
6121 XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX);
6122 return x;
6124 case SIGN_EXTEND:
6125 case ZERO_EXTEND:
6126 case SUBREG:
6128 rtx new = cse_process_notes (XEXP (x, 0), object);
6129 /* We don't substitute VOIDmode constants into these rtx,
6130 since they would impede folding. */
6131 if (GET_MODE (new) != VOIDmode)
6132 validate_change (object, &XEXP (x, 0), new, 0);
6133 return x;
6136 case REG:
6137 i = REG_QTY (REGNO (x));
6139 /* Return a constant or a constant register. */
6140 if (REGNO_QTY_VALID_P (REGNO (x)))
6142 struct qty_table_elem *ent = &qty_table[i];
6144 if (ent->const_rtx != NULL_RTX
6145 && (CONSTANT_P (ent->const_rtx)
6146 || GET_CODE (ent->const_rtx) == REG))
6148 rtx new = gen_lowpart_if_possible (GET_MODE (x), ent->const_rtx);
6149 if (new)
6150 return new;
6154 /* Otherwise, canonicalize this register. */
6155 return canon_reg (x, NULL_RTX);
6157 default:
6158 break;
6161 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6162 if (fmt[i] == 'e')
6163 validate_change (object, &XEXP (x, i),
6164 cse_process_notes (XEXP (x, i), object), 0);
6166 return x;
6169 /* Find common subexpressions between the end test of a loop and the beginning
6170 of the loop. LOOP_START is the CODE_LABEL at the start of a loop.
6172 Often we have a loop where an expression in the exit test is used
6173 in the body of the loop. For example "while (*p) *q++ = *p++;".
6174 Because of the way we duplicate the loop exit test in front of the loop,
6175 however, we don't detect that common subexpression. This will be caught
6176 when global cse is implemented, but this is a quite common case.
6178 This function handles the most common cases of these common expressions.
6179 It is called after we have processed the basic block ending with the
6180 NOTE_INSN_LOOP_END note that ends a loop and the previous JUMP_INSN
6181 jumps to a label used only once. */
6183 static void
6184 cse_around_loop (loop_start)
6185 rtx loop_start;
6187 rtx insn;
6188 int i;
6189 struct table_elt *p;
6191 /* If the jump at the end of the loop doesn't go to the start, we don't
6192 do anything. */
6193 for (insn = PREV_INSN (loop_start);
6194 insn && (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0);
6195 insn = PREV_INSN (insn))
6198 if (insn == 0
6199 || GET_CODE (insn) != NOTE
6200 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_BEG)
6201 return;
6203 /* If the last insn of the loop (the end test) was an NE comparison,
6204 we will interpret it as an EQ comparison, since we fell through
6205 the loop. Any equivalences resulting from that comparison are
6206 therefore not valid and must be invalidated. */
6207 if (last_jump_equiv_class)
6208 for (p = last_jump_equiv_class->first_same_value; p;
6209 p = p->next_same_value)
6211 if (GET_CODE (p->exp) == MEM || GET_CODE (p->exp) == REG
6212 || (GET_CODE (p->exp) == SUBREG
6213 && GET_CODE (SUBREG_REG (p->exp)) == REG))
6214 invalidate (p->exp, VOIDmode);
6215 else if (GET_CODE (p->exp) == STRICT_LOW_PART
6216 || GET_CODE (p->exp) == ZERO_EXTRACT)
6217 invalidate (XEXP (p->exp, 0), GET_MODE (p->exp));
6220 /* Process insns starting after LOOP_START until we hit a CALL_INSN or
6221 a CODE_LABEL (we could handle a CALL_INSN, but it isn't worth it).
6223 The only thing we do with SET_DEST is invalidate entries, so we
6224 can safely process each SET in order. It is slightly less efficient
6225 to do so, but we only want to handle the most common cases.
6227 The gen_move_insn call in cse_set_around_loop may create new pseudos.
6228 These pseudos won't have valid entries in any of the tables indexed
6229 by register number, such as reg_qty. We avoid out-of-range array
6230 accesses by not processing any instructions created after cse started. */
6232 for (insn = NEXT_INSN (loop_start);
6233 GET_CODE (insn) != CALL_INSN && GET_CODE (insn) != CODE_LABEL
6234 && INSN_UID (insn) < max_insn_uid
6235 && ! (GET_CODE (insn) == NOTE
6236 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
6237 insn = NEXT_INSN (insn))
6239 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
6240 && (GET_CODE (PATTERN (insn)) == SET
6241 || GET_CODE (PATTERN (insn)) == CLOBBER))
6242 cse_set_around_loop (PATTERN (insn), insn, loop_start);
6243 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
6244 && GET_CODE (PATTERN (insn)) == PARALLEL)
6245 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6246 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET
6247 || GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == CLOBBER)
6248 cse_set_around_loop (XVECEXP (PATTERN (insn), 0, i), insn,
6249 loop_start);
6253 /* Process one SET of an insn that was skipped. We ignore CLOBBERs
6254 since they are done elsewhere. This function is called via note_stores. */
6256 static void
6257 invalidate_skipped_set (dest, set, data)
6258 rtx set;
6259 rtx dest;
6260 void *data ATTRIBUTE_UNUSED;
6262 enum rtx_code code = GET_CODE (dest);
6264 if (code == MEM
6265 && ! addr_affects_sp_p (dest) /* If this is not a stack push ... */
6266 /* There are times when an address can appear varying and be a PLUS
6267 during this scan when it would be a fixed address were we to know
6268 the proper equivalences. So invalidate all memory if there is
6269 a BLKmode or nonscalar memory reference or a reference to a
6270 variable address. */
6271 && (MEM_IN_STRUCT_P (dest) || GET_MODE (dest) == BLKmode
6272 || cse_rtx_varies_p (XEXP (dest, 0))))
6274 invalidate_memory ();
6275 return;
6278 if (GET_CODE (set) == CLOBBER
6279 #ifdef HAVE_cc0
6280 || dest == cc0_rtx
6281 #endif
6282 || dest == pc_rtx)
6283 return;
6285 if (code == STRICT_LOW_PART || code == ZERO_EXTRACT)
6286 invalidate (XEXP (dest, 0), GET_MODE (dest));
6287 else if (code == REG || code == SUBREG || code == MEM)
6288 invalidate (dest, VOIDmode);
6291 /* Invalidate all insns from START up to the end of the function or the
6292 next label. This called when we wish to CSE around a block that is
6293 conditionally executed. */
6295 static void
6296 invalidate_skipped_block (start)
6297 rtx start;
6299 rtx insn;
6301 for (insn = start; insn && GET_CODE (insn) != CODE_LABEL;
6302 insn = NEXT_INSN (insn))
6304 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
6305 continue;
6307 if (GET_CODE (insn) == CALL_INSN)
6309 if (! CONST_CALL_P (insn))
6310 invalidate_memory ();
6311 invalidate_for_call ();
6314 invalidate_from_clobbers (PATTERN (insn));
6315 note_stores (PATTERN (insn), invalidate_skipped_set, NULL);
6319 /* If modifying X will modify the value in *DATA (which is really an
6320 `rtx *'), indicate that fact by setting the pointed to value to
6321 NULL_RTX. */
6323 static void
6324 cse_check_loop_start (x, set, data)
6325 rtx x;
6326 rtx set ATTRIBUTE_UNUSED;
6327 void *data;
6329 rtx *cse_check_loop_start_value = (rtx *) data;
6331 if (*cse_check_loop_start_value == NULL_RTX
6332 || GET_CODE (x) == CC0 || GET_CODE (x) == PC)
6333 return;
6335 if ((GET_CODE (x) == MEM && GET_CODE (*cse_check_loop_start_value) == MEM)
6336 || reg_overlap_mentioned_p (x, *cse_check_loop_start_value))
6337 *cse_check_loop_start_value = NULL_RTX;
6340 /* X is a SET or CLOBBER contained in INSN that was found near the start of
6341 a loop that starts with the label at LOOP_START.
6343 If X is a SET, we see if its SET_SRC is currently in our hash table.
6344 If so, we see if it has a value equal to some register used only in the
6345 loop exit code (as marked by jump.c).
6347 If those two conditions are true, we search backwards from the start of
6348 the loop to see if that same value was loaded into a register that still
6349 retains its value at the start of the loop.
6351 If so, we insert an insn after the load to copy the destination of that
6352 load into the equivalent register and (try to) replace our SET_SRC with that
6353 register.
6355 In any event, we invalidate whatever this SET or CLOBBER modifies. */
6357 static void
6358 cse_set_around_loop (x, insn, loop_start)
6359 rtx x;
6360 rtx insn;
6361 rtx loop_start;
6363 struct table_elt *src_elt;
6365 /* If this is a SET, see if we can replace SET_SRC, but ignore SETs that
6366 are setting PC or CC0 or whose SET_SRC is already a register. */
6367 if (GET_CODE (x) == SET
6368 && GET_CODE (SET_DEST (x)) != PC && GET_CODE (SET_DEST (x)) != CC0
6369 && GET_CODE (SET_SRC (x)) != REG)
6371 src_elt = lookup (SET_SRC (x),
6372 HASH (SET_SRC (x), GET_MODE (SET_DEST (x))),
6373 GET_MODE (SET_DEST (x)));
6375 if (src_elt)
6376 for (src_elt = src_elt->first_same_value; src_elt;
6377 src_elt = src_elt->next_same_value)
6378 if (GET_CODE (src_elt->exp) == REG && REG_LOOP_TEST_P (src_elt->exp)
6379 && COST (src_elt->exp) < COST (SET_SRC (x)))
6381 rtx p, set;
6383 /* Look for an insn in front of LOOP_START that sets
6384 something in the desired mode to SET_SRC (x) before we hit
6385 a label or CALL_INSN. */
6387 for (p = prev_nonnote_insn (loop_start);
6388 p && GET_CODE (p) != CALL_INSN
6389 && GET_CODE (p) != CODE_LABEL;
6390 p = prev_nonnote_insn (p))
6391 if ((set = single_set (p)) != 0
6392 && GET_CODE (SET_DEST (set)) == REG
6393 && GET_MODE (SET_DEST (set)) == src_elt->mode
6394 && rtx_equal_p (SET_SRC (set), SET_SRC (x)))
6396 /* We now have to ensure that nothing between P
6397 and LOOP_START modified anything referenced in
6398 SET_SRC (x). We know that nothing within the loop
6399 can modify it, or we would have invalidated it in
6400 the hash table. */
6401 rtx q;
6402 rtx cse_check_loop_start_value = SET_SRC (x);
6403 for (q = p; q != loop_start; q = NEXT_INSN (q))
6404 if (GET_RTX_CLASS (GET_CODE (q)) == 'i')
6405 note_stores (PATTERN (q),
6406 cse_check_loop_start,
6407 &cse_check_loop_start_value);
6409 /* If nothing was changed and we can replace our
6410 SET_SRC, add an insn after P to copy its destination
6411 to what we will be replacing SET_SRC with. */
6412 if (cse_check_loop_start_value
6413 && validate_change (insn, &SET_SRC (x),
6414 src_elt->exp, 0))
6416 /* If this creates new pseudos, this is unsafe,
6417 because the regno of new pseudo is unsuitable
6418 to index into reg_qty when cse_insn processes
6419 the new insn. Therefore, if a new pseudo was
6420 created, discard this optimization. */
6421 int nregs = max_reg_num ();
6422 rtx move
6423 = gen_move_insn (src_elt->exp, SET_DEST (set));
6424 if (nregs != max_reg_num ())
6426 if (! validate_change (insn, &SET_SRC (x),
6427 SET_SRC (set), 0))
6428 abort ();
6430 else
6431 emit_insn_after (move, p);
6433 break;
6438 /* Deal with the destination of X affecting the stack pointer. */
6439 addr_affects_sp_p (SET_DEST (x));
6441 /* See comment on similar code in cse_insn for explanation of these
6442 tests. */
6443 if (GET_CODE (SET_DEST (x)) == REG || GET_CODE (SET_DEST (x)) == SUBREG
6444 || GET_CODE (SET_DEST (x)) == MEM)
6445 invalidate (SET_DEST (x), VOIDmode);
6446 else if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
6447 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
6448 invalidate (XEXP (SET_DEST (x), 0), GET_MODE (SET_DEST (x)));
6451 /* Find the end of INSN's basic block and return its range,
6452 the total number of SETs in all the insns of the block, the last insn of the
6453 block, and the branch path.
6455 The branch path indicates which branches should be followed. If a non-zero
6456 path size is specified, the block should be rescanned and a different set
6457 of branches will be taken. The branch path is only used if
6458 FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is non-zero.
6460 DATA is a pointer to a struct cse_basic_block_data, defined below, that is
6461 used to describe the block. It is filled in with the information about
6462 the current block. The incoming structure's branch path, if any, is used
6463 to construct the output branch path. */
6465 void
6466 cse_end_of_basic_block (insn, data, follow_jumps, after_loop, skip_blocks)
6467 rtx insn;
6468 struct cse_basic_block_data *data;
6469 int follow_jumps;
6470 int after_loop;
6471 int skip_blocks;
6473 rtx p = insn, q;
6474 int nsets = 0;
6475 int low_cuid = INSN_CUID (insn), high_cuid = INSN_CUID (insn);
6476 rtx next = GET_RTX_CLASS (GET_CODE (insn)) == 'i' ? insn : next_real_insn (insn);
6477 int path_size = data->path_size;
6478 int path_entry = 0;
6479 int i;
6481 /* Update the previous branch path, if any. If the last branch was
6482 previously TAKEN, mark it NOT_TAKEN. If it was previously NOT_TAKEN,
6483 shorten the path by one and look at the previous branch. We know that
6484 at least one branch must have been taken if PATH_SIZE is non-zero. */
6485 while (path_size > 0)
6487 if (data->path[path_size - 1].status != NOT_TAKEN)
6489 data->path[path_size - 1].status = NOT_TAKEN;
6490 break;
6492 else
6493 path_size--;
6496 /* If the first instruction is marked with QImode, that means we've
6497 already processed this block. Our caller will look at DATA->LAST
6498 to figure out where to go next. We want to return the next block
6499 in the instruction stream, not some branched-to block somewhere
6500 else. We accomplish this by pretending our called forbid us to
6501 follow jumps, or skip blocks. */
6502 if (GET_MODE (insn) == QImode)
6503 follow_jumps = skip_blocks = 0;
6505 /* Scan to end of this basic block. */
6506 while (p && GET_CODE (p) != CODE_LABEL)
6508 /* Don't cse out the end of a loop. This makes a difference
6509 only for the unusual loops that always execute at least once;
6510 all other loops have labels there so we will stop in any case.
6511 Cse'ing out the end of the loop is dangerous because it
6512 might cause an invariant expression inside the loop
6513 to be reused after the end of the loop. This would make it
6514 hard to move the expression out of the loop in loop.c,
6515 especially if it is one of several equivalent expressions
6516 and loop.c would like to eliminate it.
6518 If we are running after loop.c has finished, we can ignore
6519 the NOTE_INSN_LOOP_END. */
6521 if (! after_loop && GET_CODE (p) == NOTE
6522 && NOTE_LINE_NUMBER (p) == NOTE_INSN_LOOP_END)
6523 break;
6525 /* Don't cse over a call to setjmp; on some machines (eg vax)
6526 the regs restored by the longjmp come from
6527 a later time than the setjmp. */
6528 if (GET_CODE (p) == NOTE
6529 && NOTE_LINE_NUMBER (p) == NOTE_INSN_SETJMP)
6530 break;
6532 /* A PARALLEL can have lots of SETs in it,
6533 especially if it is really an ASM_OPERANDS. */
6534 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
6535 && GET_CODE (PATTERN (p)) == PARALLEL)
6536 nsets += XVECLEN (PATTERN (p), 0);
6537 else if (GET_CODE (p) != NOTE)
6538 nsets += 1;
6540 /* Ignore insns made by CSE; they cannot affect the boundaries of
6541 the basic block. */
6543 if (INSN_UID (p) <= max_uid && INSN_CUID (p) > high_cuid)
6544 high_cuid = INSN_CUID (p);
6545 if (INSN_UID (p) <= max_uid && INSN_CUID (p) < low_cuid)
6546 low_cuid = INSN_CUID (p);
6548 /* See if this insn is in our branch path. If it is and we are to
6549 take it, do so. */
6550 if (path_entry < path_size && data->path[path_entry].branch == p)
6552 if (data->path[path_entry].status != NOT_TAKEN)
6553 p = JUMP_LABEL (p);
6555 /* Point to next entry in path, if any. */
6556 path_entry++;
6559 /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
6560 was specified, we haven't reached our maximum path length, there are
6561 insns following the target of the jump, this is the only use of the
6562 jump label, and the target label is preceded by a BARRIER.
6564 Alternatively, we can follow the jump if it branches around a
6565 block of code and there are no other branches into the block.
6566 In this case invalidate_skipped_block will be called to invalidate any
6567 registers set in the block when following the jump. */
6569 else if ((follow_jumps || skip_blocks) && path_size < PATHLENGTH - 1
6570 && GET_CODE (p) == JUMP_INSN
6571 && GET_CODE (PATTERN (p)) == SET
6572 && GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
6573 && JUMP_LABEL (p) != 0
6574 && LABEL_NUSES (JUMP_LABEL (p)) == 1
6575 && NEXT_INSN (JUMP_LABEL (p)) != 0)
6577 for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
6578 if ((GET_CODE (q) != NOTE
6579 || NOTE_LINE_NUMBER (q) == NOTE_INSN_LOOP_END
6580 || NOTE_LINE_NUMBER (q) == NOTE_INSN_SETJMP)
6581 && (GET_CODE (q) != CODE_LABEL || LABEL_NUSES (q) != 0))
6582 break;
6584 /* If we ran into a BARRIER, this code is an extension of the
6585 basic block when the branch is taken. */
6586 if (follow_jumps && q != 0 && GET_CODE (q) == BARRIER)
6588 /* Don't allow ourself to keep walking around an
6589 always-executed loop. */
6590 if (next_real_insn (q) == next)
6592 p = NEXT_INSN (p);
6593 continue;
6596 /* Similarly, don't put a branch in our path more than once. */
6597 for (i = 0; i < path_entry; i++)
6598 if (data->path[i].branch == p)
6599 break;
6601 if (i != path_entry)
6602 break;
6604 data->path[path_entry].branch = p;
6605 data->path[path_entry++].status = TAKEN;
6607 /* This branch now ends our path. It was possible that we
6608 didn't see this branch the last time around (when the
6609 insn in front of the target was a JUMP_INSN that was
6610 turned into a no-op). */
6611 path_size = path_entry;
6613 p = JUMP_LABEL (p);
6614 /* Mark block so we won't scan it again later. */
6615 PUT_MODE (NEXT_INSN (p), QImode);
6617 /* Detect a branch around a block of code. */
6618 else if (skip_blocks && q != 0 && GET_CODE (q) != CODE_LABEL)
6620 register rtx tmp;
6622 if (next_real_insn (q) == next)
6624 p = NEXT_INSN (p);
6625 continue;
6628 for (i = 0; i < path_entry; i++)
6629 if (data->path[i].branch == p)
6630 break;
6632 if (i != path_entry)
6633 break;
6635 /* This is no_labels_between_p (p, q) with an added check for
6636 reaching the end of a function (in case Q precedes P). */
6637 for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
6638 if (GET_CODE (tmp) == CODE_LABEL)
6639 break;
6641 if (tmp == q)
6643 data->path[path_entry].branch = p;
6644 data->path[path_entry++].status = AROUND;
6646 path_size = path_entry;
6648 p = JUMP_LABEL (p);
6649 /* Mark block so we won't scan it again later. */
6650 PUT_MODE (NEXT_INSN (p), QImode);
6654 p = NEXT_INSN (p);
6657 data->low_cuid = low_cuid;
6658 data->high_cuid = high_cuid;
6659 data->nsets = nsets;
6660 data->last = p;
6662 /* If all jumps in the path are not taken, set our path length to zero
6663 so a rescan won't be done. */
6664 for (i = path_size - 1; i >= 0; i--)
6665 if (data->path[i].status != NOT_TAKEN)
6666 break;
6668 if (i == -1)
6669 data->path_size = 0;
6670 else
6671 data->path_size = path_size;
6673 /* End the current branch path. */
6674 data->path[path_size].branch = 0;
6677 /* Perform cse on the instructions of a function.
6678 F is the first instruction.
6679 NREGS is one plus the highest pseudo-reg number used in the instruction.
6681 AFTER_LOOP is 1 if this is the cse call done after loop optimization
6682 (only if -frerun-cse-after-loop).
6684 Returns 1 if jump_optimize should be redone due to simplifications
6685 in conditional jump instructions. */
6688 cse_main (f, nregs, after_loop, file)
6689 rtx f;
6690 int nregs;
6691 int after_loop;
6692 FILE *file;
6694 struct cse_basic_block_data val;
6695 register rtx insn = f;
6696 register int i;
6698 cse_jumps_altered = 0;
6699 recorded_label_ref = 0;
6700 constant_pool_entries_cost = 0;
6701 val.path_size = 0;
6703 init_recog ();
6704 init_alias_analysis ();
6706 max_reg = nregs;
6708 max_insn_uid = get_max_uid ();
6710 reg_eqv_table = (struct reg_eqv_elem *)
6711 xmalloc (nregs * sizeof (struct reg_eqv_elem));
6713 #ifdef LOAD_EXTEND_OP
6715 /* Allocate scratch rtl here. cse_insn will fill in the memory reference
6716 and change the code and mode as appropriate. */
6717 memory_extend_rtx = gen_rtx_ZERO_EXTEND (VOIDmode, NULL_RTX);
6718 #endif
6720 /* Discard all the free elements of the previous function
6721 since they are allocated in the temporarily obstack. */
6722 bzero ((char *) table, sizeof table);
6723 free_element_chain = 0;
6724 n_elements_made = 0;
6726 /* Find the largest uid. */
6728 max_uid = get_max_uid ();
6729 uid_cuid = (int *) xcalloc (max_uid + 1, sizeof (int));
6731 /* Compute the mapping from uids to cuids.
6732 CUIDs are numbers assigned to insns, like uids,
6733 except that cuids increase monotonically through the code.
6734 Don't assign cuids to line-number NOTEs, so that the distance in cuids
6735 between two insns is not affected by -g. */
6737 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
6739 if (GET_CODE (insn) != NOTE
6740 || NOTE_LINE_NUMBER (insn) < 0)
6741 INSN_CUID (insn) = ++i;
6742 else
6743 /* Give a line number note the same cuid as preceding insn. */
6744 INSN_CUID (insn) = i;
6747 /* Initialize which registers are clobbered by calls. */
6749 CLEAR_HARD_REG_SET (regs_invalidated_by_call);
6751 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
6752 if ((call_used_regs[i]
6753 /* Used to check !fixed_regs[i] here, but that isn't safe;
6754 fixed regs are still call-clobbered, and sched can get
6755 confused if they can "live across calls".
6757 The frame pointer is always preserved across calls. The arg
6758 pointer is if it is fixed. The stack pointer usually is, unless
6759 RETURN_POPS_ARGS, in which case an explicit CLOBBER
6760 will be present. If we are generating PIC code, the PIC offset
6761 table register is preserved across calls. */
6763 && i != STACK_POINTER_REGNUM
6764 && i != FRAME_POINTER_REGNUM
6765 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
6766 && i != HARD_FRAME_POINTER_REGNUM
6767 #endif
6768 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
6769 && ! (i == ARG_POINTER_REGNUM && fixed_regs[i])
6770 #endif
6771 #if defined (PIC_OFFSET_TABLE_REGNUM) && !defined (PIC_OFFSET_TABLE_REG_CALL_CLOBBERED)
6772 && ! (i == PIC_OFFSET_TABLE_REGNUM && flag_pic)
6773 #endif
6775 || global_regs[i])
6776 SET_HARD_REG_BIT (regs_invalidated_by_call, i);
6778 if (ggc_p)
6779 ggc_push_context ();
6781 /* Loop over basic blocks.
6782 Compute the maximum number of qty's needed for each basic block
6783 (which is 2 for each SET). */
6784 insn = f;
6785 while (insn)
6787 cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps, after_loop,
6788 flag_cse_skip_blocks);
6790 /* If this basic block was already processed or has no sets, skip it. */
6791 if (val.nsets == 0 || GET_MODE (insn) == QImode)
6793 PUT_MODE (insn, VOIDmode);
6794 insn = (val.last ? NEXT_INSN (val.last) : 0);
6795 val.path_size = 0;
6796 continue;
6799 cse_basic_block_start = val.low_cuid;
6800 cse_basic_block_end = val.high_cuid;
6801 max_qty = val.nsets * 2;
6803 if (file)
6804 fnotice (file, ";; Processing block from %d to %d, %d sets.\n",
6805 INSN_UID (insn), val.last ? INSN_UID (val.last) : 0,
6806 val.nsets);
6808 /* Make MAX_QTY bigger to give us room to optimize
6809 past the end of this basic block, if that should prove useful. */
6810 if (max_qty < 500)
6811 max_qty = 500;
6813 max_qty += max_reg;
6815 /* If this basic block is being extended by following certain jumps,
6816 (see `cse_end_of_basic_block'), we reprocess the code from the start.
6817 Otherwise, we start after this basic block. */
6818 if (val.path_size > 0)
6819 cse_basic_block (insn, val.last, val.path, 0);
6820 else
6822 int old_cse_jumps_altered = cse_jumps_altered;
6823 rtx temp;
6825 /* When cse changes a conditional jump to an unconditional
6826 jump, we want to reprocess the block, since it will give
6827 us a new branch path to investigate. */
6828 cse_jumps_altered = 0;
6829 temp = cse_basic_block (insn, val.last, val.path, ! after_loop);
6830 if (cse_jumps_altered == 0
6831 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
6832 insn = temp;
6834 cse_jumps_altered |= old_cse_jumps_altered;
6837 if (ggc_p)
6838 ggc_collect ();
6840 #ifdef USE_C_ALLOCA
6841 alloca (0);
6842 #endif
6845 if (ggc_p)
6846 ggc_pop_context ();
6848 if (max_elements_made < n_elements_made)
6849 max_elements_made = n_elements_made;
6851 /* Clean up. */
6852 end_alias_analysis ();
6853 free (uid_cuid);
6854 free (reg_eqv_table);
6856 return cse_jumps_altered || recorded_label_ref;
6859 /* Process a single basic block. FROM and TO and the limits of the basic
6860 block. NEXT_BRANCH points to the branch path when following jumps or
6861 a null path when not following jumps.
6863 AROUND_LOOP is non-zero if we are to try to cse around to the start of a
6864 loop. This is true when we are being called for the last time on a
6865 block and this CSE pass is before loop.c. */
6867 static rtx
6868 cse_basic_block (from, to, next_branch, around_loop)
6869 register rtx from, to;
6870 struct branch_path *next_branch;
6871 int around_loop;
6873 register rtx insn;
6874 int to_usage = 0;
6875 rtx libcall_insn = NULL_RTX;
6876 int num_insns = 0;
6878 /* This array is undefined before max_reg, so only allocate
6879 the space actually needed and adjust the start. */
6881 qty_table
6882 = (struct qty_table_elem *) xmalloc ((max_qty - max_reg)
6883 * sizeof (struct qty_table_elem));
6884 qty_table -= max_reg;
6886 new_basic_block ();
6888 /* TO might be a label. If so, protect it from being deleted. */
6889 if (to != 0 && GET_CODE (to) == CODE_LABEL)
6890 ++LABEL_NUSES (to);
6892 for (insn = from; insn != to; insn = NEXT_INSN (insn))
6894 register enum rtx_code code = GET_CODE (insn);
6896 /* If we have processed 1,000 insns, flush the hash table to
6897 avoid extreme quadratic behavior. We must not include NOTEs
6898 in the count since there may be more or them when generating
6899 debugging information. If we clear the table at different
6900 times, code generated with -g -O might be different than code
6901 generated with -O but not -g.
6903 ??? This is a real kludge and needs to be done some other way.
6904 Perhaps for 2.9. */
6905 if (code != NOTE && num_insns++ > 1000)
6907 flush_hash_table ();
6908 num_insns = 0;
6911 /* See if this is a branch that is part of the path. If so, and it is
6912 to be taken, do so. */
6913 if (next_branch->branch == insn)
6915 enum taken status = next_branch++->status;
6916 if (status != NOT_TAKEN)
6918 if (status == TAKEN)
6919 record_jump_equiv (insn, 1);
6920 else
6921 invalidate_skipped_block (NEXT_INSN (insn));
6923 /* Set the last insn as the jump insn; it doesn't affect cc0.
6924 Then follow this branch. */
6925 #ifdef HAVE_cc0
6926 prev_insn_cc0 = 0;
6927 #endif
6928 prev_insn = insn;
6929 insn = JUMP_LABEL (insn);
6930 continue;
6934 if (GET_MODE (insn) == QImode)
6935 PUT_MODE (insn, VOIDmode);
6937 if (GET_RTX_CLASS (code) == 'i')
6939 rtx p;
6941 /* Process notes first so we have all notes in canonical forms when
6942 looking for duplicate operations. */
6944 if (REG_NOTES (insn))
6945 REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn), NULL_RTX);
6947 /* Track when we are inside in LIBCALL block. Inside such a block,
6948 we do not want to record destinations. The last insn of a
6949 LIBCALL block is not considered to be part of the block, since
6950 its destination is the result of the block and hence should be
6951 recorded. */
6953 if ((p = find_reg_note (insn, REG_LIBCALL, NULL_RTX)))
6954 libcall_insn = XEXP (p, 0);
6955 else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
6956 libcall_insn = NULL_RTX;
6958 cse_insn (insn, libcall_insn);
6961 /* If INSN is now an unconditional jump, skip to the end of our
6962 basic block by pretending that we just did the last insn in the
6963 basic block. If we are jumping to the end of our block, show
6964 that we can have one usage of TO. */
6966 if (simplejump_p (insn))
6968 if (to == 0)
6970 free (qty_table + max_reg);
6971 return 0;
6974 if (JUMP_LABEL (insn) == to)
6975 to_usage = 1;
6977 /* Maybe TO was deleted because the jump is unconditional.
6978 If so, there is nothing left in this basic block. */
6979 /* ??? Perhaps it would be smarter to set TO
6980 to whatever follows this insn,
6981 and pretend the basic block had always ended here. */
6982 if (INSN_DELETED_P (to))
6983 break;
6985 insn = PREV_INSN (to);
6988 /* See if it is ok to keep on going past the label
6989 which used to end our basic block. Remember that we incremented
6990 the count of that label, so we decrement it here. If we made
6991 a jump unconditional, TO_USAGE will be one; in that case, we don't
6992 want to count the use in that jump. */
6994 if (to != 0 && NEXT_INSN (insn) == to
6995 && GET_CODE (to) == CODE_LABEL && --LABEL_NUSES (to) == to_usage)
6997 struct cse_basic_block_data val;
6998 rtx prev;
7000 insn = NEXT_INSN (to);
7002 /* If TO was the last insn in the function, we are done. */
7003 if (insn == 0)
7005 free (qty_table + max_reg);
7006 return 0;
7009 /* If TO was preceded by a BARRIER we are done with this block
7010 because it has no continuation. */
7011 prev = prev_nonnote_insn (to);
7012 if (prev && GET_CODE (prev) == BARRIER)
7014 free (qty_table + max_reg);
7015 return insn;
7018 /* Find the end of the following block. Note that we won't be
7019 following branches in this case. */
7020 to_usage = 0;
7021 val.path_size = 0;
7022 cse_end_of_basic_block (insn, &val, 0, 0, 0);
7024 /* If the tables we allocated have enough space left
7025 to handle all the SETs in the next basic block,
7026 continue through it. Otherwise, return,
7027 and that block will be scanned individually. */
7028 if (val.nsets * 2 + next_qty > max_qty)
7029 break;
7031 cse_basic_block_start = val.low_cuid;
7032 cse_basic_block_end = val.high_cuid;
7033 to = val.last;
7035 /* Prevent TO from being deleted if it is a label. */
7036 if (to != 0 && GET_CODE (to) == CODE_LABEL)
7037 ++LABEL_NUSES (to);
7039 /* Back up so we process the first insn in the extension. */
7040 insn = PREV_INSN (insn);
7044 if (next_qty > max_qty)
7045 abort ();
7047 /* If we are running before loop.c, we stopped on a NOTE_INSN_LOOP_END, and
7048 the previous insn is the only insn that branches to the head of a loop,
7049 we can cse into the loop. Don't do this if we changed the jump
7050 structure of a loop unless we aren't going to be following jumps. */
7052 if ((cse_jumps_altered == 0
7053 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7054 && around_loop && to != 0
7055 && GET_CODE (to) == NOTE && NOTE_LINE_NUMBER (to) == NOTE_INSN_LOOP_END
7056 && GET_CODE (PREV_INSN (to)) == JUMP_INSN
7057 && JUMP_LABEL (PREV_INSN (to)) != 0
7058 && LABEL_NUSES (JUMP_LABEL (PREV_INSN (to))) == 1)
7059 cse_around_loop (JUMP_LABEL (PREV_INSN (to)));
7061 free (qty_table + max_reg);
7063 return to ? NEXT_INSN (to) : 0;
7066 /* Count the number of times registers are used (not set) in X.
7067 COUNTS is an array in which we accumulate the count, INCR is how much
7068 we count each register usage.
7070 Don't count a usage of DEST, which is the SET_DEST of a SET which
7071 contains X in its SET_SRC. This is because such a SET does not
7072 modify the liveness of DEST. */
7074 static void
7075 count_reg_usage (x, counts, dest, incr)
7076 rtx x;
7077 int *counts;
7078 rtx dest;
7079 int incr;
7081 enum rtx_code code;
7082 const char *fmt;
7083 int i, j;
7085 if (x == 0)
7086 return;
7088 switch (code = GET_CODE (x))
7090 case REG:
7091 if (x != dest)
7092 counts[REGNO (x)] += incr;
7093 return;
7095 case PC:
7096 case CC0:
7097 case CONST:
7098 case CONST_INT:
7099 case CONST_DOUBLE:
7100 case SYMBOL_REF:
7101 case LABEL_REF:
7102 return;
7104 case CLOBBER:
7105 /* If we are clobbering a MEM, mark any registers inside the address
7106 as being used. */
7107 if (GET_CODE (XEXP (x, 0)) == MEM)
7108 count_reg_usage (XEXP (XEXP (x, 0), 0), counts, NULL_RTX, incr);
7109 return;
7111 case SET:
7112 /* Unless we are setting a REG, count everything in SET_DEST. */
7113 if (GET_CODE (SET_DEST (x)) != REG)
7114 count_reg_usage (SET_DEST (x), counts, NULL_RTX, incr);
7116 /* If SRC has side-effects, then we can't delete this insn, so the
7117 usage of SET_DEST inside SRC counts.
7119 ??? Strictly-speaking, we might be preserving this insn
7120 because some other SET has side-effects, but that's hard
7121 to do and can't happen now. */
7122 count_reg_usage (SET_SRC (x), counts,
7123 side_effects_p (SET_SRC (x)) ? NULL_RTX : SET_DEST (x),
7124 incr);
7125 return;
7127 case CALL_INSN:
7128 count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, NULL_RTX, incr);
7130 /* ... falls through ... */
7131 case INSN:
7132 case JUMP_INSN:
7133 count_reg_usage (PATTERN (x), counts, NULL_RTX, incr);
7135 /* Things used in a REG_EQUAL note aren't dead since loop may try to
7136 use them. */
7138 count_reg_usage (REG_NOTES (x), counts, NULL_RTX, incr);
7139 return;
7141 case EXPR_LIST:
7142 case INSN_LIST:
7143 if (REG_NOTE_KIND (x) == REG_EQUAL
7144 || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE))
7145 count_reg_usage (XEXP (x, 0), counts, NULL_RTX, incr);
7146 count_reg_usage (XEXP (x, 1), counts, NULL_RTX, incr);
7147 return;
7149 default:
7150 break;
7153 fmt = GET_RTX_FORMAT (code);
7154 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7156 if (fmt[i] == 'e')
7157 count_reg_usage (XEXP (x, i), counts, dest, incr);
7158 else if (fmt[i] == 'E')
7159 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7160 count_reg_usage (XVECEXP (x, i, j), counts, dest, incr);
7164 /* Scan all the insns and delete any that are dead; i.e., they store a register
7165 that is never used or they copy a register to itself.
7167 This is used to remove insns made obviously dead by cse, loop or other
7168 optimizations. It improves the heuristics in loop since it won't try to
7169 move dead invariants out of loops or make givs for dead quantities. The
7170 remaining passes of the compilation are also sped up. */
7172 void
7173 delete_trivially_dead_insns (insns, nreg)
7174 rtx insns;
7175 int nreg;
7177 int *counts;
7178 rtx insn, prev;
7179 #ifdef HAVE_cc0
7180 rtx tem;
7181 #endif
7182 int i;
7183 int in_libcall = 0, dead_libcall = 0;
7185 /* First count the number of times each register is used. */
7186 counts = (int *) xcalloc (nreg, sizeof (int));
7187 for (insn = next_real_insn (insns); insn; insn = next_real_insn (insn))
7188 count_reg_usage (insn, counts, NULL_RTX, 1);
7190 /* Go from the last insn to the first and delete insns that only set unused
7191 registers or copy a register to itself. As we delete an insn, remove
7192 usage counts for registers it uses.
7194 The first jump optimization pass may leave a real insn as the last
7195 insn in the function. We must not skip that insn or we may end
7196 up deleting code that is not really dead. */
7197 insn = get_last_insn ();
7198 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
7199 insn = prev_real_insn (insn);
7201 for ( ; insn; insn = prev)
7203 int live_insn = 0;
7204 rtx note;
7206 prev = prev_real_insn (insn);
7208 /* Don't delete any insns that are part of a libcall block unless
7209 we can delete the whole libcall block.
7211 Flow or loop might get confused if we did that. Remember
7212 that we are scanning backwards. */
7213 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7215 in_libcall = 1;
7216 live_insn = 1;
7217 dead_libcall = 0;
7219 /* See if there's a REG_EQUAL note on this insn and try to
7220 replace the source with the REG_EQUAL expression.
7222 We assume that insns with REG_RETVALs can only be reg->reg
7223 copies at this point. */
7224 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
7225 if (note)
7227 rtx set = single_set (insn);
7228 rtx new = simplify_rtx (XEXP (note, 0));
7230 if (!new)
7231 new = XEXP (note, 0);
7233 if (set && validate_change (insn, &SET_SRC (set), new, 0))
7235 remove_note (insn,
7236 find_reg_note (insn, REG_RETVAL, NULL_RTX));
7237 dead_libcall = 1;
7241 else if (in_libcall)
7242 live_insn = ! dead_libcall;
7243 else if (GET_CODE (PATTERN (insn)) == SET)
7245 if ((GET_CODE (SET_DEST (PATTERN (insn))) == REG
7246 || GET_CODE (SET_DEST (PATTERN (insn))) == SUBREG)
7247 && rtx_equal_p (SET_DEST (PATTERN (insn)),
7248 SET_SRC (PATTERN (insn))))
7251 #ifdef HAVE_cc0
7252 else if (GET_CODE (SET_DEST (PATTERN (insn))) == CC0
7253 && ! side_effects_p (SET_SRC (PATTERN (insn)))
7254 && ((tem = next_nonnote_insn (insn)) == 0
7255 || GET_RTX_CLASS (GET_CODE (tem)) != 'i'
7256 || ! reg_referenced_p (cc0_rtx, PATTERN (tem))))
7258 #endif
7259 else if (GET_CODE (SET_DEST (PATTERN (insn))) != REG
7260 || REGNO (SET_DEST (PATTERN (insn))) < FIRST_PSEUDO_REGISTER
7261 || counts[REGNO (SET_DEST (PATTERN (insn)))] != 0
7262 || side_effects_p (SET_SRC (PATTERN (insn)))
7263 /* An ADDRESSOF expression can turn into a use of the
7264 internal arg pointer, so always consider the
7265 internal arg pointer live. If it is truly dead,
7266 flow will delete the initializing insn. */
7267 || (SET_DEST (PATTERN (insn))
7268 == current_function_internal_arg_pointer))
7269 live_insn = 1;
7271 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
7272 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7274 rtx elt = XVECEXP (PATTERN (insn), 0, i);
7276 if (GET_CODE (elt) == SET)
7278 if ((GET_CODE (SET_DEST (elt)) == REG
7279 || GET_CODE (SET_DEST (elt)) == SUBREG)
7280 && rtx_equal_p (SET_DEST (elt), SET_SRC (elt)))
7283 #ifdef HAVE_cc0
7284 else if (GET_CODE (SET_DEST (elt)) == CC0
7285 && ! side_effects_p (SET_SRC (elt))
7286 && ((tem = next_nonnote_insn (insn)) == 0
7287 || GET_RTX_CLASS (GET_CODE (tem)) != 'i'
7288 || ! reg_referenced_p (cc0_rtx, PATTERN (tem))))
7290 #endif
7291 else if (GET_CODE (SET_DEST (elt)) != REG
7292 || REGNO (SET_DEST (elt)) < FIRST_PSEUDO_REGISTER
7293 || counts[REGNO (SET_DEST (elt))] != 0
7294 || side_effects_p (SET_SRC (elt))
7295 /* An ADDRESSOF expression can turn into a use of the
7296 internal arg pointer, so always consider the
7297 internal arg pointer live. If it is truly dead,
7298 flow will delete the initializing insn. */
7299 || (SET_DEST (elt)
7300 == current_function_internal_arg_pointer))
7301 live_insn = 1;
7303 else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
7304 live_insn = 1;
7306 else
7307 live_insn = 1;
7309 /* If this is a dead insn, delete it and show registers in it aren't
7310 being used. */
7312 if (! live_insn)
7314 count_reg_usage (insn, counts, NULL_RTX, -1);
7315 delete_insn (insn);
7318 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
7320 in_libcall = 0;
7321 dead_libcall = 0;
7325 /* Clean up. */
7326 free (counts);