(arm_comp_type_attributes): Simply and comment tests on type attributes.
[official-gcc.git] / gcc / cse.c
blob822986f1282b0ad147c3a2f4a87de80b25a46c5b
1 /* Common subexpression elimination for GNU compiler.
2 Copyright (C) 1987, 88, 89, 92-99, 2000 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 /* stdio.h must precede rtl.h for FFS. */
24 #include "system.h"
25 #include <setjmp.h>
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "flags.h"
32 #include "real.h"
33 #include "insn-config.h"
34 #include "recog.h"
35 #include "function.h"
36 #include "expr.h"
37 #include "toplev.h"
38 #include "output.h"
39 #include "ggc.h"
41 /* The basic idea of common subexpression elimination is to go
42 through the code, keeping a record of expressions that would
43 have the same value at the current scan point, and replacing
44 expressions encountered with the cheapest equivalent expression.
46 It is too complicated to keep track of the different possibilities
47 when control paths merge in this code; so, at each label, we forget all
48 that is known and start fresh. This can be described as processing each
49 extended basic block separately. We have a separate pass to perform
50 global CSE.
52 Note CSE can turn a conditional or computed jump into a nop or
53 an unconditional jump. When this occurs we arrange to run the jump
54 optimizer after CSE to delete the unreachable code.
56 We use two data structures to record the equivalent expressions:
57 a hash table for most expressions, and a vector of "quantity
58 numbers" to record equivalent (pseudo) registers.
60 The use of the special data structure for registers is desirable
61 because it is faster. It is possible because registers references
62 contain a fairly small number, the register number, taken from
63 a contiguously allocated series, and two register references are
64 identical if they have the same number. General expressions
65 do not have any such thing, so the only way to retrieve the
66 information recorded on an expression other than a register
67 is to keep it in a hash table.
69 Registers and "quantity numbers":
71 At the start of each basic block, all of the (hardware and pseudo)
72 registers used in the function are given distinct quantity
73 numbers to indicate their contents. During scan, when the code
74 copies one register into another, we copy the quantity number.
75 When a register is loaded in any other way, we allocate a new
76 quantity number to describe the value generated by this operation.
77 `reg_qty' records what quantity a register is currently thought
78 of as containing.
80 All real quantity numbers are greater than or equal to `max_reg'.
81 If register N has not been assigned a quantity, reg_qty[N] will equal N.
83 Quantity numbers below `max_reg' do not exist and none of the `qty_table'
84 entries should be referenced with an index below `max_reg'.
86 We also maintain a bidirectional chain of registers for each
87 quantity number. The `qty_table` members `first_reg' and `last_reg',
88 and `reg_eqv_table' members `next' and `prev' hold these chains.
90 The first register in a chain is the one whose lifespan is least local.
91 Among equals, it is the one that was seen first.
92 We replace any equivalent register with that one.
94 If two registers have the same quantity number, it must be true that
95 REG expressions with qty_table `mode' must be in the hash table for both
96 registers and must be in the same class.
98 The converse is not true. Since hard registers may be referenced in
99 any mode, two REG expressions might be equivalent in the hash table
100 but not have the same quantity number if the quantity number of one
101 of the registers is not the same mode as those expressions.
103 Constants and quantity numbers
105 When a quantity has a known constant value, that value is stored
106 in the appropriate qty_table `const_rtx'. This is in addition to
107 putting the constant in the hash table as is usual for non-regs.
109 Whether a reg or a constant is preferred is determined by the configuration
110 macro CONST_COSTS and will often depend on the constant value. In any
111 event, expressions containing constants can be simplified, by fold_rtx.
113 When a quantity has a known nearly constant value (such as an address
114 of a stack slot), that value is stored in the appropriate qty_table
115 `const_rtx'.
117 Integer constants don't have a machine mode. However, cse
118 determines the intended machine mode from the destination
119 of the instruction that moves the constant. The machine mode
120 is recorded in the hash table along with the actual RTL
121 constant expression so that different modes are kept separate.
123 Other expressions:
125 To record known equivalences among expressions in general
126 we use a hash table called `table'. It has a fixed number of buckets
127 that contain chains of `struct table_elt' elements for expressions.
128 These chains connect the elements whose expressions have the same
129 hash codes.
131 Other chains through the same elements connect the elements which
132 currently have equivalent values.
134 Register references in an expression are canonicalized before hashing
135 the expression. This is done using `reg_qty' and qty_table `first_reg'.
136 The hash code of a register reference is computed using the quantity
137 number, not the register number.
139 When the value of an expression changes, it is necessary to remove from the
140 hash table not just that expression but all expressions whose values
141 could be different as a result.
143 1. If the value changing is in memory, except in special cases
144 ANYTHING referring to memory could be changed. That is because
145 nobody knows where a pointer does not point.
146 The function `invalidate_memory' removes what is necessary.
148 The special cases are when the address is constant or is
149 a constant plus a fixed register such as the frame pointer
150 or a static chain pointer. When such addresses are stored in,
151 we can tell exactly which other such addresses must be invalidated
152 due to overlap. `invalidate' does this.
153 All expressions that refer to non-constant
154 memory addresses are also invalidated. `invalidate_memory' does this.
156 2. If the value changing is a register, all expressions
157 containing references to that register, and only those,
158 must be removed.
160 Because searching the entire hash table for expressions that contain
161 a register is very slow, we try to figure out when it isn't necessary.
162 Precisely, this is necessary only when expressions have been
163 entered in the hash table using this register, and then the value has
164 changed, and then another expression wants to be added to refer to
165 the register's new value. This sequence of circumstances is rare
166 within any one basic block.
168 The vectors `reg_tick' and `reg_in_table' are used to detect this case.
169 reg_tick[i] is incremented whenever a value is stored in register i.
170 reg_in_table[i] holds -1 if no references to register i have been
171 entered in the table; otherwise, it contains the value reg_tick[i] had
172 when the references were entered. If we want to enter a reference
173 and reg_in_table[i] != reg_tick[i], we must scan and remove old references.
174 Until we want to enter a new entry, the mere fact that the two vectors
175 don't match makes the entries be ignored if anyone tries to match them.
177 Registers themselves are entered in the hash table as well as in
178 the equivalent-register chains. However, the vectors `reg_tick'
179 and `reg_in_table' do not apply to expressions which are simple
180 register references. These expressions are removed from the table
181 immediately when they become invalid, and this can be done even if
182 we do not immediately search for all the expressions that refer to
183 the register.
185 A CLOBBER rtx in an instruction invalidates its operand for further
186 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
187 invalidates everything that resides in memory.
189 Related expressions:
191 Constant expressions that differ only by an additive integer
192 are called related. When a constant expression is put in
193 the table, the related expression with no constant term
194 is also entered. These are made to point at each other
195 so that it is possible to find out if there exists any
196 register equivalent to an expression related to a given expression. */
198 /* One plus largest register number used in this function. */
200 static int max_reg;
202 /* One plus largest instruction UID used in this function at time of
203 cse_main call. */
205 static int max_insn_uid;
207 /* Length of qty_table vector. We know in advance we will not need
208 a quantity number this big. */
210 static int max_qty;
212 /* Next quantity number to be allocated.
213 This is 1 + the largest number needed so far. */
215 static int next_qty;
217 /* Per-qty information tracking.
219 `first_reg' and `last_reg' track the head and tail of the
220 chain of registers which currently contain this quantity.
222 `mode' contains the machine mode of this quantity.
224 `const_rtx' holds the rtx of the constant value of this
225 quantity, if known. A summations of the frame/arg pointer
226 and a constant can also be entered here. When this holds
227 a known value, `const_insn' is the insn which stored the
228 constant value.
230 `comparison_{code,const,qty}' are used to track when a
231 comparison between a quantity and some constant or register has
232 been passed. In such a case, we know the results of the comparison
233 in case we see it again. These members record a comparison that
234 is known to be true. `comparison_code' holds the rtx code of such
235 a comparison, else it is set to UNKNOWN and the other two
236 comparison members are undefined. `comparison_const' holds
237 the constant being compared against, or zero if the comparison
238 is not against a constant. `comparison_qty' holds the quantity
239 being compared against when the result is known. If the comparison
240 is not with a register, `comparison_qty' is -1. */
242 struct qty_table_elem
244 rtx const_rtx;
245 rtx const_insn;
246 rtx comparison_const;
247 int comparison_qty;
248 int first_reg, last_reg;
249 enum machine_mode mode;
250 enum rtx_code comparison_code;
253 /* The table of all qtys, indexed by qty number. */
254 static struct qty_table_elem *qty_table;
256 #ifdef HAVE_cc0
257 /* For machines that have a CC0, we do not record its value in the hash
258 table since its use is guaranteed to be the insn immediately following
259 its definition and any other insn is presumed to invalidate it.
261 Instead, we store below the value last assigned to CC0. If it should
262 happen to be a constant, it is stored in preference to the actual
263 assigned value. In case it is a constant, we store the mode in which
264 the constant should be interpreted. */
266 static rtx prev_insn_cc0;
267 static enum machine_mode prev_insn_cc0_mode;
268 #endif
270 /* Previous actual insn. 0 if at first insn of basic block. */
272 static rtx prev_insn;
274 /* Insn being scanned. */
276 static rtx this_insn;
278 /* Index by register number, gives the number of the next (or
279 previous) register in the chain of registers sharing the same
280 value.
282 Or -1 if this register is at the end of the chain.
284 If reg_qty[N] == N, reg_eqv_table[N].next is undefined. */
286 /* Per-register equivalence chain. */
287 struct reg_eqv_elem
289 int next, prev;
292 /* The table of all register equivalence chains. */
293 static struct reg_eqv_elem *reg_eqv_table;
295 struct cse_reg_info
297 /* Next in hash chain. */
298 struct cse_reg_info *hash_next;
300 /* The next cse_reg_info structure in the free or used list. */
301 struct cse_reg_info *next;
303 /* Search key */
304 int regno;
306 /* The quantity number of the register's current contents. */
307 int reg_qty;
309 /* The number of times the register has been altered in the current
310 basic block. */
311 int reg_tick;
313 /* The REG_TICK value at which rtx's containing this register are
314 valid in the hash table. If this does not equal the current
315 reg_tick value, such expressions existing in the hash table are
316 invalid. */
317 int reg_in_table;
320 /* A free list of cse_reg_info entries. */
321 static struct cse_reg_info *cse_reg_info_free_list;
323 /* A used list of cse_reg_info entries. */
324 static struct cse_reg_info *cse_reg_info_used_list;
325 static struct cse_reg_info *cse_reg_info_used_list_end;
327 /* A mapping from registers to cse_reg_info data structures. */
328 #define REGHASH_SHIFT 7
329 #define REGHASH_SIZE (1 << REGHASH_SHIFT)
330 #define REGHASH_MASK (REGHASH_SIZE - 1)
331 static struct cse_reg_info *reg_hash[REGHASH_SIZE];
333 #define REGHASH_FN(REGNO) \
334 (((REGNO) ^ ((REGNO) >> REGHASH_SHIFT)) & REGHASH_MASK)
336 /* The last lookup we did into the cse_reg_info_tree. This allows us
337 to cache repeated lookups. */
338 static int cached_regno;
339 static struct cse_reg_info *cached_cse_reg_info;
341 /* A HARD_REG_SET containing all the hard registers for which there is
342 currently a REG expression in the hash table. Note the difference
343 from the above variables, which indicate if the REG is mentioned in some
344 expression in the table. */
346 static HARD_REG_SET hard_regs_in_table;
348 /* A HARD_REG_SET containing all the hard registers that are invalidated
349 by a CALL_INSN. */
351 static HARD_REG_SET regs_invalidated_by_call;
353 /* CUID of insn that starts the basic block currently being cse-processed. */
355 static int cse_basic_block_start;
357 /* CUID of insn that ends the basic block currently being cse-processed. */
359 static int cse_basic_block_end;
361 /* Vector mapping INSN_UIDs to cuids.
362 The cuids are like uids but increase monotonically always.
363 We use them to see whether a reg is used outside a given basic block. */
365 static int *uid_cuid;
367 /* Highest UID in UID_CUID. */
368 static int max_uid;
370 /* Get the cuid of an insn. */
372 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
374 /* Nonzero if this pass has made changes, and therefore it's
375 worthwhile to run the garbage collector. */
377 static int cse_altered;
379 /* Nonzero if cse has altered conditional jump insns
380 in such a way that jump optimization should be redone. */
382 static int cse_jumps_altered;
384 /* Nonzero if we put a LABEL_REF into the hash table. Since we may have put
385 it into an INSN without a REG_LABEL, we have to rerun jump after CSE
386 to put in the note. */
387 static int recorded_label_ref;
389 /* canon_hash stores 1 in do_not_record
390 if it notices a reference to CC0, PC, or some other volatile
391 subexpression. */
393 static int do_not_record;
395 #ifdef LOAD_EXTEND_OP
397 /* Scratch rtl used when looking for load-extended copy of a MEM. */
398 static rtx memory_extend_rtx;
399 #endif
401 /* canon_hash stores 1 in hash_arg_in_memory
402 if it notices a reference to memory within the expression being hashed. */
404 static int hash_arg_in_memory;
406 /* The hash table contains buckets which are chains of `struct table_elt's,
407 each recording one expression's information.
408 That expression is in the `exp' field.
410 Those elements with the same hash code are chained in both directions
411 through the `next_same_hash' and `prev_same_hash' fields.
413 Each set of expressions with equivalent values
414 are on a two-way chain through the `next_same_value'
415 and `prev_same_value' fields, and all point with
416 the `first_same_value' field at the first element in
417 that chain. The chain is in order of increasing cost.
418 Each element's cost value is in its `cost' field.
420 The `in_memory' field is nonzero for elements that
421 involve any reference to memory. These elements are removed
422 whenever a write is done to an unidentified location in memory.
423 To be safe, we assume that a memory address is unidentified unless
424 the address is either a symbol constant or a constant plus
425 the frame pointer or argument pointer.
427 The `related_value' field is used to connect related expressions
428 (that differ by adding an integer).
429 The related expressions are chained in a circular fashion.
430 `related_value' is zero for expressions for which this
431 chain is not useful.
433 The `cost' field stores the cost of this element's expression.
435 The `is_const' flag is set if the element is a constant (including
436 a fixed address).
438 The `flag' field is used as a temporary during some search routines.
440 The `mode' field is usually the same as GET_MODE (`exp'), but
441 if `exp' is a CONST_INT and has no machine mode then the `mode'
442 field is the mode it was being used as. Each constant is
443 recorded separately for each mode it is used with. */
446 struct table_elt
448 rtx exp;
449 struct table_elt *next_same_hash;
450 struct table_elt *prev_same_hash;
451 struct table_elt *next_same_value;
452 struct table_elt *prev_same_value;
453 struct table_elt *first_same_value;
454 struct table_elt *related_value;
455 int cost;
456 enum machine_mode mode;
457 char in_memory;
458 char is_const;
459 char flag;
462 /* We don't want a lot of buckets, because we rarely have very many
463 things stored in the hash table, and a lot of buckets slows
464 down a lot of loops that happen frequently. */
465 #define HASH_SHIFT 5
466 #define HASH_SIZE (1 << HASH_SHIFT)
467 #define HASH_MASK (HASH_SIZE - 1)
469 /* Compute hash code of X in mode M. Special-case case where X is a pseudo
470 register (hard registers may require `do_not_record' to be set). */
472 #define HASH(X, M) \
473 ((GET_CODE (X) == REG && REGNO (X) >= FIRST_PSEUDO_REGISTER \
474 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
475 : canon_hash (X, M)) & HASH_MASK)
477 /* Determine whether register number N is considered a fixed register for CSE.
478 It is desirable to replace other regs with fixed regs, to reduce need for
479 non-fixed hard regs.
480 A reg wins if it is either the frame pointer or designated as fixed. */
481 #define FIXED_REGNO_P(N) \
482 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
483 || fixed_regs[N] || global_regs[N])
485 /* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
486 hard registers and pointers into the frame are the cheapest with a cost
487 of 0. Next come pseudos with a cost of one and other hard registers with
488 a cost of 2. Aside from these special cases, call `rtx_cost'. */
490 #define CHEAP_REGNO(N) \
491 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
492 || (N) == STACK_POINTER_REGNUM || (N) == ARG_POINTER_REGNUM \
493 || ((N) >= FIRST_VIRTUAL_REGISTER && (N) <= LAST_VIRTUAL_REGISTER) \
494 || ((N) < FIRST_PSEUDO_REGISTER \
495 && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
497 /* A register is cheap if it is a user variable assigned to the register
498 or if its register number always corresponds to a cheap register. */
500 #define CHEAP_REG(N) \
501 ((REG_USERVAR_P (N) && REGNO (N) < FIRST_PSEUDO_REGISTER) \
502 || CHEAP_REGNO (REGNO (N)))
504 #define COST(X) \
505 (GET_CODE (X) == REG \
506 ? (CHEAP_REG (X) ? 0 \
507 : REGNO (X) >= FIRST_PSEUDO_REGISTER ? 1 \
508 : 2) \
509 : notreg_cost(X))
511 /* Get the info associated with register N. */
513 #define GET_CSE_REG_INFO(N) \
514 (((N) == cached_regno && cached_cse_reg_info) \
515 ? cached_cse_reg_info : get_cse_reg_info ((N)))
517 /* Get the number of times this register has been updated in this
518 basic block. */
520 #define REG_TICK(N) ((GET_CSE_REG_INFO (N))->reg_tick)
522 /* Get the point at which REG was recorded in the table. */
524 #define REG_IN_TABLE(N) ((GET_CSE_REG_INFO (N))->reg_in_table)
526 /* Get the quantity number for REG. */
528 #define REG_QTY(N) ((GET_CSE_REG_INFO (N))->reg_qty)
530 /* Determine if the quantity number for register X represents a valid index
531 into the qty_table. */
533 #define REGNO_QTY_VALID_P(N) (REG_QTY (N) != (N))
535 #ifdef ADDRESS_COST
536 /* The ADDRESS_COST macro does not deal with ADDRESSOF nodes. But,
537 during CSE, such nodes are present. Using an ADDRESSOF node which
538 refers to the address of a REG is a good thing because we can then
539 turn (MEM (ADDRESSSOF (REG))) into just plain REG. */
540 #define CSE_ADDRESS_COST(RTX) \
541 ((GET_CODE (RTX) == ADDRESSOF && REG_P (XEXP ((RTX), 0))) \
542 ? -1 : ADDRESS_COST(RTX))
543 #endif
545 static struct table_elt *table[HASH_SIZE];
547 /* Chain of `struct table_elt's made so far for this function
548 but currently removed from the table. */
550 static struct table_elt *free_element_chain;
552 /* Number of `struct table_elt' structures made so far for this function. */
554 static int n_elements_made;
556 /* Maximum value `n_elements_made' has had so far in this compilation
557 for functions previously processed. */
559 static int max_elements_made;
561 /* Surviving equivalence class when two equivalence classes are merged
562 by recording the effects of a jump in the last insn. Zero if the
563 last insn was not a conditional jump. */
565 static struct table_elt *last_jump_equiv_class;
567 /* Set to the cost of a constant pool reference if one was found for a
568 symbolic constant. If this was found, it means we should try to
569 convert constants into constant pool entries if they don't fit in
570 the insn. */
572 static int constant_pool_entries_cost;
574 /* Define maximum length of a branch path. */
576 #define PATHLENGTH 10
578 /* This data describes a block that will be processed by cse_basic_block. */
580 struct cse_basic_block_data
582 /* Lowest CUID value of insns in block. */
583 int low_cuid;
584 /* Highest CUID value of insns in block. */
585 int high_cuid;
586 /* Total number of SETs in block. */
587 int nsets;
588 /* Last insn in the block. */
589 rtx last;
590 /* Size of current branch path, if any. */
591 int path_size;
592 /* Current branch path, indicating which branches will be taken. */
593 struct branch_path
595 /* The branch insn. */
596 rtx branch;
597 /* Whether it should be taken or not. AROUND is the same as taken
598 except that it is used when the destination label is not preceded
599 by a BARRIER. */
600 enum taken {TAKEN, NOT_TAKEN, AROUND} status;
601 } path[PATHLENGTH];
604 /* Nonzero if X has the form (PLUS frame-pointer integer). We check for
605 virtual regs here because the simplify_*_operation routines are called
606 by integrate.c, which is called before virtual register instantiation.
608 ?!? FIXED_BASE_PLUS_P and NONZERO_BASE_PLUS_P need to move into
609 a header file so that their definitions can be shared with the
610 simplification routines in simplify-rtx.c. Until then, do not
611 change these macros without also changing the copy in simplify-rtx.c. */
613 #define FIXED_BASE_PLUS_P(X) \
614 ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx \
615 || ((X) == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])\
616 || (X) == virtual_stack_vars_rtx \
617 || (X) == virtual_incoming_args_rtx \
618 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
619 && (XEXP (X, 0) == frame_pointer_rtx \
620 || XEXP (X, 0) == hard_frame_pointer_rtx \
621 || ((X) == arg_pointer_rtx \
622 && fixed_regs[ARG_POINTER_REGNUM]) \
623 || XEXP (X, 0) == virtual_stack_vars_rtx \
624 || XEXP (X, 0) == virtual_incoming_args_rtx)) \
625 || GET_CODE (X) == ADDRESSOF)
627 /* Similar, but also allows reference to the stack pointer.
629 This used to include FIXED_BASE_PLUS_P, however, we can't assume that
630 arg_pointer_rtx by itself is nonzero, because on at least one machine,
631 the i960, the arg pointer is zero when it is unused. */
633 #define NONZERO_BASE_PLUS_P(X) \
634 ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx \
635 || (X) == virtual_stack_vars_rtx \
636 || (X) == virtual_incoming_args_rtx \
637 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
638 && (XEXP (X, 0) == frame_pointer_rtx \
639 || XEXP (X, 0) == hard_frame_pointer_rtx \
640 || ((X) == arg_pointer_rtx \
641 && fixed_regs[ARG_POINTER_REGNUM]) \
642 || XEXP (X, 0) == virtual_stack_vars_rtx \
643 || XEXP (X, 0) == virtual_incoming_args_rtx)) \
644 || (X) == stack_pointer_rtx \
645 || (X) == virtual_stack_dynamic_rtx \
646 || (X) == virtual_outgoing_args_rtx \
647 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
648 && (XEXP (X, 0) == stack_pointer_rtx \
649 || XEXP (X, 0) == virtual_stack_dynamic_rtx \
650 || XEXP (X, 0) == virtual_outgoing_args_rtx)) \
651 || GET_CODE (X) == ADDRESSOF)
653 static int notreg_cost PARAMS ((rtx));
654 static void new_basic_block PARAMS ((void));
655 static void make_new_qty PARAMS ((int, enum machine_mode));
656 static void make_regs_eqv PARAMS ((int, int));
657 static void delete_reg_equiv PARAMS ((int));
658 static int mention_regs PARAMS ((rtx));
659 static int insert_regs PARAMS ((rtx, struct table_elt *, int));
660 static void remove_from_table PARAMS ((struct table_elt *, unsigned));
661 static struct table_elt *lookup PARAMS ((rtx, unsigned, enum machine_mode)),
662 *lookup_for_remove PARAMS ((rtx, unsigned, enum machine_mode));
663 static rtx lookup_as_function PARAMS ((rtx, enum rtx_code));
664 static struct table_elt *insert PARAMS ((rtx, struct table_elt *, unsigned,
665 enum machine_mode));
666 static void merge_equiv_classes PARAMS ((struct table_elt *,
667 struct table_elt *));
668 static void invalidate PARAMS ((rtx, enum machine_mode));
669 static int cse_rtx_varies_p PARAMS ((rtx));
670 static void remove_invalid_refs PARAMS ((int));
671 static void remove_invalid_subreg_refs PARAMS ((int, int, enum machine_mode));
672 static void rehash_using_reg PARAMS ((rtx));
673 static void invalidate_memory PARAMS ((void));
674 static void invalidate_for_call PARAMS ((void));
675 static rtx use_related_value PARAMS ((rtx, struct table_elt *));
676 static unsigned canon_hash PARAMS ((rtx, enum machine_mode));
677 static unsigned safe_hash PARAMS ((rtx, enum machine_mode));
678 static int exp_equiv_p PARAMS ((rtx, rtx, int, int));
679 static rtx canon_reg PARAMS ((rtx, rtx));
680 static void find_best_addr PARAMS ((rtx, rtx *));
681 static enum rtx_code find_comparison_args PARAMS ((enum rtx_code, rtx *, rtx *,
682 enum machine_mode *,
683 enum machine_mode *));
684 static rtx fold_rtx PARAMS ((rtx, rtx));
685 static rtx equiv_constant PARAMS ((rtx));
686 static void record_jump_equiv PARAMS ((rtx, int));
687 static void record_jump_cond PARAMS ((enum rtx_code, enum machine_mode,
688 rtx, rtx, int));
689 static void cse_insn PARAMS ((rtx, rtx));
690 static int addr_affects_sp_p PARAMS ((rtx));
691 static void invalidate_from_clobbers PARAMS ((rtx));
692 static rtx cse_process_notes PARAMS ((rtx, rtx));
693 static void cse_around_loop PARAMS ((rtx));
694 static void invalidate_skipped_set PARAMS ((rtx, rtx, void *));
695 static void invalidate_skipped_block PARAMS ((rtx));
696 static void cse_check_loop_start PARAMS ((rtx, rtx, void *));
697 static void cse_set_around_loop PARAMS ((rtx, rtx, rtx));
698 static rtx cse_basic_block PARAMS ((rtx, rtx, struct branch_path *, int));
699 static void count_reg_usage PARAMS ((rtx, int *, rtx, int));
700 extern void dump_class PARAMS ((struct table_elt*));
701 static struct cse_reg_info* get_cse_reg_info PARAMS ((int));
703 static void flush_hash_table PARAMS ((void));
705 /* Dump the expressions in the equivalence class indicated by CLASSP.
706 This function is used only for debugging. */
707 void
708 dump_class (classp)
709 struct table_elt *classp;
711 struct table_elt *elt;
713 fprintf (stderr, "Equivalence chain for ");
714 print_rtl (stderr, classp->exp);
715 fprintf (stderr, ": \n");
717 for (elt = classp->first_same_value; elt; elt = elt->next_same_value)
719 print_rtl (stderr, elt->exp);
720 fprintf (stderr, "\n");
724 /* Internal function, to compute cost when X is not a register; called
725 from COST macro to keep it simple. */
727 static int
728 notreg_cost (x)
729 rtx x;
731 return ((GET_CODE (x) == SUBREG
732 && GET_CODE (SUBREG_REG (x)) == REG
733 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
734 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
735 && (GET_MODE_SIZE (GET_MODE (x))
736 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
737 && subreg_lowpart_p (x)
738 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (x)),
739 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))))
740 ? (CHEAP_REG (SUBREG_REG (x)) ? 0
741 : (REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER ? 1
742 : 2))
743 : rtx_cost (x, SET) * 2);
746 /* Return the right cost to give to an operation
747 to make the cost of the corresponding register-to-register instruction
748 N times that of a fast register-to-register instruction. */
750 #define COSTS_N_INSNS(N) ((N) * 4 - 2)
752 /* Return an estimate of the cost of computing rtx X.
753 One use is in cse, to decide which expression to keep in the hash table.
754 Another is in rtl generation, to pick the cheapest way to multiply.
755 Other uses like the latter are expected in the future. */
758 rtx_cost (x, outer_code)
759 rtx x;
760 enum rtx_code outer_code ATTRIBUTE_UNUSED;
762 register int i, j;
763 register enum rtx_code code;
764 register const char *fmt;
765 register int total;
767 if (x == 0)
768 return 0;
770 /* Compute the default costs of certain things.
771 Note that RTX_COSTS can override the defaults. */
773 code = GET_CODE (x);
774 switch (code)
776 case MULT:
777 /* Count multiplication by 2**n as a shift,
778 because if we are considering it, we would output it as a shift. */
779 if (GET_CODE (XEXP (x, 1)) == CONST_INT
780 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
781 total = 2;
782 else
783 total = COSTS_N_INSNS (5);
784 break;
785 case DIV:
786 case UDIV:
787 case MOD:
788 case UMOD:
789 total = COSTS_N_INSNS (7);
790 break;
791 case USE:
792 /* Used in loop.c and combine.c as a marker. */
793 total = 0;
794 break;
795 case ASM_OPERANDS:
796 /* We don't want these to be used in substitutions because
797 we have no way of validating the resulting insn. So assign
798 anything containing an ASM_OPERANDS a very high cost. */
799 total = 1000;
800 break;
801 default:
802 total = 2;
805 switch (code)
807 case REG:
808 return ! CHEAP_REG (x);
810 case SUBREG:
811 /* If we can't tie these modes, make this expensive. The larger
812 the mode, the more expensive it is. */
813 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
814 return COSTS_N_INSNS (2
815 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
816 return 2;
817 #ifdef RTX_COSTS
818 RTX_COSTS (x, code, outer_code);
819 #endif
820 #ifdef CONST_COSTS
821 CONST_COSTS (x, code, outer_code);
822 #endif
824 default:
825 #ifdef DEFAULT_RTX_COSTS
826 DEFAULT_RTX_COSTS(x, code, outer_code);
827 #endif
828 break;
831 /* Sum the costs of the sub-rtx's, plus cost of this operation,
832 which is already in total. */
834 fmt = GET_RTX_FORMAT (code);
835 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
836 if (fmt[i] == 'e')
837 total += rtx_cost (XEXP (x, i), code);
838 else if (fmt[i] == 'E')
839 for (j = 0; j < XVECLEN (x, i); j++)
840 total += rtx_cost (XVECEXP (x, i, j), code);
842 return total;
845 static struct cse_reg_info *
846 get_cse_reg_info (regno)
847 int regno;
849 struct cse_reg_info **hash_head = &reg_hash[REGHASH_FN (regno)];
850 struct cse_reg_info *p;
852 for (p = *hash_head ; p != NULL; p = p->hash_next)
853 if (p->regno == regno)
854 break;
856 if (p == NULL)
858 /* Get a new cse_reg_info structure. */
859 if (cse_reg_info_free_list)
861 p = cse_reg_info_free_list;
862 cse_reg_info_free_list = p->next;
864 else
865 p = (struct cse_reg_info *) xmalloc (sizeof (struct cse_reg_info));
867 /* Insert into hash table. */
868 p->hash_next = *hash_head;
869 *hash_head = p;
871 /* Initialize it. */
872 p->reg_tick = 1;
873 p->reg_in_table = -1;
874 p->reg_qty = regno;
875 p->regno = regno;
876 p->next = cse_reg_info_used_list;
877 cse_reg_info_used_list = p;
878 if (!cse_reg_info_used_list_end)
879 cse_reg_info_used_list_end = p;
882 /* Cache this lookup; we tend to be looking up information about the
883 same register several times in a row. */
884 cached_regno = regno;
885 cached_cse_reg_info = p;
887 return p;
890 /* Clear the hash table and initialize each register with its own quantity,
891 for a new basic block. */
893 static void
894 new_basic_block ()
896 register int i;
898 next_qty = max_reg;
900 /* Clear out hash table state for this pass. */
902 bzero ((char *) reg_hash, sizeof reg_hash);
904 if (cse_reg_info_used_list)
906 cse_reg_info_used_list_end->next = cse_reg_info_free_list;
907 cse_reg_info_free_list = cse_reg_info_used_list;
908 cse_reg_info_used_list = cse_reg_info_used_list_end = 0;
910 cached_cse_reg_info = 0;
912 CLEAR_HARD_REG_SET (hard_regs_in_table);
914 /* The per-quantity values used to be initialized here, but it is
915 much faster to initialize each as it is made in `make_new_qty'. */
917 for (i = 0; i < HASH_SIZE; i++)
919 struct table_elt *first;
921 first = table[i];
922 if (first != NULL)
924 struct table_elt *last = first;
926 table[i] = NULL;
928 while (last->next_same_hash != NULL)
929 last = last->next_same_hash;
931 /* Now relink this hash entire chain into
932 the free element list. */
934 last->next_same_hash = free_element_chain;
935 free_element_chain = first;
939 prev_insn = 0;
941 #ifdef HAVE_cc0
942 prev_insn_cc0 = 0;
943 #endif
946 /* Say that register REG contains a quantity in mode MODE not in any
947 register before and initialize that quantity. */
949 static void
950 make_new_qty (reg, mode)
951 register int reg;
952 register enum machine_mode mode;
954 register int q;
955 register struct qty_table_elem *ent;
956 register struct reg_eqv_elem *eqv;
958 if (next_qty >= max_qty)
959 abort ();
961 q = REG_QTY (reg) = next_qty++;
962 ent = &qty_table[q];
963 ent->first_reg = reg;
964 ent->last_reg = reg;
965 ent->mode = mode;
966 ent->const_rtx = ent->const_insn = NULL_RTX;
967 ent->comparison_code = UNKNOWN;
969 eqv = &reg_eqv_table[reg];
970 eqv->next = eqv->prev = -1;
973 /* Make reg NEW equivalent to reg OLD.
974 OLD is not changing; NEW is. */
976 static void
977 make_regs_eqv (new, old)
978 register int new, old;
980 register int lastr, firstr;
981 register int q = REG_QTY (old);
982 register struct qty_table_elem *ent;
984 ent = &qty_table[q];
986 /* Nothing should become eqv until it has a "non-invalid" qty number. */
987 if (! REGNO_QTY_VALID_P (old))
988 abort ();
990 REG_QTY (new) = q;
991 firstr = ent->first_reg;
992 lastr = ent->last_reg;
994 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
995 hard regs. Among pseudos, if NEW will live longer than any other reg
996 of the same qty, and that is beyond the current basic block,
997 make it the new canonical replacement for this qty. */
998 if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
999 /* Certain fixed registers might be of the class NO_REGS. This means
1000 that not only can they not be allocated by the compiler, but
1001 they cannot be used in substitutions or canonicalizations
1002 either. */
1003 && (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
1004 && ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
1005 || (new >= FIRST_PSEUDO_REGISTER
1006 && (firstr < FIRST_PSEUDO_REGISTER
1007 || ((uid_cuid[REGNO_LAST_UID (new)] > cse_basic_block_end
1008 || (uid_cuid[REGNO_FIRST_UID (new)]
1009 < cse_basic_block_start))
1010 && (uid_cuid[REGNO_LAST_UID (new)]
1011 > uid_cuid[REGNO_LAST_UID (firstr)]))))))
1013 reg_eqv_table[firstr].prev = new;
1014 reg_eqv_table[new].next = firstr;
1015 reg_eqv_table[new].prev = -1;
1016 ent->first_reg = new;
1018 else
1020 /* If NEW is a hard reg (known to be non-fixed), insert at end.
1021 Otherwise, insert before any non-fixed hard regs that are at the
1022 end. Registers of class NO_REGS cannot be used as an
1023 equivalent for anything. */
1024 while (lastr < FIRST_PSEUDO_REGISTER && reg_eqv_table[lastr].prev >= 0
1025 && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
1026 && new >= FIRST_PSEUDO_REGISTER)
1027 lastr = reg_eqv_table[lastr].prev;
1028 reg_eqv_table[new].next = reg_eqv_table[lastr].next;
1029 if (reg_eqv_table[lastr].next >= 0)
1030 reg_eqv_table[reg_eqv_table[lastr].next].prev = new;
1031 else
1032 qty_table[q].last_reg = new;
1033 reg_eqv_table[lastr].next = new;
1034 reg_eqv_table[new].prev = lastr;
1038 /* Remove REG from its equivalence class. */
1040 static void
1041 delete_reg_equiv (reg)
1042 register int reg;
1044 register struct qty_table_elem *ent;
1045 register int q = REG_QTY (reg);
1046 register int p, n;
1048 /* If invalid, do nothing. */
1049 if (q == reg)
1050 return;
1052 ent = &qty_table[q];
1054 p = reg_eqv_table[reg].prev;
1055 n = reg_eqv_table[reg].next;
1057 if (n != -1)
1058 reg_eqv_table[n].prev = p;
1059 else
1060 ent->last_reg = p;
1061 if (p != -1)
1062 reg_eqv_table[p].next = n;
1063 else
1064 ent->first_reg = n;
1066 REG_QTY (reg) = reg;
1069 /* Remove any invalid expressions from the hash table
1070 that refer to any of the registers contained in expression X.
1072 Make sure that newly inserted references to those registers
1073 as subexpressions will be considered valid.
1075 mention_regs is not called when a register itself
1076 is being stored in the table.
1078 Return 1 if we have done something that may have changed the hash code
1079 of X. */
1081 static int
1082 mention_regs (x)
1083 rtx x;
1085 register enum rtx_code code;
1086 register int i, j;
1087 register const char *fmt;
1088 register int changed = 0;
1090 if (x == 0)
1091 return 0;
1093 code = GET_CODE (x);
1094 if (code == REG)
1096 register int regno = REGNO (x);
1097 register int endregno
1098 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
1099 : HARD_REGNO_NREGS (regno, GET_MODE (x)));
1100 int i;
1102 for (i = regno; i < endregno; i++)
1104 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1105 remove_invalid_refs (i);
1107 REG_IN_TABLE (i) = REG_TICK (i);
1110 return 0;
1113 /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1114 pseudo if they don't use overlapping words. We handle only pseudos
1115 here for simplicity. */
1116 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1117 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1119 int i = REGNO (SUBREG_REG (x));
1121 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1123 /* If reg_tick has been incremented more than once since
1124 reg_in_table was last set, that means that the entire
1125 register has been set before, so discard anything memorized
1126 for the entrire register, including all SUBREG expressions. */
1127 if (REG_IN_TABLE (i) != REG_TICK (i) - 1)
1128 remove_invalid_refs (i);
1129 else
1130 remove_invalid_subreg_refs (i, SUBREG_WORD (x), GET_MODE (x));
1133 REG_IN_TABLE (i) = REG_TICK (i);
1134 return 0;
1137 /* If X is a comparison or a COMPARE and either operand is a register
1138 that does not have a quantity, give it one. This is so that a later
1139 call to record_jump_equiv won't cause X to be assigned a different
1140 hash code and not found in the table after that call.
1142 It is not necessary to do this here, since rehash_using_reg can
1143 fix up the table later, but doing this here eliminates the need to
1144 call that expensive function in the most common case where the only
1145 use of the register is in the comparison. */
1147 if (code == COMPARE || GET_RTX_CLASS (code) == '<')
1149 if (GET_CODE (XEXP (x, 0)) == REG
1150 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
1151 if (insert_regs (XEXP (x, 0), NULL_PTR, 0))
1153 rehash_using_reg (XEXP (x, 0));
1154 changed = 1;
1157 if (GET_CODE (XEXP (x, 1)) == REG
1158 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
1159 if (insert_regs (XEXP (x, 1), NULL_PTR, 0))
1161 rehash_using_reg (XEXP (x, 1));
1162 changed = 1;
1166 fmt = GET_RTX_FORMAT (code);
1167 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1168 if (fmt[i] == 'e')
1169 changed |= mention_regs (XEXP (x, i));
1170 else if (fmt[i] == 'E')
1171 for (j = 0; j < XVECLEN (x, i); j++)
1172 changed |= mention_regs (XVECEXP (x, i, j));
1174 return changed;
1177 /* Update the register quantities for inserting X into the hash table
1178 with a value equivalent to CLASSP.
1179 (If the class does not contain a REG, it is irrelevant.)
1180 If MODIFIED is nonzero, X is a destination; it is being modified.
1181 Note that delete_reg_equiv should be called on a register
1182 before insert_regs is done on that register with MODIFIED != 0.
1184 Nonzero value means that elements of reg_qty have changed
1185 so X's hash code may be different. */
1187 static int
1188 insert_regs (x, classp, modified)
1189 rtx x;
1190 struct table_elt *classp;
1191 int modified;
1193 if (GET_CODE (x) == REG)
1195 register int regno = REGNO (x);
1196 register int qty_valid;
1198 /* If REGNO is in the equivalence table already but is of the
1199 wrong mode for that equivalence, don't do anything here. */
1201 qty_valid = REGNO_QTY_VALID_P (regno);
1202 if (qty_valid)
1204 struct qty_table_elem *ent = &qty_table[REG_QTY (regno)];
1206 if (ent->mode != GET_MODE (x))
1207 return 0;
1210 if (modified || ! qty_valid)
1212 if (classp)
1213 for (classp = classp->first_same_value;
1214 classp != 0;
1215 classp = classp->next_same_value)
1216 if (GET_CODE (classp->exp) == REG
1217 && GET_MODE (classp->exp) == GET_MODE (x))
1219 make_regs_eqv (regno, REGNO (classp->exp));
1220 return 1;
1223 make_new_qty (regno, GET_MODE (x));
1224 return 1;
1227 return 0;
1230 /* If X is a SUBREG, we will likely be inserting the inner register in the
1231 table. If that register doesn't have an assigned quantity number at
1232 this point but does later, the insertion that we will be doing now will
1233 not be accessible because its hash code will have changed. So assign
1234 a quantity number now. */
1236 else if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1237 && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1239 int regno = REGNO (SUBREG_REG (x));
1241 insert_regs (SUBREG_REG (x), NULL_PTR, 0);
1242 /* Mention_regs checks if REG_TICK is exactly one larger than
1243 REG_IN_TABLE to find out if there was only a single preceding
1244 invalidation - for the SUBREG - or another one, which would be
1245 for the full register. Since we don't invalidate the SUBREG
1246 here first, we might have to bump up REG_TICK so that mention_regs
1247 will do the right thing. */
1248 if (REG_IN_TABLE (regno) >= 0
1249 && REG_TICK (regno) == REG_IN_TABLE (regno) + 1)
1250 REG_TICK (regno)++;
1251 mention_regs (x);
1252 return 1;
1254 else
1255 return mention_regs (x);
1258 /* Look in or update the hash table. */
1260 /* Remove table element ELT from use in the table.
1261 HASH is its hash code, made using the HASH macro.
1262 It's an argument because often that is known in advance
1263 and we save much time not recomputing it. */
1265 static void
1266 remove_from_table (elt, hash)
1267 register struct table_elt *elt;
1268 unsigned hash;
1270 if (elt == 0)
1271 return;
1273 /* Mark this element as removed. See cse_insn. */
1274 elt->first_same_value = 0;
1276 /* Remove the table element from its equivalence class. */
1279 register struct table_elt *prev = elt->prev_same_value;
1280 register struct table_elt *next = elt->next_same_value;
1282 if (next) next->prev_same_value = prev;
1284 if (prev)
1285 prev->next_same_value = next;
1286 else
1288 register struct table_elt *newfirst = next;
1289 while (next)
1291 next->first_same_value = newfirst;
1292 next = next->next_same_value;
1297 /* Remove the table element from its hash bucket. */
1300 register struct table_elt *prev = elt->prev_same_hash;
1301 register struct table_elt *next = elt->next_same_hash;
1303 if (next) next->prev_same_hash = prev;
1305 if (prev)
1306 prev->next_same_hash = next;
1307 else if (table[hash] == elt)
1308 table[hash] = next;
1309 else
1311 /* This entry is not in the proper hash bucket. This can happen
1312 when two classes were merged by `merge_equiv_classes'. Search
1313 for the hash bucket that it heads. This happens only very
1314 rarely, so the cost is acceptable. */
1315 for (hash = 0; hash < HASH_SIZE; hash++)
1316 if (table[hash] == elt)
1317 table[hash] = next;
1321 /* Remove the table element from its related-value circular chain. */
1323 if (elt->related_value != 0 && elt->related_value != elt)
1325 register struct table_elt *p = elt->related_value;
1326 while (p->related_value != elt)
1327 p = p->related_value;
1328 p->related_value = elt->related_value;
1329 if (p->related_value == p)
1330 p->related_value = 0;
1333 /* Now add it to the free element chain. */
1334 elt->next_same_hash = free_element_chain;
1335 free_element_chain = elt;
1338 /* Look up X in the hash table and return its table element,
1339 or 0 if X is not in the table.
1341 MODE is the machine-mode of X, or if X is an integer constant
1342 with VOIDmode then MODE is the mode with which X will be used.
1344 Here we are satisfied to find an expression whose tree structure
1345 looks like X. */
1347 static struct table_elt *
1348 lookup (x, hash, mode)
1349 rtx x;
1350 unsigned hash;
1351 enum machine_mode mode;
1353 register struct table_elt *p;
1355 for (p = table[hash]; p; p = p->next_same_hash)
1356 if (mode == p->mode && ((x == p->exp && GET_CODE (x) == REG)
1357 || exp_equiv_p (x, p->exp, GET_CODE (x) != REG, 0)))
1358 return p;
1360 return 0;
1363 /* Like `lookup' but don't care whether the table element uses invalid regs.
1364 Also ignore discrepancies in the machine mode of a register. */
1366 static struct table_elt *
1367 lookup_for_remove (x, hash, mode)
1368 rtx x;
1369 unsigned hash;
1370 enum machine_mode mode;
1372 register struct table_elt *p;
1374 if (GET_CODE (x) == REG)
1376 int regno = REGNO (x);
1377 /* Don't check the machine mode when comparing registers;
1378 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1379 for (p = table[hash]; p; p = p->next_same_hash)
1380 if (GET_CODE (p->exp) == REG
1381 && REGNO (p->exp) == regno)
1382 return p;
1384 else
1386 for (p = table[hash]; p; p = p->next_same_hash)
1387 if (mode == p->mode && (x == p->exp || exp_equiv_p (x, p->exp, 0, 0)))
1388 return p;
1391 return 0;
1394 /* Look for an expression equivalent to X and with code CODE.
1395 If one is found, return that expression. */
1397 static rtx
1398 lookup_as_function (x, code)
1399 rtx x;
1400 enum rtx_code code;
1402 register struct table_elt *p = lookup (x, safe_hash (x, VOIDmode) & HASH_MASK,
1403 GET_MODE (x));
1404 /* If we are looking for a CONST_INT, the mode doesn't really matter, as
1405 long as we are narrowing. So if we looked in vain for a mode narrower
1406 than word_mode before, look for word_mode now. */
1407 if (p == 0 && code == CONST_INT
1408 && GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (word_mode))
1410 x = copy_rtx (x);
1411 PUT_MODE (x, word_mode);
1412 p = lookup (x, safe_hash (x, VOIDmode) & HASH_MASK, word_mode);
1415 if (p == 0)
1416 return 0;
1418 for (p = p->first_same_value; p; p = p->next_same_value)
1420 if (GET_CODE (p->exp) == code
1421 /* Make sure this is a valid entry in the table. */
1422 && exp_equiv_p (p->exp, p->exp, 1, 0))
1423 return p->exp;
1426 return 0;
1429 /* Insert X in the hash table, assuming HASH is its hash code
1430 and CLASSP is an element of the class it should go in
1431 (or 0 if a new class should be made).
1432 It is inserted at the proper position to keep the class in
1433 the order cheapest first.
1435 MODE is the machine-mode of X, or if X is an integer constant
1436 with VOIDmode then MODE is the mode with which X will be used.
1438 For elements of equal cheapness, the most recent one
1439 goes in front, except that the first element in the list
1440 remains first unless a cheaper element is added. The order of
1441 pseudo-registers does not matter, as canon_reg will be called to
1442 find the cheapest when a register is retrieved from the table.
1444 The in_memory field in the hash table element is set to 0.
1445 The caller must set it nonzero if appropriate.
1447 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1448 and if insert_regs returns a nonzero value
1449 you must then recompute its hash code before calling here.
1451 If necessary, update table showing constant values of quantities. */
1453 #define CHEAPER(X,Y) ((X)->cost < (Y)->cost)
1455 static struct table_elt *
1456 insert (x, classp, hash, mode)
1457 register rtx x;
1458 register struct table_elt *classp;
1459 unsigned hash;
1460 enum machine_mode mode;
1462 register struct table_elt *elt;
1464 /* If X is a register and we haven't made a quantity for it,
1465 something is wrong. */
1466 if (GET_CODE (x) == REG && ! REGNO_QTY_VALID_P (REGNO (x)))
1467 abort ();
1469 /* If X is a hard register, show it is being put in the table. */
1470 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
1472 int regno = REGNO (x);
1473 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1474 int i;
1476 for (i = regno; i < endregno; i++)
1477 SET_HARD_REG_BIT (hard_regs_in_table, i);
1480 /* If X is a label, show we recorded it. */
1481 if (GET_CODE (x) == LABEL_REF
1482 || (GET_CODE (x) == CONST && GET_CODE (XEXP (x, 0)) == PLUS
1483 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF))
1484 recorded_label_ref = 1;
1486 /* Put an element for X into the right hash bucket. */
1488 elt = free_element_chain;
1489 if (elt)
1491 free_element_chain = elt->next_same_hash;
1493 else
1495 n_elements_made++;
1496 elt = (struct table_elt *) oballoc (sizeof (struct table_elt));
1499 elt->exp = x;
1500 elt->cost = COST (x);
1501 elt->next_same_value = 0;
1502 elt->prev_same_value = 0;
1503 elt->next_same_hash = table[hash];
1504 elt->prev_same_hash = 0;
1505 elt->related_value = 0;
1506 elt->in_memory = 0;
1507 elt->mode = mode;
1508 elt->is_const = (CONSTANT_P (x)
1509 /* GNU C++ takes advantage of this for `this'
1510 (and other const values). */
1511 || (RTX_UNCHANGING_P (x)
1512 && GET_CODE (x) == REG
1513 && REGNO (x) >= FIRST_PSEUDO_REGISTER)
1514 || FIXED_BASE_PLUS_P (x));
1516 if (table[hash])
1517 table[hash]->prev_same_hash = elt;
1518 table[hash] = elt;
1520 /* Put it into the proper value-class. */
1521 if (classp)
1523 classp = classp->first_same_value;
1524 if (CHEAPER (elt, classp))
1525 /* Insert at the head of the class */
1527 register struct table_elt *p;
1528 elt->next_same_value = classp;
1529 classp->prev_same_value = elt;
1530 elt->first_same_value = elt;
1532 for (p = classp; p; p = p->next_same_value)
1533 p->first_same_value = elt;
1535 else
1537 /* Insert not at head of the class. */
1538 /* Put it after the last element cheaper than X. */
1539 register struct table_elt *p, *next;
1540 for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1541 p = next);
1542 /* Put it after P and before NEXT. */
1543 elt->next_same_value = next;
1544 if (next)
1545 next->prev_same_value = elt;
1546 elt->prev_same_value = p;
1547 p->next_same_value = elt;
1548 elt->first_same_value = classp;
1551 else
1552 elt->first_same_value = elt;
1554 /* If this is a constant being set equivalent to a register or a register
1555 being set equivalent to a constant, note the constant equivalence.
1557 If this is a constant, it cannot be equivalent to a different constant,
1558 and a constant is the only thing that can be cheaper than a register. So
1559 we know the register is the head of the class (before the constant was
1560 inserted).
1562 If this is a register that is not already known equivalent to a
1563 constant, we must check the entire class.
1565 If this is a register that is already known equivalent to an insn,
1566 update the qtys `const_insn' to show that `this_insn' is the latest
1567 insn making that quantity equivalent to the constant. */
1569 if (elt->is_const && classp && GET_CODE (classp->exp) == REG
1570 && GET_CODE (x) != REG)
1572 int exp_q = REG_QTY (REGNO (classp->exp));
1573 struct qty_table_elem *exp_ent = &qty_table[exp_q];
1575 exp_ent->const_rtx = gen_lowpart_if_possible (exp_ent->mode, x);
1576 exp_ent->const_insn = this_insn;
1579 else if (GET_CODE (x) == REG
1580 && classp
1581 && ! qty_table[REG_QTY (REGNO (x))].const_rtx
1582 && ! elt->is_const)
1584 register struct table_elt *p;
1586 for (p = classp; p != 0; p = p->next_same_value)
1588 if (p->is_const && GET_CODE (p->exp) != REG)
1590 int x_q = REG_QTY (REGNO (x));
1591 struct qty_table_elem *x_ent = &qty_table[x_q];
1593 x_ent->const_rtx = gen_lowpart_if_possible (GET_MODE (x), p->exp);
1594 x_ent->const_insn = this_insn;
1595 break;
1600 else if (GET_CODE (x) == REG
1601 && qty_table[REG_QTY (REGNO (x))].const_rtx
1602 && GET_MODE (x) == qty_table[REG_QTY (REGNO (x))].mode)
1603 qty_table[REG_QTY (REGNO (x))].const_insn = this_insn;
1605 /* If this is a constant with symbolic value,
1606 and it has a term with an explicit integer value,
1607 link it up with related expressions. */
1608 if (GET_CODE (x) == CONST)
1610 rtx subexp = get_related_value (x);
1611 unsigned subhash;
1612 struct table_elt *subelt, *subelt_prev;
1614 if (subexp != 0)
1616 /* Get the integer-free subexpression in the hash table. */
1617 subhash = safe_hash (subexp, mode) & HASH_MASK;
1618 subelt = lookup (subexp, subhash, mode);
1619 if (subelt == 0)
1620 subelt = insert (subexp, NULL_PTR, subhash, mode);
1621 /* Initialize SUBELT's circular chain if it has none. */
1622 if (subelt->related_value == 0)
1623 subelt->related_value = subelt;
1624 /* Find the element in the circular chain that precedes SUBELT. */
1625 subelt_prev = subelt;
1626 while (subelt_prev->related_value != subelt)
1627 subelt_prev = subelt_prev->related_value;
1628 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1629 This way the element that follows SUBELT is the oldest one. */
1630 elt->related_value = subelt_prev->related_value;
1631 subelt_prev->related_value = elt;
1635 return elt;
1638 /* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1639 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1640 the two classes equivalent.
1642 CLASS1 will be the surviving class; CLASS2 should not be used after this
1643 call.
1645 Any invalid entries in CLASS2 will not be copied. */
1647 static void
1648 merge_equiv_classes (class1, class2)
1649 struct table_elt *class1, *class2;
1651 struct table_elt *elt, *next, *new;
1653 /* Ensure we start with the head of the classes. */
1654 class1 = class1->first_same_value;
1655 class2 = class2->first_same_value;
1657 /* If they were already equal, forget it. */
1658 if (class1 == class2)
1659 return;
1661 for (elt = class2; elt; elt = next)
1663 unsigned hash;
1664 rtx exp = elt->exp;
1665 enum machine_mode mode = elt->mode;
1667 next = elt->next_same_value;
1669 /* Remove old entry, make a new one in CLASS1's class.
1670 Don't do this for invalid entries as we cannot find their
1671 hash code (it also isn't necessary). */
1672 if (GET_CODE (exp) == REG || exp_equiv_p (exp, exp, 1, 0))
1674 hash_arg_in_memory = 0;
1675 hash = HASH (exp, mode);
1677 if (GET_CODE (exp) == REG)
1678 delete_reg_equiv (REGNO (exp));
1680 remove_from_table (elt, hash);
1682 if (insert_regs (exp, class1, 0))
1684 rehash_using_reg (exp);
1685 hash = HASH (exp, mode);
1687 new = insert (exp, class1, hash, mode);
1688 new->in_memory = hash_arg_in_memory;
1694 /* Flush the entire hash table. */
1696 static void
1697 flush_hash_table ()
1699 int i;
1700 struct table_elt *p;
1702 for (i = 0; i < HASH_SIZE; i++)
1703 for (p = table[i]; p; p = table[i])
1705 /* Note that invalidate can remove elements
1706 after P in the current hash chain. */
1707 if (GET_CODE (p->exp) == REG)
1708 invalidate (p->exp, p->mode);
1709 else
1710 remove_from_table (p, i);
1714 /* Remove from the hash table, or mark as invalid, all expressions whose
1715 values could be altered by storing in X. X is a register, a subreg, or
1716 a memory reference with nonvarying address (because, when a memory
1717 reference with a varying address is stored in, all memory references are
1718 removed by invalidate_memory so specific invalidation is superfluous).
1719 FULL_MODE, if not VOIDmode, indicates that this much should be
1720 invalidated instead of just the amount indicated by the mode of X. This
1721 is only used for bitfield stores into memory.
1723 A nonvarying address may be just a register or just a symbol reference,
1724 or it may be either of those plus a numeric offset. */
1726 static void
1727 invalidate (x, full_mode)
1728 rtx x;
1729 enum machine_mode full_mode;
1731 register int i;
1732 register struct table_elt *p;
1734 switch (GET_CODE (x))
1736 case REG:
1738 /* If X is a register, dependencies on its contents are recorded
1739 through the qty number mechanism. Just change the qty number of
1740 the register, mark it as invalid for expressions that refer to it,
1741 and remove it itself. */
1742 register int regno = REGNO (x);
1743 register unsigned hash = HASH (x, GET_MODE (x));
1745 /* Remove REGNO from any quantity list it might be on and indicate
1746 that its value might have changed. If it is a pseudo, remove its
1747 entry from the hash table.
1749 For a hard register, we do the first two actions above for any
1750 additional hard registers corresponding to X. Then, if any of these
1751 registers are in the table, we must remove any REG entries that
1752 overlap these registers. */
1754 delete_reg_equiv (regno);
1755 REG_TICK (regno)++;
1757 if (regno >= FIRST_PSEUDO_REGISTER)
1759 /* Because a register can be referenced in more than one mode,
1760 we might have to remove more than one table entry. */
1761 struct table_elt *elt;
1763 while ((elt = lookup_for_remove (x, hash, GET_MODE (x))))
1764 remove_from_table (elt, hash);
1766 else
1768 HOST_WIDE_INT in_table
1769 = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1770 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1771 int tregno, tendregno;
1772 register struct table_elt *p, *next;
1774 CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1776 for (i = regno + 1; i < endregno; i++)
1778 in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, i);
1779 CLEAR_HARD_REG_BIT (hard_regs_in_table, i);
1780 delete_reg_equiv (i);
1781 REG_TICK (i)++;
1784 if (in_table)
1785 for (hash = 0; hash < HASH_SIZE; hash++)
1786 for (p = table[hash]; p; p = next)
1788 next = p->next_same_hash;
1790 if (GET_CODE (p->exp) != REG
1791 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1792 continue;
1794 tregno = REGNO (p->exp);
1795 tendregno
1796 = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (p->exp));
1797 if (tendregno > regno && tregno < endregno)
1798 remove_from_table (p, hash);
1802 return;
1804 case SUBREG:
1805 invalidate (SUBREG_REG (x), VOIDmode);
1806 return;
1808 case PARALLEL:
1809 for (i = XVECLEN (x, 0) - 1; i >= 0 ; --i)
1810 invalidate (XVECEXP (x, 0, i), VOIDmode);
1811 return;
1813 case EXPR_LIST:
1814 /* This is part of a disjoint return value; extract the location in
1815 question ignoring the offset. */
1816 invalidate (XEXP (x, 0), VOIDmode);
1817 return;
1819 case MEM:
1820 /* Remove all hash table elements that refer to overlapping pieces of
1821 memory. */
1822 if (full_mode == VOIDmode)
1823 full_mode = GET_MODE (x);
1825 for (i = 0; i < HASH_SIZE; i++)
1827 register struct table_elt *next;
1829 for (p = table[i]; p; p = next)
1831 next = p->next_same_hash;
1832 if (p->in_memory
1833 && (GET_CODE (p->exp) != MEM
1834 || true_dependence (x, full_mode, p->exp,
1835 cse_rtx_varies_p)))
1836 remove_from_table (p, i);
1839 return;
1841 default:
1842 abort ();
1846 /* Remove all expressions that refer to register REGNO,
1847 since they are already invalid, and we are about to
1848 mark that register valid again and don't want the old
1849 expressions to reappear as valid. */
1851 static void
1852 remove_invalid_refs (regno)
1853 int regno;
1855 register int i;
1856 register struct table_elt *p, *next;
1858 for (i = 0; i < HASH_SIZE; i++)
1859 for (p = table[i]; p; p = next)
1861 next = p->next_same_hash;
1862 if (GET_CODE (p->exp) != REG
1863 && refers_to_regno_p (regno, regno + 1, p->exp, NULL_PTR))
1864 remove_from_table (p, i);
1868 /* Likewise for a subreg with subreg_reg WORD and mode MODE. */
1869 static void
1870 remove_invalid_subreg_refs (regno, word, mode)
1871 int regno;
1872 int word;
1873 enum machine_mode mode;
1875 register int i;
1876 register struct table_elt *p, *next;
1877 int end = word + (GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD;
1879 for (i = 0; i < HASH_SIZE; i++)
1880 for (p = table[i]; p; p = next)
1882 rtx exp;
1883 next = p->next_same_hash;
1885 exp = p->exp;
1886 if (GET_CODE (p->exp) != REG
1887 && (GET_CODE (exp) != SUBREG
1888 || GET_CODE (SUBREG_REG (exp)) != REG
1889 || REGNO (SUBREG_REG (exp)) != regno
1890 || (((SUBREG_WORD (exp)
1891 + (GET_MODE_SIZE (GET_MODE (exp)) - 1) / UNITS_PER_WORD)
1892 >= word)
1893 && SUBREG_WORD (exp) <= end))
1894 && refers_to_regno_p (regno, regno + 1, p->exp, NULL_PTR))
1895 remove_from_table (p, i);
1899 /* Recompute the hash codes of any valid entries in the hash table that
1900 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
1902 This is called when we make a jump equivalence. */
1904 static void
1905 rehash_using_reg (x)
1906 rtx x;
1908 unsigned int i;
1909 struct table_elt *p, *next;
1910 unsigned hash;
1912 if (GET_CODE (x) == SUBREG)
1913 x = SUBREG_REG (x);
1915 /* If X is not a register or if the register is known not to be in any
1916 valid entries in the table, we have no work to do. */
1918 if (GET_CODE (x) != REG
1919 || REG_IN_TABLE (REGNO (x)) < 0
1920 || REG_IN_TABLE (REGNO (x)) != REG_TICK (REGNO (x)))
1921 return;
1923 /* Scan all hash chains looking for valid entries that mention X.
1924 If we find one and it is in the wrong hash chain, move it. We can skip
1925 objects that are registers, since they are handled specially. */
1927 for (i = 0; i < HASH_SIZE; i++)
1928 for (p = table[i]; p; p = next)
1930 next = p->next_same_hash;
1931 if (GET_CODE (p->exp) != REG && reg_mentioned_p (x, p->exp)
1932 && exp_equiv_p (p->exp, p->exp, 1, 0)
1933 && i != (hash = safe_hash (p->exp, p->mode) & HASH_MASK))
1935 if (p->next_same_hash)
1936 p->next_same_hash->prev_same_hash = p->prev_same_hash;
1938 if (p->prev_same_hash)
1939 p->prev_same_hash->next_same_hash = p->next_same_hash;
1940 else
1941 table[i] = p->next_same_hash;
1943 p->next_same_hash = table[hash];
1944 p->prev_same_hash = 0;
1945 if (table[hash])
1946 table[hash]->prev_same_hash = p;
1947 table[hash] = p;
1952 /* Remove from the hash table any expression that is a call-clobbered
1953 register. Also update their TICK values. */
1955 static void
1956 invalidate_for_call ()
1958 int regno, endregno;
1959 int i;
1960 unsigned hash;
1961 struct table_elt *p, *next;
1962 int in_table = 0;
1964 /* Go through all the hard registers. For each that is clobbered in
1965 a CALL_INSN, remove the register from quantity chains and update
1966 reg_tick if defined. Also see if any of these registers is currently
1967 in the table. */
1969 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1970 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
1972 delete_reg_equiv (regno);
1973 if (REG_TICK (regno) >= 0)
1974 REG_TICK (regno)++;
1976 in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
1979 /* In the case where we have no call-clobbered hard registers in the
1980 table, we are done. Otherwise, scan the table and remove any
1981 entry that overlaps a call-clobbered register. */
1983 if (in_table)
1984 for (hash = 0; hash < HASH_SIZE; hash++)
1985 for (p = table[hash]; p; p = next)
1987 next = p->next_same_hash;
1989 if (GET_CODE (p->exp) != REG
1990 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1991 continue;
1993 regno = REGNO (p->exp);
1994 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (p->exp));
1996 for (i = regno; i < endregno; i++)
1997 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1999 remove_from_table (p, hash);
2000 break;
2005 /* Given an expression X of type CONST,
2006 and ELT which is its table entry (or 0 if it
2007 is not in the hash table),
2008 return an alternate expression for X as a register plus integer.
2009 If none can be found, return 0. */
2011 static rtx
2012 use_related_value (x, elt)
2013 rtx x;
2014 struct table_elt *elt;
2016 register struct table_elt *relt = 0;
2017 register struct table_elt *p, *q;
2018 HOST_WIDE_INT offset;
2020 /* First, is there anything related known?
2021 If we have a table element, we can tell from that.
2022 Otherwise, must look it up. */
2024 if (elt != 0 && elt->related_value != 0)
2025 relt = elt;
2026 else if (elt == 0 && GET_CODE (x) == CONST)
2028 rtx subexp = get_related_value (x);
2029 if (subexp != 0)
2030 relt = lookup (subexp,
2031 safe_hash (subexp, GET_MODE (subexp)) & HASH_MASK,
2032 GET_MODE (subexp));
2035 if (relt == 0)
2036 return 0;
2038 /* Search all related table entries for one that has an
2039 equivalent register. */
2041 p = relt;
2042 while (1)
2044 /* This loop is strange in that it is executed in two different cases.
2045 The first is when X is already in the table. Then it is searching
2046 the RELATED_VALUE list of X's class (RELT). The second case is when
2047 X is not in the table. Then RELT points to a class for the related
2048 value.
2050 Ensure that, whatever case we are in, that we ignore classes that have
2051 the same value as X. */
2053 if (rtx_equal_p (x, p->exp))
2054 q = 0;
2055 else
2056 for (q = p->first_same_value; q; q = q->next_same_value)
2057 if (GET_CODE (q->exp) == REG)
2058 break;
2060 if (q)
2061 break;
2063 p = p->related_value;
2065 /* We went all the way around, so there is nothing to be found.
2066 Alternatively, perhaps RELT was in the table for some other reason
2067 and it has no related values recorded. */
2068 if (p == relt || p == 0)
2069 break;
2072 if (q == 0)
2073 return 0;
2075 offset = (get_integer_term (x) - get_integer_term (p->exp));
2076 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
2077 return plus_constant (q->exp, offset);
2080 /* Hash an rtx. We are careful to make sure the value is never negative.
2081 Equivalent registers hash identically.
2082 MODE is used in hashing for CONST_INTs only;
2083 otherwise the mode of X is used.
2085 Store 1 in do_not_record if any subexpression is volatile.
2087 Store 1 in hash_arg_in_memory if X contains a MEM rtx
2088 which does not have the RTX_UNCHANGING_P bit set.
2090 Note that cse_insn knows that the hash code of a MEM expression
2091 is just (int) MEM plus the hash code of the address. */
2093 static unsigned
2094 canon_hash (x, mode)
2095 rtx x;
2096 enum machine_mode mode;
2098 register int i, j;
2099 register unsigned hash = 0;
2100 register enum rtx_code code;
2101 register const char *fmt;
2103 /* repeat is used to turn tail-recursion into iteration. */
2104 repeat:
2105 if (x == 0)
2106 return hash;
2108 code = GET_CODE (x);
2109 switch (code)
2111 case REG:
2113 register int regno = REGNO (x);
2115 /* On some machines, we can't record any non-fixed hard register,
2116 because extending its life will cause reload problems. We
2117 consider ap, fp, and sp to be fixed for this purpose.
2119 We also consider CCmode registers to be fixed for this purpose;
2120 failure to do so leads to failure to simplify 0<100 type of
2121 conditionals.
2123 On all machines, we can't record any global registers. */
2125 if (regno < FIRST_PSEUDO_REGISTER
2126 && (global_regs[regno]
2127 || (SMALL_REGISTER_CLASSES
2128 && ! fixed_regs[regno]
2129 && regno != FRAME_POINTER_REGNUM
2130 && regno != HARD_FRAME_POINTER_REGNUM
2131 && regno != ARG_POINTER_REGNUM
2132 && regno != STACK_POINTER_REGNUM
2133 && GET_MODE_CLASS (GET_MODE (x)) != MODE_CC)))
2135 do_not_record = 1;
2136 return 0;
2138 hash += ((unsigned) REG << 7) + (unsigned) REG_QTY (regno);
2139 return hash;
2142 /* We handle SUBREG of a REG specially because the underlying
2143 reg changes its hash value with every value change; we don't
2144 want to have to forget unrelated subregs when one subreg changes. */
2145 case SUBREG:
2147 if (GET_CODE (SUBREG_REG (x)) == REG)
2149 hash += (((unsigned) SUBREG << 7)
2150 + REGNO (SUBREG_REG (x)) + SUBREG_WORD (x));
2151 return hash;
2153 break;
2156 case CONST_INT:
2158 unsigned HOST_WIDE_INT tem = INTVAL (x);
2159 hash += ((unsigned) CONST_INT << 7) + (unsigned) mode + tem;
2160 return hash;
2163 case CONST_DOUBLE:
2164 /* This is like the general case, except that it only counts
2165 the integers representing the constant. */
2166 hash += (unsigned) code + (unsigned) GET_MODE (x);
2167 if (GET_MODE (x) != VOIDmode)
2168 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
2170 unsigned HOST_WIDE_INT tem = XWINT (x, i);
2171 hash += tem;
2173 else
2174 hash += ((unsigned) CONST_DOUBLE_LOW (x)
2175 + (unsigned) CONST_DOUBLE_HIGH (x));
2176 return hash;
2178 /* Assume there is only one rtx object for any given label. */
2179 case LABEL_REF:
2180 hash
2181 += ((unsigned) LABEL_REF << 7) + (unsigned long) XEXP (x, 0);
2182 return hash;
2184 case SYMBOL_REF:
2185 hash
2186 += ((unsigned) SYMBOL_REF << 7) + (unsigned long) XSTR (x, 0);
2187 return hash;
2189 case MEM:
2190 /* We don't record if marked volatile or if BLKmode since we don't
2191 know the size of the move. */
2192 if (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode)
2194 do_not_record = 1;
2195 return 0;
2197 if (! RTX_UNCHANGING_P (x) || FIXED_BASE_PLUS_P (XEXP (x, 0)))
2199 hash_arg_in_memory = 1;
2201 /* Now that we have already found this special case,
2202 might as well speed it up as much as possible. */
2203 hash += (unsigned) MEM;
2204 x = XEXP (x, 0);
2205 goto repeat;
2207 case PRE_DEC:
2208 case PRE_INC:
2209 case POST_DEC:
2210 case POST_INC:
2211 case PC:
2212 case CC0:
2213 case CALL:
2214 case UNSPEC_VOLATILE:
2215 do_not_record = 1;
2216 return 0;
2218 case ASM_OPERANDS:
2219 if (MEM_VOLATILE_P (x))
2221 do_not_record = 1;
2222 return 0;
2224 break;
2226 default:
2227 break;
2230 i = GET_RTX_LENGTH (code) - 1;
2231 hash += (unsigned) code + (unsigned) GET_MODE (x);
2232 fmt = GET_RTX_FORMAT (code);
2233 for (; i >= 0; i--)
2235 if (fmt[i] == 'e')
2237 rtx tem = XEXP (x, i);
2239 /* If we are about to do the last recursive call
2240 needed at this level, change it into iteration.
2241 This function is called enough to be worth it. */
2242 if (i == 0)
2244 x = tem;
2245 goto repeat;
2247 hash += canon_hash (tem, 0);
2249 else if (fmt[i] == 'E')
2250 for (j = 0; j < XVECLEN (x, i); j++)
2251 hash += canon_hash (XVECEXP (x, i, j), 0);
2252 else if (fmt[i] == 's')
2254 register unsigned char *p = (unsigned char *) XSTR (x, i);
2255 if (p)
2256 while (*p)
2257 hash += *p++;
2259 else if (fmt[i] == 'i')
2261 register unsigned tem = XINT (x, i);
2262 hash += tem;
2264 else if (fmt[i] == '0' || fmt[i] == 't')
2265 /* unused */;
2266 else
2267 abort ();
2269 return hash;
2272 /* Like canon_hash but with no side effects. */
2274 static unsigned
2275 safe_hash (x, mode)
2276 rtx x;
2277 enum machine_mode mode;
2279 int save_do_not_record = do_not_record;
2280 int save_hash_arg_in_memory = hash_arg_in_memory;
2281 unsigned hash = canon_hash (x, mode);
2282 hash_arg_in_memory = save_hash_arg_in_memory;
2283 do_not_record = save_do_not_record;
2284 return hash;
2287 /* Return 1 iff X and Y would canonicalize into the same thing,
2288 without actually constructing the canonicalization of either one.
2289 If VALIDATE is nonzero,
2290 we assume X is an expression being processed from the rtl
2291 and Y was found in the hash table. We check register refs
2292 in Y for being marked as valid.
2294 If EQUAL_VALUES is nonzero, we allow a register to match a constant value
2295 that is known to be in the register. Ordinarily, we don't allow them
2296 to match, because letting them match would cause unpredictable results
2297 in all the places that search a hash table chain for an equivalent
2298 for a given value. A possible equivalent that has different structure
2299 has its hash code computed from different data. Whether the hash code
2300 is the same as that of the given value is pure luck. */
2302 static int
2303 exp_equiv_p (x, y, validate, equal_values)
2304 rtx x, y;
2305 int validate;
2306 int equal_values;
2308 register int i, j;
2309 register enum rtx_code code;
2310 register const char *fmt;
2312 /* Note: it is incorrect to assume an expression is equivalent to itself
2313 if VALIDATE is nonzero. */
2314 if (x == y && !validate)
2315 return 1;
2316 if (x == 0 || y == 0)
2317 return x == y;
2319 code = GET_CODE (x);
2320 if (code != GET_CODE (y))
2322 if (!equal_values)
2323 return 0;
2325 /* If X is a constant and Y is a register or vice versa, they may be
2326 equivalent. We only have to validate if Y is a register. */
2327 if (CONSTANT_P (x) && GET_CODE (y) == REG
2328 && REGNO_QTY_VALID_P (REGNO (y)))
2330 int y_q = REG_QTY (REGNO (y));
2331 struct qty_table_elem *y_ent = &qty_table[y_q];
2333 if (GET_MODE (y) == y_ent->mode
2334 && rtx_equal_p (x, y_ent->const_rtx)
2335 && (! validate || REG_IN_TABLE (REGNO (y)) == REG_TICK (REGNO (y))))
2336 return 1;
2339 if (CONSTANT_P (y) && code == REG
2340 && REGNO_QTY_VALID_P (REGNO (x)))
2342 int x_q = REG_QTY (REGNO (x));
2343 struct qty_table_elem *x_ent = &qty_table[x_q];
2345 if (GET_MODE (x) == x_ent->mode
2346 && rtx_equal_p (y, x_ent->const_rtx))
2347 return 1;
2350 return 0;
2353 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2354 if (GET_MODE (x) != GET_MODE (y))
2355 return 0;
2357 switch (code)
2359 case PC:
2360 case CC0:
2361 return x == y;
2363 case CONST_INT:
2364 return INTVAL (x) == INTVAL (y);
2366 case LABEL_REF:
2367 return XEXP (x, 0) == XEXP (y, 0);
2369 case SYMBOL_REF:
2370 return XSTR (x, 0) == XSTR (y, 0);
2372 case REG:
2374 int regno = REGNO (y);
2375 int endregno
2376 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
2377 : HARD_REGNO_NREGS (regno, GET_MODE (y)));
2378 int i;
2380 /* If the quantities are not the same, the expressions are not
2381 equivalent. If there are and we are not to validate, they
2382 are equivalent. Otherwise, ensure all regs are up-to-date. */
2384 if (REG_QTY (REGNO (x)) != REG_QTY (regno))
2385 return 0;
2387 if (! validate)
2388 return 1;
2390 for (i = regno; i < endregno; i++)
2391 if (REG_IN_TABLE (i) != REG_TICK (i))
2392 return 0;
2394 return 1;
2397 /* For commutative operations, check both orders. */
2398 case PLUS:
2399 case MULT:
2400 case AND:
2401 case IOR:
2402 case XOR:
2403 case NE:
2404 case EQ:
2405 return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0), validate, equal_values)
2406 && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2407 validate, equal_values))
2408 || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2409 validate, equal_values)
2410 && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2411 validate, equal_values)));
2413 default:
2414 break;
2417 /* Compare the elements. If any pair of corresponding elements
2418 fail to match, return 0 for the whole things. */
2420 fmt = GET_RTX_FORMAT (code);
2421 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2423 switch (fmt[i])
2425 case 'e':
2426 if (! exp_equiv_p (XEXP (x, i), XEXP (y, i), validate, equal_values))
2427 return 0;
2428 break;
2430 case 'E':
2431 if (XVECLEN (x, i) != XVECLEN (y, i))
2432 return 0;
2433 for (j = 0; j < XVECLEN (x, i); j++)
2434 if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2435 validate, equal_values))
2436 return 0;
2437 break;
2439 case 's':
2440 if (strcmp (XSTR (x, i), XSTR (y, i)))
2441 return 0;
2442 break;
2444 case 'i':
2445 if (XINT (x, i) != XINT (y, i))
2446 return 0;
2447 break;
2449 case 'w':
2450 if (XWINT (x, i) != XWINT (y, i))
2451 return 0;
2452 break;
2454 case '0':
2455 case 't':
2456 break;
2458 default:
2459 abort ();
2463 return 1;
2466 /* Return 1 if X has a value that can vary even between two
2467 executions of the program. 0 means X can be compared reliably
2468 against certain constants or near-constants. */
2470 static int
2471 cse_rtx_varies_p (x)
2472 register rtx x;
2474 /* We need not check for X and the equivalence class being of the same
2475 mode because if X is equivalent to a constant in some mode, it
2476 doesn't vary in any mode. */
2478 if (GET_CODE (x) == REG
2479 && REGNO_QTY_VALID_P (REGNO (x)))
2481 int x_q = REG_QTY (REGNO (x));
2482 struct qty_table_elem *x_ent = &qty_table[x_q];
2484 if (GET_MODE (x) == x_ent->mode
2485 && x_ent->const_rtx != NULL_RTX)
2486 return 0;
2489 if (GET_CODE (x) == PLUS
2490 && GET_CODE (XEXP (x, 1)) == CONST_INT
2491 && GET_CODE (XEXP (x, 0)) == REG
2492 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
2494 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2495 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2497 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2498 && x0_ent->const_rtx != NULL_RTX)
2499 return 0;
2502 /* This can happen as the result of virtual register instantiation, if
2503 the initial constant is too large to be a valid address. This gives
2504 us a three instruction sequence, load large offset into a register,
2505 load fp minus a constant into a register, then a MEM which is the
2506 sum of the two `constant' registers. */
2507 if (GET_CODE (x) == PLUS
2508 && GET_CODE (XEXP (x, 0)) == REG
2509 && GET_CODE (XEXP (x, 1)) == REG
2510 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2511 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
2513 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2514 int x1_q = REG_QTY (REGNO (XEXP (x, 1)));
2515 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2516 struct qty_table_elem *x1_ent = &qty_table[x1_q];
2518 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2519 && x0_ent->const_rtx != NULL_RTX
2520 && (GET_MODE (XEXP (x, 1)) == x1_ent->mode)
2521 && x1_ent->const_rtx != NULL_RTX)
2522 return 0;
2525 return rtx_varies_p (x);
2528 /* Canonicalize an expression:
2529 replace each register reference inside it
2530 with the "oldest" equivalent register.
2532 If INSN is non-zero and we are replacing a pseudo with a hard register
2533 or vice versa, validate_change is used to ensure that INSN remains valid
2534 after we make our substitution. The calls are made with IN_GROUP non-zero
2535 so apply_change_group must be called upon the outermost return from this
2536 function (unless INSN is zero). The result of apply_change_group can
2537 generally be discarded since the changes we are making are optional. */
2539 static rtx
2540 canon_reg (x, insn)
2541 rtx x;
2542 rtx insn;
2544 register int i;
2545 register enum rtx_code code;
2546 register const char *fmt;
2548 if (x == 0)
2549 return x;
2551 code = GET_CODE (x);
2552 switch (code)
2554 case PC:
2555 case CC0:
2556 case CONST:
2557 case CONST_INT:
2558 case CONST_DOUBLE:
2559 case SYMBOL_REF:
2560 case LABEL_REF:
2561 case ADDR_VEC:
2562 case ADDR_DIFF_VEC:
2563 return x;
2565 case REG:
2567 register int first;
2568 register int q;
2569 register struct qty_table_elem *ent;
2571 /* Never replace a hard reg, because hard regs can appear
2572 in more than one machine mode, and we must preserve the mode
2573 of each occurrence. Also, some hard regs appear in
2574 MEMs that are shared and mustn't be altered. Don't try to
2575 replace any reg that maps to a reg of class NO_REGS. */
2576 if (REGNO (x) < FIRST_PSEUDO_REGISTER
2577 || ! REGNO_QTY_VALID_P (REGNO (x)))
2578 return x;
2580 q = REG_QTY (REGNO(x));
2581 ent = &qty_table[q];
2582 first = ent->first_reg;
2583 return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2584 : REGNO_REG_CLASS (first) == NO_REGS ? x
2585 : gen_rtx_REG (ent->mode, first));
2588 default:
2589 break;
2592 fmt = GET_RTX_FORMAT (code);
2593 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2595 register int j;
2597 if (fmt[i] == 'e')
2599 rtx new = canon_reg (XEXP (x, i), insn);
2600 int insn_code;
2602 /* If replacing pseudo with hard reg or vice versa, ensure the
2603 insn remains valid. Likewise if the insn has MATCH_DUPs. */
2604 if (insn != 0 && new != 0
2605 && GET_CODE (new) == REG && GET_CODE (XEXP (x, i)) == REG
2606 && (((REGNO (new) < FIRST_PSEUDO_REGISTER)
2607 != (REGNO (XEXP (x, i)) < FIRST_PSEUDO_REGISTER))
2608 || (insn_code = recog_memoized (insn)) < 0
2609 || insn_data[insn_code].n_dups > 0))
2610 validate_change (insn, &XEXP (x, i), new, 1);
2611 else
2612 XEXP (x, i) = new;
2614 else if (fmt[i] == 'E')
2615 for (j = 0; j < XVECLEN (x, i); j++)
2616 XVECEXP (x, i, j) = canon_reg (XVECEXP (x, i, j), insn);
2619 return x;
2622 /* LOC is a location within INSN that is an operand address (the contents of
2623 a MEM). Find the best equivalent address to use that is valid for this
2624 insn.
2626 On most CISC machines, complicated address modes are costly, and rtx_cost
2627 is a good approximation for that cost. However, most RISC machines have
2628 only a few (usually only one) memory reference formats. If an address is
2629 valid at all, it is often just as cheap as any other address. Hence, for
2630 RISC machines, we use the configuration macro `ADDRESS_COST' to compare the
2631 costs of various addresses. For two addresses of equal cost, choose the one
2632 with the highest `rtx_cost' value as that has the potential of eliminating
2633 the most insns. For equal costs, we choose the first in the equivalence
2634 class. Note that we ignore the fact that pseudo registers are cheaper
2635 than hard registers here because we would also prefer the pseudo registers.
2638 static void
2639 find_best_addr (insn, loc)
2640 rtx insn;
2641 rtx *loc;
2643 struct table_elt *elt;
2644 rtx addr = *loc;
2645 #ifdef ADDRESS_COST
2646 struct table_elt *p;
2647 int found_better = 1;
2648 #endif
2649 int save_do_not_record = do_not_record;
2650 int save_hash_arg_in_memory = hash_arg_in_memory;
2651 int addr_volatile;
2652 int regno;
2653 unsigned hash;
2655 /* Do not try to replace constant addresses or addresses of local and
2656 argument slots. These MEM expressions are made only once and inserted
2657 in many instructions, as well as being used to control symbol table
2658 output. It is not safe to clobber them.
2660 There are some uncommon cases where the address is already in a register
2661 for some reason, but we cannot take advantage of that because we have
2662 no easy way to unshare the MEM. In addition, looking up all stack
2663 addresses is costly. */
2664 if ((GET_CODE (addr) == PLUS
2665 && GET_CODE (XEXP (addr, 0)) == REG
2666 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2667 && (regno = REGNO (XEXP (addr, 0)),
2668 regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM
2669 || regno == ARG_POINTER_REGNUM))
2670 || (GET_CODE (addr) == REG
2671 && (regno = REGNO (addr), regno == FRAME_POINTER_REGNUM
2672 || regno == HARD_FRAME_POINTER_REGNUM
2673 || regno == ARG_POINTER_REGNUM))
2674 || GET_CODE (addr) == ADDRESSOF
2675 || CONSTANT_ADDRESS_P (addr))
2676 return;
2678 /* If this address is not simply a register, try to fold it. This will
2679 sometimes simplify the expression. Many simplifications
2680 will not be valid, but some, usually applying the associative rule, will
2681 be valid and produce better code. */
2682 if (GET_CODE (addr) != REG)
2684 rtx folded = fold_rtx (copy_rtx (addr), NULL_RTX);
2686 if (1
2687 #ifdef ADDRESS_COST
2688 && (CSE_ADDRESS_COST (folded) < CSE_ADDRESS_COST (addr)
2689 || (CSE_ADDRESS_COST (folded) == CSE_ADDRESS_COST (addr)
2690 && rtx_cost (folded, MEM) > rtx_cost (addr, MEM)))
2691 #else
2692 && rtx_cost (folded, MEM) < rtx_cost (addr, MEM)
2693 #endif
2694 && validate_change (insn, loc, folded, 0))
2695 addr = folded;
2698 /* If this address is not in the hash table, we can't look for equivalences
2699 of the whole address. Also, ignore if volatile. */
2701 do_not_record = 0;
2702 hash = HASH (addr, Pmode);
2703 addr_volatile = do_not_record;
2704 do_not_record = save_do_not_record;
2705 hash_arg_in_memory = save_hash_arg_in_memory;
2707 if (addr_volatile)
2708 return;
2710 elt = lookup (addr, hash, Pmode);
2712 #ifndef ADDRESS_COST
2713 if (elt)
2715 int our_cost = elt->cost;
2717 /* Find the lowest cost below ours that works. */
2718 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
2719 if (elt->cost < our_cost
2720 && (GET_CODE (elt->exp) == REG
2721 || exp_equiv_p (elt->exp, elt->exp, 1, 0))
2722 && validate_change (insn, loc,
2723 canon_reg (copy_rtx (elt->exp), NULL_RTX), 0))
2724 return;
2726 #else
2728 if (elt)
2730 /* We need to find the best (under the criteria documented above) entry
2731 in the class that is valid. We use the `flag' field to indicate
2732 choices that were invalid and iterate until we can't find a better
2733 one that hasn't already been tried. */
2735 for (p = elt->first_same_value; p; p = p->next_same_value)
2736 p->flag = 0;
2738 while (found_better)
2740 int best_addr_cost = CSE_ADDRESS_COST (*loc);
2741 int best_rtx_cost = (elt->cost + 1) >> 1;
2742 struct table_elt *best_elt = elt;
2744 found_better = 0;
2745 for (p = elt->first_same_value; p; p = p->next_same_value)
2746 if (! p->flag)
2748 if ((GET_CODE (p->exp) == REG
2749 || exp_equiv_p (p->exp, p->exp, 1, 0))
2750 && (CSE_ADDRESS_COST (p->exp) < best_addr_cost
2751 || (CSE_ADDRESS_COST (p->exp) == best_addr_cost
2752 && (p->cost + 1) >> 1 > best_rtx_cost)))
2754 found_better = 1;
2755 best_addr_cost = CSE_ADDRESS_COST (p->exp);
2756 best_rtx_cost = (p->cost + 1) >> 1;
2757 best_elt = p;
2761 if (found_better)
2763 if (validate_change (insn, loc,
2764 canon_reg (copy_rtx (best_elt->exp),
2765 NULL_RTX), 0))
2766 return;
2767 else
2768 best_elt->flag = 1;
2773 /* If the address is a binary operation with the first operand a register
2774 and the second a constant, do the same as above, but looking for
2775 equivalences of the register. Then try to simplify before checking for
2776 the best address to use. This catches a few cases: First is when we
2777 have REG+const and the register is another REG+const. We can often merge
2778 the constants and eliminate one insn and one register. It may also be
2779 that a machine has a cheap REG+REG+const. Finally, this improves the
2780 code on the Alpha for unaligned byte stores. */
2782 if (flag_expensive_optimizations
2783 && (GET_RTX_CLASS (GET_CODE (*loc)) == '2'
2784 || GET_RTX_CLASS (GET_CODE (*loc)) == 'c')
2785 && GET_CODE (XEXP (*loc, 0)) == REG
2786 && GET_CODE (XEXP (*loc, 1)) == CONST_INT)
2788 rtx c = XEXP (*loc, 1);
2790 do_not_record = 0;
2791 hash = HASH (XEXP (*loc, 0), Pmode);
2792 do_not_record = save_do_not_record;
2793 hash_arg_in_memory = save_hash_arg_in_memory;
2795 elt = lookup (XEXP (*loc, 0), hash, Pmode);
2796 if (elt == 0)
2797 return;
2799 /* We need to find the best (under the criteria documented above) entry
2800 in the class that is valid. We use the `flag' field to indicate
2801 choices that were invalid and iterate until we can't find a better
2802 one that hasn't already been tried. */
2804 for (p = elt->first_same_value; p; p = p->next_same_value)
2805 p->flag = 0;
2807 while (found_better)
2809 int best_addr_cost = CSE_ADDRESS_COST (*loc);
2810 int best_rtx_cost = (COST (*loc) + 1) >> 1;
2811 struct table_elt *best_elt = elt;
2812 rtx best_rtx = *loc;
2813 int count;
2815 /* This is at worst case an O(n^2) algorithm, so limit our search
2816 to the first 32 elements on the list. This avoids trouble
2817 compiling code with very long basic blocks that can easily
2818 call simplify_gen_binary so many times that we run out of
2819 memory. */
2821 found_better = 0;
2822 for (p = elt->first_same_value, count = 0;
2823 p && count < 32;
2824 p = p->next_same_value, count++)
2825 if (! p->flag
2826 && (GET_CODE (p->exp) == REG
2827 || exp_equiv_p (p->exp, p->exp, 1, 0)))
2829 rtx new = simplify_gen_binary (GET_CODE (*loc), Pmode,
2830 p->exp, c);
2832 if ((CSE_ADDRESS_COST (new) < best_addr_cost
2833 || (CSE_ADDRESS_COST (new) == best_addr_cost
2834 && (COST (new) + 1) >> 1 > best_rtx_cost)))
2836 found_better = 1;
2837 best_addr_cost = CSE_ADDRESS_COST (new);
2838 best_rtx_cost = (COST (new) + 1) >> 1;
2839 best_elt = p;
2840 best_rtx = new;
2844 if (found_better)
2846 if (validate_change (insn, loc,
2847 canon_reg (copy_rtx (best_rtx),
2848 NULL_RTX), 0))
2849 return;
2850 else
2851 best_elt->flag = 1;
2855 #endif
2858 /* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
2859 operation (EQ, NE, GT, etc.), follow it back through the hash table and
2860 what values are being compared.
2862 *PARG1 and *PARG2 are updated to contain the rtx representing the values
2863 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
2864 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
2865 compared to produce cc0.
2867 The return value is the comparison operator and is either the code of
2868 A or the code corresponding to the inverse of the comparison. */
2870 static enum rtx_code
2871 find_comparison_args (code, parg1, parg2, pmode1, pmode2)
2872 enum rtx_code code;
2873 rtx *parg1, *parg2;
2874 enum machine_mode *pmode1, *pmode2;
2876 rtx arg1, arg2;
2878 arg1 = *parg1, arg2 = *parg2;
2880 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
2882 while (arg2 == CONST0_RTX (GET_MODE (arg1)))
2884 /* Set non-zero when we find something of interest. */
2885 rtx x = 0;
2886 int reverse_code = 0;
2887 struct table_elt *p = 0;
2889 /* If arg1 is a COMPARE, extract the comparison arguments from it.
2890 On machines with CC0, this is the only case that can occur, since
2891 fold_rtx will return the COMPARE or item being compared with zero
2892 when given CC0. */
2894 if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
2895 x = arg1;
2897 /* If ARG1 is a comparison operator and CODE is testing for
2898 STORE_FLAG_VALUE, get the inner arguments. */
2900 else if (GET_RTX_CLASS (GET_CODE (arg1)) == '<')
2902 if (code == NE
2903 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2904 && code == LT && STORE_FLAG_VALUE == -1)
2905 #ifdef FLOAT_STORE_FLAG_VALUE
2906 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
2907 && (REAL_VALUE_NEGATIVE
2908 (FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)))))
2909 #endif
2911 x = arg1;
2912 else if (code == EQ
2913 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2914 && code == GE && STORE_FLAG_VALUE == -1)
2915 #ifdef FLOAT_STORE_FLAG_VALUE
2916 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
2917 && (REAL_VALUE_NEGATIVE
2918 (FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)))))
2919 #endif
2921 x = arg1, reverse_code = 1;
2924 /* ??? We could also check for
2926 (ne (and (eq (...) (const_int 1))) (const_int 0))
2928 and related forms, but let's wait until we see them occurring. */
2930 if (x == 0)
2931 /* Look up ARG1 in the hash table and see if it has an equivalence
2932 that lets us see what is being compared. */
2933 p = lookup (arg1, safe_hash (arg1, GET_MODE (arg1)) & HASH_MASK,
2934 GET_MODE (arg1));
2935 if (p) p = p->first_same_value;
2937 for (; p; p = p->next_same_value)
2939 enum machine_mode inner_mode = GET_MODE (p->exp);
2941 /* If the entry isn't valid, skip it. */
2942 if (! exp_equiv_p (p->exp, p->exp, 1, 0))
2943 continue;
2945 if (GET_CODE (p->exp) == COMPARE
2946 /* Another possibility is that this machine has a compare insn
2947 that includes the comparison code. In that case, ARG1 would
2948 be equivalent to a comparison operation that would set ARG1 to
2949 either STORE_FLAG_VALUE or zero. If this is an NE operation,
2950 ORIG_CODE is the actual comparison being done; if it is an EQ,
2951 we must reverse ORIG_CODE. On machine with a negative value
2952 for STORE_FLAG_VALUE, also look at LT and GE operations. */
2953 || ((code == NE
2954 || (code == LT
2955 && GET_MODE_CLASS (inner_mode) == MODE_INT
2956 && (GET_MODE_BITSIZE (inner_mode)
2957 <= HOST_BITS_PER_WIDE_INT)
2958 && (STORE_FLAG_VALUE
2959 & ((HOST_WIDE_INT) 1
2960 << (GET_MODE_BITSIZE (inner_mode) - 1))))
2961 #ifdef FLOAT_STORE_FLAG_VALUE
2962 || (code == LT
2963 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
2964 && (REAL_VALUE_NEGATIVE
2965 (FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)))))
2966 #endif
2968 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<'))
2970 x = p->exp;
2971 break;
2973 else if ((code == EQ
2974 || (code == GE
2975 && GET_MODE_CLASS (inner_mode) == MODE_INT
2976 && (GET_MODE_BITSIZE (inner_mode)
2977 <= HOST_BITS_PER_WIDE_INT)
2978 && (STORE_FLAG_VALUE
2979 & ((HOST_WIDE_INT) 1
2980 << (GET_MODE_BITSIZE (inner_mode) - 1))))
2981 #ifdef FLOAT_STORE_FLAG_VALUE
2982 || (code == GE
2983 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
2984 && (REAL_VALUE_NEGATIVE
2985 (FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)))))
2986 #endif
2988 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<')
2990 reverse_code = 1;
2991 x = p->exp;
2992 break;
2995 /* If this is fp + constant, the equivalent is a better operand since
2996 it may let us predict the value of the comparison. */
2997 else if (NONZERO_BASE_PLUS_P (p->exp))
2999 arg1 = p->exp;
3000 continue;
3004 /* If we didn't find a useful equivalence for ARG1, we are done.
3005 Otherwise, set up for the next iteration. */
3006 if (x == 0)
3007 break;
3009 arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
3010 if (GET_RTX_CLASS (GET_CODE (x)) == '<')
3011 code = GET_CODE (x);
3013 if (reverse_code)
3014 code = reverse_condition (code);
3017 /* Return our results. Return the modes from before fold_rtx
3018 because fold_rtx might produce const_int, and then it's too late. */
3019 *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
3020 *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
3022 return code;
3025 /* If X is a nontrivial arithmetic operation on an argument
3026 for which a constant value can be determined, return
3027 the result of operating on that value, as a constant.
3028 Otherwise, return X, possibly with one or more operands
3029 modified by recursive calls to this function.
3031 If X is a register whose contents are known, we do NOT
3032 return those contents here. equiv_constant is called to
3033 perform that task.
3035 INSN is the insn that we may be modifying. If it is 0, make a copy
3036 of X before modifying it. */
3038 static rtx
3039 fold_rtx (x, insn)
3040 rtx x;
3041 rtx insn;
3043 register enum rtx_code code;
3044 register enum machine_mode mode;
3045 register const char *fmt;
3046 register int i;
3047 rtx new = 0;
3048 int copied = 0;
3049 int must_swap = 0;
3051 /* Folded equivalents of first two operands of X. */
3052 rtx folded_arg0;
3053 rtx folded_arg1;
3055 /* Constant equivalents of first three operands of X;
3056 0 when no such equivalent is known. */
3057 rtx const_arg0;
3058 rtx const_arg1;
3059 rtx const_arg2;
3061 /* The mode of the first operand of X. We need this for sign and zero
3062 extends. */
3063 enum machine_mode mode_arg0;
3065 if (x == 0)
3066 return x;
3068 mode = GET_MODE (x);
3069 code = GET_CODE (x);
3070 switch (code)
3072 case CONST:
3073 case CONST_INT:
3074 case CONST_DOUBLE:
3075 case SYMBOL_REF:
3076 case LABEL_REF:
3077 case REG:
3078 /* No use simplifying an EXPR_LIST
3079 since they are used only for lists of args
3080 in a function call's REG_EQUAL note. */
3081 case EXPR_LIST:
3082 /* Changing anything inside an ADDRESSOF is incorrect; we don't
3083 want to (e.g.,) make (addressof (const_int 0)) just because
3084 the location is known to be zero. */
3085 case ADDRESSOF:
3086 return x;
3088 #ifdef HAVE_cc0
3089 case CC0:
3090 return prev_insn_cc0;
3091 #endif
3093 case PC:
3094 /* If the next insn is a CODE_LABEL followed by a jump table,
3095 PC's value is a LABEL_REF pointing to that label. That
3096 lets us fold switch statements on the Vax. */
3097 if (insn && GET_CODE (insn) == JUMP_INSN)
3099 rtx next = next_nonnote_insn (insn);
3101 if (next && GET_CODE (next) == CODE_LABEL
3102 && NEXT_INSN (next) != 0
3103 && GET_CODE (NEXT_INSN (next)) == JUMP_INSN
3104 && (GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_VEC
3105 || GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_DIFF_VEC))
3106 return gen_rtx_LABEL_REF (Pmode, next);
3108 break;
3110 case SUBREG:
3111 /* See if we previously assigned a constant value to this SUBREG. */
3112 if ((new = lookup_as_function (x, CONST_INT)) != 0
3113 || (new = lookup_as_function (x, CONST_DOUBLE)) != 0)
3114 return new;
3116 /* If this is a paradoxical SUBREG, we have no idea what value the
3117 extra bits would have. However, if the operand is equivalent
3118 to a SUBREG whose operand is the same as our mode, and all the
3119 modes are within a word, we can just use the inner operand
3120 because these SUBREGs just say how to treat the register.
3122 Similarly if we find an integer constant. */
3124 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3126 enum machine_mode imode = GET_MODE (SUBREG_REG (x));
3127 struct table_elt *elt;
3129 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
3130 && GET_MODE_SIZE (imode) <= UNITS_PER_WORD
3131 && (elt = lookup (SUBREG_REG (x), HASH (SUBREG_REG (x), imode),
3132 imode)) != 0)
3133 for (elt = elt->first_same_value;
3134 elt; elt = elt->next_same_value)
3136 if (CONSTANT_P (elt->exp)
3137 && GET_MODE (elt->exp) == VOIDmode)
3138 return elt->exp;
3140 if (GET_CODE (elt->exp) == SUBREG
3141 && GET_MODE (SUBREG_REG (elt->exp)) == mode
3142 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
3143 return copy_rtx (SUBREG_REG (elt->exp));
3146 return x;
3149 /* Fold SUBREG_REG. If it changed, see if we can simplify the SUBREG.
3150 We might be able to if the SUBREG is extracting a single word in an
3151 integral mode or extracting the low part. */
3153 folded_arg0 = fold_rtx (SUBREG_REG (x), insn);
3154 const_arg0 = equiv_constant (folded_arg0);
3155 if (const_arg0)
3156 folded_arg0 = const_arg0;
3158 if (folded_arg0 != SUBREG_REG (x))
3160 new = 0;
3162 if (GET_MODE_CLASS (mode) == MODE_INT
3163 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
3164 && GET_MODE (SUBREG_REG (x)) != VOIDmode)
3165 new = operand_subword (folded_arg0, SUBREG_WORD (x), 0,
3166 GET_MODE (SUBREG_REG (x)));
3167 if (new == 0 && subreg_lowpart_p (x))
3168 new = gen_lowpart_if_possible (mode, folded_arg0);
3169 if (new)
3170 return new;
3173 /* If this is a narrowing SUBREG and our operand is a REG, see if
3174 we can find an equivalence for REG that is an arithmetic operation
3175 in a wider mode where both operands are paradoxical SUBREGs
3176 from objects of our result mode. In that case, we couldn't report
3177 an equivalent value for that operation, since we don't know what the
3178 extra bits will be. But we can find an equivalence for this SUBREG
3179 by folding that operation is the narrow mode. This allows us to
3180 fold arithmetic in narrow modes when the machine only supports
3181 word-sized arithmetic.
3183 Also look for a case where we have a SUBREG whose operand is the
3184 same as our result. If both modes are smaller than a word, we
3185 are simply interpreting a register in different modes and we
3186 can use the inner value. */
3188 if (GET_CODE (folded_arg0) == REG
3189 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (folded_arg0))
3190 && subreg_lowpart_p (x))
3192 struct table_elt *elt;
3194 /* We can use HASH here since we know that canon_hash won't be
3195 called. */
3196 elt = lookup (folded_arg0,
3197 HASH (folded_arg0, GET_MODE (folded_arg0)),
3198 GET_MODE (folded_arg0));
3200 if (elt)
3201 elt = elt->first_same_value;
3203 for (; elt; elt = elt->next_same_value)
3205 enum rtx_code eltcode = GET_CODE (elt->exp);
3207 /* Just check for unary and binary operations. */
3208 if (GET_RTX_CLASS (GET_CODE (elt->exp)) == '1'
3209 && GET_CODE (elt->exp) != SIGN_EXTEND
3210 && GET_CODE (elt->exp) != ZERO_EXTEND
3211 && GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3212 && GET_MODE (SUBREG_REG (XEXP (elt->exp, 0))) == mode)
3214 rtx op0 = SUBREG_REG (XEXP (elt->exp, 0));
3216 if (GET_CODE (op0) != REG && ! CONSTANT_P (op0))
3217 op0 = fold_rtx (op0, NULL_RTX);
3219 op0 = equiv_constant (op0);
3220 if (op0)
3221 new = simplify_unary_operation (GET_CODE (elt->exp), mode,
3222 op0, mode);
3224 else if ((GET_RTX_CLASS (GET_CODE (elt->exp)) == '2'
3225 || GET_RTX_CLASS (GET_CODE (elt->exp)) == 'c')
3226 && eltcode != DIV && eltcode != MOD
3227 && eltcode != UDIV && eltcode != UMOD
3228 && eltcode != ASHIFTRT && eltcode != LSHIFTRT
3229 && eltcode != ROTATE && eltcode != ROTATERT
3230 && ((GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3231 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 0)))
3232 == mode))
3233 || CONSTANT_P (XEXP (elt->exp, 0)))
3234 && ((GET_CODE (XEXP (elt->exp, 1)) == SUBREG
3235 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 1)))
3236 == mode))
3237 || CONSTANT_P (XEXP (elt->exp, 1))))
3239 rtx op0 = gen_lowpart_common (mode, XEXP (elt->exp, 0));
3240 rtx op1 = gen_lowpart_common (mode, XEXP (elt->exp, 1));
3242 if (op0 && GET_CODE (op0) != REG && ! CONSTANT_P (op0))
3243 op0 = fold_rtx (op0, NULL_RTX);
3245 if (op0)
3246 op0 = equiv_constant (op0);
3248 if (op1 && GET_CODE (op1) != REG && ! CONSTANT_P (op1))
3249 op1 = fold_rtx (op1, NULL_RTX);
3251 if (op1)
3252 op1 = equiv_constant (op1);
3254 /* If we are looking for the low SImode part of
3255 (ashift:DI c (const_int 32)), it doesn't work
3256 to compute that in SImode, because a 32-bit shift
3257 in SImode is unpredictable. We know the value is 0. */
3258 if (op0 && op1
3259 && GET_CODE (elt->exp) == ASHIFT
3260 && GET_CODE (op1) == CONST_INT
3261 && INTVAL (op1) >= GET_MODE_BITSIZE (mode))
3263 if (INTVAL (op1) < GET_MODE_BITSIZE (GET_MODE (elt->exp)))
3265 /* If the count fits in the inner mode's width,
3266 but exceeds the outer mode's width,
3267 the value will get truncated to 0
3268 by the subreg. */
3269 new = const0_rtx;
3270 else
3271 /* If the count exceeds even the inner mode's width,
3272 don't fold this expression. */
3273 new = 0;
3275 else if (op0 && op1)
3276 new = simplify_binary_operation (GET_CODE (elt->exp), mode,
3277 op0, op1);
3280 else if (GET_CODE (elt->exp) == SUBREG
3281 && GET_MODE (SUBREG_REG (elt->exp)) == mode
3282 && (GET_MODE_SIZE (GET_MODE (folded_arg0))
3283 <= UNITS_PER_WORD)
3284 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
3285 new = copy_rtx (SUBREG_REG (elt->exp));
3287 if (new)
3288 return new;
3292 return x;
3294 case NOT:
3295 case NEG:
3296 /* If we have (NOT Y), see if Y is known to be (NOT Z).
3297 If so, (NOT Y) simplifies to Z. Similarly for NEG. */
3298 new = lookup_as_function (XEXP (x, 0), code);
3299 if (new)
3300 return fold_rtx (copy_rtx (XEXP (new, 0)), insn);
3301 break;
3303 case MEM:
3304 /* If we are not actually processing an insn, don't try to find the
3305 best address. Not only don't we care, but we could modify the
3306 MEM in an invalid way since we have no insn to validate against. */
3307 if (insn != 0)
3308 find_best_addr (insn, &XEXP (x, 0));
3311 /* Even if we don't fold in the insn itself,
3312 we can safely do so here, in hopes of getting a constant. */
3313 rtx addr = fold_rtx (XEXP (x, 0), NULL_RTX);
3314 rtx base = 0;
3315 HOST_WIDE_INT offset = 0;
3317 if (GET_CODE (addr) == REG
3318 && REGNO_QTY_VALID_P (REGNO (addr)))
3320 int addr_q = REG_QTY (REGNO (addr));
3321 struct qty_table_elem *addr_ent = &qty_table[addr_q];
3323 if (GET_MODE (addr) == addr_ent->mode
3324 && addr_ent->const_rtx != NULL_RTX)
3325 addr = addr_ent->const_rtx;
3328 /* If address is constant, split it into a base and integer offset. */
3329 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
3330 base = addr;
3331 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
3332 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
3334 base = XEXP (XEXP (addr, 0), 0);
3335 offset = INTVAL (XEXP (XEXP (addr, 0), 1));
3337 else if (GET_CODE (addr) == LO_SUM
3338 && GET_CODE (XEXP (addr, 1)) == SYMBOL_REF)
3339 base = XEXP (addr, 1);
3340 else if (GET_CODE (addr) == ADDRESSOF)
3341 return change_address (x, VOIDmode, addr);
3343 /* If this is a constant pool reference, we can fold it into its
3344 constant to allow better value tracking. */
3345 if (base && GET_CODE (base) == SYMBOL_REF
3346 && CONSTANT_POOL_ADDRESS_P (base))
3348 rtx constant = get_pool_constant (base);
3349 enum machine_mode const_mode = get_pool_mode (base);
3350 rtx new;
3352 if (CONSTANT_P (constant) && GET_CODE (constant) != CONST_INT)
3353 constant_pool_entries_cost = COST (constant);
3355 /* If we are loading the full constant, we have an equivalence. */
3356 if (offset == 0 && mode == const_mode)
3357 return constant;
3359 /* If this actually isn't a constant (weird!), we can't do
3360 anything. Otherwise, handle the two most common cases:
3361 extracting a word from a multi-word constant, and extracting
3362 the low-order bits. Other cases don't seem common enough to
3363 worry about. */
3364 if (! CONSTANT_P (constant))
3365 return x;
3367 if (GET_MODE_CLASS (mode) == MODE_INT
3368 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
3369 && offset % UNITS_PER_WORD == 0
3370 && (new = operand_subword (constant,
3371 offset / UNITS_PER_WORD,
3372 0, const_mode)) != 0)
3373 return new;
3375 if (((BYTES_BIG_ENDIAN
3376 && offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
3377 || (! BYTES_BIG_ENDIAN && offset == 0))
3378 && (new = gen_lowpart_if_possible (mode, constant)) != 0)
3379 return new;
3382 /* If this is a reference to a label at a known position in a jump
3383 table, we also know its value. */
3384 if (base && GET_CODE (base) == LABEL_REF)
3386 rtx label = XEXP (base, 0);
3387 rtx table_insn = NEXT_INSN (label);
3389 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
3390 && GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
3392 rtx table = PATTERN (table_insn);
3394 if (offset >= 0
3395 && (offset / GET_MODE_SIZE (GET_MODE (table))
3396 < XVECLEN (table, 0)))
3397 return XVECEXP (table, 0,
3398 offset / GET_MODE_SIZE (GET_MODE (table)));
3400 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
3401 && GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
3403 rtx table = PATTERN (table_insn);
3405 if (offset >= 0
3406 && (offset / GET_MODE_SIZE (GET_MODE (table))
3407 < XVECLEN (table, 1)))
3409 offset /= GET_MODE_SIZE (GET_MODE (table));
3410 new = gen_rtx_MINUS (Pmode, XVECEXP (table, 1, offset),
3411 XEXP (table, 0));
3413 if (GET_MODE (table) != Pmode)
3414 new = gen_rtx_TRUNCATE (GET_MODE (table), new);
3416 /* Indicate this is a constant. This isn't a
3417 valid form of CONST, but it will only be used
3418 to fold the next insns and then discarded, so
3419 it should be safe.
3421 Note this expression must be explicitly discarded,
3422 by cse_insn, else it may end up in a REG_EQUAL note
3423 and "escape" to cause problems elsewhere. */
3424 return gen_rtx_CONST (GET_MODE (new), new);
3429 return x;
3432 case ASM_OPERANDS:
3433 for (i = XVECLEN (x, 3) - 1; i >= 0; i--)
3434 validate_change (insn, &XVECEXP (x, 3, i),
3435 fold_rtx (XVECEXP (x, 3, i), insn), 0);
3436 break;
3438 default:
3439 break;
3442 const_arg0 = 0;
3443 const_arg1 = 0;
3444 const_arg2 = 0;
3445 mode_arg0 = VOIDmode;
3447 /* Try folding our operands.
3448 Then see which ones have constant values known. */
3450 fmt = GET_RTX_FORMAT (code);
3451 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3452 if (fmt[i] == 'e')
3454 rtx arg = XEXP (x, i);
3455 rtx folded_arg = arg, const_arg = 0;
3456 enum machine_mode mode_arg = GET_MODE (arg);
3457 rtx cheap_arg, expensive_arg;
3458 rtx replacements[2];
3459 int j;
3461 /* Most arguments are cheap, so handle them specially. */
3462 switch (GET_CODE (arg))
3464 case REG:
3465 /* This is the same as calling equiv_constant; it is duplicated
3466 here for speed. */
3467 if (REGNO_QTY_VALID_P (REGNO (arg)))
3469 int arg_q = REG_QTY (REGNO (arg));
3470 struct qty_table_elem *arg_ent = &qty_table[arg_q];
3472 if (arg_ent->const_rtx != NULL_RTX
3473 && GET_CODE (arg_ent->const_rtx) != REG
3474 && GET_CODE (arg_ent->const_rtx) != PLUS)
3475 const_arg
3476 = gen_lowpart_if_possible (GET_MODE (arg),
3477 arg_ent->const_rtx);
3479 break;
3481 case CONST:
3482 case CONST_INT:
3483 case SYMBOL_REF:
3484 case LABEL_REF:
3485 case CONST_DOUBLE:
3486 const_arg = arg;
3487 break;
3489 #ifdef HAVE_cc0
3490 case CC0:
3491 folded_arg = prev_insn_cc0;
3492 mode_arg = prev_insn_cc0_mode;
3493 const_arg = equiv_constant (folded_arg);
3494 break;
3495 #endif
3497 default:
3498 folded_arg = fold_rtx (arg, insn);
3499 const_arg = equiv_constant (folded_arg);
3502 /* For the first three operands, see if the operand
3503 is constant or equivalent to a constant. */
3504 switch (i)
3506 case 0:
3507 folded_arg0 = folded_arg;
3508 const_arg0 = const_arg;
3509 mode_arg0 = mode_arg;
3510 break;
3511 case 1:
3512 folded_arg1 = folded_arg;
3513 const_arg1 = const_arg;
3514 break;
3515 case 2:
3516 const_arg2 = const_arg;
3517 break;
3520 /* Pick the least expensive of the folded argument and an
3521 equivalent constant argument. */
3522 if (const_arg == 0 || const_arg == folded_arg
3523 || COST (const_arg) > COST (folded_arg))
3524 cheap_arg = folded_arg, expensive_arg = const_arg;
3525 else
3526 cheap_arg = const_arg, expensive_arg = folded_arg;
3528 /* Try to replace the operand with the cheapest of the two
3529 possibilities. If it doesn't work and this is either of the first
3530 two operands of a commutative operation, try swapping them.
3531 If THAT fails, try the more expensive, provided it is cheaper
3532 than what is already there. */
3534 if (cheap_arg == XEXP (x, i))
3535 continue;
3537 if (insn == 0 && ! copied)
3539 x = copy_rtx (x);
3540 copied = 1;
3543 replacements[0] = cheap_arg, replacements[1] = expensive_arg;
3544 for (j = 0;
3545 j < 2 && replacements[j]
3546 && COST (replacements[j]) < COST (XEXP (x, i));
3547 j++)
3549 if (validate_change (insn, &XEXP (x, i), replacements[j], 0))
3550 break;
3552 if (code == NE || code == EQ || GET_RTX_CLASS (code) == 'c')
3554 validate_change (insn, &XEXP (x, i), XEXP (x, 1 - i), 1);
3555 validate_change (insn, &XEXP (x, 1 - i), replacements[j], 1);
3557 if (apply_change_group ())
3559 /* Swap them back to be invalid so that this loop can
3560 continue and flag them to be swapped back later. */
3561 rtx tem;
3563 tem = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1);
3564 XEXP (x, 1) = tem;
3565 must_swap = 1;
3566 break;
3572 else
3574 if (fmt[i] == 'E')
3575 /* Don't try to fold inside of a vector of expressions.
3576 Doing nothing is harmless. */
3577 {;}
3580 /* If a commutative operation, place a constant integer as the second
3581 operand unless the first operand is also a constant integer. Otherwise,
3582 place any constant second unless the first operand is also a constant. */
3584 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
3586 if (must_swap || (const_arg0
3587 && (const_arg1 == 0
3588 || (GET_CODE (const_arg0) == CONST_INT
3589 && GET_CODE (const_arg1) != CONST_INT))))
3591 register rtx tem = XEXP (x, 0);
3593 if (insn == 0 && ! copied)
3595 x = copy_rtx (x);
3596 copied = 1;
3599 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
3600 validate_change (insn, &XEXP (x, 1), tem, 1);
3601 if (apply_change_group ())
3603 tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
3604 tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
3609 /* If X is an arithmetic operation, see if we can simplify it. */
3611 switch (GET_RTX_CLASS (code))
3613 case '1':
3615 int is_const = 0;
3617 /* We can't simplify extension ops unless we know the
3618 original mode. */
3619 if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
3620 && mode_arg0 == VOIDmode)
3621 break;
3623 /* If we had a CONST, strip it off and put it back later if we
3624 fold. */
3625 if (const_arg0 != 0 && GET_CODE (const_arg0) == CONST)
3626 is_const = 1, const_arg0 = XEXP (const_arg0, 0);
3628 new = simplify_unary_operation (code, mode,
3629 const_arg0 ? const_arg0 : folded_arg0,
3630 mode_arg0);
3631 if (new != 0 && is_const)
3632 new = gen_rtx_CONST (mode, new);
3634 break;
3636 case '<':
3637 /* See what items are actually being compared and set FOLDED_ARG[01]
3638 to those values and CODE to the actual comparison code. If any are
3639 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
3640 do anything if both operands are already known to be constant. */
3642 if (const_arg0 == 0 || const_arg1 == 0)
3644 struct table_elt *p0, *p1;
3645 rtx true = const_true_rtx, false = const0_rtx;
3646 enum machine_mode mode_arg1;
3648 #ifdef FLOAT_STORE_FLAG_VALUE
3649 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3651 true = (CONST_DOUBLE_FROM_REAL_VALUE
3652 (FLOAT_STORE_FLAG_VALUE (mode), mode));
3653 false = CONST0_RTX (mode);
3655 #endif
3657 code = find_comparison_args (code, &folded_arg0, &folded_arg1,
3658 &mode_arg0, &mode_arg1);
3659 const_arg0 = equiv_constant (folded_arg0);
3660 const_arg1 = equiv_constant (folded_arg1);
3662 /* If the mode is VOIDmode or a MODE_CC mode, we don't know
3663 what kinds of things are being compared, so we can't do
3664 anything with this comparison. */
3666 if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
3667 break;
3669 /* If we do not now have two constants being compared, see
3670 if we can nevertheless deduce some things about the
3671 comparison. */
3672 if (const_arg0 == 0 || const_arg1 == 0)
3674 /* Is FOLDED_ARG0 frame-pointer plus a constant? Or
3675 non-explicit constant? These aren't zero, but we
3676 don't know their sign. */
3677 if (const_arg1 == const0_rtx
3678 && (NONZERO_BASE_PLUS_P (folded_arg0)
3679 #if 0 /* Sad to say, on sysvr4, #pragma weak can make a symbol address
3680 come out as 0. */
3681 || GET_CODE (folded_arg0) == SYMBOL_REF
3682 #endif
3683 || GET_CODE (folded_arg0) == LABEL_REF
3684 || GET_CODE (folded_arg0) == CONST))
3686 if (code == EQ)
3687 return false;
3688 else if (code == NE)
3689 return true;
3692 /* See if the two operands are the same. We don't do this
3693 for IEEE floating-point since we can't assume x == x
3694 since x might be a NaN. */
3696 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3697 || ! FLOAT_MODE_P (mode_arg0) || flag_fast_math)
3698 && (folded_arg0 == folded_arg1
3699 || (GET_CODE (folded_arg0) == REG
3700 && GET_CODE (folded_arg1) == REG
3701 && (REG_QTY (REGNO (folded_arg0))
3702 == REG_QTY (REGNO (folded_arg1))))
3703 || ((p0 = lookup (folded_arg0,
3704 (safe_hash (folded_arg0, mode_arg0)
3705 & HASH_MASK), mode_arg0))
3706 && (p1 = lookup (folded_arg1,
3707 (safe_hash (folded_arg1, mode_arg0)
3708 & HASH_MASK), mode_arg0))
3709 && p0->first_same_value == p1->first_same_value)))
3710 return ((code == EQ || code == LE || code == GE
3711 || code == LEU || code == GEU)
3712 ? true : false);
3714 /* If FOLDED_ARG0 is a register, see if the comparison we are
3715 doing now is either the same as we did before or the reverse
3716 (we only check the reverse if not floating-point). */
3717 else if (GET_CODE (folded_arg0) == REG)
3719 int qty = REG_QTY (REGNO (folded_arg0));
3721 if (REGNO_QTY_VALID_P (REGNO (folded_arg0)))
3723 struct qty_table_elem *ent = &qty_table[qty];
3725 if ((comparison_dominates_p (ent->comparison_code, code)
3726 || (! FLOAT_MODE_P (mode_arg0)
3727 && comparison_dominates_p (ent->comparison_code,
3728 reverse_condition (code))))
3729 && (rtx_equal_p (ent->comparison_const, folded_arg1)
3730 || (const_arg1
3731 && rtx_equal_p (ent->comparison_const,
3732 const_arg1))
3733 || (GET_CODE (folded_arg1) == REG
3734 && (REG_QTY (REGNO (folded_arg1)) == ent->comparison_qty))))
3735 return (comparison_dominates_p (ent->comparison_code, code)
3736 ? true : false);
3742 /* If we are comparing against zero, see if the first operand is
3743 equivalent to an IOR with a constant. If so, we may be able to
3744 determine the result of this comparison. */
3746 if (const_arg1 == const0_rtx)
3748 rtx y = lookup_as_function (folded_arg0, IOR);
3749 rtx inner_const;
3751 if (y != 0
3752 && (inner_const = equiv_constant (XEXP (y, 1))) != 0
3753 && GET_CODE (inner_const) == CONST_INT
3754 && INTVAL (inner_const) != 0)
3756 int sign_bitnum = GET_MODE_BITSIZE (mode_arg0) - 1;
3757 int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
3758 && (INTVAL (inner_const)
3759 & ((HOST_WIDE_INT) 1 << sign_bitnum)));
3760 rtx true = const_true_rtx, false = const0_rtx;
3762 #ifdef FLOAT_STORE_FLAG_VALUE
3763 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3765 true = (CONST_DOUBLE_FROM_REAL_VALUE
3766 (FLOAT_STORE_FLAG_VALUE (mode), mode));
3767 false = CONST0_RTX (mode);
3769 #endif
3771 switch (code)
3773 case EQ:
3774 return false;
3775 case NE:
3776 return true;
3777 case LT: case LE:
3778 if (has_sign)
3779 return true;
3780 break;
3781 case GT: case GE:
3782 if (has_sign)
3783 return false;
3784 break;
3785 default:
3786 break;
3791 new = simplify_relational_operation (code, mode_arg0,
3792 const_arg0 ? const_arg0 : folded_arg0,
3793 const_arg1 ? const_arg1 : folded_arg1);
3794 #ifdef FLOAT_STORE_FLAG_VALUE
3795 if (new != 0 && GET_MODE_CLASS (mode) == MODE_FLOAT)
3797 if (new == const0_rtx)
3798 new = CONST0_RTX (mode);
3799 else
3800 new = (CONST_DOUBLE_FROM_REAL_VALUE
3801 (FLOAT_STORE_FLAG_VALUE (mode), mode));
3803 #endif
3804 break;
3806 case '2':
3807 case 'c':
3808 switch (code)
3810 case PLUS:
3811 /* If the second operand is a LABEL_REF, see if the first is a MINUS
3812 with that LABEL_REF as its second operand. If so, the result is
3813 the first operand of that MINUS. This handles switches with an
3814 ADDR_DIFF_VEC table. */
3815 if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
3817 rtx y
3818 = GET_CODE (folded_arg0) == MINUS ? folded_arg0
3819 : lookup_as_function (folded_arg0, MINUS);
3821 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
3822 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
3823 return XEXP (y, 0);
3825 /* Now try for a CONST of a MINUS like the above. */
3826 if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
3827 : lookup_as_function (folded_arg0, CONST))) != 0
3828 && GET_CODE (XEXP (y, 0)) == MINUS
3829 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
3830 && XEXP (XEXP (XEXP (y, 0),1), 0) == XEXP (const_arg1, 0))
3831 return XEXP (XEXP (y, 0), 0);
3834 /* Likewise if the operands are in the other order. */
3835 if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
3837 rtx y
3838 = GET_CODE (folded_arg1) == MINUS ? folded_arg1
3839 : lookup_as_function (folded_arg1, MINUS);
3841 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
3842 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
3843 return XEXP (y, 0);
3845 /* Now try for a CONST of a MINUS like the above. */
3846 if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
3847 : lookup_as_function (folded_arg1, CONST))) != 0
3848 && GET_CODE (XEXP (y, 0)) == MINUS
3849 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
3850 && XEXP (XEXP (XEXP (y, 0),1), 0) == XEXP (const_arg0, 0))
3851 return XEXP (XEXP (y, 0), 0);
3854 /* If second operand is a register equivalent to a negative
3855 CONST_INT, see if we can find a register equivalent to the
3856 positive constant. Make a MINUS if so. Don't do this for
3857 a non-negative constant since we might then alternate between
3858 chosing positive and negative constants. Having the positive
3859 constant previously-used is the more common case. Be sure
3860 the resulting constant is non-negative; if const_arg1 were
3861 the smallest negative number this would overflow: depending
3862 on the mode, this would either just be the same value (and
3863 hence not save anything) or be incorrect. */
3864 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT
3865 && INTVAL (const_arg1) < 0
3866 /* This used to test
3868 - INTVAL (const_arg1) >= 0
3870 But The Sun V5.0 compilers mis-compiled that test. So
3871 instead we test for the problematic value in a more direct
3872 manner and hope the Sun compilers get it correct. */
3873 && INTVAL (const_arg1) !=
3874 ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1))
3875 && GET_CODE (folded_arg1) == REG)
3877 rtx new_const = GEN_INT (- INTVAL (const_arg1));
3878 struct table_elt *p
3879 = lookup (new_const, safe_hash (new_const, mode) & HASH_MASK,
3880 mode);
3882 if (p)
3883 for (p = p->first_same_value; p; p = p->next_same_value)
3884 if (GET_CODE (p->exp) == REG)
3885 return simplify_gen_binary (MINUS, mode, folded_arg0,
3886 canon_reg (p->exp, NULL_RTX));
3888 goto from_plus;
3890 case MINUS:
3891 /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
3892 If so, produce (PLUS Z C2-C). */
3893 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT)
3895 rtx y = lookup_as_function (XEXP (x, 0), PLUS);
3896 if (y && GET_CODE (XEXP (y, 1)) == CONST_INT)
3897 return fold_rtx (plus_constant (copy_rtx (y),
3898 -INTVAL (const_arg1)),
3899 NULL_RTX);
3902 /* ... fall through ... */
3904 from_plus:
3905 case SMIN: case SMAX: case UMIN: case UMAX:
3906 case IOR: case AND: case XOR:
3907 case MULT: case DIV: case UDIV:
3908 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
3909 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
3910 is known to be of similar form, we may be able to replace the
3911 operation with a combined operation. This may eliminate the
3912 intermediate operation if every use is simplified in this way.
3913 Note that the similar optimization done by combine.c only works
3914 if the intermediate operation's result has only one reference. */
3916 if (GET_CODE (folded_arg0) == REG
3917 && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
3919 int is_shift
3920 = (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
3921 rtx y = lookup_as_function (folded_arg0, code);
3922 rtx inner_const;
3923 enum rtx_code associate_code;
3924 rtx new_const;
3926 if (y == 0
3927 || 0 == (inner_const
3928 = equiv_constant (fold_rtx (XEXP (y, 1), 0)))
3929 || GET_CODE (inner_const) != CONST_INT
3930 /* If we have compiled a statement like
3931 "if (x == (x & mask1))", and now are looking at
3932 "x & mask2", we will have a case where the first operand
3933 of Y is the same as our first operand. Unless we detect
3934 this case, an infinite loop will result. */
3935 || XEXP (y, 0) == folded_arg0)
3936 break;
3938 /* Don't associate these operations if they are a PLUS with the
3939 same constant and it is a power of two. These might be doable
3940 with a pre- or post-increment. Similarly for two subtracts of
3941 identical powers of two with post decrement. */
3943 if (code == PLUS && INTVAL (const_arg1) == INTVAL (inner_const)
3944 && ((HAVE_PRE_INCREMENT
3945 && exact_log2 (INTVAL (const_arg1)) >= 0)
3946 || (HAVE_POST_INCREMENT
3947 && exact_log2 (INTVAL (const_arg1)) >= 0)
3948 || (HAVE_PRE_DECREMENT
3949 && exact_log2 (- INTVAL (const_arg1)) >= 0)
3950 || (HAVE_POST_DECREMENT
3951 && exact_log2 (- INTVAL (const_arg1)) >= 0)))
3952 break;
3954 /* Compute the code used to compose the constants. For example,
3955 A/C1/C2 is A/(C1 * C2), so if CODE == DIV, we want MULT. */
3957 associate_code
3958 = (code == MULT || code == DIV || code == UDIV ? MULT
3959 : is_shift || code == PLUS || code == MINUS ? PLUS : code);
3961 new_const = simplify_binary_operation (associate_code, mode,
3962 const_arg1, inner_const);
3964 if (new_const == 0)
3965 break;
3967 /* If we are associating shift operations, don't let this
3968 produce a shift of the size of the object or larger.
3969 This could occur when we follow a sign-extend by a right
3970 shift on a machine that does a sign-extend as a pair
3971 of shifts. */
3973 if (is_shift && GET_CODE (new_const) == CONST_INT
3974 && INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
3976 /* As an exception, we can turn an ASHIFTRT of this
3977 form into a shift of the number of bits - 1. */
3978 if (code == ASHIFTRT)
3979 new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
3980 else
3981 break;
3984 y = copy_rtx (XEXP (y, 0));
3986 /* If Y contains our first operand (the most common way this
3987 can happen is if Y is a MEM), we would do into an infinite
3988 loop if we tried to fold it. So don't in that case. */
3990 if (! reg_mentioned_p (folded_arg0, y))
3991 y = fold_rtx (y, insn);
3993 return simplify_gen_binary (code, mode, y, new_const);
3995 break;
3997 default:
3998 break;
4001 new = simplify_binary_operation (code, mode,
4002 const_arg0 ? const_arg0 : folded_arg0,
4003 const_arg1 ? const_arg1 : folded_arg1);
4004 break;
4006 case 'o':
4007 /* (lo_sum (high X) X) is simply X. */
4008 if (code == LO_SUM && const_arg0 != 0
4009 && GET_CODE (const_arg0) == HIGH
4010 && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
4011 return const_arg1;
4012 break;
4014 case '3':
4015 case 'b':
4016 new = simplify_ternary_operation (code, mode, mode_arg0,
4017 const_arg0 ? const_arg0 : folded_arg0,
4018 const_arg1 ? const_arg1 : folded_arg1,
4019 const_arg2 ? const_arg2 : XEXP (x, 2));
4020 break;
4022 case 'x':
4023 /* Always eliminate CONSTANT_P_RTX at this stage. */
4024 if (code == CONSTANT_P_RTX)
4025 return (const_arg0 ? const1_rtx : const0_rtx);
4026 break;
4029 return new ? new : x;
4032 /* Return a constant value currently equivalent to X.
4033 Return 0 if we don't know one. */
4035 static rtx
4036 equiv_constant (x)
4037 rtx x;
4039 if (GET_CODE (x) == REG
4040 && REGNO_QTY_VALID_P (REGNO (x)))
4042 int x_q = REG_QTY (REGNO (x));
4043 struct qty_table_elem *x_ent = &qty_table[x_q];
4045 if (x_ent->const_rtx)
4046 x = gen_lowpart_if_possible (GET_MODE (x), x_ent->const_rtx);
4049 if (x == 0 || CONSTANT_P (x))
4050 return x;
4052 /* If X is a MEM, try to fold it outside the context of any insn to see if
4053 it might be equivalent to a constant. That handles the case where it
4054 is a constant-pool reference. Then try to look it up in the hash table
4055 in case it is something whose value we have seen before. */
4057 if (GET_CODE (x) == MEM)
4059 struct table_elt *elt;
4061 x = fold_rtx (x, NULL_RTX);
4062 if (CONSTANT_P (x))
4063 return x;
4065 elt = lookup (x, safe_hash (x, GET_MODE (x)) & HASH_MASK, GET_MODE (x));
4066 if (elt == 0)
4067 return 0;
4069 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
4070 if (elt->is_const && CONSTANT_P (elt->exp))
4071 return elt->exp;
4074 return 0;
4077 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a fixed-point
4078 number, return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
4079 least-significant part of X.
4080 MODE specifies how big a part of X to return.
4082 If the requested operation cannot be done, 0 is returned.
4084 This is similar to gen_lowpart in emit-rtl.c. */
4087 gen_lowpart_if_possible (mode, x)
4088 enum machine_mode mode;
4089 register rtx x;
4091 rtx result = gen_lowpart_common (mode, x);
4093 if (result)
4094 return result;
4095 else if (GET_CODE (x) == MEM)
4097 /* This is the only other case we handle. */
4098 register int offset = 0;
4099 rtx new;
4101 if (WORDS_BIG_ENDIAN)
4102 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
4103 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
4104 if (BYTES_BIG_ENDIAN)
4105 /* Adjust the address so that the address-after-the-data is
4106 unchanged. */
4107 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
4108 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
4109 new = gen_rtx_MEM (mode, plus_constant (XEXP (x, 0), offset));
4110 if (! memory_address_p (mode, XEXP (new, 0)))
4111 return 0;
4112 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
4113 MEM_COPY_ATTRIBUTES (new, x);
4114 return new;
4116 else
4117 return 0;
4120 /* Given INSN, a jump insn, TAKEN indicates if we are following the "taken"
4121 branch. It will be zero if not.
4123 In certain cases, this can cause us to add an equivalence. For example,
4124 if we are following the taken case of
4125 if (i == 2)
4126 we can add the fact that `i' and '2' are now equivalent.
4128 In any case, we can record that this comparison was passed. If the same
4129 comparison is seen later, we will know its value. */
4131 static void
4132 record_jump_equiv (insn, taken)
4133 rtx insn;
4134 int taken;
4136 int cond_known_true;
4137 rtx op0, op1;
4138 enum machine_mode mode, mode0, mode1;
4139 int reversed_nonequality = 0;
4140 enum rtx_code code;
4142 /* Ensure this is the right kind of insn. */
4143 if (! condjump_p (insn) || simplejump_p (insn))
4144 return;
4146 /* See if this jump condition is known true or false. */
4147 if (taken)
4148 cond_known_true = (XEXP (SET_SRC (PATTERN (insn)), 2) == pc_rtx);
4149 else
4150 cond_known_true = (XEXP (SET_SRC (PATTERN (insn)), 1) == pc_rtx);
4152 /* Get the type of comparison being done and the operands being compared.
4153 If we had to reverse a non-equality condition, record that fact so we
4154 know that it isn't valid for floating-point. */
4155 code = GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 0));
4156 op0 = fold_rtx (XEXP (XEXP (SET_SRC (PATTERN (insn)), 0), 0), insn);
4157 op1 = fold_rtx (XEXP (XEXP (SET_SRC (PATTERN (insn)), 0), 1), insn);
4159 code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
4160 if (! cond_known_true)
4162 reversed_nonequality = (code != EQ && code != NE);
4163 code = reverse_condition (code);
4165 /* Don't remember if we can't find the inverse. */
4166 if (code == UNKNOWN)
4167 return;
4170 /* The mode is the mode of the non-constant. */
4171 mode = mode0;
4172 if (mode1 != VOIDmode)
4173 mode = mode1;
4175 record_jump_cond (code, mode, op0, op1, reversed_nonequality);
4178 /* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
4179 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
4180 Make any useful entries we can with that information. Called from
4181 above function and called recursively. */
4183 static void
4184 record_jump_cond (code, mode, op0, op1, reversed_nonequality)
4185 enum rtx_code code;
4186 enum machine_mode mode;
4187 rtx op0, op1;
4188 int reversed_nonequality;
4190 unsigned op0_hash, op1_hash;
4191 int op0_in_memory, op1_in_memory;
4192 struct table_elt *op0_elt, *op1_elt;
4194 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
4195 we know that they are also equal in the smaller mode (this is also
4196 true for all smaller modes whether or not there is a SUBREG, but
4197 is not worth testing for with no SUBREG). */
4199 /* Note that GET_MODE (op0) may not equal MODE. */
4200 if (code == EQ && GET_CODE (op0) == SUBREG
4201 && (GET_MODE_SIZE (GET_MODE (op0))
4202 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4204 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4205 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
4207 record_jump_cond (code, mode, SUBREG_REG (op0),
4208 tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
4209 reversed_nonequality);
4212 if (code == EQ && GET_CODE (op1) == SUBREG
4213 && (GET_MODE_SIZE (GET_MODE (op1))
4214 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4216 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4217 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
4219 record_jump_cond (code, mode, SUBREG_REG (op1),
4220 tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
4221 reversed_nonequality);
4224 /* Similarly, if this is an NE comparison, and either is a SUBREG
4225 making a smaller mode, we know the whole thing is also NE. */
4227 /* Note that GET_MODE (op0) may not equal MODE;
4228 if we test MODE instead, we can get an infinite recursion
4229 alternating between two modes each wider than MODE. */
4231 if (code == NE && GET_CODE (op0) == SUBREG
4232 && subreg_lowpart_p (op0)
4233 && (GET_MODE_SIZE (GET_MODE (op0))
4234 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4236 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4237 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
4239 record_jump_cond (code, mode, SUBREG_REG (op0),
4240 tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
4241 reversed_nonequality);
4244 if (code == NE && GET_CODE (op1) == SUBREG
4245 && subreg_lowpart_p (op1)
4246 && (GET_MODE_SIZE (GET_MODE (op1))
4247 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4249 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4250 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
4252 record_jump_cond (code, mode, SUBREG_REG (op1),
4253 tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
4254 reversed_nonequality);
4257 /* Hash both operands. */
4259 do_not_record = 0;
4260 hash_arg_in_memory = 0;
4261 op0_hash = HASH (op0, mode);
4262 op0_in_memory = hash_arg_in_memory;
4264 if (do_not_record)
4265 return;
4267 do_not_record = 0;
4268 hash_arg_in_memory = 0;
4269 op1_hash = HASH (op1, mode);
4270 op1_in_memory = hash_arg_in_memory;
4272 if (do_not_record)
4273 return;
4275 /* Look up both operands. */
4276 op0_elt = lookup (op0, op0_hash, mode);
4277 op1_elt = lookup (op1, op1_hash, mode);
4279 /* If both operands are already equivalent or if they are not in the
4280 table but are identical, do nothing. */
4281 if ((op0_elt != 0 && op1_elt != 0
4282 && op0_elt->first_same_value == op1_elt->first_same_value)
4283 || op0 == op1 || rtx_equal_p (op0, op1))
4284 return;
4286 /* If we aren't setting two things equal all we can do is save this
4287 comparison. Similarly if this is floating-point. In the latter
4288 case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
4289 If we record the equality, we might inadvertently delete code
4290 whose intent was to change -0 to +0. */
4292 if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
4294 struct qty_table_elem *ent;
4295 int qty;
4297 /* If we reversed a floating-point comparison, if OP0 is not a
4298 register, or if OP1 is neither a register or constant, we can't
4299 do anything. */
4301 if (GET_CODE (op1) != REG)
4302 op1 = equiv_constant (op1);
4304 if ((reversed_nonequality && FLOAT_MODE_P (mode))
4305 || GET_CODE (op0) != REG || op1 == 0)
4306 return;
4308 /* Put OP0 in the hash table if it isn't already. This gives it a
4309 new quantity number. */
4310 if (op0_elt == 0)
4312 if (insert_regs (op0, NULL_PTR, 0))
4314 rehash_using_reg (op0);
4315 op0_hash = HASH (op0, mode);
4317 /* If OP0 is contained in OP1, this changes its hash code
4318 as well. Faster to rehash than to check, except
4319 for the simple case of a constant. */
4320 if (! CONSTANT_P (op1))
4321 op1_hash = HASH (op1,mode);
4324 op0_elt = insert (op0, NULL_PTR, op0_hash, mode);
4325 op0_elt->in_memory = op0_in_memory;
4328 qty = REG_QTY (REGNO (op0));
4329 ent = &qty_table[qty];
4331 ent->comparison_code = code;
4332 if (GET_CODE (op1) == REG)
4334 /* Look it up again--in case op0 and op1 are the same. */
4335 op1_elt = lookup (op1, op1_hash, mode);
4337 /* Put OP1 in the hash table so it gets a new quantity number. */
4338 if (op1_elt == 0)
4340 if (insert_regs (op1, NULL_PTR, 0))
4342 rehash_using_reg (op1);
4343 op1_hash = HASH (op1, mode);
4346 op1_elt = insert (op1, NULL_PTR, op1_hash, mode);
4347 op1_elt->in_memory = op1_in_memory;
4350 ent->comparison_const = NULL_RTX;
4351 ent->comparison_qty = REG_QTY (REGNO (op1));
4353 else
4355 ent->comparison_const = op1;
4356 ent->comparison_qty = -1;
4359 return;
4362 /* If either side is still missing an equivalence, make it now,
4363 then merge the equivalences. */
4365 if (op0_elt == 0)
4367 if (insert_regs (op0, NULL_PTR, 0))
4369 rehash_using_reg (op0);
4370 op0_hash = HASH (op0, mode);
4373 op0_elt = insert (op0, NULL_PTR, op0_hash, mode);
4374 op0_elt->in_memory = op0_in_memory;
4377 if (op1_elt == 0)
4379 if (insert_regs (op1, NULL_PTR, 0))
4381 rehash_using_reg (op1);
4382 op1_hash = HASH (op1, mode);
4385 op1_elt = insert (op1, NULL_PTR, op1_hash, mode);
4386 op1_elt->in_memory = op1_in_memory;
4389 merge_equiv_classes (op0_elt, op1_elt);
4390 last_jump_equiv_class = op0_elt;
4393 /* CSE processing for one instruction.
4394 First simplify sources and addresses of all assignments
4395 in the instruction, using previously-computed equivalents values.
4396 Then install the new sources and destinations in the table
4397 of available values.
4399 If LIBCALL_INSN is nonzero, don't record any equivalence made in
4400 the insn. It means that INSN is inside libcall block. In this
4401 case LIBCALL_INSN is the corresponding insn with REG_LIBCALL. */
4403 /* Data on one SET contained in the instruction. */
4405 struct set
4407 /* The SET rtx itself. */
4408 rtx rtl;
4409 /* The SET_SRC of the rtx (the original value, if it is changing). */
4410 rtx src;
4411 /* The hash-table element for the SET_SRC of the SET. */
4412 struct table_elt *src_elt;
4413 /* Hash value for the SET_SRC. */
4414 unsigned src_hash;
4415 /* Hash value for the SET_DEST. */
4416 unsigned dest_hash;
4417 /* The SET_DEST, with SUBREG, etc., stripped. */
4418 rtx inner_dest;
4419 /* Nonzero if the SET_SRC is in memory. */
4420 char src_in_memory;
4421 /* Nonzero if the SET_SRC contains something
4422 whose value cannot be predicted and understood. */
4423 char src_volatile;
4424 /* Original machine mode, in case it becomes a CONST_INT. */
4425 enum machine_mode mode;
4426 /* A constant equivalent for SET_SRC, if any. */
4427 rtx src_const;
4428 /* Original SET_SRC value used for libcall notes. */
4429 rtx orig_src;
4430 /* Hash value of constant equivalent for SET_SRC. */
4431 unsigned src_const_hash;
4432 /* Table entry for constant equivalent for SET_SRC, if any. */
4433 struct table_elt *src_const_elt;
4436 static void
4437 cse_insn (insn, libcall_insn)
4438 rtx insn;
4439 rtx libcall_insn;
4441 register rtx x = PATTERN (insn);
4442 register int i;
4443 rtx tem;
4444 register int n_sets = 0;
4446 #ifdef HAVE_cc0
4447 /* Records what this insn does to set CC0. */
4448 rtx this_insn_cc0 = 0;
4449 enum machine_mode this_insn_cc0_mode = VOIDmode;
4450 #endif
4452 rtx src_eqv = 0;
4453 struct table_elt *src_eqv_elt = 0;
4454 int src_eqv_volatile = 0;
4455 int src_eqv_in_memory = 0;
4456 unsigned src_eqv_hash = 0;
4458 struct set *sets = (struct set *) NULL_PTR;
4460 this_insn = insn;
4462 /* Find all the SETs and CLOBBERs in this instruction.
4463 Record all the SETs in the array `set' and count them.
4464 Also determine whether there is a CLOBBER that invalidates
4465 all memory references, or all references at varying addresses. */
4467 if (GET_CODE (insn) == CALL_INSN)
4469 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4470 if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
4471 invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
4474 if (GET_CODE (x) == SET)
4476 sets = (struct set *) alloca (sizeof (struct set));
4477 sets[0].rtl = x;
4479 /* Ignore SETs that are unconditional jumps.
4480 They never need cse processing, so this does not hurt.
4481 The reason is not efficiency but rather
4482 so that we can test at the end for instructions
4483 that have been simplified to unconditional jumps
4484 and not be misled by unchanged instructions
4485 that were unconditional jumps to begin with. */
4486 if (SET_DEST (x) == pc_rtx
4487 && GET_CODE (SET_SRC (x)) == LABEL_REF)
4490 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
4491 The hard function value register is used only once, to copy to
4492 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
4493 Ensure we invalidate the destination register. On the 80386 no
4494 other code would invalidate it since it is a fixed_reg.
4495 We need not check the return of apply_change_group; see canon_reg. */
4497 else if (GET_CODE (SET_SRC (x)) == CALL)
4499 canon_reg (SET_SRC (x), insn);
4500 apply_change_group ();
4501 fold_rtx (SET_SRC (x), insn);
4502 invalidate (SET_DEST (x), VOIDmode);
4504 else
4505 n_sets = 1;
4507 else if (GET_CODE (x) == PARALLEL)
4509 register int lim = XVECLEN (x, 0);
4511 sets = (struct set *) alloca (lim * sizeof (struct set));
4513 /* Find all regs explicitly clobbered in this insn,
4514 and ensure they are not replaced with any other regs
4515 elsewhere in this insn.
4516 When a reg that is clobbered is also used for input,
4517 we should presume that that is for a reason,
4518 and we should not substitute some other register
4519 which is not supposed to be clobbered.
4520 Therefore, this loop cannot be merged into the one below
4521 because a CALL may precede a CLOBBER and refer to the
4522 value clobbered. We must not let a canonicalization do
4523 anything in that case. */
4524 for (i = 0; i < lim; i++)
4526 register rtx y = XVECEXP (x, 0, i);
4527 if (GET_CODE (y) == CLOBBER)
4529 rtx clobbered = XEXP (y, 0);
4531 if (GET_CODE (clobbered) == REG
4532 || GET_CODE (clobbered) == SUBREG)
4533 invalidate (clobbered, VOIDmode);
4534 else if (GET_CODE (clobbered) == STRICT_LOW_PART
4535 || GET_CODE (clobbered) == ZERO_EXTRACT)
4536 invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
4540 for (i = 0; i < lim; i++)
4542 register rtx y = XVECEXP (x, 0, i);
4543 if (GET_CODE (y) == SET)
4545 /* As above, we ignore unconditional jumps and call-insns and
4546 ignore the result of apply_change_group. */
4547 if (GET_CODE (SET_SRC (y)) == CALL)
4549 canon_reg (SET_SRC (y), insn);
4550 apply_change_group ();
4551 fold_rtx (SET_SRC (y), insn);
4552 invalidate (SET_DEST (y), VOIDmode);
4554 else if (SET_DEST (y) == pc_rtx
4555 && GET_CODE (SET_SRC (y)) == LABEL_REF)
4557 else
4558 sets[n_sets++].rtl = y;
4560 else if (GET_CODE (y) == CLOBBER)
4562 /* If we clobber memory, canon the address.
4563 This does nothing when a register is clobbered
4564 because we have already invalidated the reg. */
4565 if (GET_CODE (XEXP (y, 0)) == MEM)
4566 canon_reg (XEXP (y, 0), NULL_RTX);
4568 else if (GET_CODE (y) == USE
4569 && ! (GET_CODE (XEXP (y, 0)) == REG
4570 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
4571 canon_reg (y, NULL_RTX);
4572 else if (GET_CODE (y) == CALL)
4574 /* The result of apply_change_group can be ignored; see
4575 canon_reg. */
4576 canon_reg (y, insn);
4577 apply_change_group ();
4578 fold_rtx (y, insn);
4582 else if (GET_CODE (x) == CLOBBER)
4584 if (GET_CODE (XEXP (x, 0)) == MEM)
4585 canon_reg (XEXP (x, 0), NULL_RTX);
4588 /* Canonicalize a USE of a pseudo register or memory location. */
4589 else if (GET_CODE (x) == USE
4590 && ! (GET_CODE (XEXP (x, 0)) == REG
4591 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
4592 canon_reg (XEXP (x, 0), NULL_RTX);
4593 else if (GET_CODE (x) == CALL)
4595 /* The result of apply_change_group can be ignored; see canon_reg. */
4596 canon_reg (x, insn);
4597 apply_change_group ();
4598 fold_rtx (x, insn);
4601 /* Store the equivalent value in SRC_EQV, if different, or if the DEST
4602 is a STRICT_LOW_PART. The latter condition is necessary because SRC_EQV
4603 is handled specially for this case, and if it isn't set, then there will
4604 be no equivalence for the destination. */
4605 if (n_sets == 1 && REG_NOTES (insn) != 0
4606 && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
4607 && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
4608 || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
4609 src_eqv = canon_reg (XEXP (tem, 0), NULL_RTX);
4611 /* Canonicalize sources and addresses of destinations.
4612 We do this in a separate pass to avoid problems when a MATCH_DUP is
4613 present in the insn pattern. In that case, we want to ensure that
4614 we don't break the duplicate nature of the pattern. So we will replace
4615 both operands at the same time. Otherwise, we would fail to find an
4616 equivalent substitution in the loop calling validate_change below.
4618 We used to suppress canonicalization of DEST if it appears in SRC,
4619 but we don't do this any more. */
4621 for (i = 0; i < n_sets; i++)
4623 rtx dest = SET_DEST (sets[i].rtl);
4624 rtx src = SET_SRC (sets[i].rtl);
4625 rtx new = canon_reg (src, insn);
4626 int insn_code;
4628 sets[i].orig_src = src;
4629 if ((GET_CODE (new) == REG && GET_CODE (src) == REG
4630 && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
4631 != (REGNO (src) < FIRST_PSEUDO_REGISTER)))
4632 || (insn_code = recog_memoized (insn)) < 0
4633 || insn_data[insn_code].n_dups > 0)
4634 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
4635 else
4636 SET_SRC (sets[i].rtl) = new;
4638 if (GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SIGN_EXTRACT)
4640 validate_change (insn, &XEXP (dest, 1),
4641 canon_reg (XEXP (dest, 1), insn), 1);
4642 validate_change (insn, &XEXP (dest, 2),
4643 canon_reg (XEXP (dest, 2), insn), 1);
4646 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
4647 || GET_CODE (dest) == ZERO_EXTRACT
4648 || GET_CODE (dest) == SIGN_EXTRACT)
4649 dest = XEXP (dest, 0);
4651 if (GET_CODE (dest) == MEM)
4652 canon_reg (dest, insn);
4655 /* Now that we have done all the replacements, we can apply the change
4656 group and see if they all work. Note that this will cause some
4657 canonicalizations that would have worked individually not to be applied
4658 because some other canonicalization didn't work, but this should not
4659 occur often.
4661 The result of apply_change_group can be ignored; see canon_reg. */
4663 apply_change_group ();
4665 /* Set sets[i].src_elt to the class each source belongs to.
4666 Detect assignments from or to volatile things
4667 and set set[i] to zero so they will be ignored
4668 in the rest of this function.
4670 Nothing in this loop changes the hash table or the register chains. */
4672 for (i = 0; i < n_sets; i++)
4674 register rtx src, dest;
4675 register rtx src_folded;
4676 register struct table_elt *elt = 0, *p;
4677 enum machine_mode mode;
4678 rtx src_eqv_here;
4679 rtx src_const = 0;
4680 rtx src_related = 0;
4681 struct table_elt *src_const_elt = 0;
4682 int src_cost = 10000, src_eqv_cost = 10000, src_folded_cost = 10000;
4683 int src_related_cost = 10000, src_elt_cost = 10000;
4684 /* Set non-zero if we need to call force_const_mem on with the
4685 contents of src_folded before using it. */
4686 int src_folded_force_flag = 0;
4688 dest = SET_DEST (sets[i].rtl);
4689 src = SET_SRC (sets[i].rtl);
4691 /* If SRC is a constant that has no machine mode,
4692 hash it with the destination's machine mode.
4693 This way we can keep different modes separate. */
4695 mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
4696 sets[i].mode = mode;
4698 if (src_eqv)
4700 enum machine_mode eqvmode = mode;
4701 if (GET_CODE (dest) == STRICT_LOW_PART)
4702 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
4703 do_not_record = 0;
4704 hash_arg_in_memory = 0;
4705 src_eqv = fold_rtx (src_eqv, insn);
4706 src_eqv_hash = HASH (src_eqv, eqvmode);
4708 /* Find the equivalence class for the equivalent expression. */
4710 if (!do_not_record)
4711 src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
4713 src_eqv_volatile = do_not_record;
4714 src_eqv_in_memory = hash_arg_in_memory;
4717 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
4718 value of the INNER register, not the destination. So it is not
4719 a valid substitution for the source. But save it for later. */
4720 if (GET_CODE (dest) == STRICT_LOW_PART)
4721 src_eqv_here = 0;
4722 else
4723 src_eqv_here = src_eqv;
4725 /* Simplify and foldable subexpressions in SRC. Then get the fully-
4726 simplified result, which may not necessarily be valid. */
4727 src_folded = fold_rtx (src, insn);
4729 #if 0
4730 /* ??? This caused bad code to be generated for the m68k port with -O2.
4731 Suppose src is (CONST_INT -1), and that after truncation src_folded
4732 is (CONST_INT 3). Suppose src_folded is then used for src_const.
4733 At the end we will add src and src_const to the same equivalence
4734 class. We now have 3 and -1 on the same equivalence class. This
4735 causes later instructions to be mis-optimized. */
4736 /* If storing a constant in a bitfield, pre-truncate the constant
4737 so we will be able to record it later. */
4738 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
4739 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
4741 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
4743 if (GET_CODE (src) == CONST_INT
4744 && GET_CODE (width) == CONST_INT
4745 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
4746 && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
4747 src_folded
4748 = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
4749 << INTVAL (width)) - 1));
4751 #endif
4753 /* Compute SRC's hash code, and also notice if it
4754 should not be recorded at all. In that case,
4755 prevent any further processing of this assignment. */
4756 do_not_record = 0;
4757 hash_arg_in_memory = 0;
4759 sets[i].src = src;
4760 sets[i].src_hash = HASH (src, mode);
4761 sets[i].src_volatile = do_not_record;
4762 sets[i].src_in_memory = hash_arg_in_memory;
4764 /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
4765 a pseudo that is set more than once, do not record SRC. Using
4766 SRC as a replacement for anything else will be incorrect in that
4767 situation. Note that this usually occurs only for stack slots,
4768 in which case all the RTL would be referring to SRC, so we don't
4769 lose any optimization opportunities by not having SRC in the
4770 hash table. */
4772 if (GET_CODE (src) == MEM
4773 && find_reg_note (insn, REG_EQUIV, src) != 0
4774 && GET_CODE (dest) == REG
4775 && REGNO (dest) >= FIRST_PSEUDO_REGISTER
4776 && REG_N_SETS (REGNO (dest)) != 1)
4777 sets[i].src_volatile = 1;
4779 #if 0
4780 /* It is no longer clear why we used to do this, but it doesn't
4781 appear to still be needed. So let's try without it since this
4782 code hurts cse'ing widened ops. */
4783 /* If source is a perverse subreg (such as QI treated as an SI),
4784 treat it as volatile. It may do the work of an SI in one context
4785 where the extra bits are not being used, but cannot replace an SI
4786 in general. */
4787 if (GET_CODE (src) == SUBREG
4788 && (GET_MODE_SIZE (GET_MODE (src))
4789 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
4790 sets[i].src_volatile = 1;
4791 #endif
4793 /* Locate all possible equivalent forms for SRC. Try to replace
4794 SRC in the insn with each cheaper equivalent.
4796 We have the following types of equivalents: SRC itself, a folded
4797 version, a value given in a REG_EQUAL note, or a value related
4798 to a constant.
4800 Each of these equivalents may be part of an additional class
4801 of equivalents (if more than one is in the table, they must be in
4802 the same class; we check for this).
4804 If the source is volatile, we don't do any table lookups.
4806 We note any constant equivalent for possible later use in a
4807 REG_NOTE. */
4809 if (!sets[i].src_volatile)
4810 elt = lookup (src, sets[i].src_hash, mode);
4812 sets[i].src_elt = elt;
4814 if (elt && src_eqv_here && src_eqv_elt)
4816 if (elt->first_same_value != src_eqv_elt->first_same_value)
4818 /* The REG_EQUAL is indicating that two formerly distinct
4819 classes are now equivalent. So merge them. */
4820 merge_equiv_classes (elt, src_eqv_elt);
4821 src_eqv_hash = HASH (src_eqv, elt->mode);
4822 src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
4825 src_eqv_here = 0;
4828 else if (src_eqv_elt)
4829 elt = src_eqv_elt;
4831 /* Try to find a constant somewhere and record it in `src_const'.
4832 Record its table element, if any, in `src_const_elt'. Look in
4833 any known equivalences first. (If the constant is not in the
4834 table, also set `sets[i].src_const_hash'). */
4835 if (elt)
4836 for (p = elt->first_same_value; p; p = p->next_same_value)
4837 if (p->is_const)
4839 src_const = p->exp;
4840 src_const_elt = elt;
4841 break;
4844 if (src_const == 0
4845 && (CONSTANT_P (src_folded)
4846 /* Consider (minus (label_ref L1) (label_ref L2)) as
4847 "constant" here so we will record it. This allows us
4848 to fold switch statements when an ADDR_DIFF_VEC is used. */
4849 || (GET_CODE (src_folded) == MINUS
4850 && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
4851 && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
4852 src_const = src_folded, src_const_elt = elt;
4853 else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
4854 src_const = src_eqv_here, src_const_elt = src_eqv_elt;
4856 /* If we don't know if the constant is in the table, get its
4857 hash code and look it up. */
4858 if (src_const && src_const_elt == 0)
4860 sets[i].src_const_hash = HASH (src_const, mode);
4861 src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
4864 sets[i].src_const = src_const;
4865 sets[i].src_const_elt = src_const_elt;
4867 /* If the constant and our source are both in the table, mark them as
4868 equivalent. Otherwise, if a constant is in the table but the source
4869 isn't, set ELT to it. */
4870 if (src_const_elt && elt
4871 && src_const_elt->first_same_value != elt->first_same_value)
4872 merge_equiv_classes (elt, src_const_elt);
4873 else if (src_const_elt && elt == 0)
4874 elt = src_const_elt;
4876 /* See if there is a register linearly related to a constant
4877 equivalent of SRC. */
4878 if (src_const
4879 && (GET_CODE (src_const) == CONST
4880 || (src_const_elt && src_const_elt->related_value != 0)))
4882 src_related = use_related_value (src_const, src_const_elt);
4883 if (src_related)
4885 struct table_elt *src_related_elt
4886 = lookup (src_related, HASH (src_related, mode), mode);
4887 if (src_related_elt && elt)
4889 if (elt->first_same_value
4890 != src_related_elt->first_same_value)
4891 /* This can occur when we previously saw a CONST
4892 involving a SYMBOL_REF and then see the SYMBOL_REF
4893 twice. Merge the involved classes. */
4894 merge_equiv_classes (elt, src_related_elt);
4896 src_related = 0;
4897 src_related_elt = 0;
4899 else if (src_related_elt && elt == 0)
4900 elt = src_related_elt;
4904 /* See if we have a CONST_INT that is already in a register in a
4905 wider mode. */
4907 if (src_const && src_related == 0 && GET_CODE (src_const) == CONST_INT
4908 && GET_MODE_CLASS (mode) == MODE_INT
4909 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
4911 enum machine_mode wider_mode;
4913 for (wider_mode = GET_MODE_WIDER_MODE (mode);
4914 GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
4915 && src_related == 0;
4916 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
4918 struct table_elt *const_elt
4919 = lookup (src_const, HASH (src_const, wider_mode), wider_mode);
4921 if (const_elt == 0)
4922 continue;
4924 for (const_elt = const_elt->first_same_value;
4925 const_elt; const_elt = const_elt->next_same_value)
4926 if (GET_CODE (const_elt->exp) == REG)
4928 src_related = gen_lowpart_if_possible (mode,
4929 const_elt->exp);
4930 break;
4935 /* Another possibility is that we have an AND with a constant in
4936 a mode narrower than a word. If so, it might have been generated
4937 as part of an "if" which would narrow the AND. If we already
4938 have done the AND in a wider mode, we can use a SUBREG of that
4939 value. */
4941 if (flag_expensive_optimizations && ! src_related
4942 && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
4943 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
4945 enum machine_mode tmode;
4946 rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
4948 for (tmode = GET_MODE_WIDER_MODE (mode);
4949 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
4950 tmode = GET_MODE_WIDER_MODE (tmode))
4952 rtx inner = gen_lowpart_if_possible (tmode, XEXP (src, 0));
4953 struct table_elt *larger_elt;
4955 if (inner)
4957 PUT_MODE (new_and, tmode);
4958 XEXP (new_and, 0) = inner;
4959 larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
4960 if (larger_elt == 0)
4961 continue;
4963 for (larger_elt = larger_elt->first_same_value;
4964 larger_elt; larger_elt = larger_elt->next_same_value)
4965 if (GET_CODE (larger_elt->exp) == REG)
4967 src_related
4968 = gen_lowpart_if_possible (mode, larger_elt->exp);
4969 break;
4972 if (src_related)
4973 break;
4978 #ifdef LOAD_EXTEND_OP
4979 /* See if a MEM has already been loaded with a widening operation;
4980 if it has, we can use a subreg of that. Many CISC machines
4981 also have such operations, but this is only likely to be
4982 beneficial these machines. */
4984 if (flag_expensive_optimizations && src_related == 0
4985 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
4986 && GET_MODE_CLASS (mode) == MODE_INT
4987 && GET_CODE (src) == MEM && ! do_not_record
4988 && LOAD_EXTEND_OP (mode) != NIL)
4990 enum machine_mode tmode;
4992 /* Set what we are trying to extend and the operation it might
4993 have been extended with. */
4994 PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
4995 XEXP (memory_extend_rtx, 0) = src;
4997 for (tmode = GET_MODE_WIDER_MODE (mode);
4998 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
4999 tmode = GET_MODE_WIDER_MODE (tmode))
5001 struct table_elt *larger_elt;
5003 PUT_MODE (memory_extend_rtx, tmode);
5004 larger_elt = lookup (memory_extend_rtx,
5005 HASH (memory_extend_rtx, tmode), tmode);
5006 if (larger_elt == 0)
5007 continue;
5009 for (larger_elt = larger_elt->first_same_value;
5010 larger_elt; larger_elt = larger_elt->next_same_value)
5011 if (GET_CODE (larger_elt->exp) == REG)
5013 src_related = gen_lowpart_if_possible (mode,
5014 larger_elt->exp);
5015 break;
5018 if (src_related)
5019 break;
5022 #endif /* LOAD_EXTEND_OP */
5024 if (src == src_folded)
5025 src_folded = 0;
5027 /* At this point, ELT, if non-zero, points to a class of expressions
5028 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
5029 and SRC_RELATED, if non-zero, each contain additional equivalent
5030 expressions. Prune these latter expressions by deleting expressions
5031 already in the equivalence class.
5033 Check for an equivalent identical to the destination. If found,
5034 this is the preferred equivalent since it will likely lead to
5035 elimination of the insn. Indicate this by placing it in
5036 `src_related'. */
5038 if (elt) elt = elt->first_same_value;
5039 for (p = elt; p; p = p->next_same_value)
5041 enum rtx_code code = GET_CODE (p->exp);
5043 /* If the expression is not valid, ignore it. Then we do not
5044 have to check for validity below. In most cases, we can use
5045 `rtx_equal_p', since canonicalization has already been done. */
5046 if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, 0))
5047 continue;
5049 /* Also skip paradoxical subregs, unless that's what we're
5050 looking for. */
5051 if (code == SUBREG
5052 && (GET_MODE_SIZE (GET_MODE (p->exp))
5053 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))
5054 && ! (src != 0
5055 && GET_CODE (src) == SUBREG
5056 && GET_MODE (src) == GET_MODE (p->exp)
5057 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5058 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))))
5059 continue;
5061 if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
5062 src = 0;
5063 else if (src_folded && GET_CODE (src_folded) == code
5064 && rtx_equal_p (src_folded, p->exp))
5065 src_folded = 0;
5066 else if (src_eqv_here && GET_CODE (src_eqv_here) == code
5067 && rtx_equal_p (src_eqv_here, p->exp))
5068 src_eqv_here = 0;
5069 else if (src_related && GET_CODE (src_related) == code
5070 && rtx_equal_p (src_related, p->exp))
5071 src_related = 0;
5073 /* This is the same as the destination of the insns, we want
5074 to prefer it. Copy it to src_related. The code below will
5075 then give it a negative cost. */
5076 if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
5077 src_related = dest;
5081 /* Find the cheapest valid equivalent, trying all the available
5082 possibilities. Prefer items not in the hash table to ones
5083 that are when they are equal cost. Note that we can never
5084 worsen an insn as the current contents will also succeed.
5085 If we find an equivalent identical to the destination, use it as best,
5086 since this insn will probably be eliminated in that case. */
5087 if (src)
5089 if (rtx_equal_p (src, dest))
5090 src_cost = -1;
5091 else
5092 src_cost = COST (src);
5095 if (src_eqv_here)
5097 if (rtx_equal_p (src_eqv_here, dest))
5098 src_eqv_cost = -1;
5099 else
5100 src_eqv_cost = COST (src_eqv_here);
5103 if (src_folded)
5105 if (rtx_equal_p (src_folded, dest))
5106 src_folded_cost = -1;
5107 else
5108 src_folded_cost = COST (src_folded);
5111 if (src_related)
5113 if (rtx_equal_p (src_related, dest))
5114 src_related_cost = -1;
5115 else
5116 src_related_cost = COST (src_related);
5119 /* If this was an indirect jump insn, a known label will really be
5120 cheaper even though it looks more expensive. */
5121 if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
5122 src_folded = src_const, src_folded_cost = -1;
5124 /* Terminate loop when replacement made. This must terminate since
5125 the current contents will be tested and will always be valid. */
5126 while (1)
5128 rtx trial;
5130 /* Skip invalid entries. */
5131 while (elt && GET_CODE (elt->exp) != REG
5132 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
5133 elt = elt->next_same_value;
5135 /* A paradoxical subreg would be bad here: it'll be the right
5136 size, but later may be adjusted so that the upper bits aren't
5137 what we want. So reject it. */
5138 if (elt != 0
5139 && GET_CODE (elt->exp) == SUBREG
5140 && (GET_MODE_SIZE (GET_MODE (elt->exp))
5141 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))
5142 /* It is okay, though, if the rtx we're trying to match
5143 will ignore any of the bits we can't predict. */
5144 && ! (src != 0
5145 && GET_CODE (src) == SUBREG
5146 && GET_MODE (src) == GET_MODE (elt->exp)
5147 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5148 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))))
5150 elt = elt->next_same_value;
5151 continue;
5154 if (elt) src_elt_cost = elt->cost;
5156 /* Find cheapest and skip it for the next time. For items
5157 of equal cost, use this order:
5158 src_folded, src, src_eqv, src_related and hash table entry. */
5159 if (src_folded_cost <= src_cost
5160 && src_folded_cost <= src_eqv_cost
5161 && src_folded_cost <= src_related_cost
5162 && src_folded_cost <= src_elt_cost)
5164 trial = src_folded, src_folded_cost = 10000;
5165 if (src_folded_force_flag)
5166 trial = force_const_mem (mode, trial);
5168 else if (src_cost <= src_eqv_cost
5169 && src_cost <= src_related_cost
5170 && src_cost <= src_elt_cost)
5171 trial = src, src_cost = 10000;
5172 else if (src_eqv_cost <= src_related_cost
5173 && src_eqv_cost <= src_elt_cost)
5174 trial = copy_rtx (src_eqv_here), src_eqv_cost = 10000;
5175 else if (src_related_cost <= src_elt_cost)
5176 trial = copy_rtx (src_related), src_related_cost = 10000;
5177 else
5179 trial = copy_rtx (elt->exp);
5180 elt = elt->next_same_value;
5181 src_elt_cost = 10000;
5184 /* We don't normally have an insn matching (set (pc) (pc)), so
5185 check for this separately here. We will delete such an
5186 insn below.
5188 Tablejump insns contain a USE of the table, so simply replacing
5189 the operand with the constant won't match. This is simply an
5190 unconditional branch, however, and is therefore valid. Just
5191 insert the substitution here and we will delete and re-emit
5192 the insn later. */
5194 if (n_sets == 1 && dest == pc_rtx
5195 && (trial == pc_rtx
5196 || (GET_CODE (trial) == LABEL_REF
5197 && ! condjump_p (insn))))
5199 /* If TRIAL is a label in front of a jump table, we are
5200 really falling through the switch (this is how casesi
5201 insns work), so we must branch around the table. */
5202 if (GET_CODE (trial) == CODE_LABEL
5203 && NEXT_INSN (trial) != 0
5204 && GET_CODE (NEXT_INSN (trial)) == JUMP_INSN
5205 && (GET_CODE (PATTERN (NEXT_INSN (trial))) == ADDR_DIFF_VEC
5206 || GET_CODE (PATTERN (NEXT_INSN (trial))) == ADDR_VEC))
5208 trial = gen_rtx_LABEL_REF (Pmode, get_label_after (trial));
5210 if (trial == pc_rtx)
5212 SET_SRC (sets[i].rtl) = trial;
5213 cse_jumps_altered = 1;
5214 break;
5217 /* We must actually validate the change. Consider a target
5218 where unconditional jumps are more complex than
5219 (set (pc) (label_ref)) such as the fr30. */
5220 if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
5221 cse_jumps_altered = 1;
5222 break;
5225 /* Look for a substitution that makes a valid insn. */
5226 else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
5228 /* If we just made a substitution inside a libcall, then we
5229 need to make the same substitution in any notes attached
5230 to the RETVAL insn. */
5231 if (libcall_insn
5232 && (GET_CODE (sets[i].orig_src) == REG
5233 || GET_CODE (sets[i].orig_src) == SUBREG
5234 || GET_CODE (sets[i].orig_src) == MEM))
5235 replace_rtx (REG_NOTES (libcall_insn), sets[i].orig_src,
5236 canon_reg (SET_SRC (sets[i].rtl), insn));
5238 /* The result of apply_change_group can be ignored; see
5239 canon_reg. */
5241 validate_change (insn, &SET_SRC (sets[i].rtl),
5242 canon_reg (SET_SRC (sets[i].rtl), insn),
5244 apply_change_group ();
5245 break;
5248 /* If we previously found constant pool entries for
5249 constants and this is a constant, try making a
5250 pool entry. Put it in src_folded unless we already have done
5251 this since that is where it likely came from. */
5253 else if (constant_pool_entries_cost
5254 && CONSTANT_P (trial)
5255 && ! (GET_CODE (trial) == CONST
5256 && GET_CODE (XEXP (trial, 0)) == TRUNCATE)
5257 && (src_folded == 0
5258 || (GET_CODE (src_folded) != MEM
5259 && ! src_folded_force_flag))
5260 && GET_MODE_CLASS (mode) != MODE_CC
5261 && mode != VOIDmode)
5263 src_folded_force_flag = 1;
5264 src_folded = trial;
5265 src_folded_cost = constant_pool_entries_cost;
5269 src = SET_SRC (sets[i].rtl);
5271 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
5272 However, there is an important exception: If both are registers
5273 that are not the head of their equivalence class, replace SET_SRC
5274 with the head of the class. If we do not do this, we will have
5275 both registers live over a portion of the basic block. This way,
5276 their lifetimes will likely abut instead of overlapping. */
5277 if (GET_CODE (dest) == REG
5278 && REGNO_QTY_VALID_P (REGNO (dest)))
5280 int dest_q = REG_QTY (REGNO (dest));
5281 struct qty_table_elem *dest_ent = &qty_table[dest_q];
5283 if (dest_ent->mode == GET_MODE (dest)
5284 && dest_ent->first_reg != REGNO (dest)
5285 && GET_CODE (src) == REG && REGNO (src) == REGNO (dest)
5286 /* Don't do this if the original insn had a hard reg as
5287 SET_SRC or SET_DEST. */
5288 && (GET_CODE (sets[i].src) != REG
5289 || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER)
5290 && (GET_CODE (dest) != REG || REGNO (dest) >= FIRST_PSEUDO_REGISTER))
5291 /* We can't call canon_reg here because it won't do anything if
5292 SRC is a hard register. */
5294 int src_q = REG_QTY (REGNO (src));
5295 struct qty_table_elem *src_ent = &qty_table[src_q];
5296 int first = src_ent->first_reg;
5297 rtx new_src
5298 = (first >= FIRST_PSEUDO_REGISTER
5299 ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first));
5301 /* We must use validate-change even for this, because this
5302 might be a special no-op instruction, suitable only to
5303 tag notes onto. */
5304 if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0))
5306 src = new_src;
5307 /* If we had a constant that is cheaper than what we are now
5308 setting SRC to, use that constant. We ignored it when we
5309 thought we could make this into a no-op. */
5310 if (src_const && COST (src_const) < COST (src)
5311 && validate_change (insn, &SET_SRC (sets[i].rtl), src_const,
5313 src = src_const;
5318 /* If we made a change, recompute SRC values. */
5319 if (src != sets[i].src)
5321 cse_altered = 1;
5322 do_not_record = 0;
5323 hash_arg_in_memory = 0;
5324 sets[i].src = src;
5325 sets[i].src_hash = HASH (src, mode);
5326 sets[i].src_volatile = do_not_record;
5327 sets[i].src_in_memory = hash_arg_in_memory;
5328 sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
5331 /* If this is a single SET, we are setting a register, and we have an
5332 equivalent constant, we want to add a REG_NOTE. We don't want
5333 to write a REG_EQUAL note for a constant pseudo since verifying that
5334 that pseudo hasn't been eliminated is a pain. Such a note also
5335 won't help anything.
5337 Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
5338 which can be created for a reference to a compile time computable
5339 entry in a jump table. */
5341 if (n_sets == 1 && src_const && GET_CODE (dest) == REG
5342 && GET_CODE (src_const) != REG
5343 && ! (GET_CODE (src_const) == CONST
5344 && GET_CODE (XEXP (src_const, 0)) == MINUS
5345 && GET_CODE (XEXP (XEXP (src_const, 0), 0)) == LABEL_REF
5346 && GET_CODE (XEXP (XEXP (src_const, 0), 1)) == LABEL_REF))
5348 tem = find_reg_note (insn, REG_EQUAL, NULL_RTX);
5350 /* Make sure that the rtx is not shared with any other insn. */
5351 src_const = copy_rtx (src_const);
5353 /* Record the actual constant value in a REG_EQUAL note, making
5354 a new one if one does not already exist. */
5355 if (tem)
5356 XEXP (tem, 0) = src_const;
5357 else
5358 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL,
5359 src_const, REG_NOTES (insn));
5361 /* If storing a constant value in a register that
5362 previously held the constant value 0,
5363 record this fact with a REG_WAS_0 note on this insn.
5365 Note that the *register* is required to have previously held 0,
5366 not just any register in the quantity and we must point to the
5367 insn that set that register to zero.
5369 Rather than track each register individually, we just see if
5370 the last set for this quantity was for this register. */
5372 if (REGNO_QTY_VALID_P (REGNO (dest)))
5374 int dest_q = REG_QTY (REGNO (dest));
5375 struct qty_table_elem *dest_ent = &qty_table[dest_q];
5377 if (dest_ent->const_rtx == const0_rtx)
5379 /* See if we previously had a REG_WAS_0 note. */
5380 rtx note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
5381 rtx const_insn = dest_ent->const_insn;
5383 if ((tem = single_set (const_insn)) != 0
5384 && rtx_equal_p (SET_DEST (tem), dest))
5386 if (note)
5387 XEXP (note, 0) = const_insn;
5388 else
5389 REG_NOTES (insn)
5390 = gen_rtx_INSN_LIST (REG_WAS_0, const_insn,
5391 REG_NOTES (insn));
5397 /* Now deal with the destination. */
5398 do_not_record = 0;
5400 /* Look within any SIGN_EXTRACT or ZERO_EXTRACT
5401 to the MEM or REG within it. */
5402 while (GET_CODE (dest) == SIGN_EXTRACT
5403 || GET_CODE (dest) == ZERO_EXTRACT
5404 || GET_CODE (dest) == SUBREG
5405 || GET_CODE (dest) == STRICT_LOW_PART)
5406 dest = XEXP (dest, 0);
5408 sets[i].inner_dest = dest;
5410 if (GET_CODE (dest) == MEM)
5412 #ifdef PUSH_ROUNDING
5413 /* Stack pushes invalidate the stack pointer. */
5414 rtx addr = XEXP (dest, 0);
5415 if ((GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
5416 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
5417 && XEXP (addr, 0) == stack_pointer_rtx)
5418 invalidate (stack_pointer_rtx, Pmode);
5419 #endif
5420 dest = fold_rtx (dest, insn);
5423 /* Compute the hash code of the destination now,
5424 before the effects of this instruction are recorded,
5425 since the register values used in the address computation
5426 are those before this instruction. */
5427 sets[i].dest_hash = HASH (dest, mode);
5429 /* Don't enter a bit-field in the hash table
5430 because the value in it after the store
5431 may not equal what was stored, due to truncation. */
5433 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
5434 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
5436 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5438 if (src_const != 0 && GET_CODE (src_const) == CONST_INT
5439 && GET_CODE (width) == CONST_INT
5440 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5441 && ! (INTVAL (src_const)
5442 & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5443 /* Exception: if the value is constant,
5444 and it won't be truncated, record it. */
5446 else
5448 /* This is chosen so that the destination will be invalidated
5449 but no new value will be recorded.
5450 We must invalidate because sometimes constant
5451 values can be recorded for bitfields. */
5452 sets[i].src_elt = 0;
5453 sets[i].src_volatile = 1;
5454 src_eqv = 0;
5455 src_eqv_elt = 0;
5459 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
5460 the insn. */
5461 else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
5463 /* One less use of the label this insn used to jump to. */
5464 if (JUMP_LABEL (insn) != 0)
5465 --LABEL_NUSES (JUMP_LABEL (insn));
5466 PUT_CODE (insn, NOTE);
5467 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
5468 NOTE_SOURCE_FILE (insn) = 0;
5469 cse_jumps_altered = 1;
5470 /* No more processing for this set. */
5471 sets[i].rtl = 0;
5474 /* If this SET is now setting PC to a label, we know it used to
5475 be a conditional or computed branch. So we see if we can follow
5476 it. If it was a computed branch, delete it and re-emit. */
5477 else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF)
5479 /* If this is not in the format for a simple branch and
5480 we are the only SET in it, re-emit it. */
5481 if (! simplejump_p (insn) && n_sets == 1)
5483 rtx new = emit_jump_insn_before (gen_jump (XEXP (src, 0)), insn);
5484 JUMP_LABEL (new) = XEXP (src, 0);
5485 LABEL_NUSES (XEXP (src, 0))++;
5486 insn = new;
5488 else
5489 /* Otherwise, force rerecognition, since it probably had
5490 a different pattern before.
5491 This shouldn't really be necessary, since whatever
5492 changed the source value above should have done this.
5493 Until the right place is found, might as well do this here. */
5494 INSN_CODE (insn) = -1;
5496 never_reached_warning (insn);
5498 /* Now emit a BARRIER after the unconditional jump. Do not bother
5499 deleting any unreachable code, let jump/flow do that. */
5500 if (NEXT_INSN (insn) != 0
5501 && GET_CODE (NEXT_INSN (insn)) != BARRIER)
5502 emit_barrier_after (insn);
5504 cse_jumps_altered = 1;
5505 sets[i].rtl = 0;
5508 /* If destination is volatile, invalidate it and then do no further
5509 processing for this assignment. */
5511 else if (do_not_record)
5513 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
5514 || GET_CODE (dest) == MEM)
5515 invalidate (dest, VOIDmode);
5516 else if (GET_CODE (dest) == STRICT_LOW_PART
5517 || GET_CODE (dest) == ZERO_EXTRACT)
5518 invalidate (XEXP (dest, 0), GET_MODE (dest));
5519 sets[i].rtl = 0;
5522 if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
5523 sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
5525 #ifdef HAVE_cc0
5526 /* If setting CC0, record what it was set to, or a constant, if it
5527 is equivalent to a constant. If it is being set to a floating-point
5528 value, make a COMPARE with the appropriate constant of 0. If we
5529 don't do this, later code can interpret this as a test against
5530 const0_rtx, which can cause problems if we try to put it into an
5531 insn as a floating-point operand. */
5532 if (dest == cc0_rtx)
5534 this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
5535 this_insn_cc0_mode = mode;
5536 if (FLOAT_MODE_P (mode))
5537 this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0,
5538 CONST0_RTX (mode));
5540 #endif
5543 /* Now enter all non-volatile source expressions in the hash table
5544 if they are not already present.
5545 Record their equivalence classes in src_elt.
5546 This way we can insert the corresponding destinations into
5547 the same classes even if the actual sources are no longer in them
5548 (having been invalidated). */
5550 if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
5551 && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
5553 register struct table_elt *elt;
5554 register struct table_elt *classp = sets[0].src_elt;
5555 rtx dest = SET_DEST (sets[0].rtl);
5556 enum machine_mode eqvmode = GET_MODE (dest);
5558 if (GET_CODE (dest) == STRICT_LOW_PART)
5560 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5561 classp = 0;
5563 if (insert_regs (src_eqv, classp, 0))
5565 rehash_using_reg (src_eqv);
5566 src_eqv_hash = HASH (src_eqv, eqvmode);
5568 elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
5569 elt->in_memory = src_eqv_in_memory;
5570 src_eqv_elt = elt;
5572 /* Check to see if src_eqv_elt is the same as a set source which
5573 does not yet have an elt, and if so set the elt of the set source
5574 to src_eqv_elt. */
5575 for (i = 0; i < n_sets; i++)
5576 if (sets[i].rtl && sets[i].src_elt == 0
5577 && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
5578 sets[i].src_elt = src_eqv_elt;
5581 for (i = 0; i < n_sets; i++)
5582 if (sets[i].rtl && ! sets[i].src_volatile
5583 && ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
5585 if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
5587 /* REG_EQUAL in setting a STRICT_LOW_PART
5588 gives an equivalent for the entire destination register,
5589 not just for the subreg being stored in now.
5590 This is a more interesting equivalence, so we arrange later
5591 to treat the entire reg as the destination. */
5592 sets[i].src_elt = src_eqv_elt;
5593 sets[i].src_hash = src_eqv_hash;
5595 else
5597 /* Insert source and constant equivalent into hash table, if not
5598 already present. */
5599 register struct table_elt *classp = src_eqv_elt;
5600 register rtx src = sets[i].src;
5601 register rtx dest = SET_DEST (sets[i].rtl);
5602 enum machine_mode mode
5603 = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5605 if (sets[i].src_elt == 0)
5607 /* Don't put a hard register source into the table if this is
5608 the last insn of a libcall. In this case, we only need
5609 to put src_eqv_elt in src_elt. */
5610 if (GET_CODE (src) != REG
5611 || REGNO (src) >= FIRST_PSEUDO_REGISTER
5612 || ! find_reg_note (insn, REG_RETVAL, NULL_RTX))
5614 register struct table_elt *elt;
5616 /* Note that these insert_regs calls cannot remove
5617 any of the src_elt's, because they would have failed to
5618 match if not still valid. */
5619 if (insert_regs (src, classp, 0))
5621 rehash_using_reg (src);
5622 sets[i].src_hash = HASH (src, mode);
5624 elt = insert (src, classp, sets[i].src_hash, mode);
5625 elt->in_memory = sets[i].src_in_memory;
5626 sets[i].src_elt = classp = elt;
5628 else
5629 sets[i].src_elt = classp;
5631 if (sets[i].src_const && sets[i].src_const_elt == 0
5632 && src != sets[i].src_const
5633 && ! rtx_equal_p (sets[i].src_const, src))
5634 sets[i].src_elt = insert (sets[i].src_const, classp,
5635 sets[i].src_const_hash, mode);
5638 else if (sets[i].src_elt == 0)
5639 /* If we did not insert the source into the hash table (e.g., it was
5640 volatile), note the equivalence class for the REG_EQUAL value, if any,
5641 so that the destination goes into that class. */
5642 sets[i].src_elt = src_eqv_elt;
5644 invalidate_from_clobbers (x);
5646 /* Some registers are invalidated by subroutine calls. Memory is
5647 invalidated by non-constant calls. */
5649 if (GET_CODE (insn) == CALL_INSN)
5651 if (! CONST_CALL_P (insn))
5652 invalidate_memory ();
5653 invalidate_for_call ();
5656 /* Now invalidate everything set by this instruction.
5657 If a SUBREG or other funny destination is being set,
5658 sets[i].rtl is still nonzero, so here we invalidate the reg
5659 a part of which is being set. */
5661 for (i = 0; i < n_sets; i++)
5662 if (sets[i].rtl)
5664 /* We can't use the inner dest, because the mode associated with
5665 a ZERO_EXTRACT is significant. */
5666 register rtx dest = SET_DEST (sets[i].rtl);
5668 /* Needed for registers to remove the register from its
5669 previous quantity's chain.
5670 Needed for memory if this is a nonvarying address, unless
5671 we have just done an invalidate_memory that covers even those. */
5672 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
5673 || GET_CODE (dest) == MEM)
5674 invalidate (dest, VOIDmode);
5675 else if (GET_CODE (dest) == STRICT_LOW_PART
5676 || GET_CODE (dest) == ZERO_EXTRACT)
5677 invalidate (XEXP (dest, 0), GET_MODE (dest));
5680 /* A volatile ASM invalidates everything. */
5681 if (GET_CODE (insn) == INSN
5682 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
5683 && MEM_VOLATILE_P (PATTERN (insn)))
5684 flush_hash_table ();
5686 /* Make sure registers mentioned in destinations
5687 are safe for use in an expression to be inserted.
5688 This removes from the hash table
5689 any invalid entry that refers to one of these registers.
5691 We don't care about the return value from mention_regs because
5692 we are going to hash the SET_DEST values unconditionally. */
5694 for (i = 0; i < n_sets; i++)
5696 if (sets[i].rtl)
5698 rtx x = SET_DEST (sets[i].rtl);
5700 if (GET_CODE (x) != REG)
5701 mention_regs (x);
5702 else
5704 /* We used to rely on all references to a register becoming
5705 inaccessible when a register changes to a new quantity,
5706 since that changes the hash code. However, that is not
5707 safe, since after HASH_SIZE new quantities we get a
5708 hash 'collision' of a register with its own invalid
5709 entries. And since SUBREGs have been changed not to
5710 change their hash code with the hash code of the register,
5711 it wouldn't work any longer at all. So we have to check
5712 for any invalid references lying around now.
5713 This code is similar to the REG case in mention_regs,
5714 but it knows that reg_tick has been incremented, and
5715 it leaves reg_in_table as -1 . */
5716 register int regno = REGNO (x);
5717 register int endregno
5718 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
5719 : HARD_REGNO_NREGS (regno, GET_MODE (x)));
5720 int i;
5722 for (i = regno; i < endregno; i++)
5724 if (REG_IN_TABLE (i) >= 0)
5726 remove_invalid_refs (i);
5727 REG_IN_TABLE (i) = -1;
5734 /* We may have just removed some of the src_elt's from the hash table.
5735 So replace each one with the current head of the same class. */
5737 for (i = 0; i < n_sets; i++)
5738 if (sets[i].rtl)
5740 if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
5741 /* If elt was removed, find current head of same class,
5742 or 0 if nothing remains of that class. */
5744 register struct table_elt *elt = sets[i].src_elt;
5746 while (elt && elt->prev_same_value)
5747 elt = elt->prev_same_value;
5749 while (elt && elt->first_same_value == 0)
5750 elt = elt->next_same_value;
5751 sets[i].src_elt = elt ? elt->first_same_value : 0;
5755 /* Now insert the destinations into their equivalence classes. */
5757 for (i = 0; i < n_sets; i++)
5758 if (sets[i].rtl)
5760 register rtx dest = SET_DEST (sets[i].rtl);
5761 rtx inner_dest = sets[i].inner_dest;
5762 register struct table_elt *elt;
5764 /* Don't record value if we are not supposed to risk allocating
5765 floating-point values in registers that might be wider than
5766 memory. */
5767 if ((flag_float_store
5768 && GET_CODE (dest) == MEM
5769 && FLOAT_MODE_P (GET_MODE (dest)))
5770 /* Don't record BLKmode values, because we don't know the
5771 size of it, and can't be sure that other BLKmode values
5772 have the same or smaller size. */
5773 || GET_MODE (dest) == BLKmode
5774 /* Don't record values of destinations set inside a libcall block
5775 since we might delete the libcall. Things should have been set
5776 up so we won't want to reuse such a value, but we play it safe
5777 here. */
5778 || libcall_insn
5779 /* If we didn't put a REG_EQUAL value or a source into the hash
5780 table, there is no point is recording DEST. */
5781 || sets[i].src_elt == 0
5782 /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
5783 or SIGN_EXTEND, don't record DEST since it can cause
5784 some tracking to be wrong.
5786 ??? Think about this more later. */
5787 || (GET_CODE (dest) == SUBREG
5788 && (GET_MODE_SIZE (GET_MODE (dest))
5789 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
5790 && (GET_CODE (sets[i].src) == SIGN_EXTEND
5791 || GET_CODE (sets[i].src) == ZERO_EXTEND)))
5792 continue;
5794 /* STRICT_LOW_PART isn't part of the value BEING set,
5795 and neither is the SUBREG inside it.
5796 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
5797 if (GET_CODE (dest) == STRICT_LOW_PART)
5798 dest = SUBREG_REG (XEXP (dest, 0));
5800 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
5801 /* Registers must also be inserted into chains for quantities. */
5802 if (insert_regs (dest, sets[i].src_elt, 1))
5804 /* If `insert_regs' changes something, the hash code must be
5805 recalculated. */
5806 rehash_using_reg (dest);
5807 sets[i].dest_hash = HASH (dest, GET_MODE (dest));
5810 if (GET_CODE (inner_dest) == MEM
5811 && GET_CODE (XEXP (inner_dest, 0)) == ADDRESSOF)
5812 /* Given (SET (MEM (ADDRESSOF (X))) Y) we don't want to say
5813 that (MEM (ADDRESSOF (X))) is equivalent to Y.
5814 Consider the case in which the address of the MEM is
5815 passed to a function, which alters the MEM. Then, if we
5816 later use Y instead of the MEM we'll miss the update. */
5817 elt = insert (dest, 0, sets[i].dest_hash, GET_MODE (dest));
5818 else
5819 elt = insert (dest, sets[i].src_elt,
5820 sets[i].dest_hash, GET_MODE (dest));
5822 elt->in_memory = (GET_CODE (sets[i].inner_dest) == MEM
5823 && (! RTX_UNCHANGING_P (sets[i].inner_dest)
5824 || FIXED_BASE_PLUS_P (XEXP (sets[i].inner_dest,
5825 0))));
5827 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
5828 narrower than M2, and both M1 and M2 are the same number of words,
5829 we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
5830 make that equivalence as well.
5832 However, BAR may have equivalences for which gen_lowpart_if_possible
5833 will produce a simpler value than gen_lowpart_if_possible applied to
5834 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
5835 BAR's equivalences. If we don't get a simplified form, make
5836 the SUBREG. It will not be used in an equivalence, but will
5837 cause two similar assignments to be detected.
5839 Note the loop below will find SUBREG_REG (DEST) since we have
5840 already entered SRC and DEST of the SET in the table. */
5842 if (GET_CODE (dest) == SUBREG
5843 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
5844 / UNITS_PER_WORD)
5845 == (GET_MODE_SIZE (GET_MODE (dest)) - 1)/ UNITS_PER_WORD)
5846 && (GET_MODE_SIZE (GET_MODE (dest))
5847 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
5848 && sets[i].src_elt != 0)
5850 enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
5851 struct table_elt *elt, *classp = 0;
5853 for (elt = sets[i].src_elt->first_same_value; elt;
5854 elt = elt->next_same_value)
5856 rtx new_src = 0;
5857 unsigned src_hash;
5858 struct table_elt *src_elt;
5860 /* Ignore invalid entries. */
5861 if (GET_CODE (elt->exp) != REG
5862 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
5863 continue;
5865 new_src = gen_lowpart_if_possible (new_mode, elt->exp);
5866 if (new_src == 0)
5867 new_src = gen_rtx_SUBREG (new_mode, elt->exp, 0);
5869 src_hash = HASH (new_src, new_mode);
5870 src_elt = lookup (new_src, src_hash, new_mode);
5872 /* Put the new source in the hash table is if isn't
5873 already. */
5874 if (src_elt == 0)
5876 if (insert_regs (new_src, classp, 0))
5878 rehash_using_reg (new_src);
5879 src_hash = HASH (new_src, new_mode);
5881 src_elt = insert (new_src, classp, src_hash, new_mode);
5882 src_elt->in_memory = elt->in_memory;
5884 else if (classp && classp != src_elt->first_same_value)
5885 /* Show that two things that we've seen before are
5886 actually the same. */
5887 merge_equiv_classes (src_elt, classp);
5889 classp = src_elt->first_same_value;
5890 /* Ignore invalid entries. */
5891 while (classp
5892 && GET_CODE (classp->exp) != REG
5893 && ! exp_equiv_p (classp->exp, classp->exp, 1, 0))
5894 classp = classp->next_same_value;
5899 /* Special handling for (set REG0 REG1)
5900 where REG0 is the "cheapest", cheaper than REG1.
5901 After cse, REG1 will probably not be used in the sequel,
5902 so (if easily done) change this insn to (set REG1 REG0) and
5903 replace REG1 with REG0 in the previous insn that computed their value.
5904 Then REG1 will become a dead store and won't cloud the situation
5905 for later optimizations.
5907 Do not make this change if REG1 is a hard register, because it will
5908 then be used in the sequel and we may be changing a two-operand insn
5909 into a three-operand insn.
5911 Also do not do this if we are operating on a copy of INSN.
5913 Also don't do this if INSN ends a libcall; this would cause an unrelated
5914 register to be set in the middle of a libcall, and we then get bad code
5915 if the libcall is deleted. */
5917 if (n_sets == 1 && sets[0].rtl && GET_CODE (SET_DEST (sets[0].rtl)) == REG
5918 && NEXT_INSN (PREV_INSN (insn)) == insn
5919 && GET_CODE (SET_SRC (sets[0].rtl)) == REG
5920 && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
5921 && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl))))
5923 int src_q = REG_QTY (REGNO (SET_SRC (sets[0].rtl)));
5924 struct qty_table_elem *src_ent = &qty_table[src_q];
5926 if ((src_ent->first_reg == REGNO (SET_DEST (sets[0].rtl)))
5927 && ! find_reg_note (insn, REG_RETVAL, NULL_RTX))
5929 rtx prev = PREV_INSN (insn);
5930 while (prev && GET_CODE (prev) == NOTE)
5931 prev = PREV_INSN (prev);
5933 if (prev && GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SET
5934 && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl))
5936 rtx dest = SET_DEST (sets[0].rtl);
5937 rtx note = find_reg_note (prev, REG_EQUIV, NULL_RTX);
5939 validate_change (prev, & SET_DEST (PATTERN (prev)), dest, 1);
5940 validate_change (insn, & SET_DEST (sets[0].rtl),
5941 SET_SRC (sets[0].rtl), 1);
5942 validate_change (insn, & SET_SRC (sets[0].rtl), dest, 1);
5943 apply_change_group ();
5945 /* If REG1 was equivalent to a constant, REG0 is not. */
5946 if (note)
5947 PUT_REG_NOTE_KIND (note, REG_EQUAL);
5949 /* If there was a REG_WAS_0 note on PREV, remove it. Move
5950 any REG_WAS_0 note on INSN to PREV. */
5951 note = find_reg_note (prev, REG_WAS_0, NULL_RTX);
5952 if (note)
5953 remove_note (prev, note);
5955 note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
5956 if (note)
5958 remove_note (insn, note);
5959 XEXP (note, 1) = REG_NOTES (prev);
5960 REG_NOTES (prev) = note;
5963 /* If INSN has a REG_EQUAL note, and this note mentions REG0,
5964 then we must delete it, because the value in REG0 has changed. */
5965 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
5966 if (note && reg_mentioned_p (dest, XEXP (note, 0)))
5967 remove_note (insn, note);
5972 /* If this is a conditional jump insn, record any known equivalences due to
5973 the condition being tested. */
5975 last_jump_equiv_class = 0;
5976 if (GET_CODE (insn) == JUMP_INSN
5977 && n_sets == 1 && GET_CODE (x) == SET
5978 && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
5979 record_jump_equiv (insn, 0);
5981 #ifdef HAVE_cc0
5982 /* If the previous insn set CC0 and this insn no longer references CC0,
5983 delete the previous insn. Here we use the fact that nothing expects CC0
5984 to be valid over an insn, which is true until the final pass. */
5985 if (prev_insn && GET_CODE (prev_insn) == INSN
5986 && (tem = single_set (prev_insn)) != 0
5987 && SET_DEST (tem) == cc0_rtx
5988 && ! reg_mentioned_p (cc0_rtx, x))
5990 PUT_CODE (prev_insn, NOTE);
5991 NOTE_LINE_NUMBER (prev_insn) = NOTE_INSN_DELETED;
5992 NOTE_SOURCE_FILE (prev_insn) = 0;
5995 prev_insn_cc0 = this_insn_cc0;
5996 prev_insn_cc0_mode = this_insn_cc0_mode;
5997 #endif
5999 prev_insn = insn;
6002 /* Remove from the hash table all expressions that reference memory. */
6004 static void
6005 invalidate_memory ()
6007 register int i;
6008 register struct table_elt *p, *next;
6010 for (i = 0; i < HASH_SIZE; i++)
6011 for (p = table[i]; p; p = next)
6013 next = p->next_same_hash;
6014 if (p->in_memory)
6015 remove_from_table (p, i);
6019 /* If ADDR is an address that implicitly affects the stack pointer, return
6020 1 and update the register tables to show the effect. Else, return 0. */
6022 static int
6023 addr_affects_sp_p (addr)
6024 register rtx addr;
6026 if ((GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
6027 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
6028 && GET_CODE (XEXP (addr, 0)) == REG
6029 && REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
6031 if (REG_TICK (STACK_POINTER_REGNUM) >= 0)
6032 REG_TICK (STACK_POINTER_REGNUM)++;
6034 /* This should be *very* rare. */
6035 if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
6036 invalidate (stack_pointer_rtx, VOIDmode);
6038 return 1;
6041 return 0;
6044 /* Perform invalidation on the basis of everything about an insn
6045 except for invalidating the actual places that are SET in it.
6046 This includes the places CLOBBERed, and anything that might
6047 alias with something that is SET or CLOBBERed.
6049 X is the pattern of the insn. */
6051 static void
6052 invalidate_from_clobbers (x)
6053 rtx x;
6055 if (GET_CODE (x) == CLOBBER)
6057 rtx ref = XEXP (x, 0);
6058 if (ref)
6060 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
6061 || GET_CODE (ref) == MEM)
6062 invalidate (ref, VOIDmode);
6063 else if (GET_CODE (ref) == STRICT_LOW_PART
6064 || GET_CODE (ref) == ZERO_EXTRACT)
6065 invalidate (XEXP (ref, 0), GET_MODE (ref));
6068 else if (GET_CODE (x) == PARALLEL)
6070 register int i;
6071 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
6073 register rtx y = XVECEXP (x, 0, i);
6074 if (GET_CODE (y) == CLOBBER)
6076 rtx ref = XEXP (y, 0);
6077 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
6078 || GET_CODE (ref) == MEM)
6079 invalidate (ref, VOIDmode);
6080 else if (GET_CODE (ref) == STRICT_LOW_PART
6081 || GET_CODE (ref) == ZERO_EXTRACT)
6082 invalidate (XEXP (ref, 0), GET_MODE (ref));
6088 /* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes
6089 and replace any registers in them with either an equivalent constant
6090 or the canonical form of the register. If we are inside an address,
6091 only do this if the address remains valid.
6093 OBJECT is 0 except when within a MEM in which case it is the MEM.
6095 Return the replacement for X. */
6097 static rtx
6098 cse_process_notes (x, object)
6099 rtx x;
6100 rtx object;
6102 enum rtx_code code = GET_CODE (x);
6103 const char *fmt = GET_RTX_FORMAT (code);
6104 int i;
6106 switch (code)
6108 case CONST_INT:
6109 case CONST:
6110 case SYMBOL_REF:
6111 case LABEL_REF:
6112 case CONST_DOUBLE:
6113 case PC:
6114 case CC0:
6115 case LO_SUM:
6116 return x;
6118 case MEM:
6119 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), x);
6120 return x;
6122 case EXPR_LIST:
6123 case INSN_LIST:
6124 if (REG_NOTE_KIND (x) == REG_EQUAL)
6125 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX);
6126 if (XEXP (x, 1))
6127 XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX);
6128 return x;
6130 case SIGN_EXTEND:
6131 case ZERO_EXTEND:
6132 case SUBREG:
6134 rtx new = cse_process_notes (XEXP (x, 0), object);
6135 /* We don't substitute VOIDmode constants into these rtx,
6136 since they would impede folding. */
6137 if (GET_MODE (new) != VOIDmode)
6138 validate_change (object, &XEXP (x, 0), new, 0);
6139 return x;
6142 case REG:
6143 i = REG_QTY (REGNO (x));
6145 /* Return a constant or a constant register. */
6146 if (REGNO_QTY_VALID_P (REGNO (x)))
6148 struct qty_table_elem *ent = &qty_table[i];
6150 if (ent->const_rtx != NULL_RTX
6151 && (CONSTANT_P (ent->const_rtx)
6152 || GET_CODE (ent->const_rtx) == REG))
6154 rtx new = gen_lowpart_if_possible (GET_MODE (x), ent->const_rtx);
6155 if (new)
6156 return new;
6160 /* Otherwise, canonicalize this register. */
6161 return canon_reg (x, NULL_RTX);
6163 default:
6164 break;
6167 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6168 if (fmt[i] == 'e')
6169 validate_change (object, &XEXP (x, i),
6170 cse_process_notes (XEXP (x, i), object), 0);
6172 return x;
6175 /* Find common subexpressions between the end test of a loop and the beginning
6176 of the loop. LOOP_START is the CODE_LABEL at the start of a loop.
6178 Often we have a loop where an expression in the exit test is used
6179 in the body of the loop. For example "while (*p) *q++ = *p++;".
6180 Because of the way we duplicate the loop exit test in front of the loop,
6181 however, we don't detect that common subexpression. This will be caught
6182 when global cse is implemented, but this is a quite common case.
6184 This function handles the most common cases of these common expressions.
6185 It is called after we have processed the basic block ending with the
6186 NOTE_INSN_LOOP_END note that ends a loop and the previous JUMP_INSN
6187 jumps to a label used only once. */
6189 static void
6190 cse_around_loop (loop_start)
6191 rtx loop_start;
6193 rtx insn;
6194 int i;
6195 struct table_elt *p;
6197 /* If the jump at the end of the loop doesn't go to the start, we don't
6198 do anything. */
6199 for (insn = PREV_INSN (loop_start);
6200 insn && (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0);
6201 insn = PREV_INSN (insn))
6204 if (insn == 0
6205 || GET_CODE (insn) != NOTE
6206 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_BEG)
6207 return;
6209 /* If the last insn of the loop (the end test) was an NE comparison,
6210 we will interpret it as an EQ comparison, since we fell through
6211 the loop. Any equivalences resulting from that comparison are
6212 therefore not valid and must be invalidated. */
6213 if (last_jump_equiv_class)
6214 for (p = last_jump_equiv_class->first_same_value; p;
6215 p = p->next_same_value)
6217 if (GET_CODE (p->exp) == MEM || GET_CODE (p->exp) == REG
6218 || (GET_CODE (p->exp) == SUBREG
6219 && GET_CODE (SUBREG_REG (p->exp)) == REG))
6220 invalidate (p->exp, VOIDmode);
6221 else if (GET_CODE (p->exp) == STRICT_LOW_PART
6222 || GET_CODE (p->exp) == ZERO_EXTRACT)
6223 invalidate (XEXP (p->exp, 0), GET_MODE (p->exp));
6226 /* Process insns starting after LOOP_START until we hit a CALL_INSN or
6227 a CODE_LABEL (we could handle a CALL_INSN, but it isn't worth it).
6229 The only thing we do with SET_DEST is invalidate entries, so we
6230 can safely process each SET in order. It is slightly less efficient
6231 to do so, but we only want to handle the most common cases.
6233 The gen_move_insn call in cse_set_around_loop may create new pseudos.
6234 These pseudos won't have valid entries in any of the tables indexed
6235 by register number, such as reg_qty. We avoid out-of-range array
6236 accesses by not processing any instructions created after cse started. */
6238 for (insn = NEXT_INSN (loop_start);
6239 GET_CODE (insn) != CALL_INSN && GET_CODE (insn) != CODE_LABEL
6240 && INSN_UID (insn) < max_insn_uid
6241 && ! (GET_CODE (insn) == NOTE
6242 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
6243 insn = NEXT_INSN (insn))
6245 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
6246 && (GET_CODE (PATTERN (insn)) == SET
6247 || GET_CODE (PATTERN (insn)) == CLOBBER))
6248 cse_set_around_loop (PATTERN (insn), insn, loop_start);
6249 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
6250 && GET_CODE (PATTERN (insn)) == PARALLEL)
6251 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6252 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET
6253 || GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == CLOBBER)
6254 cse_set_around_loop (XVECEXP (PATTERN (insn), 0, i), insn,
6255 loop_start);
6259 /* Process one SET of an insn that was skipped. We ignore CLOBBERs
6260 since they are done elsewhere. This function is called via note_stores. */
6262 static void
6263 invalidate_skipped_set (dest, set, data)
6264 rtx set;
6265 rtx dest;
6266 void *data ATTRIBUTE_UNUSED;
6268 enum rtx_code code = GET_CODE (dest);
6270 if (code == MEM
6271 && ! addr_affects_sp_p (dest) /* If this is not a stack push ... */
6272 /* There are times when an address can appear varying and be a PLUS
6273 during this scan when it would be a fixed address were we to know
6274 the proper equivalences. So invalidate all memory if there is
6275 a BLKmode or nonscalar memory reference or a reference to a
6276 variable address. */
6277 && (MEM_IN_STRUCT_P (dest) || GET_MODE (dest) == BLKmode
6278 || cse_rtx_varies_p (XEXP (dest, 0))))
6280 invalidate_memory ();
6281 return;
6284 if (GET_CODE (set) == CLOBBER
6285 #ifdef HAVE_cc0
6286 || dest == cc0_rtx
6287 #endif
6288 || dest == pc_rtx)
6289 return;
6291 if (code == STRICT_LOW_PART || code == ZERO_EXTRACT)
6292 invalidate (XEXP (dest, 0), GET_MODE (dest));
6293 else if (code == REG || code == SUBREG || code == MEM)
6294 invalidate (dest, VOIDmode);
6297 /* Invalidate all insns from START up to the end of the function or the
6298 next label. This called when we wish to CSE around a block that is
6299 conditionally executed. */
6301 static void
6302 invalidate_skipped_block (start)
6303 rtx start;
6305 rtx insn;
6307 for (insn = start; insn && GET_CODE (insn) != CODE_LABEL;
6308 insn = NEXT_INSN (insn))
6310 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
6311 continue;
6313 if (GET_CODE (insn) == CALL_INSN)
6315 if (! CONST_CALL_P (insn))
6316 invalidate_memory ();
6317 invalidate_for_call ();
6320 invalidate_from_clobbers (PATTERN (insn));
6321 note_stores (PATTERN (insn), invalidate_skipped_set, NULL);
6325 /* If modifying X will modify the value in *DATA (which is really an
6326 `rtx *'), indicate that fact by setting the pointed to value to
6327 NULL_RTX. */
6329 static void
6330 cse_check_loop_start (x, set, data)
6331 rtx x;
6332 rtx set ATTRIBUTE_UNUSED;
6333 void *data;
6335 rtx *cse_check_loop_start_value = (rtx *) data;
6337 if (*cse_check_loop_start_value == NULL_RTX
6338 || GET_CODE (x) == CC0 || GET_CODE (x) == PC)
6339 return;
6341 if ((GET_CODE (x) == MEM && GET_CODE (*cse_check_loop_start_value) == MEM)
6342 || reg_overlap_mentioned_p (x, *cse_check_loop_start_value))
6343 *cse_check_loop_start_value = NULL_RTX;
6346 /* X is a SET or CLOBBER contained in INSN that was found near the start of
6347 a loop that starts with the label at LOOP_START.
6349 If X is a SET, we see if its SET_SRC is currently in our hash table.
6350 If so, we see if it has a value equal to some register used only in the
6351 loop exit code (as marked by jump.c).
6353 If those two conditions are true, we search backwards from the start of
6354 the loop to see if that same value was loaded into a register that still
6355 retains its value at the start of the loop.
6357 If so, we insert an insn after the load to copy the destination of that
6358 load into the equivalent register and (try to) replace our SET_SRC with that
6359 register.
6361 In any event, we invalidate whatever this SET or CLOBBER modifies. */
6363 static void
6364 cse_set_around_loop (x, insn, loop_start)
6365 rtx x;
6366 rtx insn;
6367 rtx loop_start;
6369 struct table_elt *src_elt;
6371 /* If this is a SET, see if we can replace SET_SRC, but ignore SETs that
6372 are setting PC or CC0 or whose SET_SRC is already a register. */
6373 if (GET_CODE (x) == SET
6374 && GET_CODE (SET_DEST (x)) != PC && GET_CODE (SET_DEST (x)) != CC0
6375 && GET_CODE (SET_SRC (x)) != REG)
6377 src_elt = lookup (SET_SRC (x),
6378 HASH (SET_SRC (x), GET_MODE (SET_DEST (x))),
6379 GET_MODE (SET_DEST (x)));
6381 if (src_elt)
6382 for (src_elt = src_elt->first_same_value; src_elt;
6383 src_elt = src_elt->next_same_value)
6384 if (GET_CODE (src_elt->exp) == REG && REG_LOOP_TEST_P (src_elt->exp)
6385 && COST (src_elt->exp) < COST (SET_SRC (x)))
6387 rtx p, set;
6389 /* Look for an insn in front of LOOP_START that sets
6390 something in the desired mode to SET_SRC (x) before we hit
6391 a label or CALL_INSN. */
6393 for (p = prev_nonnote_insn (loop_start);
6394 p && GET_CODE (p) != CALL_INSN
6395 && GET_CODE (p) != CODE_LABEL;
6396 p = prev_nonnote_insn (p))
6397 if ((set = single_set (p)) != 0
6398 && GET_CODE (SET_DEST (set)) == REG
6399 && GET_MODE (SET_DEST (set)) == src_elt->mode
6400 && rtx_equal_p (SET_SRC (set), SET_SRC (x)))
6402 /* We now have to ensure that nothing between P
6403 and LOOP_START modified anything referenced in
6404 SET_SRC (x). We know that nothing within the loop
6405 can modify it, or we would have invalidated it in
6406 the hash table. */
6407 rtx q;
6408 rtx cse_check_loop_start_value = SET_SRC (x);
6409 for (q = p; q != loop_start; q = NEXT_INSN (q))
6410 if (GET_RTX_CLASS (GET_CODE (q)) == 'i')
6411 note_stores (PATTERN (q),
6412 cse_check_loop_start,
6413 &cse_check_loop_start_value);
6415 /* If nothing was changed and we can replace our
6416 SET_SRC, add an insn after P to copy its destination
6417 to what we will be replacing SET_SRC with. */
6418 if (cse_check_loop_start_value
6419 && validate_change (insn, &SET_SRC (x),
6420 src_elt->exp, 0))
6422 /* If this creates new pseudos, this is unsafe,
6423 because the regno of new pseudo is unsuitable
6424 to index into reg_qty when cse_insn processes
6425 the new insn. Therefore, if a new pseudo was
6426 created, discard this optimization. */
6427 int nregs = max_reg_num ();
6428 rtx move
6429 = gen_move_insn (src_elt->exp, SET_DEST (set));
6430 if (nregs != max_reg_num ())
6432 if (! validate_change (insn, &SET_SRC (x),
6433 SET_SRC (set), 0))
6434 abort ();
6436 else
6437 emit_insn_after (move, p);
6439 break;
6444 /* Deal with the destination of X affecting the stack pointer. */
6445 addr_affects_sp_p (SET_DEST (x));
6447 /* See comment on similar code in cse_insn for explanation of these
6448 tests. */
6449 if (GET_CODE (SET_DEST (x)) == REG || GET_CODE (SET_DEST (x)) == SUBREG
6450 || GET_CODE (SET_DEST (x)) == MEM)
6451 invalidate (SET_DEST (x), VOIDmode);
6452 else if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
6453 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
6454 invalidate (XEXP (SET_DEST (x), 0), GET_MODE (SET_DEST (x)));
6457 /* Find the end of INSN's basic block and return its range,
6458 the total number of SETs in all the insns of the block, the last insn of the
6459 block, and the branch path.
6461 The branch path indicates which branches should be followed. If a non-zero
6462 path size is specified, the block should be rescanned and a different set
6463 of branches will be taken. The branch path is only used if
6464 FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is non-zero.
6466 DATA is a pointer to a struct cse_basic_block_data, defined below, that is
6467 used to describe the block. It is filled in with the information about
6468 the current block. The incoming structure's branch path, if any, is used
6469 to construct the output branch path. */
6471 void
6472 cse_end_of_basic_block (insn, data, follow_jumps, after_loop, skip_blocks)
6473 rtx insn;
6474 struct cse_basic_block_data *data;
6475 int follow_jumps;
6476 int after_loop;
6477 int skip_blocks;
6479 rtx p = insn, q;
6480 int nsets = 0;
6481 int low_cuid = INSN_CUID (insn), high_cuid = INSN_CUID (insn);
6482 rtx next = GET_RTX_CLASS (GET_CODE (insn)) == 'i' ? insn : next_real_insn (insn);
6483 int path_size = data->path_size;
6484 int path_entry = 0;
6485 int i;
6487 /* Update the previous branch path, if any. If the last branch was
6488 previously TAKEN, mark it NOT_TAKEN. If it was previously NOT_TAKEN,
6489 shorten the path by one and look at the previous branch. We know that
6490 at least one branch must have been taken if PATH_SIZE is non-zero. */
6491 while (path_size > 0)
6493 if (data->path[path_size - 1].status != NOT_TAKEN)
6495 data->path[path_size - 1].status = NOT_TAKEN;
6496 break;
6498 else
6499 path_size--;
6502 /* If the first instruction is marked with QImode, that means we've
6503 already processed this block. Our caller will look at DATA->LAST
6504 to figure out where to go next. We want to return the next block
6505 in the instruction stream, not some branched-to block somewhere
6506 else. We accomplish this by pretending our called forbid us to
6507 follow jumps, or skip blocks. */
6508 if (GET_MODE (insn) == QImode)
6509 follow_jumps = skip_blocks = 0;
6511 /* Scan to end of this basic block. */
6512 while (p && GET_CODE (p) != CODE_LABEL)
6514 /* Don't cse out the end of a loop. This makes a difference
6515 only for the unusual loops that always execute at least once;
6516 all other loops have labels there so we will stop in any case.
6517 Cse'ing out the end of the loop is dangerous because it
6518 might cause an invariant expression inside the loop
6519 to be reused after the end of the loop. This would make it
6520 hard to move the expression out of the loop in loop.c,
6521 especially if it is one of several equivalent expressions
6522 and loop.c would like to eliminate it.
6524 If we are running after loop.c has finished, we can ignore
6525 the NOTE_INSN_LOOP_END. */
6527 if (! after_loop && GET_CODE (p) == NOTE
6528 && NOTE_LINE_NUMBER (p) == NOTE_INSN_LOOP_END)
6529 break;
6531 /* Don't cse over a call to setjmp; on some machines (eg vax)
6532 the regs restored by the longjmp come from
6533 a later time than the setjmp. */
6534 if (GET_CODE (p) == NOTE
6535 && NOTE_LINE_NUMBER (p) == NOTE_INSN_SETJMP)
6536 break;
6538 /* A PARALLEL can have lots of SETs in it,
6539 especially if it is really an ASM_OPERANDS. */
6540 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
6541 && GET_CODE (PATTERN (p)) == PARALLEL)
6542 nsets += XVECLEN (PATTERN (p), 0);
6543 else if (GET_CODE (p) != NOTE)
6544 nsets += 1;
6546 /* Ignore insns made by CSE; they cannot affect the boundaries of
6547 the basic block. */
6549 if (INSN_UID (p) <= max_uid && INSN_CUID (p) > high_cuid)
6550 high_cuid = INSN_CUID (p);
6551 if (INSN_UID (p) <= max_uid && INSN_CUID (p) < low_cuid)
6552 low_cuid = INSN_CUID (p);
6554 /* See if this insn is in our branch path. If it is and we are to
6555 take it, do so. */
6556 if (path_entry < path_size && data->path[path_entry].branch == p)
6558 if (data->path[path_entry].status != NOT_TAKEN)
6559 p = JUMP_LABEL (p);
6561 /* Point to next entry in path, if any. */
6562 path_entry++;
6565 /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
6566 was specified, we haven't reached our maximum path length, there are
6567 insns following the target of the jump, this is the only use of the
6568 jump label, and the target label is preceded by a BARRIER.
6570 Alternatively, we can follow the jump if it branches around a
6571 block of code and there are no other branches into the block.
6572 In this case invalidate_skipped_block will be called to invalidate any
6573 registers set in the block when following the jump. */
6575 else if ((follow_jumps || skip_blocks) && path_size < PATHLENGTH - 1
6576 && GET_CODE (p) == JUMP_INSN
6577 && GET_CODE (PATTERN (p)) == SET
6578 && GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
6579 && JUMP_LABEL (p) != 0
6580 && LABEL_NUSES (JUMP_LABEL (p)) == 1
6581 && NEXT_INSN (JUMP_LABEL (p)) != 0)
6583 for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
6584 if ((GET_CODE (q) != NOTE
6585 || NOTE_LINE_NUMBER (q) == NOTE_INSN_LOOP_END
6586 || NOTE_LINE_NUMBER (q) == NOTE_INSN_SETJMP)
6587 && (GET_CODE (q) != CODE_LABEL || LABEL_NUSES (q) != 0))
6588 break;
6590 /* If we ran into a BARRIER, this code is an extension of the
6591 basic block when the branch is taken. */
6592 if (follow_jumps && q != 0 && GET_CODE (q) == BARRIER)
6594 /* Don't allow ourself to keep walking around an
6595 always-executed loop. */
6596 if (next_real_insn (q) == next)
6598 p = NEXT_INSN (p);
6599 continue;
6602 /* Similarly, don't put a branch in our path more than once. */
6603 for (i = 0; i < path_entry; i++)
6604 if (data->path[i].branch == p)
6605 break;
6607 if (i != path_entry)
6608 break;
6610 data->path[path_entry].branch = p;
6611 data->path[path_entry++].status = TAKEN;
6613 /* This branch now ends our path. It was possible that we
6614 didn't see this branch the last time around (when the
6615 insn in front of the target was a JUMP_INSN that was
6616 turned into a no-op). */
6617 path_size = path_entry;
6619 p = JUMP_LABEL (p);
6620 /* Mark block so we won't scan it again later. */
6621 PUT_MODE (NEXT_INSN (p), QImode);
6623 /* Detect a branch around a block of code. */
6624 else if (skip_blocks && q != 0 && GET_CODE (q) != CODE_LABEL)
6626 register rtx tmp;
6628 if (next_real_insn (q) == next)
6630 p = NEXT_INSN (p);
6631 continue;
6634 for (i = 0; i < path_entry; i++)
6635 if (data->path[i].branch == p)
6636 break;
6638 if (i != path_entry)
6639 break;
6641 /* This is no_labels_between_p (p, q) with an added check for
6642 reaching the end of a function (in case Q precedes P). */
6643 for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
6644 if (GET_CODE (tmp) == CODE_LABEL)
6645 break;
6647 if (tmp == q)
6649 data->path[path_entry].branch = p;
6650 data->path[path_entry++].status = AROUND;
6652 path_size = path_entry;
6654 p = JUMP_LABEL (p);
6655 /* Mark block so we won't scan it again later. */
6656 PUT_MODE (NEXT_INSN (p), QImode);
6660 p = NEXT_INSN (p);
6663 data->low_cuid = low_cuid;
6664 data->high_cuid = high_cuid;
6665 data->nsets = nsets;
6666 data->last = p;
6668 /* If all jumps in the path are not taken, set our path length to zero
6669 so a rescan won't be done. */
6670 for (i = path_size - 1; i >= 0; i--)
6671 if (data->path[i].status != NOT_TAKEN)
6672 break;
6674 if (i == -1)
6675 data->path_size = 0;
6676 else
6677 data->path_size = path_size;
6679 /* End the current branch path. */
6680 data->path[path_size].branch = 0;
6683 /* Perform cse on the instructions of a function.
6684 F is the first instruction.
6685 NREGS is one plus the highest pseudo-reg number used in the instruction.
6687 AFTER_LOOP is 1 if this is the cse call done after loop optimization
6688 (only if -frerun-cse-after-loop).
6690 Returns 1 if jump_optimize should be redone due to simplifications
6691 in conditional jump instructions. */
6694 cse_main (f, nregs, after_loop, file)
6695 rtx f;
6696 int nregs;
6697 int after_loop;
6698 FILE *file;
6700 struct cse_basic_block_data val;
6701 register rtx insn = f;
6702 register int i;
6704 cse_jumps_altered = 0;
6705 recorded_label_ref = 0;
6706 constant_pool_entries_cost = 0;
6707 val.path_size = 0;
6709 init_recog ();
6710 init_alias_analysis ();
6712 max_reg = nregs;
6714 max_insn_uid = get_max_uid ();
6716 reg_eqv_table = (struct reg_eqv_elem *)
6717 xmalloc (nregs * sizeof (struct reg_eqv_elem));
6719 #ifdef LOAD_EXTEND_OP
6721 /* Allocate scratch rtl here. cse_insn will fill in the memory reference
6722 and change the code and mode as appropriate. */
6723 memory_extend_rtx = gen_rtx_ZERO_EXTEND (VOIDmode, NULL_RTX);
6724 #endif
6726 /* Discard all the free elements of the previous function
6727 since they are allocated in the temporarily obstack. */
6728 bzero ((char *) table, sizeof table);
6729 free_element_chain = 0;
6730 n_elements_made = 0;
6732 /* Find the largest uid. */
6734 max_uid = get_max_uid ();
6735 uid_cuid = (int *) xcalloc (max_uid + 1, sizeof (int));
6737 /* Compute the mapping from uids to cuids.
6738 CUIDs are numbers assigned to insns, like uids,
6739 except that cuids increase monotonically through the code.
6740 Don't assign cuids to line-number NOTEs, so that the distance in cuids
6741 between two insns is not affected by -g. */
6743 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
6745 if (GET_CODE (insn) != NOTE
6746 || NOTE_LINE_NUMBER (insn) < 0)
6747 INSN_CUID (insn) = ++i;
6748 else
6749 /* Give a line number note the same cuid as preceding insn. */
6750 INSN_CUID (insn) = i;
6753 /* Initialize which registers are clobbered by calls. */
6755 CLEAR_HARD_REG_SET (regs_invalidated_by_call);
6757 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
6758 if ((call_used_regs[i]
6759 /* Used to check !fixed_regs[i] here, but that isn't safe;
6760 fixed regs are still call-clobbered, and sched can get
6761 confused if they can "live across calls".
6763 The frame pointer is always preserved across calls. The arg
6764 pointer is if it is fixed. The stack pointer usually is, unless
6765 RETURN_POPS_ARGS, in which case an explicit CLOBBER
6766 will be present. If we are generating PIC code, the PIC offset
6767 table register is preserved across calls. */
6769 && i != STACK_POINTER_REGNUM
6770 && i != FRAME_POINTER_REGNUM
6771 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
6772 && i != HARD_FRAME_POINTER_REGNUM
6773 #endif
6774 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
6775 && ! (i == ARG_POINTER_REGNUM && fixed_regs[i])
6776 #endif
6777 #if defined (PIC_OFFSET_TABLE_REGNUM) && !defined (PIC_OFFSET_TABLE_REG_CALL_CLOBBERED)
6778 && ! (i == PIC_OFFSET_TABLE_REGNUM && flag_pic)
6779 #endif
6781 || global_regs[i])
6782 SET_HARD_REG_BIT (regs_invalidated_by_call, i);
6784 if (ggc_p)
6785 ggc_push_context ();
6787 /* Loop over basic blocks.
6788 Compute the maximum number of qty's needed for each basic block
6789 (which is 2 for each SET). */
6790 insn = f;
6791 while (insn)
6793 cse_altered = 0;
6794 cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps, after_loop,
6795 flag_cse_skip_blocks);
6797 /* If this basic block was already processed or has no sets, skip it. */
6798 if (val.nsets == 0 || GET_MODE (insn) == QImode)
6800 PUT_MODE (insn, VOIDmode);
6801 insn = (val.last ? NEXT_INSN (val.last) : 0);
6802 val.path_size = 0;
6803 continue;
6806 cse_basic_block_start = val.low_cuid;
6807 cse_basic_block_end = val.high_cuid;
6808 max_qty = val.nsets * 2;
6810 if (file)
6811 fnotice (file, ";; Processing block from %d to %d, %d sets.\n",
6812 INSN_UID (insn), val.last ? INSN_UID (val.last) : 0,
6813 val.nsets);
6815 /* Make MAX_QTY bigger to give us room to optimize
6816 past the end of this basic block, if that should prove useful. */
6817 if (max_qty < 500)
6818 max_qty = 500;
6820 max_qty += max_reg;
6822 /* If this basic block is being extended by following certain jumps,
6823 (see `cse_end_of_basic_block'), we reprocess the code from the start.
6824 Otherwise, we start after this basic block. */
6825 if (val.path_size > 0)
6826 cse_basic_block (insn, val.last, val.path, 0);
6827 else
6829 int old_cse_jumps_altered = cse_jumps_altered;
6830 rtx temp;
6832 /* When cse changes a conditional jump to an unconditional
6833 jump, we want to reprocess the block, since it will give
6834 us a new branch path to investigate. */
6835 cse_jumps_altered = 0;
6836 temp = cse_basic_block (insn, val.last, val.path, ! after_loop);
6837 if (cse_jumps_altered == 0
6838 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
6839 insn = temp;
6841 cse_jumps_altered |= old_cse_jumps_altered;
6844 if (ggc_p && cse_altered)
6845 ggc_collect ();
6847 #ifdef USE_C_ALLOCA
6848 alloca (0);
6849 #endif
6852 if (ggc_p)
6853 ggc_pop_context ();
6855 if (max_elements_made < n_elements_made)
6856 max_elements_made = n_elements_made;
6858 /* Clean up. */
6859 end_alias_analysis ();
6860 free (uid_cuid);
6861 free (reg_eqv_table);
6863 return cse_jumps_altered || recorded_label_ref;
6866 /* Process a single basic block. FROM and TO and the limits of the basic
6867 block. NEXT_BRANCH points to the branch path when following jumps or
6868 a null path when not following jumps.
6870 AROUND_LOOP is non-zero if we are to try to cse around to the start of a
6871 loop. This is true when we are being called for the last time on a
6872 block and this CSE pass is before loop.c. */
6874 static rtx
6875 cse_basic_block (from, to, next_branch, around_loop)
6876 register rtx from, to;
6877 struct branch_path *next_branch;
6878 int around_loop;
6880 register rtx insn;
6881 int to_usage = 0;
6882 rtx libcall_insn = NULL_RTX;
6883 int num_insns = 0;
6885 /* This array is undefined before max_reg, so only allocate
6886 the space actually needed and adjust the start. */
6888 qty_table
6889 = (struct qty_table_elem *) xmalloc ((max_qty - max_reg)
6890 * sizeof (struct qty_table_elem));
6891 qty_table -= max_reg;
6893 new_basic_block ();
6895 /* TO might be a label. If so, protect it from being deleted. */
6896 if (to != 0 && GET_CODE (to) == CODE_LABEL)
6897 ++LABEL_NUSES (to);
6899 for (insn = from; insn != to; insn = NEXT_INSN (insn))
6901 register enum rtx_code code = GET_CODE (insn);
6903 /* If we have processed 1,000 insns, flush the hash table to
6904 avoid extreme quadratic behavior. We must not include NOTEs
6905 in the count since there may be more or them when generating
6906 debugging information. If we clear the table at different
6907 times, code generated with -g -O might be different than code
6908 generated with -O but not -g.
6910 ??? This is a real kludge and needs to be done some other way.
6911 Perhaps for 2.9. */
6912 if (code != NOTE && num_insns++ > 1000)
6914 flush_hash_table ();
6915 num_insns = 0;
6918 /* See if this is a branch that is part of the path. If so, and it is
6919 to be taken, do so. */
6920 if (next_branch->branch == insn)
6922 enum taken status = next_branch++->status;
6923 if (status != NOT_TAKEN)
6925 if (status == TAKEN)
6926 record_jump_equiv (insn, 1);
6927 else
6928 invalidate_skipped_block (NEXT_INSN (insn));
6930 /* Set the last insn as the jump insn; it doesn't affect cc0.
6931 Then follow this branch. */
6932 #ifdef HAVE_cc0
6933 prev_insn_cc0 = 0;
6934 #endif
6935 prev_insn = insn;
6936 insn = JUMP_LABEL (insn);
6937 continue;
6941 if (GET_MODE (insn) == QImode)
6942 PUT_MODE (insn, VOIDmode);
6944 if (GET_RTX_CLASS (code) == 'i')
6946 rtx p;
6948 /* Process notes first so we have all notes in canonical forms when
6949 looking for duplicate operations. */
6951 if (REG_NOTES (insn))
6952 REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn), NULL_RTX);
6954 /* Track when we are inside in LIBCALL block. Inside such a block,
6955 we do not want to record destinations. The last insn of a
6956 LIBCALL block is not considered to be part of the block, since
6957 its destination is the result of the block and hence should be
6958 recorded. */
6960 if ((p = find_reg_note (insn, REG_LIBCALL, NULL_RTX)))
6961 libcall_insn = XEXP (p, 0);
6962 else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
6963 libcall_insn = NULL_RTX;
6965 cse_insn (insn, libcall_insn);
6968 /* If INSN is now an unconditional jump, skip to the end of our
6969 basic block by pretending that we just did the last insn in the
6970 basic block. If we are jumping to the end of our block, show
6971 that we can have one usage of TO. */
6973 if (simplejump_p (insn))
6975 if (to == 0)
6977 free (qty_table + max_reg);
6978 return 0;
6981 if (JUMP_LABEL (insn) == to)
6982 to_usage = 1;
6984 /* Maybe TO was deleted because the jump is unconditional.
6985 If so, there is nothing left in this basic block. */
6986 /* ??? Perhaps it would be smarter to set TO
6987 to whatever follows this insn,
6988 and pretend the basic block had always ended here. */
6989 if (INSN_DELETED_P (to))
6990 break;
6992 insn = PREV_INSN (to);
6995 /* See if it is ok to keep on going past the label
6996 which used to end our basic block. Remember that we incremented
6997 the count of that label, so we decrement it here. If we made
6998 a jump unconditional, TO_USAGE will be one; in that case, we don't
6999 want to count the use in that jump. */
7001 if (to != 0 && NEXT_INSN (insn) == to
7002 && GET_CODE (to) == CODE_LABEL && --LABEL_NUSES (to) == to_usage)
7004 struct cse_basic_block_data val;
7005 rtx prev;
7007 insn = NEXT_INSN (to);
7009 /* If TO was the last insn in the function, we are done. */
7010 if (insn == 0)
7012 free (qty_table + max_reg);
7013 return 0;
7016 /* If TO was preceded by a BARRIER we are done with this block
7017 because it has no continuation. */
7018 prev = prev_nonnote_insn (to);
7019 if (prev && GET_CODE (prev) == BARRIER)
7021 free (qty_table + max_reg);
7022 return insn;
7025 /* Find the end of the following block. Note that we won't be
7026 following branches in this case. */
7027 to_usage = 0;
7028 val.path_size = 0;
7029 cse_end_of_basic_block (insn, &val, 0, 0, 0);
7031 /* If the tables we allocated have enough space left
7032 to handle all the SETs in the next basic block,
7033 continue through it. Otherwise, return,
7034 and that block will be scanned individually. */
7035 if (val.nsets * 2 + next_qty > max_qty)
7036 break;
7038 cse_basic_block_start = val.low_cuid;
7039 cse_basic_block_end = val.high_cuid;
7040 to = val.last;
7042 /* Prevent TO from being deleted if it is a label. */
7043 if (to != 0 && GET_CODE (to) == CODE_LABEL)
7044 ++LABEL_NUSES (to);
7046 /* Back up so we process the first insn in the extension. */
7047 insn = PREV_INSN (insn);
7051 if (next_qty > max_qty)
7052 abort ();
7054 /* If we are running before loop.c, we stopped on a NOTE_INSN_LOOP_END, and
7055 the previous insn is the only insn that branches to the head of a loop,
7056 we can cse into the loop. Don't do this if we changed the jump
7057 structure of a loop unless we aren't going to be following jumps. */
7059 if ((cse_jumps_altered == 0
7060 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7061 && around_loop && to != 0
7062 && GET_CODE (to) == NOTE && NOTE_LINE_NUMBER (to) == NOTE_INSN_LOOP_END
7063 && GET_CODE (PREV_INSN (to)) == JUMP_INSN
7064 && JUMP_LABEL (PREV_INSN (to)) != 0
7065 && LABEL_NUSES (JUMP_LABEL (PREV_INSN (to))) == 1)
7066 cse_around_loop (JUMP_LABEL (PREV_INSN (to)));
7068 free (qty_table + max_reg);
7070 return to ? NEXT_INSN (to) : 0;
7073 /* Count the number of times registers are used (not set) in X.
7074 COUNTS is an array in which we accumulate the count, INCR is how much
7075 we count each register usage.
7077 Don't count a usage of DEST, which is the SET_DEST of a SET which
7078 contains X in its SET_SRC. This is because such a SET does not
7079 modify the liveness of DEST. */
7081 static void
7082 count_reg_usage (x, counts, dest, incr)
7083 rtx x;
7084 int *counts;
7085 rtx dest;
7086 int incr;
7088 enum rtx_code code;
7089 const char *fmt;
7090 int i, j;
7092 if (x == 0)
7093 return;
7095 switch (code = GET_CODE (x))
7097 case REG:
7098 if (x != dest)
7099 counts[REGNO (x)] += incr;
7100 return;
7102 case PC:
7103 case CC0:
7104 case CONST:
7105 case CONST_INT:
7106 case CONST_DOUBLE:
7107 case SYMBOL_REF:
7108 case LABEL_REF:
7109 return;
7111 case CLOBBER:
7112 /* If we are clobbering a MEM, mark any registers inside the address
7113 as being used. */
7114 if (GET_CODE (XEXP (x, 0)) == MEM)
7115 count_reg_usage (XEXP (XEXP (x, 0), 0), counts, NULL_RTX, incr);
7116 return;
7118 case SET:
7119 /* Unless we are setting a REG, count everything in SET_DEST. */
7120 if (GET_CODE (SET_DEST (x)) != REG)
7121 count_reg_usage (SET_DEST (x), counts, NULL_RTX, incr);
7123 /* If SRC has side-effects, then we can't delete this insn, so the
7124 usage of SET_DEST inside SRC counts.
7126 ??? Strictly-speaking, we might be preserving this insn
7127 because some other SET has side-effects, but that's hard
7128 to do and can't happen now. */
7129 count_reg_usage (SET_SRC (x), counts,
7130 side_effects_p (SET_SRC (x)) ? NULL_RTX : SET_DEST (x),
7131 incr);
7132 return;
7134 case CALL_INSN:
7135 count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, NULL_RTX, incr);
7137 /* ... falls through ... */
7138 case INSN:
7139 case JUMP_INSN:
7140 count_reg_usage (PATTERN (x), counts, NULL_RTX, incr);
7142 /* Things used in a REG_EQUAL note aren't dead since loop may try to
7143 use them. */
7145 count_reg_usage (REG_NOTES (x), counts, NULL_RTX, incr);
7146 return;
7148 case EXPR_LIST:
7149 case INSN_LIST:
7150 if (REG_NOTE_KIND (x) == REG_EQUAL
7151 || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE))
7152 count_reg_usage (XEXP (x, 0), counts, NULL_RTX, incr);
7153 count_reg_usage (XEXP (x, 1), counts, NULL_RTX, incr);
7154 return;
7156 default:
7157 break;
7160 fmt = GET_RTX_FORMAT (code);
7161 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7163 if (fmt[i] == 'e')
7164 count_reg_usage (XEXP (x, i), counts, dest, incr);
7165 else if (fmt[i] == 'E')
7166 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7167 count_reg_usage (XVECEXP (x, i, j), counts, dest, incr);
7171 /* Scan all the insns and delete any that are dead; i.e., they store a register
7172 that is never used or they copy a register to itself.
7174 This is used to remove insns made obviously dead by cse, loop or other
7175 optimizations. It improves the heuristics in loop since it won't try to
7176 move dead invariants out of loops or make givs for dead quantities. The
7177 remaining passes of the compilation are also sped up. */
7179 void
7180 delete_trivially_dead_insns (insns, nreg)
7181 rtx insns;
7182 int nreg;
7184 int *counts;
7185 rtx insn, prev;
7186 #ifdef HAVE_cc0
7187 rtx tem;
7188 #endif
7189 int i;
7190 int in_libcall = 0, dead_libcall = 0;
7192 /* First count the number of times each register is used. */
7193 counts = (int *) xcalloc (nreg, sizeof (int));
7194 for (insn = next_real_insn (insns); insn; insn = next_real_insn (insn))
7195 count_reg_usage (insn, counts, NULL_RTX, 1);
7197 /* Go from the last insn to the first and delete insns that only set unused
7198 registers or copy a register to itself. As we delete an insn, remove
7199 usage counts for registers it uses.
7201 The first jump optimization pass may leave a real insn as the last
7202 insn in the function. We must not skip that insn or we may end
7203 up deleting code that is not really dead. */
7204 insn = get_last_insn ();
7205 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
7206 insn = prev_real_insn (insn);
7208 for ( ; insn; insn = prev)
7210 int live_insn = 0;
7211 rtx note;
7213 prev = prev_real_insn (insn);
7215 /* Don't delete any insns that are part of a libcall block unless
7216 we can delete the whole libcall block.
7218 Flow or loop might get confused if we did that. Remember
7219 that we are scanning backwards. */
7220 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7222 in_libcall = 1;
7223 live_insn = 1;
7224 dead_libcall = 0;
7226 /* See if there's a REG_EQUAL note on this insn and try to
7227 replace the source with the REG_EQUAL expression.
7229 We assume that insns with REG_RETVALs can only be reg->reg
7230 copies at this point. */
7231 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
7232 if (note)
7234 rtx set = single_set (insn);
7235 rtx new = simplify_rtx (XEXP (note, 0));
7237 if (!new)
7238 new = XEXP (note, 0);
7240 if (set && validate_change (insn, &SET_SRC (set), new, 0))
7242 remove_note (insn,
7243 find_reg_note (insn, REG_RETVAL, NULL_RTX));
7244 dead_libcall = 1;
7248 else if (in_libcall)
7249 live_insn = ! dead_libcall;
7250 else if (GET_CODE (PATTERN (insn)) == SET)
7252 if ((GET_CODE (SET_DEST (PATTERN (insn))) == REG
7253 || GET_CODE (SET_DEST (PATTERN (insn))) == SUBREG)
7254 && rtx_equal_p (SET_DEST (PATTERN (insn)),
7255 SET_SRC (PATTERN (insn))))
7258 #ifdef HAVE_cc0
7259 else if (GET_CODE (SET_DEST (PATTERN (insn))) == CC0
7260 && ! side_effects_p (SET_SRC (PATTERN (insn)))
7261 && ((tem = next_nonnote_insn (insn)) == 0
7262 || GET_RTX_CLASS (GET_CODE (tem)) != 'i'
7263 || ! reg_referenced_p (cc0_rtx, PATTERN (tem))))
7265 #endif
7266 else if (GET_CODE (SET_DEST (PATTERN (insn))) != REG
7267 || REGNO (SET_DEST (PATTERN (insn))) < FIRST_PSEUDO_REGISTER
7268 || counts[REGNO (SET_DEST (PATTERN (insn)))] != 0
7269 || side_effects_p (SET_SRC (PATTERN (insn)))
7270 /* An ADDRESSOF expression can turn into a use of the
7271 internal arg pointer, so always consider the
7272 internal arg pointer live. If it is truly dead,
7273 flow will delete the initializing insn. */
7274 || (SET_DEST (PATTERN (insn))
7275 == current_function_internal_arg_pointer))
7276 live_insn = 1;
7278 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
7279 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7281 rtx elt = XVECEXP (PATTERN (insn), 0, i);
7283 if (GET_CODE (elt) == SET)
7285 if ((GET_CODE (SET_DEST (elt)) == REG
7286 || GET_CODE (SET_DEST (elt)) == SUBREG)
7287 && rtx_equal_p (SET_DEST (elt), SET_SRC (elt)))
7290 #ifdef HAVE_cc0
7291 else if (GET_CODE (SET_DEST (elt)) == CC0
7292 && ! side_effects_p (SET_SRC (elt))
7293 && ((tem = next_nonnote_insn (insn)) == 0
7294 || GET_RTX_CLASS (GET_CODE (tem)) != 'i'
7295 || ! reg_referenced_p (cc0_rtx, PATTERN (tem))))
7297 #endif
7298 else if (GET_CODE (SET_DEST (elt)) != REG
7299 || REGNO (SET_DEST (elt)) < FIRST_PSEUDO_REGISTER
7300 || counts[REGNO (SET_DEST (elt))] != 0
7301 || side_effects_p (SET_SRC (elt))
7302 /* An ADDRESSOF expression can turn into a use of the
7303 internal arg pointer, so always consider the
7304 internal arg pointer live. If it is truly dead,
7305 flow will delete the initializing insn. */
7306 || (SET_DEST (elt)
7307 == current_function_internal_arg_pointer))
7308 live_insn = 1;
7310 else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
7311 live_insn = 1;
7313 else
7314 live_insn = 1;
7316 /* If this is a dead insn, delete it and show registers in it aren't
7317 being used. */
7319 if (! live_insn)
7321 count_reg_usage (insn, counts, NULL_RTX, -1);
7322 delete_insn (insn);
7325 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
7327 in_libcall = 0;
7328 dead_libcall = 0;
7332 /* Clean up. */
7333 free (counts);