* haifa-sched.c (split_edges): Pass edgeset_size as second arg to
[official-gcc.git] / gcc / cse.c
blobff007ff80b186dad07129405e1be3ea89d677ff4
1 /* Common subexpression elimination for GNU compiler.
2 Copyright (C) 1987, 88, 89, 92-99, 2000 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 /* stdio.h must precede rtl.h for FFS. */
24 #include "system.h"
25 #include <setjmp.h>
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "flags.h"
32 #include "real.h"
33 #include "insn-config.h"
34 #include "recog.h"
35 #include "function.h"
36 #include "expr.h"
37 #include "toplev.h"
38 #include "output.h"
39 #include "ggc.h"
41 /* The basic idea of common subexpression elimination is to go
42 through the code, keeping a record of expressions that would
43 have the same value at the current scan point, and replacing
44 expressions encountered with the cheapest equivalent expression.
46 It is too complicated to keep track of the different possibilities
47 when control paths merge in this code; so, at each label, we forget all
48 that is known and start fresh. This can be described as processing each
49 extended basic block separately. We have a separate pass to perform
50 global CSE.
52 Note CSE can turn a conditional or computed jump into a nop or
53 an unconditional jump. When this occurs we arrange to run the jump
54 optimizer after CSE to delete the unreachable code.
56 We use two data structures to record the equivalent expressions:
57 a hash table for most expressions, and a vector of "quantity
58 numbers" to record equivalent (pseudo) registers.
60 The use of the special data structure for registers is desirable
61 because it is faster. It is possible because registers references
62 contain a fairly small number, the register number, taken from
63 a contiguously allocated series, and two register references are
64 identical if they have the same number. General expressions
65 do not have any such thing, so the only way to retrieve the
66 information recorded on an expression other than a register
67 is to keep it in a hash table.
69 Registers and "quantity numbers":
71 At the start of each basic block, all of the (hardware and pseudo)
72 registers used in the function are given distinct quantity
73 numbers to indicate their contents. During scan, when the code
74 copies one register into another, we copy the quantity number.
75 When a register is loaded in any other way, we allocate a new
76 quantity number to describe the value generated by this operation.
77 `reg_qty' records what quantity a register is currently thought
78 of as containing.
80 All real quantity numbers are greater than or equal to `max_reg'.
81 If register N has not been assigned a quantity, reg_qty[N] will equal N.
83 Quantity numbers below `max_reg' do not exist and none of the `qty_table'
84 entries should be referenced with an index below `max_reg'.
86 We also maintain a bidirectional chain of registers for each
87 quantity number. The `qty_table` members `first_reg' and `last_reg',
88 and `reg_eqv_table' members `next' and `prev' hold these chains.
90 The first register in a chain is the one whose lifespan is least local.
91 Among equals, it is the one that was seen first.
92 We replace any equivalent register with that one.
94 If two registers have the same quantity number, it must be true that
95 REG expressions with qty_table `mode' must be in the hash table for both
96 registers and must be in the same class.
98 The converse is not true. Since hard registers may be referenced in
99 any mode, two REG expressions might be equivalent in the hash table
100 but not have the same quantity number if the quantity number of one
101 of the registers is not the same mode as those expressions.
103 Constants and quantity numbers
105 When a quantity has a known constant value, that value is stored
106 in the appropriate qty_table `const_rtx'. This is in addition to
107 putting the constant in the hash table as is usual for non-regs.
109 Whether a reg or a constant is preferred is determined by the configuration
110 macro CONST_COSTS and will often depend on the constant value. In any
111 event, expressions containing constants can be simplified, by fold_rtx.
113 When a quantity has a known nearly constant value (such as an address
114 of a stack slot), that value is stored in the appropriate qty_table
115 `const_rtx'.
117 Integer constants don't have a machine mode. However, cse
118 determines the intended machine mode from the destination
119 of the instruction that moves the constant. The machine mode
120 is recorded in the hash table along with the actual RTL
121 constant expression so that different modes are kept separate.
123 Other expressions:
125 To record known equivalences among expressions in general
126 we use a hash table called `table'. It has a fixed number of buckets
127 that contain chains of `struct table_elt' elements for expressions.
128 These chains connect the elements whose expressions have the same
129 hash codes.
131 Other chains through the same elements connect the elements which
132 currently have equivalent values.
134 Register references in an expression are canonicalized before hashing
135 the expression. This is done using `reg_qty' and qty_table `first_reg'.
136 The hash code of a register reference is computed using the quantity
137 number, not the register number.
139 When the value of an expression changes, it is necessary to remove from the
140 hash table not just that expression but all expressions whose values
141 could be different as a result.
143 1. If the value changing is in memory, except in special cases
144 ANYTHING referring to memory could be changed. That is because
145 nobody knows where a pointer does not point.
146 The function `invalidate_memory' removes what is necessary.
148 The special cases are when the address is constant or is
149 a constant plus a fixed register such as the frame pointer
150 or a static chain pointer. When such addresses are stored in,
151 we can tell exactly which other such addresses must be invalidated
152 due to overlap. `invalidate' does this.
153 All expressions that refer to non-constant
154 memory addresses are also invalidated. `invalidate_memory' does this.
156 2. If the value changing is a register, all expressions
157 containing references to that register, and only those,
158 must be removed.
160 Because searching the entire hash table for expressions that contain
161 a register is very slow, we try to figure out when it isn't necessary.
162 Precisely, this is necessary only when expressions have been
163 entered in the hash table using this register, and then the value has
164 changed, and then another expression wants to be added to refer to
165 the register's new value. This sequence of circumstances is rare
166 within any one basic block.
168 The vectors `reg_tick' and `reg_in_table' are used to detect this case.
169 reg_tick[i] is incremented whenever a value is stored in register i.
170 reg_in_table[i] holds -1 if no references to register i have been
171 entered in the table; otherwise, it contains the value reg_tick[i] had
172 when the references were entered. If we want to enter a reference
173 and reg_in_table[i] != reg_tick[i], we must scan and remove old references.
174 Until we want to enter a new entry, the mere fact that the two vectors
175 don't match makes the entries be ignored if anyone tries to match them.
177 Registers themselves are entered in the hash table as well as in
178 the equivalent-register chains. However, the vectors `reg_tick'
179 and `reg_in_table' do not apply to expressions which are simple
180 register references. These expressions are removed from the table
181 immediately when they become invalid, and this can be done even if
182 we do not immediately search for all the expressions that refer to
183 the register.
185 A CLOBBER rtx in an instruction invalidates its operand for further
186 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
187 invalidates everything that resides in memory.
189 Related expressions:
191 Constant expressions that differ only by an additive integer
192 are called related. When a constant expression is put in
193 the table, the related expression with no constant term
194 is also entered. These are made to point at each other
195 so that it is possible to find out if there exists any
196 register equivalent to an expression related to a given expression. */
198 /* One plus largest register number used in this function. */
200 static int max_reg;
202 /* One plus largest instruction UID used in this function at time of
203 cse_main call. */
205 static int max_insn_uid;
207 /* Length of qty_table vector. We know in advance we will not need
208 a quantity number this big. */
210 static int max_qty;
212 /* Next quantity number to be allocated.
213 This is 1 + the largest number needed so far. */
215 static int next_qty;
217 /* Per-qty information tracking.
219 `first_reg' and `last_reg' track the head and tail of the
220 chain of registers which currently contain this quantity.
222 `mode' contains the machine mode of this quantity.
224 `const_rtx' holds the rtx of the constant value of this
225 quantity, if known. A summations of the frame/arg pointer
226 and a constant can also be entered here. When this holds
227 a known value, `const_insn' is the insn which stored the
228 constant value.
230 `comparison_{code,const,qty}' are used to track when a
231 comparison between a quantity and some constant or register has
232 been passed. In such a case, we know the results of the comparison
233 in case we see it again. These members record a comparison that
234 is known to be true. `comparison_code' holds the rtx code of such
235 a comparison, else it is set to UNKNOWN and the other two
236 comparison members are undefined. `comparison_const' holds
237 the constant being compared against, or zero if the comparison
238 is not against a constant. `comparison_qty' holds the quantity
239 being compared against when the result is known. If the comparison
240 is not with a register, `comparison_qty' is -1. */
242 struct qty_table_elem
244 rtx const_rtx;
245 rtx const_insn;
246 rtx comparison_const;
247 int comparison_qty;
248 int first_reg, last_reg;
249 enum machine_mode mode;
250 enum rtx_code comparison_code;
253 /* The table of all qtys, indexed by qty number. */
254 static struct qty_table_elem *qty_table;
256 #ifdef HAVE_cc0
257 /* For machines that have a CC0, we do not record its value in the hash
258 table since its use is guaranteed to be the insn immediately following
259 its definition and any other insn is presumed to invalidate it.
261 Instead, we store below the value last assigned to CC0. If it should
262 happen to be a constant, it is stored in preference to the actual
263 assigned value. In case it is a constant, we store the mode in which
264 the constant should be interpreted. */
266 static rtx prev_insn_cc0;
267 static enum machine_mode prev_insn_cc0_mode;
268 #endif
270 /* Previous actual insn. 0 if at first insn of basic block. */
272 static rtx prev_insn;
274 /* Insn being scanned. */
276 static rtx this_insn;
278 /* Index by register number, gives the number of the next (or
279 previous) register in the chain of registers sharing the same
280 value.
282 Or -1 if this register is at the end of the chain.
284 If reg_qty[N] == N, reg_eqv_table[N].next is undefined. */
286 /* Per-register equivalence chain. */
287 struct reg_eqv_elem
289 int next, prev;
292 /* The table of all register equivalence chains. */
293 static struct reg_eqv_elem *reg_eqv_table;
295 struct cse_reg_info
297 /* Next in hash chain. */
298 struct cse_reg_info *hash_next;
300 /* The next cse_reg_info structure in the free or used list. */
301 struct cse_reg_info *next;
303 /* Search key */
304 int regno;
306 /* The quantity number of the register's current contents. */
307 int reg_qty;
309 /* The number of times the register has been altered in the current
310 basic block. */
311 int reg_tick;
313 /* The REG_TICK value at which rtx's containing this register are
314 valid in the hash table. If this does not equal the current
315 reg_tick value, such expressions existing in the hash table are
316 invalid. */
317 int reg_in_table;
320 /* A free list of cse_reg_info entries. */
321 static struct cse_reg_info *cse_reg_info_free_list;
323 /* A used list of cse_reg_info entries. */
324 static struct cse_reg_info *cse_reg_info_used_list;
325 static struct cse_reg_info *cse_reg_info_used_list_end;
327 /* A mapping from registers to cse_reg_info data structures. */
328 #define REGHASH_SHIFT 7
329 #define REGHASH_SIZE (1 << REGHASH_SHIFT)
330 #define REGHASH_MASK (REGHASH_SIZE - 1)
331 static struct cse_reg_info *reg_hash[REGHASH_SIZE];
333 #define REGHASH_FN(REGNO) \
334 (((REGNO) ^ ((REGNO) >> REGHASH_SHIFT)) & REGHASH_MASK)
336 /* The last lookup we did into the cse_reg_info_tree. This allows us
337 to cache repeated lookups. */
338 static int cached_regno;
339 static struct cse_reg_info *cached_cse_reg_info;
341 /* A HARD_REG_SET containing all the hard registers for which there is
342 currently a REG expression in the hash table. Note the difference
343 from the above variables, which indicate if the REG is mentioned in some
344 expression in the table. */
346 static HARD_REG_SET hard_regs_in_table;
348 /* A HARD_REG_SET containing all the hard registers that are invalidated
349 by a CALL_INSN. */
351 static HARD_REG_SET regs_invalidated_by_call;
353 /* CUID of insn that starts the basic block currently being cse-processed. */
355 static int cse_basic_block_start;
357 /* CUID of insn that ends the basic block currently being cse-processed. */
359 static int cse_basic_block_end;
361 /* Vector mapping INSN_UIDs to cuids.
362 The cuids are like uids but increase monotonically always.
363 We use them to see whether a reg is used outside a given basic block. */
365 static int *uid_cuid;
367 /* Highest UID in UID_CUID. */
368 static int max_uid;
370 /* Get the cuid of an insn. */
372 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
374 /* Nonzero if cse has altered conditional jump insns
375 in such a way that jump optimization should be redone. */
377 static int cse_jumps_altered;
379 /* Nonzero if we put a LABEL_REF into the hash table. Since we may have put
380 it into an INSN without a REG_LABEL, we have to rerun jump after CSE
381 to put in the note. */
382 static int recorded_label_ref;
384 /* canon_hash stores 1 in do_not_record
385 if it notices a reference to CC0, PC, or some other volatile
386 subexpression. */
388 static int do_not_record;
390 #ifdef LOAD_EXTEND_OP
392 /* Scratch rtl used when looking for load-extended copy of a MEM. */
393 static rtx memory_extend_rtx;
394 #endif
396 /* canon_hash stores 1 in hash_arg_in_memory
397 if it notices a reference to memory within the expression being hashed. */
399 static int hash_arg_in_memory;
401 /* The hash table contains buckets which are chains of `struct table_elt's,
402 each recording one expression's information.
403 That expression is in the `exp' field.
405 Those elements with the same hash code are chained in both directions
406 through the `next_same_hash' and `prev_same_hash' fields.
408 Each set of expressions with equivalent values
409 are on a two-way chain through the `next_same_value'
410 and `prev_same_value' fields, and all point with
411 the `first_same_value' field at the first element in
412 that chain. The chain is in order of increasing cost.
413 Each element's cost value is in its `cost' field.
415 The `in_memory' field is nonzero for elements that
416 involve any reference to memory. These elements are removed
417 whenever a write is done to an unidentified location in memory.
418 To be safe, we assume that a memory address is unidentified unless
419 the address is either a symbol constant or a constant plus
420 the frame pointer or argument pointer.
422 The `related_value' field is used to connect related expressions
423 (that differ by adding an integer).
424 The related expressions are chained in a circular fashion.
425 `related_value' is zero for expressions for which this
426 chain is not useful.
428 The `cost' field stores the cost of this element's expression.
430 The `is_const' flag is set if the element is a constant (including
431 a fixed address).
433 The `flag' field is used as a temporary during some search routines.
435 The `mode' field is usually the same as GET_MODE (`exp'), but
436 if `exp' is a CONST_INT and has no machine mode then the `mode'
437 field is the mode it was being used as. Each constant is
438 recorded separately for each mode it is used with. */
441 struct table_elt
443 rtx exp;
444 struct table_elt *next_same_hash;
445 struct table_elt *prev_same_hash;
446 struct table_elt *next_same_value;
447 struct table_elt *prev_same_value;
448 struct table_elt *first_same_value;
449 struct table_elt *related_value;
450 int cost;
451 enum machine_mode mode;
452 char in_memory;
453 char is_const;
454 char flag;
457 /* We don't want a lot of buckets, because we rarely have very many
458 things stored in the hash table, and a lot of buckets slows
459 down a lot of loops that happen frequently. */
460 #define HASH_SHIFT 5
461 #define HASH_SIZE (1 << HASH_SHIFT)
462 #define HASH_MASK (HASH_SIZE - 1)
464 /* Compute hash code of X in mode M. Special-case case where X is a pseudo
465 register (hard registers may require `do_not_record' to be set). */
467 #define HASH(X, M) \
468 ((GET_CODE (X) == REG && REGNO (X) >= FIRST_PSEUDO_REGISTER \
469 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
470 : canon_hash (X, M)) & HASH_MASK)
472 /* Determine whether register number N is considered a fixed register for CSE.
473 It is desirable to replace other regs with fixed regs, to reduce need for
474 non-fixed hard regs.
475 A reg wins if it is either the frame pointer or designated as fixed. */
476 #define FIXED_REGNO_P(N) \
477 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
478 || fixed_regs[N] || global_regs[N])
480 /* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
481 hard registers and pointers into the frame are the cheapest with a cost
482 of 0. Next come pseudos with a cost of one and other hard registers with
483 a cost of 2. Aside from these special cases, call `rtx_cost'. */
485 #define CHEAP_REGNO(N) \
486 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
487 || (N) == STACK_POINTER_REGNUM || (N) == ARG_POINTER_REGNUM \
488 || ((N) >= FIRST_VIRTUAL_REGISTER && (N) <= LAST_VIRTUAL_REGISTER) \
489 || ((N) < FIRST_PSEUDO_REGISTER \
490 && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
492 /* A register is cheap if it is a user variable assigned to the register
493 or if its register number always corresponds to a cheap register. */
495 #define CHEAP_REG(N) \
496 ((REG_USERVAR_P (N) && REGNO (N) < FIRST_PSEUDO_REGISTER) \
497 || CHEAP_REGNO (REGNO (N)))
499 #define COST(X) \
500 (GET_CODE (X) == REG \
501 ? (CHEAP_REG (X) ? 0 \
502 : REGNO (X) >= FIRST_PSEUDO_REGISTER ? 1 \
503 : 2) \
504 : notreg_cost(X))
506 /* Get the info associated with register N. */
508 #define GET_CSE_REG_INFO(N) \
509 (((N) == cached_regno && cached_cse_reg_info) \
510 ? cached_cse_reg_info : get_cse_reg_info ((N)))
512 /* Get the number of times this register has been updated in this
513 basic block. */
515 #define REG_TICK(N) ((GET_CSE_REG_INFO (N))->reg_tick)
517 /* Get the point at which REG was recorded in the table. */
519 #define REG_IN_TABLE(N) ((GET_CSE_REG_INFO (N))->reg_in_table)
521 /* Get the quantity number for REG. */
523 #define REG_QTY(N) ((GET_CSE_REG_INFO (N))->reg_qty)
525 /* Determine if the quantity number for register X represents a valid index
526 into the qty_table. */
528 #define REGNO_QTY_VALID_P(N) (REG_QTY (N) != (N))
530 #ifdef ADDRESS_COST
531 /* The ADDRESS_COST macro does not deal with ADDRESSOF nodes. But,
532 during CSE, such nodes are present. Using an ADDRESSOF node which
533 refers to the address of a REG is a good thing because we can then
534 turn (MEM (ADDRESSSOF (REG))) into just plain REG. */
535 #define CSE_ADDRESS_COST(RTX) \
536 ((GET_CODE (RTX) == ADDRESSOF && REG_P (XEXP ((RTX), 0))) \
537 ? -1 : ADDRESS_COST(RTX))
538 #endif
540 static struct table_elt *table[HASH_SIZE];
542 /* Chain of `struct table_elt's made so far for this function
543 but currently removed from the table. */
545 static struct table_elt *free_element_chain;
547 /* Number of `struct table_elt' structures made so far for this function. */
549 static int n_elements_made;
551 /* Maximum value `n_elements_made' has had so far in this compilation
552 for functions previously processed. */
554 static int max_elements_made;
556 /* Surviving equivalence class when two equivalence classes are merged
557 by recording the effects of a jump in the last insn. Zero if the
558 last insn was not a conditional jump. */
560 static struct table_elt *last_jump_equiv_class;
562 /* Set to the cost of a constant pool reference if one was found for a
563 symbolic constant. If this was found, it means we should try to
564 convert constants into constant pool entries if they don't fit in
565 the insn. */
567 static int constant_pool_entries_cost;
569 /* Define maximum length of a branch path. */
571 #define PATHLENGTH 10
573 /* This data describes a block that will be processed by cse_basic_block. */
575 struct cse_basic_block_data
577 /* Lowest CUID value of insns in block. */
578 int low_cuid;
579 /* Highest CUID value of insns in block. */
580 int high_cuid;
581 /* Total number of SETs in block. */
582 int nsets;
583 /* Last insn in the block. */
584 rtx last;
585 /* Size of current branch path, if any. */
586 int path_size;
587 /* Current branch path, indicating which branches will be taken. */
588 struct branch_path
590 /* The branch insn. */
591 rtx branch;
592 /* Whether it should be taken or not. AROUND is the same as taken
593 except that it is used when the destination label is not preceded
594 by a BARRIER. */
595 enum taken {TAKEN, NOT_TAKEN, AROUND} status;
596 } path[PATHLENGTH];
599 /* Nonzero if X has the form (PLUS frame-pointer integer). We check for
600 virtual regs here because the simplify_*_operation routines are called
601 by integrate.c, which is called before virtual register instantiation.
603 ?!? FIXED_BASE_PLUS_P and NONZERO_BASE_PLUS_P need to move into
604 a header file so that their definitions can be shared with the
605 simplification routines in simplify-rtx.c. Until then, do not
606 change these macros without also changing the copy in simplify-rtx.c. */
608 #define FIXED_BASE_PLUS_P(X) \
609 ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx \
610 || ((X) == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])\
611 || (X) == virtual_stack_vars_rtx \
612 || (X) == virtual_incoming_args_rtx \
613 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
614 && (XEXP (X, 0) == frame_pointer_rtx \
615 || XEXP (X, 0) == hard_frame_pointer_rtx \
616 || ((X) == arg_pointer_rtx \
617 && fixed_regs[ARG_POINTER_REGNUM]) \
618 || XEXP (X, 0) == virtual_stack_vars_rtx \
619 || XEXP (X, 0) == virtual_incoming_args_rtx)) \
620 || GET_CODE (X) == ADDRESSOF)
622 /* Similar, but also allows reference to the stack pointer.
624 This used to include FIXED_BASE_PLUS_P, however, we can't assume that
625 arg_pointer_rtx by itself is nonzero, because on at least one machine,
626 the i960, the arg pointer is zero when it is unused. */
628 #define NONZERO_BASE_PLUS_P(X) \
629 ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx \
630 || (X) == virtual_stack_vars_rtx \
631 || (X) == virtual_incoming_args_rtx \
632 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
633 && (XEXP (X, 0) == frame_pointer_rtx \
634 || XEXP (X, 0) == hard_frame_pointer_rtx \
635 || ((X) == arg_pointer_rtx \
636 && fixed_regs[ARG_POINTER_REGNUM]) \
637 || XEXP (X, 0) == virtual_stack_vars_rtx \
638 || XEXP (X, 0) == virtual_incoming_args_rtx)) \
639 || (X) == stack_pointer_rtx \
640 || (X) == virtual_stack_dynamic_rtx \
641 || (X) == virtual_outgoing_args_rtx \
642 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
643 && (XEXP (X, 0) == stack_pointer_rtx \
644 || XEXP (X, 0) == virtual_stack_dynamic_rtx \
645 || XEXP (X, 0) == virtual_outgoing_args_rtx)) \
646 || GET_CODE (X) == ADDRESSOF)
648 static int notreg_cost PROTO((rtx));
649 static void new_basic_block PROTO((void));
650 static void make_new_qty PROTO((int, enum machine_mode));
651 static void make_regs_eqv PROTO((int, int));
652 static void delete_reg_equiv PROTO((int));
653 static int mention_regs PROTO((rtx));
654 static int insert_regs PROTO((rtx, struct table_elt *, int));
655 static void remove_from_table PROTO((struct table_elt *, unsigned));
656 static struct table_elt *lookup PROTO((rtx, unsigned, enum machine_mode)),
657 *lookup_for_remove PROTO((rtx, unsigned, enum machine_mode));
658 static rtx lookup_as_function PROTO((rtx, enum rtx_code));
659 static struct table_elt *insert PROTO((rtx, struct table_elt *, unsigned,
660 enum machine_mode));
661 static void merge_equiv_classes PROTO((struct table_elt *,
662 struct table_elt *));
663 static void invalidate PROTO((rtx, enum machine_mode));
664 static int cse_rtx_varies_p PROTO((rtx));
665 static void remove_invalid_refs PROTO((int));
666 static void remove_invalid_subreg_refs PROTO((int, int, enum machine_mode));
667 static void rehash_using_reg PROTO((rtx));
668 static void invalidate_memory PROTO((void));
669 static void invalidate_for_call PROTO((void));
670 static rtx use_related_value PROTO((rtx, struct table_elt *));
671 static unsigned canon_hash PROTO((rtx, enum machine_mode));
672 static unsigned safe_hash PROTO((rtx, enum machine_mode));
673 static int exp_equiv_p PROTO((rtx, rtx, int, int));
674 static rtx canon_reg PROTO((rtx, rtx));
675 static void find_best_addr PROTO((rtx, rtx *));
676 static enum rtx_code find_comparison_args PROTO((enum rtx_code, rtx *, rtx *,
677 enum machine_mode *,
678 enum machine_mode *));
679 static rtx fold_rtx PROTO((rtx, rtx));
680 static rtx equiv_constant PROTO((rtx));
681 static void record_jump_equiv PROTO((rtx, int));
682 static void record_jump_cond PROTO((enum rtx_code, enum machine_mode,
683 rtx, rtx, int));
684 static void cse_insn PROTO((rtx, rtx));
685 static int addr_affects_sp_p PROTO((rtx));
686 static void invalidate_from_clobbers PROTO((rtx));
687 static rtx cse_process_notes PROTO((rtx, rtx));
688 static void cse_around_loop PROTO((rtx));
689 static void invalidate_skipped_set PROTO((rtx, rtx, void *));
690 static void invalidate_skipped_block PROTO((rtx));
691 static void cse_check_loop_start PROTO((rtx, rtx, void *));
692 static void cse_set_around_loop PROTO((rtx, rtx, rtx));
693 static rtx cse_basic_block PROTO((rtx, rtx, struct branch_path *, int));
694 static void count_reg_usage PROTO((rtx, int *, rtx, int));
695 extern void dump_class PROTO((struct table_elt*));
696 static struct cse_reg_info* get_cse_reg_info PROTO((int));
698 static void flush_hash_table PROTO((void));
700 /* Dump the expressions in the equivalence class indicated by CLASSP.
701 This function is used only for debugging. */
702 void
703 dump_class (classp)
704 struct table_elt *classp;
706 struct table_elt *elt;
708 fprintf (stderr, "Equivalence chain for ");
709 print_rtl (stderr, classp->exp);
710 fprintf (stderr, ": \n");
712 for (elt = classp->first_same_value; elt; elt = elt->next_same_value)
714 print_rtl (stderr, elt->exp);
715 fprintf (stderr, "\n");
719 /* Internal function, to compute cost when X is not a register; called
720 from COST macro to keep it simple. */
722 static int
723 notreg_cost (x)
724 rtx x;
726 return ((GET_CODE (x) == SUBREG
727 && GET_CODE (SUBREG_REG (x)) == REG
728 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
729 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
730 && (GET_MODE_SIZE (GET_MODE (x))
731 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
732 && subreg_lowpart_p (x)
733 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (x)),
734 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))))
735 ? (CHEAP_REG (SUBREG_REG (x)) ? 0
736 : (REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER ? 1
737 : 2))
738 : rtx_cost (x, SET) * 2);
741 /* Return the right cost to give to an operation
742 to make the cost of the corresponding register-to-register instruction
743 N times that of a fast register-to-register instruction. */
745 #define COSTS_N_INSNS(N) ((N) * 4 - 2)
747 /* Return an estimate of the cost of computing rtx X.
748 One use is in cse, to decide which expression to keep in the hash table.
749 Another is in rtl generation, to pick the cheapest way to multiply.
750 Other uses like the latter are expected in the future. */
753 rtx_cost (x, outer_code)
754 rtx x;
755 enum rtx_code outer_code ATTRIBUTE_UNUSED;
757 register int i, j;
758 register enum rtx_code code;
759 register const char *fmt;
760 register int total;
762 if (x == 0)
763 return 0;
765 /* Compute the default costs of certain things.
766 Note that RTX_COSTS can override the defaults. */
768 code = GET_CODE (x);
769 switch (code)
771 case MULT:
772 /* Count multiplication by 2**n as a shift,
773 because if we are considering it, we would output it as a shift. */
774 if (GET_CODE (XEXP (x, 1)) == CONST_INT
775 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
776 total = 2;
777 else
778 total = COSTS_N_INSNS (5);
779 break;
780 case DIV:
781 case UDIV:
782 case MOD:
783 case UMOD:
784 total = COSTS_N_INSNS (7);
785 break;
786 case USE:
787 /* Used in loop.c and combine.c as a marker. */
788 total = 0;
789 break;
790 case ASM_OPERANDS:
791 /* We don't want these to be used in substitutions because
792 we have no way of validating the resulting insn. So assign
793 anything containing an ASM_OPERANDS a very high cost. */
794 total = 1000;
795 break;
796 default:
797 total = 2;
800 switch (code)
802 case REG:
803 return ! CHEAP_REG (x);
805 case SUBREG:
806 /* If we can't tie these modes, make this expensive. The larger
807 the mode, the more expensive it is. */
808 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
809 return COSTS_N_INSNS (2
810 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
811 return 2;
812 #ifdef RTX_COSTS
813 RTX_COSTS (x, code, outer_code);
814 #endif
815 #ifdef CONST_COSTS
816 CONST_COSTS (x, code, outer_code);
817 #endif
819 default:
820 #ifdef DEFAULT_RTX_COSTS
821 DEFAULT_RTX_COSTS(x, code, outer_code);
822 #endif
823 break;
826 /* Sum the costs of the sub-rtx's, plus cost of this operation,
827 which is already in total. */
829 fmt = GET_RTX_FORMAT (code);
830 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
831 if (fmt[i] == 'e')
832 total += rtx_cost (XEXP (x, i), code);
833 else if (fmt[i] == 'E')
834 for (j = 0; j < XVECLEN (x, i); j++)
835 total += rtx_cost (XVECEXP (x, i, j), code);
837 return total;
840 static struct cse_reg_info *
841 get_cse_reg_info (regno)
842 int regno;
844 struct cse_reg_info **hash_head = &reg_hash[REGHASH_FN (regno)];
845 struct cse_reg_info *p;
847 for (p = *hash_head ; p != NULL; p = p->hash_next)
848 if (p->regno == regno)
849 break;
851 if (p == NULL)
853 /* Get a new cse_reg_info structure. */
854 if (cse_reg_info_free_list)
856 p = cse_reg_info_free_list;
857 cse_reg_info_free_list = p->next;
859 else
860 p = (struct cse_reg_info *) xmalloc (sizeof (struct cse_reg_info));
862 /* Insert into hash table. */
863 p->hash_next = *hash_head;
864 *hash_head = p;
866 /* Initialize it. */
867 p->reg_tick = 1;
868 p->reg_in_table = -1;
869 p->reg_qty = regno;
870 p->regno = regno;
871 p->next = cse_reg_info_used_list;
872 cse_reg_info_used_list = p;
873 if (!cse_reg_info_used_list_end)
874 cse_reg_info_used_list_end = p;
877 /* Cache this lookup; we tend to be looking up information about the
878 same register several times in a row. */
879 cached_regno = regno;
880 cached_cse_reg_info = p;
882 return p;
885 /* Clear the hash table and initialize each register with its own quantity,
886 for a new basic block. */
888 static void
889 new_basic_block ()
891 register int i;
893 next_qty = max_reg;
895 /* Clear out hash table state for this pass. */
897 bzero ((char *) reg_hash, sizeof reg_hash);
899 if (cse_reg_info_used_list)
901 cse_reg_info_used_list_end->next = cse_reg_info_free_list;
902 cse_reg_info_free_list = cse_reg_info_used_list;
903 cse_reg_info_used_list = cse_reg_info_used_list_end = 0;
905 cached_cse_reg_info = 0;
907 CLEAR_HARD_REG_SET (hard_regs_in_table);
909 /* The per-quantity values used to be initialized here, but it is
910 much faster to initialize each as it is made in `make_new_qty'. */
912 for (i = 0; i < HASH_SIZE; i++)
914 struct table_elt *first;
916 first = table[i];
917 if (first != NULL)
919 struct table_elt *last = first;
921 table[i] = NULL;
923 while (last->next_same_hash != NULL)
924 last = last->next_same_hash;
926 /* Now relink this hash entire chain into
927 the free element list. */
929 last->next_same_hash = free_element_chain;
930 free_element_chain = first;
934 prev_insn = 0;
936 #ifdef HAVE_cc0
937 prev_insn_cc0 = 0;
938 #endif
941 /* Say that register REG contains a quantity in mode MODE not in any
942 register before and initialize that quantity. */
944 static void
945 make_new_qty (reg, mode)
946 register int reg;
947 register enum machine_mode mode;
949 register int q;
950 register struct qty_table_elem *ent;
951 register struct reg_eqv_elem *eqv;
953 if (next_qty >= max_qty)
954 abort ();
956 q = REG_QTY (reg) = next_qty++;
957 ent = &qty_table[q];
958 ent->first_reg = reg;
959 ent->last_reg = reg;
960 ent->mode = mode;
961 ent->const_rtx = ent->const_insn = NULL_RTX;
962 ent->comparison_code = UNKNOWN;
964 eqv = &reg_eqv_table[reg];
965 eqv->next = eqv->prev = -1;
968 /* Make reg NEW equivalent to reg OLD.
969 OLD is not changing; NEW is. */
971 static void
972 make_regs_eqv (new, old)
973 register int new, old;
975 register int lastr, firstr;
976 register int q = REG_QTY (old);
977 register struct qty_table_elem *ent;
979 ent = &qty_table[q];
981 /* Nothing should become eqv until it has a "non-invalid" qty number. */
982 if (! REGNO_QTY_VALID_P (old))
983 abort ();
985 REG_QTY (new) = q;
986 firstr = ent->first_reg;
987 lastr = ent->last_reg;
989 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
990 hard regs. Among pseudos, if NEW will live longer than any other reg
991 of the same qty, and that is beyond the current basic block,
992 make it the new canonical replacement for this qty. */
993 if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
994 /* Certain fixed registers might be of the class NO_REGS. This means
995 that not only can they not be allocated by the compiler, but
996 they cannot be used in substitutions or canonicalizations
997 either. */
998 && (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
999 && ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
1000 || (new >= FIRST_PSEUDO_REGISTER
1001 && (firstr < FIRST_PSEUDO_REGISTER
1002 || ((uid_cuid[REGNO_LAST_UID (new)] > cse_basic_block_end
1003 || (uid_cuid[REGNO_FIRST_UID (new)]
1004 < cse_basic_block_start))
1005 && (uid_cuid[REGNO_LAST_UID (new)]
1006 > uid_cuid[REGNO_LAST_UID (firstr)]))))))
1008 reg_eqv_table[firstr].prev = new;
1009 reg_eqv_table[new].next = firstr;
1010 reg_eqv_table[new].prev = -1;
1011 ent->first_reg = new;
1013 else
1015 /* If NEW is a hard reg (known to be non-fixed), insert at end.
1016 Otherwise, insert before any non-fixed hard regs that are at the
1017 end. Registers of class NO_REGS cannot be used as an
1018 equivalent for anything. */
1019 while (lastr < FIRST_PSEUDO_REGISTER && reg_eqv_table[lastr].prev >= 0
1020 && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
1021 && new >= FIRST_PSEUDO_REGISTER)
1022 lastr = reg_eqv_table[lastr].prev;
1023 reg_eqv_table[new].next = reg_eqv_table[lastr].next;
1024 if (reg_eqv_table[lastr].next >= 0)
1025 reg_eqv_table[reg_eqv_table[lastr].next].prev = new;
1026 else
1027 qty_table[q].last_reg = new;
1028 reg_eqv_table[lastr].next = new;
1029 reg_eqv_table[new].prev = lastr;
1033 /* Remove REG from its equivalence class. */
1035 static void
1036 delete_reg_equiv (reg)
1037 register int reg;
1039 register struct qty_table_elem *ent;
1040 register int q = REG_QTY (reg);
1041 register int p, n;
1043 /* If invalid, do nothing. */
1044 if (q == reg)
1045 return;
1047 ent = &qty_table[q];
1049 p = reg_eqv_table[reg].prev;
1050 n = reg_eqv_table[reg].next;
1052 if (n != -1)
1053 reg_eqv_table[n].prev = p;
1054 else
1055 ent->last_reg = p;
1056 if (p != -1)
1057 reg_eqv_table[p].next = n;
1058 else
1059 ent->first_reg = n;
1061 REG_QTY (reg) = reg;
1064 /* Remove any invalid expressions from the hash table
1065 that refer to any of the registers contained in expression X.
1067 Make sure that newly inserted references to those registers
1068 as subexpressions will be considered valid.
1070 mention_regs is not called when a register itself
1071 is being stored in the table.
1073 Return 1 if we have done something that may have changed the hash code
1074 of X. */
1076 static int
1077 mention_regs (x)
1078 rtx x;
1080 register enum rtx_code code;
1081 register int i, j;
1082 register const char *fmt;
1083 register int changed = 0;
1085 if (x == 0)
1086 return 0;
1088 code = GET_CODE (x);
1089 if (code == REG)
1091 register int regno = REGNO (x);
1092 register int endregno
1093 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
1094 : HARD_REGNO_NREGS (regno, GET_MODE (x)));
1095 int i;
1097 for (i = regno; i < endregno; i++)
1099 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1100 remove_invalid_refs (i);
1102 REG_IN_TABLE (i) = REG_TICK (i);
1105 return 0;
1108 /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1109 pseudo if they don't use overlapping words. We handle only pseudos
1110 here for simplicity. */
1111 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1112 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1114 int i = REGNO (SUBREG_REG (x));
1116 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1118 /* If reg_tick has been incremented more than once since
1119 reg_in_table was last set, that means that the entire
1120 register has been set before, so discard anything memorized
1121 for the entrire register, including all SUBREG expressions. */
1122 if (REG_IN_TABLE (i) != REG_TICK (i) - 1)
1123 remove_invalid_refs (i);
1124 else
1125 remove_invalid_subreg_refs (i, SUBREG_WORD (x), GET_MODE (x));
1128 REG_IN_TABLE (i) = REG_TICK (i);
1129 return 0;
1132 /* If X is a comparison or a COMPARE and either operand is a register
1133 that does not have a quantity, give it one. This is so that a later
1134 call to record_jump_equiv won't cause X to be assigned a different
1135 hash code and not found in the table after that call.
1137 It is not necessary to do this here, since rehash_using_reg can
1138 fix up the table later, but doing this here eliminates the need to
1139 call that expensive function in the most common case where the only
1140 use of the register is in the comparison. */
1142 if (code == COMPARE || GET_RTX_CLASS (code) == '<')
1144 if (GET_CODE (XEXP (x, 0)) == REG
1145 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
1146 if (insert_regs (XEXP (x, 0), NULL_PTR, 0))
1148 rehash_using_reg (XEXP (x, 0));
1149 changed = 1;
1152 if (GET_CODE (XEXP (x, 1)) == REG
1153 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
1154 if (insert_regs (XEXP (x, 1), NULL_PTR, 0))
1156 rehash_using_reg (XEXP (x, 1));
1157 changed = 1;
1161 fmt = GET_RTX_FORMAT (code);
1162 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1163 if (fmt[i] == 'e')
1164 changed |= mention_regs (XEXP (x, i));
1165 else if (fmt[i] == 'E')
1166 for (j = 0; j < XVECLEN (x, i); j++)
1167 changed |= mention_regs (XVECEXP (x, i, j));
1169 return changed;
1172 /* Update the register quantities for inserting X into the hash table
1173 with a value equivalent to CLASSP.
1174 (If the class does not contain a REG, it is irrelevant.)
1175 If MODIFIED is nonzero, X is a destination; it is being modified.
1176 Note that delete_reg_equiv should be called on a register
1177 before insert_regs is done on that register with MODIFIED != 0.
1179 Nonzero value means that elements of reg_qty have changed
1180 so X's hash code may be different. */
1182 static int
1183 insert_regs (x, classp, modified)
1184 rtx x;
1185 struct table_elt *classp;
1186 int modified;
1188 if (GET_CODE (x) == REG)
1190 register int regno = REGNO (x);
1191 register int qty_valid;
1193 /* If REGNO is in the equivalence table already but is of the
1194 wrong mode for that equivalence, don't do anything here. */
1196 qty_valid = REGNO_QTY_VALID_P (regno);
1197 if (qty_valid)
1199 struct qty_table_elem *ent = &qty_table[REG_QTY (regno)];
1201 if (ent->mode != GET_MODE (x))
1202 return 0;
1205 if (modified || ! qty_valid)
1207 if (classp)
1208 for (classp = classp->first_same_value;
1209 classp != 0;
1210 classp = classp->next_same_value)
1211 if (GET_CODE (classp->exp) == REG
1212 && GET_MODE (classp->exp) == GET_MODE (x))
1214 make_regs_eqv (regno, REGNO (classp->exp));
1215 return 1;
1218 make_new_qty (regno, GET_MODE (x));
1219 return 1;
1222 return 0;
1225 /* If X is a SUBREG, we will likely be inserting the inner register in the
1226 table. If that register doesn't have an assigned quantity number at
1227 this point but does later, the insertion that we will be doing now will
1228 not be accessible because its hash code will have changed. So assign
1229 a quantity number now. */
1231 else if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1232 && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1234 int regno = REGNO (SUBREG_REG (x));
1236 insert_regs (SUBREG_REG (x), NULL_PTR, 0);
1237 /* Mention_regs checks if REG_TICK is exactly one larger than
1238 REG_IN_TABLE to find out if there was only a single preceding
1239 invalidation - for the SUBREG - or another one, which would be
1240 for the full register. Since we don't invalidate the SUBREG
1241 here first, we might have to bump up REG_TICK so that mention_regs
1242 will do the right thing. */
1243 if (REG_IN_TABLE (regno) >= 0
1244 && REG_TICK (regno) == REG_IN_TABLE (regno) + 1)
1245 REG_TICK (regno)++;
1246 mention_regs (x);
1247 return 1;
1249 else
1250 return mention_regs (x);
1253 /* Look in or update the hash table. */
1255 /* Remove table element ELT from use in the table.
1256 HASH is its hash code, made using the HASH macro.
1257 It's an argument because often that is known in advance
1258 and we save much time not recomputing it. */
1260 static void
1261 remove_from_table (elt, hash)
1262 register struct table_elt *elt;
1263 unsigned hash;
1265 if (elt == 0)
1266 return;
1268 /* Mark this element as removed. See cse_insn. */
1269 elt->first_same_value = 0;
1271 /* Remove the table element from its equivalence class. */
1274 register struct table_elt *prev = elt->prev_same_value;
1275 register struct table_elt *next = elt->next_same_value;
1277 if (next) next->prev_same_value = prev;
1279 if (prev)
1280 prev->next_same_value = next;
1281 else
1283 register struct table_elt *newfirst = next;
1284 while (next)
1286 next->first_same_value = newfirst;
1287 next = next->next_same_value;
1292 /* Remove the table element from its hash bucket. */
1295 register struct table_elt *prev = elt->prev_same_hash;
1296 register struct table_elt *next = elt->next_same_hash;
1298 if (next) next->prev_same_hash = prev;
1300 if (prev)
1301 prev->next_same_hash = next;
1302 else if (table[hash] == elt)
1303 table[hash] = next;
1304 else
1306 /* This entry is not in the proper hash bucket. This can happen
1307 when two classes were merged by `merge_equiv_classes'. Search
1308 for the hash bucket that it heads. This happens only very
1309 rarely, so the cost is acceptable. */
1310 for (hash = 0; hash < HASH_SIZE; hash++)
1311 if (table[hash] == elt)
1312 table[hash] = next;
1316 /* Remove the table element from its related-value circular chain. */
1318 if (elt->related_value != 0 && elt->related_value != elt)
1320 register struct table_elt *p = elt->related_value;
1321 while (p->related_value != elt)
1322 p = p->related_value;
1323 p->related_value = elt->related_value;
1324 if (p->related_value == p)
1325 p->related_value = 0;
1328 /* Now add it to the free element chain. */
1329 elt->next_same_hash = free_element_chain;
1330 free_element_chain = elt;
1333 /* Look up X in the hash table and return its table element,
1334 or 0 if X is not in the table.
1336 MODE is the machine-mode of X, or if X is an integer constant
1337 with VOIDmode then MODE is the mode with which X will be used.
1339 Here we are satisfied to find an expression whose tree structure
1340 looks like X. */
1342 static struct table_elt *
1343 lookup (x, hash, mode)
1344 rtx x;
1345 unsigned hash;
1346 enum machine_mode mode;
1348 register struct table_elt *p;
1350 for (p = table[hash]; p; p = p->next_same_hash)
1351 if (mode == p->mode && ((x == p->exp && GET_CODE (x) == REG)
1352 || exp_equiv_p (x, p->exp, GET_CODE (x) != REG, 0)))
1353 return p;
1355 return 0;
1358 /* Like `lookup' but don't care whether the table element uses invalid regs.
1359 Also ignore discrepancies in the machine mode of a register. */
1361 static struct table_elt *
1362 lookup_for_remove (x, hash, mode)
1363 rtx x;
1364 unsigned hash;
1365 enum machine_mode mode;
1367 register struct table_elt *p;
1369 if (GET_CODE (x) == REG)
1371 int regno = REGNO (x);
1372 /* Don't check the machine mode when comparing registers;
1373 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1374 for (p = table[hash]; p; p = p->next_same_hash)
1375 if (GET_CODE (p->exp) == REG
1376 && REGNO (p->exp) == regno)
1377 return p;
1379 else
1381 for (p = table[hash]; p; p = p->next_same_hash)
1382 if (mode == p->mode && (x == p->exp || exp_equiv_p (x, p->exp, 0, 0)))
1383 return p;
1386 return 0;
1389 /* Look for an expression equivalent to X and with code CODE.
1390 If one is found, return that expression. */
1392 static rtx
1393 lookup_as_function (x, code)
1394 rtx x;
1395 enum rtx_code code;
1397 register struct table_elt *p = lookup (x, safe_hash (x, VOIDmode) & HASH_MASK,
1398 GET_MODE (x));
1399 /* If we are looking for a CONST_INT, the mode doesn't really matter, as
1400 long as we are narrowing. So if we looked in vain for a mode narrower
1401 than word_mode before, look for word_mode now. */
1402 if (p == 0 && code == CONST_INT
1403 && GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (word_mode))
1405 x = copy_rtx (x);
1406 PUT_MODE (x, word_mode);
1407 p = lookup (x, safe_hash (x, VOIDmode) & HASH_MASK, word_mode);
1410 if (p == 0)
1411 return 0;
1413 for (p = p->first_same_value; p; p = p->next_same_value)
1415 if (GET_CODE (p->exp) == code
1416 /* Make sure this is a valid entry in the table. */
1417 && exp_equiv_p (p->exp, p->exp, 1, 0))
1418 return p->exp;
1421 return 0;
1424 /* Insert X in the hash table, assuming HASH is its hash code
1425 and CLASSP is an element of the class it should go in
1426 (or 0 if a new class should be made).
1427 It is inserted at the proper position to keep the class in
1428 the order cheapest first.
1430 MODE is the machine-mode of X, or if X is an integer constant
1431 with VOIDmode then MODE is the mode with which X will be used.
1433 For elements of equal cheapness, the most recent one
1434 goes in front, except that the first element in the list
1435 remains first unless a cheaper element is added. The order of
1436 pseudo-registers does not matter, as canon_reg will be called to
1437 find the cheapest when a register is retrieved from the table.
1439 The in_memory field in the hash table element is set to 0.
1440 The caller must set it nonzero if appropriate.
1442 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1443 and if insert_regs returns a nonzero value
1444 you must then recompute its hash code before calling here.
1446 If necessary, update table showing constant values of quantities. */
1448 #define CHEAPER(X,Y) ((X)->cost < (Y)->cost)
1450 static struct table_elt *
1451 insert (x, classp, hash, mode)
1452 register rtx x;
1453 register struct table_elt *classp;
1454 unsigned hash;
1455 enum machine_mode mode;
1457 register struct table_elt *elt;
1459 /* If X is a register and we haven't made a quantity for it,
1460 something is wrong. */
1461 if (GET_CODE (x) == REG && ! REGNO_QTY_VALID_P (REGNO (x)))
1462 abort ();
1464 /* If X is a hard register, show it is being put in the table. */
1465 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
1467 int regno = REGNO (x);
1468 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1469 int i;
1471 for (i = regno; i < endregno; i++)
1472 SET_HARD_REG_BIT (hard_regs_in_table, i);
1475 /* If X is a label, show we recorded it. */
1476 if (GET_CODE (x) == LABEL_REF
1477 || (GET_CODE (x) == CONST && GET_CODE (XEXP (x, 0)) == PLUS
1478 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF))
1479 recorded_label_ref = 1;
1481 /* Put an element for X into the right hash bucket. */
1483 elt = free_element_chain;
1484 if (elt)
1486 free_element_chain = elt->next_same_hash;
1488 else
1490 n_elements_made++;
1491 elt = (struct table_elt *) oballoc (sizeof (struct table_elt));
1494 elt->exp = x;
1495 elt->cost = COST (x);
1496 elt->next_same_value = 0;
1497 elt->prev_same_value = 0;
1498 elt->next_same_hash = table[hash];
1499 elt->prev_same_hash = 0;
1500 elt->related_value = 0;
1501 elt->in_memory = 0;
1502 elt->mode = mode;
1503 elt->is_const = (CONSTANT_P (x)
1504 /* GNU C++ takes advantage of this for `this'
1505 (and other const values). */
1506 || (RTX_UNCHANGING_P (x)
1507 && GET_CODE (x) == REG
1508 && REGNO (x) >= FIRST_PSEUDO_REGISTER)
1509 || FIXED_BASE_PLUS_P (x));
1511 if (table[hash])
1512 table[hash]->prev_same_hash = elt;
1513 table[hash] = elt;
1515 /* Put it into the proper value-class. */
1516 if (classp)
1518 classp = classp->first_same_value;
1519 if (CHEAPER (elt, classp))
1520 /* Insert at the head of the class */
1522 register struct table_elt *p;
1523 elt->next_same_value = classp;
1524 classp->prev_same_value = elt;
1525 elt->first_same_value = elt;
1527 for (p = classp; p; p = p->next_same_value)
1528 p->first_same_value = elt;
1530 else
1532 /* Insert not at head of the class. */
1533 /* Put it after the last element cheaper than X. */
1534 register struct table_elt *p, *next;
1535 for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1536 p = next);
1537 /* Put it after P and before NEXT. */
1538 elt->next_same_value = next;
1539 if (next)
1540 next->prev_same_value = elt;
1541 elt->prev_same_value = p;
1542 p->next_same_value = elt;
1543 elt->first_same_value = classp;
1546 else
1547 elt->first_same_value = elt;
1549 /* If this is a constant being set equivalent to a register or a register
1550 being set equivalent to a constant, note the constant equivalence.
1552 If this is a constant, it cannot be equivalent to a different constant,
1553 and a constant is the only thing that can be cheaper than a register. So
1554 we know the register is the head of the class (before the constant was
1555 inserted).
1557 If this is a register that is not already known equivalent to a
1558 constant, we must check the entire class.
1560 If this is a register that is already known equivalent to an insn,
1561 update the qtys `const_insn' to show that `this_insn' is the latest
1562 insn making that quantity equivalent to the constant. */
1564 if (elt->is_const && classp && GET_CODE (classp->exp) == REG
1565 && GET_CODE (x) != REG)
1567 int exp_q = REG_QTY (REGNO (classp->exp));
1568 struct qty_table_elem *exp_ent = &qty_table[exp_q];
1570 exp_ent->const_rtx = gen_lowpart_if_possible (exp_ent->mode, x);
1571 exp_ent->const_insn = this_insn;
1574 else if (GET_CODE (x) == REG
1575 && classp
1576 && ! qty_table[REG_QTY (REGNO (x))].const_rtx
1577 && ! elt->is_const)
1579 register struct table_elt *p;
1581 for (p = classp; p != 0; p = p->next_same_value)
1583 if (p->is_const && GET_CODE (p->exp) != REG)
1585 int x_q = REG_QTY (REGNO (x));
1586 struct qty_table_elem *x_ent = &qty_table[x_q];
1588 x_ent->const_rtx = gen_lowpart_if_possible (GET_MODE (x), p->exp);
1589 x_ent->const_insn = this_insn;
1590 break;
1595 else if (GET_CODE (x) == REG
1596 && qty_table[REG_QTY (REGNO (x))].const_rtx
1597 && GET_MODE (x) == qty_table[REG_QTY (REGNO (x))].mode)
1598 qty_table[REG_QTY (REGNO (x))].const_insn = this_insn;
1600 /* If this is a constant with symbolic value,
1601 and it has a term with an explicit integer value,
1602 link it up with related expressions. */
1603 if (GET_CODE (x) == CONST)
1605 rtx subexp = get_related_value (x);
1606 unsigned subhash;
1607 struct table_elt *subelt, *subelt_prev;
1609 if (subexp != 0)
1611 /* Get the integer-free subexpression in the hash table. */
1612 subhash = safe_hash (subexp, mode) & HASH_MASK;
1613 subelt = lookup (subexp, subhash, mode);
1614 if (subelt == 0)
1615 subelt = insert (subexp, NULL_PTR, subhash, mode);
1616 /* Initialize SUBELT's circular chain if it has none. */
1617 if (subelt->related_value == 0)
1618 subelt->related_value = subelt;
1619 /* Find the element in the circular chain that precedes SUBELT. */
1620 subelt_prev = subelt;
1621 while (subelt_prev->related_value != subelt)
1622 subelt_prev = subelt_prev->related_value;
1623 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1624 This way the element that follows SUBELT is the oldest one. */
1625 elt->related_value = subelt_prev->related_value;
1626 subelt_prev->related_value = elt;
1630 return elt;
1633 /* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1634 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1635 the two classes equivalent.
1637 CLASS1 will be the surviving class; CLASS2 should not be used after this
1638 call.
1640 Any invalid entries in CLASS2 will not be copied. */
1642 static void
1643 merge_equiv_classes (class1, class2)
1644 struct table_elt *class1, *class2;
1646 struct table_elt *elt, *next, *new;
1648 /* Ensure we start with the head of the classes. */
1649 class1 = class1->first_same_value;
1650 class2 = class2->first_same_value;
1652 /* If they were already equal, forget it. */
1653 if (class1 == class2)
1654 return;
1656 for (elt = class2; elt; elt = next)
1658 unsigned hash;
1659 rtx exp = elt->exp;
1660 enum machine_mode mode = elt->mode;
1662 next = elt->next_same_value;
1664 /* Remove old entry, make a new one in CLASS1's class.
1665 Don't do this for invalid entries as we cannot find their
1666 hash code (it also isn't necessary). */
1667 if (GET_CODE (exp) == REG || exp_equiv_p (exp, exp, 1, 0))
1669 hash_arg_in_memory = 0;
1670 hash = HASH (exp, mode);
1672 if (GET_CODE (exp) == REG)
1673 delete_reg_equiv (REGNO (exp));
1675 remove_from_table (elt, hash);
1677 if (insert_regs (exp, class1, 0))
1679 rehash_using_reg (exp);
1680 hash = HASH (exp, mode);
1682 new = insert (exp, class1, hash, mode);
1683 new->in_memory = hash_arg_in_memory;
1689 /* Flush the entire hash table. */
1691 static void
1692 flush_hash_table ()
1694 int i;
1695 struct table_elt *p;
1697 for (i = 0; i < HASH_SIZE; i++)
1698 for (p = table[i]; p; p = table[i])
1700 /* Note that invalidate can remove elements
1701 after P in the current hash chain. */
1702 if (GET_CODE (p->exp) == REG)
1703 invalidate (p->exp, p->mode);
1704 else
1705 remove_from_table (p, i);
1709 /* Remove from the hash table, or mark as invalid, all expressions whose
1710 values could be altered by storing in X. X is a register, a subreg, or
1711 a memory reference with nonvarying address (because, when a memory
1712 reference with a varying address is stored in, all memory references are
1713 removed by invalidate_memory so specific invalidation is superfluous).
1714 FULL_MODE, if not VOIDmode, indicates that this much should be
1715 invalidated instead of just the amount indicated by the mode of X. This
1716 is only used for bitfield stores into memory.
1718 A nonvarying address may be just a register or just a symbol reference,
1719 or it may be either of those plus a numeric offset. */
1721 static void
1722 invalidate (x, full_mode)
1723 rtx x;
1724 enum machine_mode full_mode;
1726 register int i;
1727 register struct table_elt *p;
1729 switch (GET_CODE (x))
1731 case REG:
1733 /* If X is a register, dependencies on its contents are recorded
1734 through the qty number mechanism. Just change the qty number of
1735 the register, mark it as invalid for expressions that refer to it,
1736 and remove it itself. */
1737 register int regno = REGNO (x);
1738 register unsigned hash = HASH (x, GET_MODE (x));
1740 /* Remove REGNO from any quantity list it might be on and indicate
1741 that its value might have changed. If it is a pseudo, remove its
1742 entry from the hash table.
1744 For a hard register, we do the first two actions above for any
1745 additional hard registers corresponding to X. Then, if any of these
1746 registers are in the table, we must remove any REG entries that
1747 overlap these registers. */
1749 delete_reg_equiv (regno);
1750 REG_TICK (regno)++;
1752 if (regno >= FIRST_PSEUDO_REGISTER)
1754 /* Because a register can be referenced in more than one mode,
1755 we might have to remove more than one table entry. */
1756 struct table_elt *elt;
1758 while ((elt = lookup_for_remove (x, hash, GET_MODE (x))))
1759 remove_from_table (elt, hash);
1761 else
1763 HOST_WIDE_INT in_table
1764 = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1765 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1766 int tregno, tendregno;
1767 register struct table_elt *p, *next;
1769 CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1771 for (i = regno + 1; i < endregno; i++)
1773 in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, i);
1774 CLEAR_HARD_REG_BIT (hard_regs_in_table, i);
1775 delete_reg_equiv (i);
1776 REG_TICK (i)++;
1779 if (in_table)
1780 for (hash = 0; hash < HASH_SIZE; hash++)
1781 for (p = table[hash]; p; p = next)
1783 next = p->next_same_hash;
1785 if (GET_CODE (p->exp) != REG
1786 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1787 continue;
1789 tregno = REGNO (p->exp);
1790 tendregno
1791 = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (p->exp));
1792 if (tendregno > regno && tregno < endregno)
1793 remove_from_table (p, hash);
1797 return;
1799 case SUBREG:
1800 invalidate (SUBREG_REG (x), VOIDmode);
1801 return;
1803 case PARALLEL:
1804 for (i = XVECLEN (x, 0) - 1; i >= 0 ; --i)
1805 invalidate (XVECEXP (x, 0, i), VOIDmode);
1806 return;
1808 case EXPR_LIST:
1809 /* This is part of a disjoint return value; extract the location in
1810 question ignoring the offset. */
1811 invalidate (XEXP (x, 0), VOIDmode);
1812 return;
1814 case MEM:
1815 /* Remove all hash table elements that refer to overlapping pieces of
1816 memory. */
1817 if (full_mode == VOIDmode)
1818 full_mode = GET_MODE (x);
1820 for (i = 0; i < HASH_SIZE; i++)
1822 register struct table_elt *next;
1824 for (p = table[i]; p; p = next)
1826 next = p->next_same_hash;
1827 if (p->in_memory
1828 && (GET_CODE (p->exp) != MEM
1829 || true_dependence (x, full_mode, p->exp,
1830 cse_rtx_varies_p)))
1831 remove_from_table (p, i);
1834 return;
1836 default:
1837 abort ();
1841 /* Remove all expressions that refer to register REGNO,
1842 since they are already invalid, and we are about to
1843 mark that register valid again and don't want the old
1844 expressions to reappear as valid. */
1846 static void
1847 remove_invalid_refs (regno)
1848 int regno;
1850 register int i;
1851 register struct table_elt *p, *next;
1853 for (i = 0; i < HASH_SIZE; i++)
1854 for (p = table[i]; p; p = next)
1856 next = p->next_same_hash;
1857 if (GET_CODE (p->exp) != REG
1858 && refers_to_regno_p (regno, regno + 1, p->exp, NULL_PTR))
1859 remove_from_table (p, i);
1863 /* Likewise for a subreg with subreg_reg WORD and mode MODE. */
1864 static void
1865 remove_invalid_subreg_refs (regno, word, mode)
1866 int regno;
1867 int word;
1868 enum machine_mode mode;
1870 register int i;
1871 register struct table_elt *p, *next;
1872 int end = word + (GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD;
1874 for (i = 0; i < HASH_SIZE; i++)
1875 for (p = table[i]; p; p = next)
1877 rtx exp;
1878 next = p->next_same_hash;
1880 exp = p->exp;
1881 if (GET_CODE (p->exp) != REG
1882 && (GET_CODE (exp) != SUBREG
1883 || GET_CODE (SUBREG_REG (exp)) != REG
1884 || REGNO (SUBREG_REG (exp)) != regno
1885 || (((SUBREG_WORD (exp)
1886 + (GET_MODE_SIZE (GET_MODE (exp)) - 1) / UNITS_PER_WORD)
1887 >= word)
1888 && SUBREG_WORD (exp) <= end))
1889 && refers_to_regno_p (regno, regno + 1, p->exp, NULL_PTR))
1890 remove_from_table (p, i);
1894 /* Recompute the hash codes of any valid entries in the hash table that
1895 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
1897 This is called when we make a jump equivalence. */
1899 static void
1900 rehash_using_reg (x)
1901 rtx x;
1903 unsigned int i;
1904 struct table_elt *p, *next;
1905 unsigned hash;
1907 if (GET_CODE (x) == SUBREG)
1908 x = SUBREG_REG (x);
1910 /* If X is not a register or if the register is known not to be in any
1911 valid entries in the table, we have no work to do. */
1913 if (GET_CODE (x) != REG
1914 || REG_IN_TABLE (REGNO (x)) < 0
1915 || REG_IN_TABLE (REGNO (x)) != REG_TICK (REGNO (x)))
1916 return;
1918 /* Scan all hash chains looking for valid entries that mention X.
1919 If we find one and it is in the wrong hash chain, move it. We can skip
1920 objects that are registers, since they are handled specially. */
1922 for (i = 0; i < HASH_SIZE; i++)
1923 for (p = table[i]; p; p = next)
1925 next = p->next_same_hash;
1926 if (GET_CODE (p->exp) != REG && reg_mentioned_p (x, p->exp)
1927 && exp_equiv_p (p->exp, p->exp, 1, 0)
1928 && i != (hash = safe_hash (p->exp, p->mode) & HASH_MASK))
1930 if (p->next_same_hash)
1931 p->next_same_hash->prev_same_hash = p->prev_same_hash;
1933 if (p->prev_same_hash)
1934 p->prev_same_hash->next_same_hash = p->next_same_hash;
1935 else
1936 table[i] = p->next_same_hash;
1938 p->next_same_hash = table[hash];
1939 p->prev_same_hash = 0;
1940 if (table[hash])
1941 table[hash]->prev_same_hash = p;
1942 table[hash] = p;
1947 /* Remove from the hash table any expression that is a call-clobbered
1948 register. Also update their TICK values. */
1950 static void
1951 invalidate_for_call ()
1953 int regno, endregno;
1954 int i;
1955 unsigned hash;
1956 struct table_elt *p, *next;
1957 int in_table = 0;
1959 /* Go through all the hard registers. For each that is clobbered in
1960 a CALL_INSN, remove the register from quantity chains and update
1961 reg_tick if defined. Also see if any of these registers is currently
1962 in the table. */
1964 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1965 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
1967 delete_reg_equiv (regno);
1968 if (REG_TICK (regno) >= 0)
1969 REG_TICK (regno)++;
1971 in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
1974 /* In the case where we have no call-clobbered hard registers in the
1975 table, we are done. Otherwise, scan the table and remove any
1976 entry that overlaps a call-clobbered register. */
1978 if (in_table)
1979 for (hash = 0; hash < HASH_SIZE; hash++)
1980 for (p = table[hash]; p; p = next)
1982 next = p->next_same_hash;
1984 if (GET_CODE (p->exp) != REG
1985 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1986 continue;
1988 regno = REGNO (p->exp);
1989 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (p->exp));
1991 for (i = regno; i < endregno; i++)
1992 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1994 remove_from_table (p, hash);
1995 break;
2000 /* Given an expression X of type CONST,
2001 and ELT which is its table entry (or 0 if it
2002 is not in the hash table),
2003 return an alternate expression for X as a register plus integer.
2004 If none can be found, return 0. */
2006 static rtx
2007 use_related_value (x, elt)
2008 rtx x;
2009 struct table_elt *elt;
2011 register struct table_elt *relt = 0;
2012 register struct table_elt *p, *q;
2013 HOST_WIDE_INT offset;
2015 /* First, is there anything related known?
2016 If we have a table element, we can tell from that.
2017 Otherwise, must look it up. */
2019 if (elt != 0 && elt->related_value != 0)
2020 relt = elt;
2021 else if (elt == 0 && GET_CODE (x) == CONST)
2023 rtx subexp = get_related_value (x);
2024 if (subexp != 0)
2025 relt = lookup (subexp,
2026 safe_hash (subexp, GET_MODE (subexp)) & HASH_MASK,
2027 GET_MODE (subexp));
2030 if (relt == 0)
2031 return 0;
2033 /* Search all related table entries for one that has an
2034 equivalent register. */
2036 p = relt;
2037 while (1)
2039 /* This loop is strange in that it is executed in two different cases.
2040 The first is when X is already in the table. Then it is searching
2041 the RELATED_VALUE list of X's class (RELT). The second case is when
2042 X is not in the table. Then RELT points to a class for the related
2043 value.
2045 Ensure that, whatever case we are in, that we ignore classes that have
2046 the same value as X. */
2048 if (rtx_equal_p (x, p->exp))
2049 q = 0;
2050 else
2051 for (q = p->first_same_value; q; q = q->next_same_value)
2052 if (GET_CODE (q->exp) == REG)
2053 break;
2055 if (q)
2056 break;
2058 p = p->related_value;
2060 /* We went all the way around, so there is nothing to be found.
2061 Alternatively, perhaps RELT was in the table for some other reason
2062 and it has no related values recorded. */
2063 if (p == relt || p == 0)
2064 break;
2067 if (q == 0)
2068 return 0;
2070 offset = (get_integer_term (x) - get_integer_term (p->exp));
2071 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
2072 return plus_constant (q->exp, offset);
2075 /* Hash an rtx. We are careful to make sure the value is never negative.
2076 Equivalent registers hash identically.
2077 MODE is used in hashing for CONST_INTs only;
2078 otherwise the mode of X is used.
2080 Store 1 in do_not_record if any subexpression is volatile.
2082 Store 1 in hash_arg_in_memory if X contains a MEM rtx
2083 which does not have the RTX_UNCHANGING_P bit set.
2085 Note that cse_insn knows that the hash code of a MEM expression
2086 is just (int) MEM plus the hash code of the address. */
2088 static unsigned
2089 canon_hash (x, mode)
2090 rtx x;
2091 enum machine_mode mode;
2093 register int i, j;
2094 register unsigned hash = 0;
2095 register enum rtx_code code;
2096 register const char *fmt;
2098 /* repeat is used to turn tail-recursion into iteration. */
2099 repeat:
2100 if (x == 0)
2101 return hash;
2103 code = GET_CODE (x);
2104 switch (code)
2106 case REG:
2108 register int regno = REGNO (x);
2110 /* On some machines, we can't record any non-fixed hard register,
2111 because extending its life will cause reload problems. We
2112 consider ap, fp, and sp to be fixed for this purpose.
2114 We also consider CCmode registers to be fixed for this purpose;
2115 failure to do so leads to failure to simplify 0<100 type of
2116 conditionals.
2118 On all machines, we can't record any global registers. */
2120 if (regno < FIRST_PSEUDO_REGISTER
2121 && (global_regs[regno]
2122 || (SMALL_REGISTER_CLASSES
2123 && ! fixed_regs[regno]
2124 && regno != FRAME_POINTER_REGNUM
2125 && regno != HARD_FRAME_POINTER_REGNUM
2126 && regno != ARG_POINTER_REGNUM
2127 && regno != STACK_POINTER_REGNUM
2128 && GET_MODE_CLASS (GET_MODE (x)) != MODE_CC)))
2130 do_not_record = 1;
2131 return 0;
2133 hash += ((unsigned) REG << 7) + (unsigned) REG_QTY (regno);
2134 return hash;
2137 /* We handle SUBREG of a REG specially because the underlying
2138 reg changes its hash value with every value change; we don't
2139 want to have to forget unrelated subregs when one subreg changes. */
2140 case SUBREG:
2142 if (GET_CODE (SUBREG_REG (x)) == REG)
2144 hash += (((unsigned) SUBREG << 7)
2145 + REGNO (SUBREG_REG (x)) + SUBREG_WORD (x));
2146 return hash;
2148 break;
2151 case CONST_INT:
2153 unsigned HOST_WIDE_INT tem = INTVAL (x);
2154 hash += ((unsigned) CONST_INT << 7) + (unsigned) mode + tem;
2155 return hash;
2158 case CONST_DOUBLE:
2159 /* This is like the general case, except that it only counts
2160 the integers representing the constant. */
2161 hash += (unsigned) code + (unsigned) GET_MODE (x);
2162 if (GET_MODE (x) != VOIDmode)
2163 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
2165 unsigned HOST_WIDE_INT tem = XWINT (x, i);
2166 hash += tem;
2168 else
2169 hash += ((unsigned) CONST_DOUBLE_LOW (x)
2170 + (unsigned) CONST_DOUBLE_HIGH (x));
2171 return hash;
2173 /* Assume there is only one rtx object for any given label. */
2174 case LABEL_REF:
2175 hash
2176 += ((unsigned) LABEL_REF << 7) + (unsigned long) XEXP (x, 0);
2177 return hash;
2179 case SYMBOL_REF:
2180 hash
2181 += ((unsigned) SYMBOL_REF << 7) + (unsigned long) XSTR (x, 0);
2182 return hash;
2184 case MEM:
2185 /* We don't record if marked volatile or if BLKmode since we don't
2186 know the size of the move. */
2187 if (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode)
2189 do_not_record = 1;
2190 return 0;
2192 if (! RTX_UNCHANGING_P (x) || FIXED_BASE_PLUS_P (XEXP (x, 0)))
2194 hash_arg_in_memory = 1;
2196 /* Now that we have already found this special case,
2197 might as well speed it up as much as possible. */
2198 hash += (unsigned) MEM;
2199 x = XEXP (x, 0);
2200 goto repeat;
2202 case PRE_DEC:
2203 case PRE_INC:
2204 case POST_DEC:
2205 case POST_INC:
2206 case PC:
2207 case CC0:
2208 case CALL:
2209 case UNSPEC_VOLATILE:
2210 do_not_record = 1;
2211 return 0;
2213 case ASM_OPERANDS:
2214 if (MEM_VOLATILE_P (x))
2216 do_not_record = 1;
2217 return 0;
2219 break;
2221 default:
2222 break;
2225 i = GET_RTX_LENGTH (code) - 1;
2226 hash += (unsigned) code + (unsigned) GET_MODE (x);
2227 fmt = GET_RTX_FORMAT (code);
2228 for (; i >= 0; i--)
2230 if (fmt[i] == 'e')
2232 rtx tem = XEXP (x, i);
2234 /* If we are about to do the last recursive call
2235 needed at this level, change it into iteration.
2236 This function is called enough to be worth it. */
2237 if (i == 0)
2239 x = tem;
2240 goto repeat;
2242 hash += canon_hash (tem, 0);
2244 else if (fmt[i] == 'E')
2245 for (j = 0; j < XVECLEN (x, i); j++)
2246 hash += canon_hash (XVECEXP (x, i, j), 0);
2247 else if (fmt[i] == 's')
2249 register unsigned char *p = (unsigned char *) XSTR (x, i);
2250 if (p)
2251 while (*p)
2252 hash += *p++;
2254 else if (fmt[i] == 'i')
2256 register unsigned tem = XINT (x, i);
2257 hash += tem;
2259 else if (fmt[i] == '0' || fmt[i] == 't')
2260 /* unused */;
2261 else
2262 abort ();
2264 return hash;
2267 /* Like canon_hash but with no side effects. */
2269 static unsigned
2270 safe_hash (x, mode)
2271 rtx x;
2272 enum machine_mode mode;
2274 int save_do_not_record = do_not_record;
2275 int save_hash_arg_in_memory = hash_arg_in_memory;
2276 unsigned hash = canon_hash (x, mode);
2277 hash_arg_in_memory = save_hash_arg_in_memory;
2278 do_not_record = save_do_not_record;
2279 return hash;
2282 /* Return 1 iff X and Y would canonicalize into the same thing,
2283 without actually constructing the canonicalization of either one.
2284 If VALIDATE is nonzero,
2285 we assume X is an expression being processed from the rtl
2286 and Y was found in the hash table. We check register refs
2287 in Y for being marked as valid.
2289 If EQUAL_VALUES is nonzero, we allow a register to match a constant value
2290 that is known to be in the register. Ordinarily, we don't allow them
2291 to match, because letting them match would cause unpredictable results
2292 in all the places that search a hash table chain for an equivalent
2293 for a given value. A possible equivalent that has different structure
2294 has its hash code computed from different data. Whether the hash code
2295 is the same as that of the given value is pure luck. */
2297 static int
2298 exp_equiv_p (x, y, validate, equal_values)
2299 rtx x, y;
2300 int validate;
2301 int equal_values;
2303 register int i, j;
2304 register enum rtx_code code;
2305 register const char *fmt;
2307 /* Note: it is incorrect to assume an expression is equivalent to itself
2308 if VALIDATE is nonzero. */
2309 if (x == y && !validate)
2310 return 1;
2311 if (x == 0 || y == 0)
2312 return x == y;
2314 code = GET_CODE (x);
2315 if (code != GET_CODE (y))
2317 if (!equal_values)
2318 return 0;
2320 /* If X is a constant and Y is a register or vice versa, they may be
2321 equivalent. We only have to validate if Y is a register. */
2322 if (CONSTANT_P (x) && GET_CODE (y) == REG
2323 && REGNO_QTY_VALID_P (REGNO (y)))
2325 int y_q = REG_QTY (REGNO (y));
2326 struct qty_table_elem *y_ent = &qty_table[y_q];
2328 if (GET_MODE (y) == y_ent->mode
2329 && rtx_equal_p (x, y_ent->const_rtx)
2330 && (! validate || REG_IN_TABLE (REGNO (y)) == REG_TICK (REGNO (y))))
2331 return 1;
2334 if (CONSTANT_P (y) && code == REG
2335 && REGNO_QTY_VALID_P (REGNO (x)))
2337 int x_q = REG_QTY (REGNO (x));
2338 struct qty_table_elem *x_ent = &qty_table[x_q];
2340 if (GET_MODE (x) == x_ent->mode
2341 && rtx_equal_p (y, x_ent->const_rtx))
2342 return 1;
2345 return 0;
2348 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2349 if (GET_MODE (x) != GET_MODE (y))
2350 return 0;
2352 switch (code)
2354 case PC:
2355 case CC0:
2356 return x == y;
2358 case CONST_INT:
2359 return INTVAL (x) == INTVAL (y);
2361 case LABEL_REF:
2362 return XEXP (x, 0) == XEXP (y, 0);
2364 case SYMBOL_REF:
2365 return XSTR (x, 0) == XSTR (y, 0);
2367 case REG:
2369 int regno = REGNO (y);
2370 int endregno
2371 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
2372 : HARD_REGNO_NREGS (regno, GET_MODE (y)));
2373 int i;
2375 /* If the quantities are not the same, the expressions are not
2376 equivalent. If there are and we are not to validate, they
2377 are equivalent. Otherwise, ensure all regs are up-to-date. */
2379 if (REG_QTY (REGNO (x)) != REG_QTY (regno))
2380 return 0;
2382 if (! validate)
2383 return 1;
2385 for (i = regno; i < endregno; i++)
2386 if (REG_IN_TABLE (i) != REG_TICK (i))
2387 return 0;
2389 return 1;
2392 /* For commutative operations, check both orders. */
2393 case PLUS:
2394 case MULT:
2395 case AND:
2396 case IOR:
2397 case XOR:
2398 case NE:
2399 case EQ:
2400 return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0), validate, equal_values)
2401 && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2402 validate, equal_values))
2403 || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2404 validate, equal_values)
2405 && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2406 validate, equal_values)));
2408 default:
2409 break;
2412 /* Compare the elements. If any pair of corresponding elements
2413 fail to match, return 0 for the whole things. */
2415 fmt = GET_RTX_FORMAT (code);
2416 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2418 switch (fmt[i])
2420 case 'e':
2421 if (! exp_equiv_p (XEXP (x, i), XEXP (y, i), validate, equal_values))
2422 return 0;
2423 break;
2425 case 'E':
2426 if (XVECLEN (x, i) != XVECLEN (y, i))
2427 return 0;
2428 for (j = 0; j < XVECLEN (x, i); j++)
2429 if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2430 validate, equal_values))
2431 return 0;
2432 break;
2434 case 's':
2435 if (strcmp (XSTR (x, i), XSTR (y, i)))
2436 return 0;
2437 break;
2439 case 'i':
2440 if (XINT (x, i) != XINT (y, i))
2441 return 0;
2442 break;
2444 case 'w':
2445 if (XWINT (x, i) != XWINT (y, i))
2446 return 0;
2447 break;
2449 case '0':
2450 case 't':
2451 break;
2453 default:
2454 abort ();
2458 return 1;
2461 /* Return 1 if X has a value that can vary even between two
2462 executions of the program. 0 means X can be compared reliably
2463 against certain constants or near-constants. */
2465 static int
2466 cse_rtx_varies_p (x)
2467 register rtx x;
2469 /* We need not check for X and the equivalence class being of the same
2470 mode because if X is equivalent to a constant in some mode, it
2471 doesn't vary in any mode. */
2473 if (GET_CODE (x) == REG
2474 && REGNO_QTY_VALID_P (REGNO (x)))
2476 int x_q = REG_QTY (REGNO (x));
2477 struct qty_table_elem *x_ent = &qty_table[x_q];
2479 if (GET_MODE (x) == x_ent->mode
2480 && x_ent->const_rtx != NULL_RTX)
2481 return 0;
2484 if (GET_CODE (x) == PLUS
2485 && GET_CODE (XEXP (x, 1)) == CONST_INT
2486 && GET_CODE (XEXP (x, 0)) == REG
2487 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
2489 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2490 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2492 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2493 && x0_ent->const_rtx != NULL_RTX)
2494 return 0;
2497 /* This can happen as the result of virtual register instantiation, if
2498 the initial constant is too large to be a valid address. This gives
2499 us a three instruction sequence, load large offset into a register,
2500 load fp minus a constant into a register, then a MEM which is the
2501 sum of the two `constant' registers. */
2502 if (GET_CODE (x) == PLUS
2503 && GET_CODE (XEXP (x, 0)) == REG
2504 && GET_CODE (XEXP (x, 1)) == REG
2505 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2506 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
2508 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2509 int x1_q = REG_QTY (REGNO (XEXP (x, 1)));
2510 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2511 struct qty_table_elem *x1_ent = &qty_table[x1_q];
2513 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2514 && x0_ent->const_rtx != NULL_RTX
2515 && (GET_MODE (XEXP (x, 1)) == x1_ent->mode)
2516 && x1_ent->const_rtx != NULL_RTX)
2517 return 0;
2520 return rtx_varies_p (x);
2523 /* Canonicalize an expression:
2524 replace each register reference inside it
2525 with the "oldest" equivalent register.
2527 If INSN is non-zero and we are replacing a pseudo with a hard register
2528 or vice versa, validate_change is used to ensure that INSN remains valid
2529 after we make our substitution. The calls are made with IN_GROUP non-zero
2530 so apply_change_group must be called upon the outermost return from this
2531 function (unless INSN is zero). The result of apply_change_group can
2532 generally be discarded since the changes we are making are optional. */
2534 static rtx
2535 canon_reg (x, insn)
2536 rtx x;
2537 rtx insn;
2539 register int i;
2540 register enum rtx_code code;
2541 register const char *fmt;
2543 if (x == 0)
2544 return x;
2546 code = GET_CODE (x);
2547 switch (code)
2549 case PC:
2550 case CC0:
2551 case CONST:
2552 case CONST_INT:
2553 case CONST_DOUBLE:
2554 case SYMBOL_REF:
2555 case LABEL_REF:
2556 case ADDR_VEC:
2557 case ADDR_DIFF_VEC:
2558 return x;
2560 case REG:
2562 register int first;
2563 register int q;
2564 register struct qty_table_elem *ent;
2566 /* Never replace a hard reg, because hard regs can appear
2567 in more than one machine mode, and we must preserve the mode
2568 of each occurrence. Also, some hard regs appear in
2569 MEMs that are shared and mustn't be altered. Don't try to
2570 replace any reg that maps to a reg of class NO_REGS. */
2571 if (REGNO (x) < FIRST_PSEUDO_REGISTER
2572 || ! REGNO_QTY_VALID_P (REGNO (x)))
2573 return x;
2575 q = REG_QTY (REGNO(x));
2576 ent = &qty_table[q];
2577 first = ent->first_reg;
2578 return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2579 : REGNO_REG_CLASS (first) == NO_REGS ? x
2580 : gen_rtx_REG (ent->mode, first));
2583 default:
2584 break;
2587 fmt = GET_RTX_FORMAT (code);
2588 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2590 register int j;
2592 if (fmt[i] == 'e')
2594 rtx new = canon_reg (XEXP (x, i), insn);
2595 int insn_code;
2597 /* If replacing pseudo with hard reg or vice versa, ensure the
2598 insn remains valid. Likewise if the insn has MATCH_DUPs. */
2599 if (insn != 0 && new != 0
2600 && GET_CODE (new) == REG && GET_CODE (XEXP (x, i)) == REG
2601 && (((REGNO (new) < FIRST_PSEUDO_REGISTER)
2602 != (REGNO (XEXP (x, i)) < FIRST_PSEUDO_REGISTER))
2603 || (insn_code = recog_memoized (insn)) < 0
2604 || insn_data[insn_code].n_dups > 0))
2605 validate_change (insn, &XEXP (x, i), new, 1);
2606 else
2607 XEXP (x, i) = new;
2609 else if (fmt[i] == 'E')
2610 for (j = 0; j < XVECLEN (x, i); j++)
2611 XVECEXP (x, i, j) = canon_reg (XVECEXP (x, i, j), insn);
2614 return x;
2617 /* LOC is a location within INSN that is an operand address (the contents of
2618 a MEM). Find the best equivalent address to use that is valid for this
2619 insn.
2621 On most CISC machines, complicated address modes are costly, and rtx_cost
2622 is a good approximation for that cost. However, most RISC machines have
2623 only a few (usually only one) memory reference formats. If an address is
2624 valid at all, it is often just as cheap as any other address. Hence, for
2625 RISC machines, we use the configuration macro `ADDRESS_COST' to compare the
2626 costs of various addresses. For two addresses of equal cost, choose the one
2627 with the highest `rtx_cost' value as that has the potential of eliminating
2628 the most insns. For equal costs, we choose the first in the equivalence
2629 class. Note that we ignore the fact that pseudo registers are cheaper
2630 than hard registers here because we would also prefer the pseudo registers.
2633 static void
2634 find_best_addr (insn, loc)
2635 rtx insn;
2636 rtx *loc;
2638 struct table_elt *elt;
2639 rtx addr = *loc;
2640 #ifdef ADDRESS_COST
2641 struct table_elt *p;
2642 int found_better = 1;
2643 #endif
2644 int save_do_not_record = do_not_record;
2645 int save_hash_arg_in_memory = hash_arg_in_memory;
2646 int addr_volatile;
2647 int regno;
2648 unsigned hash;
2650 /* Do not try to replace constant addresses or addresses of local and
2651 argument slots. These MEM expressions are made only once and inserted
2652 in many instructions, as well as being used to control symbol table
2653 output. It is not safe to clobber them.
2655 There are some uncommon cases where the address is already in a register
2656 for some reason, but we cannot take advantage of that because we have
2657 no easy way to unshare the MEM. In addition, looking up all stack
2658 addresses is costly. */
2659 if ((GET_CODE (addr) == PLUS
2660 && GET_CODE (XEXP (addr, 0)) == REG
2661 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2662 && (regno = REGNO (XEXP (addr, 0)),
2663 regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM
2664 || regno == ARG_POINTER_REGNUM))
2665 || (GET_CODE (addr) == REG
2666 && (regno = REGNO (addr), regno == FRAME_POINTER_REGNUM
2667 || regno == HARD_FRAME_POINTER_REGNUM
2668 || regno == ARG_POINTER_REGNUM))
2669 || GET_CODE (addr) == ADDRESSOF
2670 || CONSTANT_ADDRESS_P (addr))
2671 return;
2673 /* If this address is not simply a register, try to fold it. This will
2674 sometimes simplify the expression. Many simplifications
2675 will not be valid, but some, usually applying the associative rule, will
2676 be valid and produce better code. */
2677 if (GET_CODE (addr) != REG)
2679 rtx folded = fold_rtx (copy_rtx (addr), NULL_RTX);
2681 if (1
2682 #ifdef ADDRESS_COST
2683 && (CSE_ADDRESS_COST (folded) < CSE_ADDRESS_COST (addr)
2684 || (CSE_ADDRESS_COST (folded) == CSE_ADDRESS_COST (addr)
2685 && rtx_cost (folded, MEM) > rtx_cost (addr, MEM)))
2686 #else
2687 && rtx_cost (folded, MEM) < rtx_cost (addr, MEM)
2688 #endif
2689 && validate_change (insn, loc, folded, 0))
2690 addr = folded;
2693 /* If this address is not in the hash table, we can't look for equivalences
2694 of the whole address. Also, ignore if volatile. */
2696 do_not_record = 0;
2697 hash = HASH (addr, Pmode);
2698 addr_volatile = do_not_record;
2699 do_not_record = save_do_not_record;
2700 hash_arg_in_memory = save_hash_arg_in_memory;
2702 if (addr_volatile)
2703 return;
2705 elt = lookup (addr, hash, Pmode);
2707 #ifndef ADDRESS_COST
2708 if (elt)
2710 int our_cost = elt->cost;
2712 /* Find the lowest cost below ours that works. */
2713 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
2714 if (elt->cost < our_cost
2715 && (GET_CODE (elt->exp) == REG
2716 || exp_equiv_p (elt->exp, elt->exp, 1, 0))
2717 && validate_change (insn, loc,
2718 canon_reg (copy_rtx (elt->exp), NULL_RTX), 0))
2719 return;
2721 #else
2723 if (elt)
2725 /* We need to find the best (under the criteria documented above) entry
2726 in the class that is valid. We use the `flag' field to indicate
2727 choices that were invalid and iterate until we can't find a better
2728 one that hasn't already been tried. */
2730 for (p = elt->first_same_value; p; p = p->next_same_value)
2731 p->flag = 0;
2733 while (found_better)
2735 int best_addr_cost = CSE_ADDRESS_COST (*loc);
2736 int best_rtx_cost = (elt->cost + 1) >> 1;
2737 struct table_elt *best_elt = elt;
2739 found_better = 0;
2740 for (p = elt->first_same_value; p; p = p->next_same_value)
2741 if (! p->flag)
2743 if ((GET_CODE (p->exp) == REG
2744 || exp_equiv_p (p->exp, p->exp, 1, 0))
2745 && (CSE_ADDRESS_COST (p->exp) < best_addr_cost
2746 || (CSE_ADDRESS_COST (p->exp) == best_addr_cost
2747 && (p->cost + 1) >> 1 > best_rtx_cost)))
2749 found_better = 1;
2750 best_addr_cost = CSE_ADDRESS_COST (p->exp);
2751 best_rtx_cost = (p->cost + 1) >> 1;
2752 best_elt = p;
2756 if (found_better)
2758 if (validate_change (insn, loc,
2759 canon_reg (copy_rtx (best_elt->exp),
2760 NULL_RTX), 0))
2761 return;
2762 else
2763 best_elt->flag = 1;
2768 /* If the address is a binary operation with the first operand a register
2769 and the second a constant, do the same as above, but looking for
2770 equivalences of the register. Then try to simplify before checking for
2771 the best address to use. This catches a few cases: First is when we
2772 have REG+const and the register is another REG+const. We can often merge
2773 the constants and eliminate one insn and one register. It may also be
2774 that a machine has a cheap REG+REG+const. Finally, this improves the
2775 code on the Alpha for unaligned byte stores. */
2777 if (flag_expensive_optimizations
2778 && (GET_RTX_CLASS (GET_CODE (*loc)) == '2'
2779 || GET_RTX_CLASS (GET_CODE (*loc)) == 'c')
2780 && GET_CODE (XEXP (*loc, 0)) == REG
2781 && GET_CODE (XEXP (*loc, 1)) == CONST_INT)
2783 rtx c = XEXP (*loc, 1);
2785 do_not_record = 0;
2786 hash = HASH (XEXP (*loc, 0), Pmode);
2787 do_not_record = save_do_not_record;
2788 hash_arg_in_memory = save_hash_arg_in_memory;
2790 elt = lookup (XEXP (*loc, 0), hash, Pmode);
2791 if (elt == 0)
2792 return;
2794 /* We need to find the best (under the criteria documented above) entry
2795 in the class that is valid. We use the `flag' field to indicate
2796 choices that were invalid and iterate until we can't find a better
2797 one that hasn't already been tried. */
2799 for (p = elt->first_same_value; p; p = p->next_same_value)
2800 p->flag = 0;
2802 while (found_better)
2804 int best_addr_cost = CSE_ADDRESS_COST (*loc);
2805 int best_rtx_cost = (COST (*loc) + 1) >> 1;
2806 struct table_elt *best_elt = elt;
2807 rtx best_rtx = *loc;
2808 int count;
2810 /* This is at worst case an O(n^2) algorithm, so limit our search
2811 to the first 32 elements on the list. This avoids trouble
2812 compiling code with very long basic blocks that can easily
2813 call simplify_gen_binary so many times that we run out of
2814 memory. */
2816 found_better = 0;
2817 for (p = elt->first_same_value, count = 0;
2818 p && count < 32;
2819 p = p->next_same_value, count++)
2820 if (! p->flag
2821 && (GET_CODE (p->exp) == REG
2822 || exp_equiv_p (p->exp, p->exp, 1, 0)))
2824 rtx new = simplify_gen_binary (GET_CODE (*loc), Pmode,
2825 p->exp, c);
2827 if ((CSE_ADDRESS_COST (new) < best_addr_cost
2828 || (CSE_ADDRESS_COST (new) == best_addr_cost
2829 && (COST (new) + 1) >> 1 > best_rtx_cost)))
2831 found_better = 1;
2832 best_addr_cost = CSE_ADDRESS_COST (new);
2833 best_rtx_cost = (COST (new) + 1) >> 1;
2834 best_elt = p;
2835 best_rtx = new;
2839 if (found_better)
2841 if (validate_change (insn, loc,
2842 canon_reg (copy_rtx (best_rtx),
2843 NULL_RTX), 0))
2844 return;
2845 else
2846 best_elt->flag = 1;
2850 #endif
2853 /* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
2854 operation (EQ, NE, GT, etc.), follow it back through the hash table and
2855 what values are being compared.
2857 *PARG1 and *PARG2 are updated to contain the rtx representing the values
2858 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
2859 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
2860 compared to produce cc0.
2862 The return value is the comparison operator and is either the code of
2863 A or the code corresponding to the inverse of the comparison. */
2865 static enum rtx_code
2866 find_comparison_args (code, parg1, parg2, pmode1, pmode2)
2867 enum rtx_code code;
2868 rtx *parg1, *parg2;
2869 enum machine_mode *pmode1, *pmode2;
2871 rtx arg1, arg2;
2873 arg1 = *parg1, arg2 = *parg2;
2875 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
2877 while (arg2 == CONST0_RTX (GET_MODE (arg1)))
2879 /* Set non-zero when we find something of interest. */
2880 rtx x = 0;
2881 int reverse_code = 0;
2882 struct table_elt *p = 0;
2884 /* If arg1 is a COMPARE, extract the comparison arguments from it.
2885 On machines with CC0, this is the only case that can occur, since
2886 fold_rtx will return the COMPARE or item being compared with zero
2887 when given CC0. */
2889 if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
2890 x = arg1;
2892 /* If ARG1 is a comparison operator and CODE is testing for
2893 STORE_FLAG_VALUE, get the inner arguments. */
2895 else if (GET_RTX_CLASS (GET_CODE (arg1)) == '<')
2897 if (code == NE
2898 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2899 && code == LT && STORE_FLAG_VALUE == -1)
2900 #ifdef FLOAT_STORE_FLAG_VALUE
2901 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
2902 && FLOAT_STORE_FLAG_VALUE < 0)
2903 #endif
2905 x = arg1;
2906 else if (code == EQ
2907 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
2908 && code == GE && STORE_FLAG_VALUE == -1)
2909 #ifdef FLOAT_STORE_FLAG_VALUE
2910 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
2911 && FLOAT_STORE_FLAG_VALUE < 0)
2912 #endif
2914 x = arg1, reverse_code = 1;
2917 /* ??? We could also check for
2919 (ne (and (eq (...) (const_int 1))) (const_int 0))
2921 and related forms, but let's wait until we see them occurring. */
2923 if (x == 0)
2924 /* Look up ARG1 in the hash table and see if it has an equivalence
2925 that lets us see what is being compared. */
2926 p = lookup (arg1, safe_hash (arg1, GET_MODE (arg1)) & HASH_MASK,
2927 GET_MODE (arg1));
2928 if (p) p = p->first_same_value;
2930 for (; p; p = p->next_same_value)
2932 enum machine_mode inner_mode = GET_MODE (p->exp);
2934 /* If the entry isn't valid, skip it. */
2935 if (! exp_equiv_p (p->exp, p->exp, 1, 0))
2936 continue;
2938 if (GET_CODE (p->exp) == COMPARE
2939 /* Another possibility is that this machine has a compare insn
2940 that includes the comparison code. In that case, ARG1 would
2941 be equivalent to a comparison operation that would set ARG1 to
2942 either STORE_FLAG_VALUE or zero. If this is an NE operation,
2943 ORIG_CODE is the actual comparison being done; if it is an EQ,
2944 we must reverse ORIG_CODE. On machine with a negative value
2945 for STORE_FLAG_VALUE, also look at LT and GE operations. */
2946 || ((code == NE
2947 || (code == LT
2948 && GET_MODE_CLASS (inner_mode) == MODE_INT
2949 && (GET_MODE_BITSIZE (inner_mode)
2950 <= HOST_BITS_PER_WIDE_INT)
2951 && (STORE_FLAG_VALUE
2952 & ((HOST_WIDE_INT) 1
2953 << (GET_MODE_BITSIZE (inner_mode) - 1))))
2954 #ifdef FLOAT_STORE_FLAG_VALUE
2955 || (code == LT
2956 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
2957 && FLOAT_STORE_FLAG_VALUE < 0)
2958 #endif
2960 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<'))
2962 x = p->exp;
2963 break;
2965 else if ((code == EQ
2966 || (code == GE
2967 && GET_MODE_CLASS (inner_mode) == MODE_INT
2968 && (GET_MODE_BITSIZE (inner_mode)
2969 <= HOST_BITS_PER_WIDE_INT)
2970 && (STORE_FLAG_VALUE
2971 & ((HOST_WIDE_INT) 1
2972 << (GET_MODE_BITSIZE (inner_mode) - 1))))
2973 #ifdef FLOAT_STORE_FLAG_VALUE
2974 || (code == GE
2975 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
2976 && FLOAT_STORE_FLAG_VALUE < 0)
2977 #endif
2979 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<')
2981 reverse_code = 1;
2982 x = p->exp;
2983 break;
2986 /* If this is fp + constant, the equivalent is a better operand since
2987 it may let us predict the value of the comparison. */
2988 else if (NONZERO_BASE_PLUS_P (p->exp))
2990 arg1 = p->exp;
2991 continue;
2995 /* If we didn't find a useful equivalence for ARG1, we are done.
2996 Otherwise, set up for the next iteration. */
2997 if (x == 0)
2998 break;
3000 arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
3001 if (GET_RTX_CLASS (GET_CODE (x)) == '<')
3002 code = GET_CODE (x);
3004 if (reverse_code)
3005 code = reverse_condition (code);
3008 /* Return our results. Return the modes from before fold_rtx
3009 because fold_rtx might produce const_int, and then it's too late. */
3010 *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
3011 *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
3013 return code;
3016 /* If X is a nontrivial arithmetic operation on an argument
3017 for which a constant value can be determined, return
3018 the result of operating on that value, as a constant.
3019 Otherwise, return X, possibly with one or more operands
3020 modified by recursive calls to this function.
3022 If X is a register whose contents are known, we do NOT
3023 return those contents here. equiv_constant is called to
3024 perform that task.
3026 INSN is the insn that we may be modifying. If it is 0, make a copy
3027 of X before modifying it. */
3029 static rtx
3030 fold_rtx (x, insn)
3031 rtx x;
3032 rtx insn;
3034 register enum rtx_code code;
3035 register enum machine_mode mode;
3036 register const char *fmt;
3037 register int i;
3038 rtx new = 0;
3039 int copied = 0;
3040 int must_swap = 0;
3042 /* Folded equivalents of first two operands of X. */
3043 rtx folded_arg0;
3044 rtx folded_arg1;
3046 /* Constant equivalents of first three operands of X;
3047 0 when no such equivalent is known. */
3048 rtx const_arg0;
3049 rtx const_arg1;
3050 rtx const_arg2;
3052 /* The mode of the first operand of X. We need this for sign and zero
3053 extends. */
3054 enum machine_mode mode_arg0;
3056 if (x == 0)
3057 return x;
3059 mode = GET_MODE (x);
3060 code = GET_CODE (x);
3061 switch (code)
3063 case CONST:
3064 case CONST_INT:
3065 case CONST_DOUBLE:
3066 case SYMBOL_REF:
3067 case LABEL_REF:
3068 case REG:
3069 /* No use simplifying an EXPR_LIST
3070 since they are used only for lists of args
3071 in a function call's REG_EQUAL note. */
3072 case EXPR_LIST:
3073 /* Changing anything inside an ADDRESSOF is incorrect; we don't
3074 want to (e.g.,) make (addressof (const_int 0)) just because
3075 the location is known to be zero. */
3076 case ADDRESSOF:
3077 return x;
3079 #ifdef HAVE_cc0
3080 case CC0:
3081 return prev_insn_cc0;
3082 #endif
3084 case PC:
3085 /* If the next insn is a CODE_LABEL followed by a jump table,
3086 PC's value is a LABEL_REF pointing to that label. That
3087 lets us fold switch statements on the Vax. */
3088 if (insn && GET_CODE (insn) == JUMP_INSN)
3090 rtx next = next_nonnote_insn (insn);
3092 if (next && GET_CODE (next) == CODE_LABEL
3093 && NEXT_INSN (next) != 0
3094 && GET_CODE (NEXT_INSN (next)) == JUMP_INSN
3095 && (GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_VEC
3096 || GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_DIFF_VEC))
3097 return gen_rtx_LABEL_REF (Pmode, next);
3099 break;
3101 case SUBREG:
3102 /* See if we previously assigned a constant value to this SUBREG. */
3103 if ((new = lookup_as_function (x, CONST_INT)) != 0
3104 || (new = lookup_as_function (x, CONST_DOUBLE)) != 0)
3105 return new;
3107 /* If this is a paradoxical SUBREG, we have no idea what value the
3108 extra bits would have. However, if the operand is equivalent
3109 to a SUBREG whose operand is the same as our mode, and all the
3110 modes are within a word, we can just use the inner operand
3111 because these SUBREGs just say how to treat the register.
3113 Similarly if we find an integer constant. */
3115 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3117 enum machine_mode imode = GET_MODE (SUBREG_REG (x));
3118 struct table_elt *elt;
3120 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
3121 && GET_MODE_SIZE (imode) <= UNITS_PER_WORD
3122 && (elt = lookup (SUBREG_REG (x), HASH (SUBREG_REG (x), imode),
3123 imode)) != 0)
3124 for (elt = elt->first_same_value;
3125 elt; elt = elt->next_same_value)
3127 if (CONSTANT_P (elt->exp)
3128 && GET_MODE (elt->exp) == VOIDmode)
3129 return elt->exp;
3131 if (GET_CODE (elt->exp) == SUBREG
3132 && GET_MODE (SUBREG_REG (elt->exp)) == mode
3133 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
3134 return copy_rtx (SUBREG_REG (elt->exp));
3137 return x;
3140 /* Fold SUBREG_REG. If it changed, see if we can simplify the SUBREG.
3141 We might be able to if the SUBREG is extracting a single word in an
3142 integral mode or extracting the low part. */
3144 folded_arg0 = fold_rtx (SUBREG_REG (x), insn);
3145 const_arg0 = equiv_constant (folded_arg0);
3146 if (const_arg0)
3147 folded_arg0 = const_arg0;
3149 if (folded_arg0 != SUBREG_REG (x))
3151 new = 0;
3153 if (GET_MODE_CLASS (mode) == MODE_INT
3154 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
3155 && GET_MODE (SUBREG_REG (x)) != VOIDmode)
3156 new = operand_subword (folded_arg0, SUBREG_WORD (x), 0,
3157 GET_MODE (SUBREG_REG (x)));
3158 if (new == 0 && subreg_lowpart_p (x))
3159 new = gen_lowpart_if_possible (mode, folded_arg0);
3160 if (new)
3161 return new;
3164 /* If this is a narrowing SUBREG and our operand is a REG, see if
3165 we can find an equivalence for REG that is an arithmetic operation
3166 in a wider mode where both operands are paradoxical SUBREGs
3167 from objects of our result mode. In that case, we couldn't report
3168 an equivalent value for that operation, since we don't know what the
3169 extra bits will be. But we can find an equivalence for this SUBREG
3170 by folding that operation is the narrow mode. This allows us to
3171 fold arithmetic in narrow modes when the machine only supports
3172 word-sized arithmetic.
3174 Also look for a case where we have a SUBREG whose operand is the
3175 same as our result. If both modes are smaller than a word, we
3176 are simply interpreting a register in different modes and we
3177 can use the inner value. */
3179 if (GET_CODE (folded_arg0) == REG
3180 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (folded_arg0))
3181 && subreg_lowpart_p (x))
3183 struct table_elt *elt;
3185 /* We can use HASH here since we know that canon_hash won't be
3186 called. */
3187 elt = lookup (folded_arg0,
3188 HASH (folded_arg0, GET_MODE (folded_arg0)),
3189 GET_MODE (folded_arg0));
3191 if (elt)
3192 elt = elt->first_same_value;
3194 for (; elt; elt = elt->next_same_value)
3196 enum rtx_code eltcode = GET_CODE (elt->exp);
3198 /* Just check for unary and binary operations. */
3199 if (GET_RTX_CLASS (GET_CODE (elt->exp)) == '1'
3200 && GET_CODE (elt->exp) != SIGN_EXTEND
3201 && GET_CODE (elt->exp) != ZERO_EXTEND
3202 && GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3203 && GET_MODE (SUBREG_REG (XEXP (elt->exp, 0))) == mode)
3205 rtx op0 = SUBREG_REG (XEXP (elt->exp, 0));
3207 if (GET_CODE (op0) != REG && ! CONSTANT_P (op0))
3208 op0 = fold_rtx (op0, NULL_RTX);
3210 op0 = equiv_constant (op0);
3211 if (op0)
3212 new = simplify_unary_operation (GET_CODE (elt->exp), mode,
3213 op0, mode);
3215 else if ((GET_RTX_CLASS (GET_CODE (elt->exp)) == '2'
3216 || GET_RTX_CLASS (GET_CODE (elt->exp)) == 'c')
3217 && eltcode != DIV && eltcode != MOD
3218 && eltcode != UDIV && eltcode != UMOD
3219 && eltcode != ASHIFTRT && eltcode != LSHIFTRT
3220 && eltcode != ROTATE && eltcode != ROTATERT
3221 && ((GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3222 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 0)))
3223 == mode))
3224 || CONSTANT_P (XEXP (elt->exp, 0)))
3225 && ((GET_CODE (XEXP (elt->exp, 1)) == SUBREG
3226 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 1)))
3227 == mode))
3228 || CONSTANT_P (XEXP (elt->exp, 1))))
3230 rtx op0 = gen_lowpart_common (mode, XEXP (elt->exp, 0));
3231 rtx op1 = gen_lowpart_common (mode, XEXP (elt->exp, 1));
3233 if (op0 && GET_CODE (op0) != REG && ! CONSTANT_P (op0))
3234 op0 = fold_rtx (op0, NULL_RTX);
3236 if (op0)
3237 op0 = equiv_constant (op0);
3239 if (op1 && GET_CODE (op1) != REG && ! CONSTANT_P (op1))
3240 op1 = fold_rtx (op1, NULL_RTX);
3242 if (op1)
3243 op1 = equiv_constant (op1);
3245 /* If we are looking for the low SImode part of
3246 (ashift:DI c (const_int 32)), it doesn't work
3247 to compute that in SImode, because a 32-bit shift
3248 in SImode is unpredictable. We know the value is 0. */
3249 if (op0 && op1
3250 && GET_CODE (elt->exp) == ASHIFT
3251 && GET_CODE (op1) == CONST_INT
3252 && INTVAL (op1) >= GET_MODE_BITSIZE (mode))
3254 if (INTVAL (op1) < GET_MODE_BITSIZE (GET_MODE (elt->exp)))
3256 /* If the count fits in the inner mode's width,
3257 but exceeds the outer mode's width,
3258 the value will get truncated to 0
3259 by the subreg. */
3260 new = const0_rtx;
3261 else
3262 /* If the count exceeds even the inner mode's width,
3263 don't fold this expression. */
3264 new = 0;
3266 else if (op0 && op1)
3267 new = simplify_binary_operation (GET_CODE (elt->exp), mode,
3268 op0, op1);
3271 else if (GET_CODE (elt->exp) == SUBREG
3272 && GET_MODE (SUBREG_REG (elt->exp)) == mode
3273 && (GET_MODE_SIZE (GET_MODE (folded_arg0))
3274 <= UNITS_PER_WORD)
3275 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
3276 new = copy_rtx (SUBREG_REG (elt->exp));
3278 if (new)
3279 return new;
3283 return x;
3285 case NOT:
3286 case NEG:
3287 /* If we have (NOT Y), see if Y is known to be (NOT Z).
3288 If so, (NOT Y) simplifies to Z. Similarly for NEG. */
3289 new = lookup_as_function (XEXP (x, 0), code);
3290 if (new)
3291 return fold_rtx (copy_rtx (XEXP (new, 0)), insn);
3292 break;
3294 case MEM:
3295 /* If we are not actually processing an insn, don't try to find the
3296 best address. Not only don't we care, but we could modify the
3297 MEM in an invalid way since we have no insn to validate against. */
3298 if (insn != 0)
3299 find_best_addr (insn, &XEXP (x, 0));
3302 /* Even if we don't fold in the insn itself,
3303 we can safely do so here, in hopes of getting a constant. */
3304 rtx addr = fold_rtx (XEXP (x, 0), NULL_RTX);
3305 rtx base = 0;
3306 HOST_WIDE_INT offset = 0;
3308 if (GET_CODE (addr) == REG
3309 && REGNO_QTY_VALID_P (REGNO (addr)))
3311 int addr_q = REG_QTY (REGNO (addr));
3312 struct qty_table_elem *addr_ent = &qty_table[addr_q];
3314 if (GET_MODE (addr) == addr_ent->mode
3315 && addr_ent->const_rtx != NULL_RTX)
3316 addr = addr_ent->const_rtx;
3319 /* If address is constant, split it into a base and integer offset. */
3320 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
3321 base = addr;
3322 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
3323 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
3325 base = XEXP (XEXP (addr, 0), 0);
3326 offset = INTVAL (XEXP (XEXP (addr, 0), 1));
3328 else if (GET_CODE (addr) == LO_SUM
3329 && GET_CODE (XEXP (addr, 1)) == SYMBOL_REF)
3330 base = XEXP (addr, 1);
3331 else if (GET_CODE (addr) == ADDRESSOF)
3332 return change_address (x, VOIDmode, addr);
3334 /* If this is a constant pool reference, we can fold it into its
3335 constant to allow better value tracking. */
3336 if (base && GET_CODE (base) == SYMBOL_REF
3337 && CONSTANT_POOL_ADDRESS_P (base))
3339 rtx constant = get_pool_constant (base);
3340 enum machine_mode const_mode = get_pool_mode (base);
3341 rtx new;
3343 if (CONSTANT_P (constant) && GET_CODE (constant) != CONST_INT)
3344 constant_pool_entries_cost = COST (constant);
3346 /* If we are loading the full constant, we have an equivalence. */
3347 if (offset == 0 && mode == const_mode)
3348 return constant;
3350 /* If this actually isn't a constant (weird!), we can't do
3351 anything. Otherwise, handle the two most common cases:
3352 extracting a word from a multi-word constant, and extracting
3353 the low-order bits. Other cases don't seem common enough to
3354 worry about. */
3355 if (! CONSTANT_P (constant))
3356 return x;
3358 if (GET_MODE_CLASS (mode) == MODE_INT
3359 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
3360 && offset % UNITS_PER_WORD == 0
3361 && (new = operand_subword (constant,
3362 offset / UNITS_PER_WORD,
3363 0, const_mode)) != 0)
3364 return new;
3366 if (((BYTES_BIG_ENDIAN
3367 && offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
3368 || (! BYTES_BIG_ENDIAN && offset == 0))
3369 && (new = gen_lowpart_if_possible (mode, constant)) != 0)
3370 return new;
3373 /* If this is a reference to a label at a known position in a jump
3374 table, we also know its value. */
3375 if (base && GET_CODE (base) == LABEL_REF)
3377 rtx label = XEXP (base, 0);
3378 rtx table_insn = NEXT_INSN (label);
3380 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
3381 && GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
3383 rtx table = PATTERN (table_insn);
3385 if (offset >= 0
3386 && (offset / GET_MODE_SIZE (GET_MODE (table))
3387 < XVECLEN (table, 0)))
3388 return XVECEXP (table, 0,
3389 offset / GET_MODE_SIZE (GET_MODE (table)));
3391 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
3392 && GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
3394 rtx table = PATTERN (table_insn);
3396 if (offset >= 0
3397 && (offset / GET_MODE_SIZE (GET_MODE (table))
3398 < XVECLEN (table, 1)))
3400 offset /= GET_MODE_SIZE (GET_MODE (table));
3401 new = gen_rtx_MINUS (Pmode, XVECEXP (table, 1, offset),
3402 XEXP (table, 0));
3404 if (GET_MODE (table) != Pmode)
3405 new = gen_rtx_TRUNCATE (GET_MODE (table), new);
3407 /* Indicate this is a constant. This isn't a
3408 valid form of CONST, but it will only be used
3409 to fold the next insns and then discarded, so
3410 it should be safe.
3412 Note this expression must be explicitly discarded,
3413 by cse_insn, else it may end up in a REG_EQUAL note
3414 and "escape" to cause problems elsewhere. */
3415 return gen_rtx_CONST (GET_MODE (new), new);
3420 return x;
3423 case ASM_OPERANDS:
3424 for (i = XVECLEN (x, 3) - 1; i >= 0; i--)
3425 validate_change (insn, &XVECEXP (x, 3, i),
3426 fold_rtx (XVECEXP (x, 3, i), insn), 0);
3427 break;
3429 default:
3430 break;
3433 const_arg0 = 0;
3434 const_arg1 = 0;
3435 const_arg2 = 0;
3436 mode_arg0 = VOIDmode;
3438 /* Try folding our operands.
3439 Then see which ones have constant values known. */
3441 fmt = GET_RTX_FORMAT (code);
3442 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3443 if (fmt[i] == 'e')
3445 rtx arg = XEXP (x, i);
3446 rtx folded_arg = arg, const_arg = 0;
3447 enum machine_mode mode_arg = GET_MODE (arg);
3448 rtx cheap_arg, expensive_arg;
3449 rtx replacements[2];
3450 int j;
3452 /* Most arguments are cheap, so handle them specially. */
3453 switch (GET_CODE (arg))
3455 case REG:
3456 /* This is the same as calling equiv_constant; it is duplicated
3457 here for speed. */
3458 if (REGNO_QTY_VALID_P (REGNO (arg)))
3460 int arg_q = REG_QTY (REGNO (arg));
3461 struct qty_table_elem *arg_ent = &qty_table[arg_q];
3463 if (arg_ent->const_rtx != NULL_RTX
3464 && GET_CODE (arg_ent->const_rtx) != REG
3465 && GET_CODE (arg_ent->const_rtx) != PLUS)
3466 const_arg
3467 = gen_lowpart_if_possible (GET_MODE (arg),
3468 arg_ent->const_rtx);
3470 break;
3472 case CONST:
3473 case CONST_INT:
3474 case SYMBOL_REF:
3475 case LABEL_REF:
3476 case CONST_DOUBLE:
3477 const_arg = arg;
3478 break;
3480 #ifdef HAVE_cc0
3481 case CC0:
3482 folded_arg = prev_insn_cc0;
3483 mode_arg = prev_insn_cc0_mode;
3484 const_arg = equiv_constant (folded_arg);
3485 break;
3486 #endif
3488 default:
3489 folded_arg = fold_rtx (arg, insn);
3490 const_arg = equiv_constant (folded_arg);
3493 /* For the first three operands, see if the operand
3494 is constant or equivalent to a constant. */
3495 switch (i)
3497 case 0:
3498 folded_arg0 = folded_arg;
3499 const_arg0 = const_arg;
3500 mode_arg0 = mode_arg;
3501 break;
3502 case 1:
3503 folded_arg1 = folded_arg;
3504 const_arg1 = const_arg;
3505 break;
3506 case 2:
3507 const_arg2 = const_arg;
3508 break;
3511 /* Pick the least expensive of the folded argument and an
3512 equivalent constant argument. */
3513 if (const_arg == 0 || const_arg == folded_arg
3514 || COST (const_arg) > COST (folded_arg))
3515 cheap_arg = folded_arg, expensive_arg = const_arg;
3516 else
3517 cheap_arg = const_arg, expensive_arg = folded_arg;
3519 /* Try to replace the operand with the cheapest of the two
3520 possibilities. If it doesn't work and this is either of the first
3521 two operands of a commutative operation, try swapping them.
3522 If THAT fails, try the more expensive, provided it is cheaper
3523 than what is already there. */
3525 if (cheap_arg == XEXP (x, i))
3526 continue;
3528 if (insn == 0 && ! copied)
3530 x = copy_rtx (x);
3531 copied = 1;
3534 replacements[0] = cheap_arg, replacements[1] = expensive_arg;
3535 for (j = 0;
3536 j < 2 && replacements[j]
3537 && COST (replacements[j]) < COST (XEXP (x, i));
3538 j++)
3540 if (validate_change (insn, &XEXP (x, i), replacements[j], 0))
3541 break;
3543 if (code == NE || code == EQ || GET_RTX_CLASS (code) == 'c')
3545 validate_change (insn, &XEXP (x, i), XEXP (x, 1 - i), 1);
3546 validate_change (insn, &XEXP (x, 1 - i), replacements[j], 1);
3548 if (apply_change_group ())
3550 /* Swap them back to be invalid so that this loop can
3551 continue and flag them to be swapped back later. */
3552 rtx tem;
3554 tem = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1);
3555 XEXP (x, 1) = tem;
3556 must_swap = 1;
3557 break;
3563 else
3565 if (fmt[i] == 'E')
3566 /* Don't try to fold inside of a vector of expressions.
3567 Doing nothing is harmless. */
3568 {;}
3571 /* If a commutative operation, place a constant integer as the second
3572 operand unless the first operand is also a constant integer. Otherwise,
3573 place any constant second unless the first operand is also a constant. */
3575 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
3577 if (must_swap || (const_arg0
3578 && (const_arg1 == 0
3579 || (GET_CODE (const_arg0) == CONST_INT
3580 && GET_CODE (const_arg1) != CONST_INT))))
3582 register rtx tem = XEXP (x, 0);
3584 if (insn == 0 && ! copied)
3586 x = copy_rtx (x);
3587 copied = 1;
3590 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
3591 validate_change (insn, &XEXP (x, 1), tem, 1);
3592 if (apply_change_group ())
3594 tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
3595 tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
3600 /* If X is an arithmetic operation, see if we can simplify it. */
3602 switch (GET_RTX_CLASS (code))
3604 case '1':
3606 int is_const = 0;
3608 /* We can't simplify extension ops unless we know the
3609 original mode. */
3610 if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
3611 && mode_arg0 == VOIDmode)
3612 break;
3614 /* If we had a CONST, strip it off and put it back later if we
3615 fold. */
3616 if (const_arg0 != 0 && GET_CODE (const_arg0) == CONST)
3617 is_const = 1, const_arg0 = XEXP (const_arg0, 0);
3619 new = simplify_unary_operation (code, mode,
3620 const_arg0 ? const_arg0 : folded_arg0,
3621 mode_arg0);
3622 if (new != 0 && is_const)
3623 new = gen_rtx_CONST (mode, new);
3625 break;
3627 case '<':
3628 /* See what items are actually being compared and set FOLDED_ARG[01]
3629 to those values and CODE to the actual comparison code. If any are
3630 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
3631 do anything if both operands are already known to be constant. */
3633 if (const_arg0 == 0 || const_arg1 == 0)
3635 struct table_elt *p0, *p1;
3636 rtx true = const_true_rtx, false = const0_rtx;
3637 enum machine_mode mode_arg1;
3639 #ifdef FLOAT_STORE_FLAG_VALUE
3640 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3642 true = CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
3643 mode);
3644 false = CONST0_RTX (mode);
3646 #endif
3648 code = find_comparison_args (code, &folded_arg0, &folded_arg1,
3649 &mode_arg0, &mode_arg1);
3650 const_arg0 = equiv_constant (folded_arg0);
3651 const_arg1 = equiv_constant (folded_arg1);
3653 /* If the mode is VOIDmode or a MODE_CC mode, we don't know
3654 what kinds of things are being compared, so we can't do
3655 anything with this comparison. */
3657 if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
3658 break;
3660 /* If we do not now have two constants being compared, see
3661 if we can nevertheless deduce some things about the
3662 comparison. */
3663 if (const_arg0 == 0 || const_arg1 == 0)
3665 /* Is FOLDED_ARG0 frame-pointer plus a constant? Or
3666 non-explicit constant? These aren't zero, but we
3667 don't know their sign. */
3668 if (const_arg1 == const0_rtx
3669 && (NONZERO_BASE_PLUS_P (folded_arg0)
3670 #if 0 /* Sad to say, on sysvr4, #pragma weak can make a symbol address
3671 come out as 0. */
3672 || GET_CODE (folded_arg0) == SYMBOL_REF
3673 #endif
3674 || GET_CODE (folded_arg0) == LABEL_REF
3675 || GET_CODE (folded_arg0) == CONST))
3677 if (code == EQ)
3678 return false;
3679 else if (code == NE)
3680 return true;
3683 /* See if the two operands are the same. We don't do this
3684 for IEEE floating-point since we can't assume x == x
3685 since x might be a NaN. */
3687 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3688 || ! FLOAT_MODE_P (mode_arg0) || flag_fast_math)
3689 && (folded_arg0 == folded_arg1
3690 || (GET_CODE (folded_arg0) == REG
3691 && GET_CODE (folded_arg1) == REG
3692 && (REG_QTY (REGNO (folded_arg0))
3693 == REG_QTY (REGNO (folded_arg1))))
3694 || ((p0 = lookup (folded_arg0,
3695 (safe_hash (folded_arg0, mode_arg0)
3696 & HASH_MASK), mode_arg0))
3697 && (p1 = lookup (folded_arg1,
3698 (safe_hash (folded_arg1, mode_arg0)
3699 & HASH_MASK), mode_arg0))
3700 && p0->first_same_value == p1->first_same_value)))
3701 return ((code == EQ || code == LE || code == GE
3702 || code == LEU || code == GEU)
3703 ? true : false);
3705 /* If FOLDED_ARG0 is a register, see if the comparison we are
3706 doing now is either the same as we did before or the reverse
3707 (we only check the reverse if not floating-point). */
3708 else if (GET_CODE (folded_arg0) == REG)
3710 int qty = REG_QTY (REGNO (folded_arg0));
3712 if (REGNO_QTY_VALID_P (REGNO (folded_arg0)))
3714 struct qty_table_elem *ent = &qty_table[qty];
3716 if ((comparison_dominates_p (ent->comparison_code, code)
3717 || (comparison_dominates_p (ent->comparison_code,
3718 reverse_condition (code))
3719 && ! FLOAT_MODE_P (mode_arg0)))
3720 && (rtx_equal_p (ent->comparison_const, folded_arg1)
3721 || (const_arg1
3722 && rtx_equal_p (ent->comparison_const,
3723 const_arg1))
3724 || (GET_CODE (folded_arg1) == REG
3725 && (REG_QTY (REGNO (folded_arg1)) == ent->comparison_qty))))
3726 return (comparison_dominates_p (ent->comparison_code, code)
3727 ? true : false);
3733 /* If we are comparing against zero, see if the first operand is
3734 equivalent to an IOR with a constant. If so, we may be able to
3735 determine the result of this comparison. */
3737 if (const_arg1 == const0_rtx)
3739 rtx y = lookup_as_function (folded_arg0, IOR);
3740 rtx inner_const;
3742 if (y != 0
3743 && (inner_const = equiv_constant (XEXP (y, 1))) != 0
3744 && GET_CODE (inner_const) == CONST_INT
3745 && INTVAL (inner_const) != 0)
3747 int sign_bitnum = GET_MODE_BITSIZE (mode_arg0) - 1;
3748 int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
3749 && (INTVAL (inner_const)
3750 & ((HOST_WIDE_INT) 1 << sign_bitnum)));
3751 rtx true = const_true_rtx, false = const0_rtx;
3753 #ifdef FLOAT_STORE_FLAG_VALUE
3754 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3756 true = CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
3757 mode);
3758 false = CONST0_RTX (mode);
3760 #endif
3762 switch (code)
3764 case EQ:
3765 return false;
3766 case NE:
3767 return true;
3768 case LT: case LE:
3769 if (has_sign)
3770 return true;
3771 break;
3772 case GT: case GE:
3773 if (has_sign)
3774 return false;
3775 break;
3776 default:
3777 break;
3782 new = simplify_relational_operation (code, mode_arg0,
3783 const_arg0 ? const_arg0 : folded_arg0,
3784 const_arg1 ? const_arg1 : folded_arg1);
3785 #ifdef FLOAT_STORE_FLAG_VALUE
3786 if (new != 0 && GET_MODE_CLASS (mode) == MODE_FLOAT)
3787 new = ((new == const0_rtx) ? CONST0_RTX (mode)
3788 : CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE, mode));
3789 #endif
3790 break;
3792 case '2':
3793 case 'c':
3794 switch (code)
3796 case PLUS:
3797 /* If the second operand is a LABEL_REF, see if the first is a MINUS
3798 with that LABEL_REF as its second operand. If so, the result is
3799 the first operand of that MINUS. This handles switches with an
3800 ADDR_DIFF_VEC table. */
3801 if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
3803 rtx y
3804 = GET_CODE (folded_arg0) == MINUS ? folded_arg0
3805 : lookup_as_function (folded_arg0, MINUS);
3807 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
3808 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
3809 return XEXP (y, 0);
3811 /* Now try for a CONST of a MINUS like the above. */
3812 if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
3813 : lookup_as_function (folded_arg0, CONST))) != 0
3814 && GET_CODE (XEXP (y, 0)) == MINUS
3815 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
3816 && XEXP (XEXP (XEXP (y, 0),1), 0) == XEXP (const_arg1, 0))
3817 return XEXP (XEXP (y, 0), 0);
3820 /* Likewise if the operands are in the other order. */
3821 if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
3823 rtx y
3824 = GET_CODE (folded_arg1) == MINUS ? folded_arg1
3825 : lookup_as_function (folded_arg1, MINUS);
3827 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
3828 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
3829 return XEXP (y, 0);
3831 /* Now try for a CONST of a MINUS like the above. */
3832 if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
3833 : lookup_as_function (folded_arg1, CONST))) != 0
3834 && GET_CODE (XEXP (y, 0)) == MINUS
3835 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
3836 && XEXP (XEXP (XEXP (y, 0),1), 0) == XEXP (const_arg0, 0))
3837 return XEXP (XEXP (y, 0), 0);
3840 /* If second operand is a register equivalent to a negative
3841 CONST_INT, see if we can find a register equivalent to the
3842 positive constant. Make a MINUS if so. Don't do this for
3843 a non-negative constant since we might then alternate between
3844 chosing positive and negative constants. Having the positive
3845 constant previously-used is the more common case. Be sure
3846 the resulting constant is non-negative; if const_arg1 were
3847 the smallest negative number this would overflow: depending
3848 on the mode, this would either just be the same value (and
3849 hence not save anything) or be incorrect. */
3850 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT
3851 && INTVAL (const_arg1) < 0
3852 /* This used to test
3854 - INTVAL (const_arg1) >= 0
3856 But The Sun V5.0 compilers mis-compiled that test. So
3857 instead we test for the problematic value in a more direct
3858 manner and hope the Sun compilers get it correct. */
3859 && INTVAL (const_arg1) !=
3860 ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1))
3861 && GET_CODE (folded_arg1) == REG)
3863 rtx new_const = GEN_INT (- INTVAL (const_arg1));
3864 struct table_elt *p
3865 = lookup (new_const, safe_hash (new_const, mode) & HASH_MASK,
3866 mode);
3868 if (p)
3869 for (p = p->first_same_value; p; p = p->next_same_value)
3870 if (GET_CODE (p->exp) == REG)
3871 return simplify_gen_binary (MINUS, mode, folded_arg0,
3872 canon_reg (p->exp, NULL_RTX));
3874 goto from_plus;
3876 case MINUS:
3877 /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
3878 If so, produce (PLUS Z C2-C). */
3879 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT)
3881 rtx y = lookup_as_function (XEXP (x, 0), PLUS);
3882 if (y && GET_CODE (XEXP (y, 1)) == CONST_INT)
3883 return fold_rtx (plus_constant (copy_rtx (y),
3884 -INTVAL (const_arg1)),
3885 NULL_RTX);
3888 /* ... fall through ... */
3890 from_plus:
3891 case SMIN: case SMAX: case UMIN: case UMAX:
3892 case IOR: case AND: case XOR:
3893 case MULT: case DIV: case UDIV:
3894 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
3895 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
3896 is known to be of similar form, we may be able to replace the
3897 operation with a combined operation. This may eliminate the
3898 intermediate operation if every use is simplified in this way.
3899 Note that the similar optimization done by combine.c only works
3900 if the intermediate operation's result has only one reference. */
3902 if (GET_CODE (folded_arg0) == REG
3903 && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
3905 int is_shift
3906 = (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
3907 rtx y = lookup_as_function (folded_arg0, code);
3908 rtx inner_const;
3909 enum rtx_code associate_code;
3910 rtx new_const;
3912 if (y == 0
3913 || 0 == (inner_const
3914 = equiv_constant (fold_rtx (XEXP (y, 1), 0)))
3915 || GET_CODE (inner_const) != CONST_INT
3916 /* If we have compiled a statement like
3917 "if (x == (x & mask1))", and now are looking at
3918 "x & mask2", we will have a case where the first operand
3919 of Y is the same as our first operand. Unless we detect
3920 this case, an infinite loop will result. */
3921 || XEXP (y, 0) == folded_arg0)
3922 break;
3924 /* Don't associate these operations if they are a PLUS with the
3925 same constant and it is a power of two. These might be doable
3926 with a pre- or post-increment. Similarly for two subtracts of
3927 identical powers of two with post decrement. */
3929 if (code == PLUS && INTVAL (const_arg1) == INTVAL (inner_const)
3930 && ((HAVE_PRE_INCREMENT
3931 && exact_log2 (INTVAL (const_arg1)) >= 0)
3932 || (HAVE_POST_INCREMENT
3933 && exact_log2 (INTVAL (const_arg1)) >= 0)
3934 || (HAVE_PRE_DECREMENT
3935 && exact_log2 (- INTVAL (const_arg1)) >= 0)
3936 || (HAVE_POST_DECREMENT
3937 && exact_log2 (- INTVAL (const_arg1)) >= 0)))
3938 break;
3940 /* Compute the code used to compose the constants. For example,
3941 A/C1/C2 is A/(C1 * C2), so if CODE == DIV, we want MULT. */
3943 associate_code
3944 = (code == MULT || code == DIV || code == UDIV ? MULT
3945 : is_shift || code == PLUS || code == MINUS ? PLUS : code);
3947 new_const = simplify_binary_operation (associate_code, mode,
3948 const_arg1, inner_const);
3950 if (new_const == 0)
3951 break;
3953 /* If we are associating shift operations, don't let this
3954 produce a shift of the size of the object or larger.
3955 This could occur when we follow a sign-extend by a right
3956 shift on a machine that does a sign-extend as a pair
3957 of shifts. */
3959 if (is_shift && GET_CODE (new_const) == CONST_INT
3960 && INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
3962 /* As an exception, we can turn an ASHIFTRT of this
3963 form into a shift of the number of bits - 1. */
3964 if (code == ASHIFTRT)
3965 new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
3966 else
3967 break;
3970 y = copy_rtx (XEXP (y, 0));
3972 /* If Y contains our first operand (the most common way this
3973 can happen is if Y is a MEM), we would do into an infinite
3974 loop if we tried to fold it. So don't in that case. */
3976 if (! reg_mentioned_p (folded_arg0, y))
3977 y = fold_rtx (y, insn);
3979 return simplify_gen_binary (code, mode, y, new_const);
3981 break;
3983 default:
3984 break;
3987 new = simplify_binary_operation (code, mode,
3988 const_arg0 ? const_arg0 : folded_arg0,
3989 const_arg1 ? const_arg1 : folded_arg1);
3990 break;
3992 case 'o':
3993 /* (lo_sum (high X) X) is simply X. */
3994 if (code == LO_SUM && const_arg0 != 0
3995 && GET_CODE (const_arg0) == HIGH
3996 && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
3997 return const_arg1;
3998 break;
4000 case '3':
4001 case 'b':
4002 new = simplify_ternary_operation (code, mode, mode_arg0,
4003 const_arg0 ? const_arg0 : folded_arg0,
4004 const_arg1 ? const_arg1 : folded_arg1,
4005 const_arg2 ? const_arg2 : XEXP (x, 2));
4006 break;
4008 case 'x':
4009 /* Always eliminate CONSTANT_P_RTX at this stage. */
4010 if (code == CONSTANT_P_RTX)
4011 return (const_arg0 ? const1_rtx : const0_rtx);
4012 break;
4015 return new ? new : x;
4018 /* Return a constant value currently equivalent to X.
4019 Return 0 if we don't know one. */
4021 static rtx
4022 equiv_constant (x)
4023 rtx x;
4025 if (GET_CODE (x) == REG
4026 && REGNO_QTY_VALID_P (REGNO (x)))
4028 int x_q = REG_QTY (REGNO (x));
4029 struct qty_table_elem *x_ent = &qty_table[x_q];
4031 if (x_ent->const_rtx)
4032 x = gen_lowpart_if_possible (GET_MODE (x), x_ent->const_rtx);
4035 if (x == 0 || CONSTANT_P (x))
4036 return x;
4038 /* If X is a MEM, try to fold it outside the context of any insn to see if
4039 it might be equivalent to a constant. That handles the case where it
4040 is a constant-pool reference. Then try to look it up in the hash table
4041 in case it is something whose value we have seen before. */
4043 if (GET_CODE (x) == MEM)
4045 struct table_elt *elt;
4047 x = fold_rtx (x, NULL_RTX);
4048 if (CONSTANT_P (x))
4049 return x;
4051 elt = lookup (x, safe_hash (x, GET_MODE (x)) & HASH_MASK, GET_MODE (x));
4052 if (elt == 0)
4053 return 0;
4055 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
4056 if (elt->is_const && CONSTANT_P (elt->exp))
4057 return elt->exp;
4060 return 0;
4063 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a fixed-point
4064 number, return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
4065 least-significant part of X.
4066 MODE specifies how big a part of X to return.
4068 If the requested operation cannot be done, 0 is returned.
4070 This is similar to gen_lowpart in emit-rtl.c. */
4073 gen_lowpart_if_possible (mode, x)
4074 enum machine_mode mode;
4075 register rtx x;
4077 rtx result = gen_lowpart_common (mode, x);
4079 if (result)
4080 return result;
4081 else if (GET_CODE (x) == MEM)
4083 /* This is the only other case we handle. */
4084 register int offset = 0;
4085 rtx new;
4087 if (WORDS_BIG_ENDIAN)
4088 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
4089 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
4090 if (BYTES_BIG_ENDIAN)
4091 /* Adjust the address so that the address-after-the-data is
4092 unchanged. */
4093 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
4094 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
4095 new = gen_rtx_MEM (mode, plus_constant (XEXP (x, 0), offset));
4096 if (! memory_address_p (mode, XEXP (new, 0)))
4097 return 0;
4098 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
4099 MEM_COPY_ATTRIBUTES (new, x);
4100 return new;
4102 else
4103 return 0;
4106 /* Given INSN, a jump insn, TAKEN indicates if we are following the "taken"
4107 branch. It will be zero if not.
4109 In certain cases, this can cause us to add an equivalence. For example,
4110 if we are following the taken case of
4111 if (i == 2)
4112 we can add the fact that `i' and '2' are now equivalent.
4114 In any case, we can record that this comparison was passed. If the same
4115 comparison is seen later, we will know its value. */
4117 static void
4118 record_jump_equiv (insn, taken)
4119 rtx insn;
4120 int taken;
4122 int cond_known_true;
4123 rtx op0, op1;
4124 enum machine_mode mode, mode0, mode1;
4125 int reversed_nonequality = 0;
4126 enum rtx_code code;
4128 /* Ensure this is the right kind of insn. */
4129 if (! condjump_p (insn) || simplejump_p (insn))
4130 return;
4132 /* See if this jump condition is known true or false. */
4133 if (taken)
4134 cond_known_true = (XEXP (SET_SRC (PATTERN (insn)), 2) == pc_rtx);
4135 else
4136 cond_known_true = (XEXP (SET_SRC (PATTERN (insn)), 1) == pc_rtx);
4138 /* Get the type of comparison being done and the operands being compared.
4139 If we had to reverse a non-equality condition, record that fact so we
4140 know that it isn't valid for floating-point. */
4141 code = GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 0));
4142 op0 = fold_rtx (XEXP (XEXP (SET_SRC (PATTERN (insn)), 0), 0), insn);
4143 op1 = fold_rtx (XEXP (XEXP (SET_SRC (PATTERN (insn)), 0), 1), insn);
4145 code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
4146 if (! cond_known_true)
4148 reversed_nonequality = (code != EQ && code != NE);
4149 code = reverse_condition (code);
4152 /* The mode is the mode of the non-constant. */
4153 mode = mode0;
4154 if (mode1 != VOIDmode)
4155 mode = mode1;
4157 record_jump_cond (code, mode, op0, op1, reversed_nonequality);
4160 /* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
4161 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
4162 Make any useful entries we can with that information. Called from
4163 above function and called recursively. */
4165 static void
4166 record_jump_cond (code, mode, op0, op1, reversed_nonequality)
4167 enum rtx_code code;
4168 enum machine_mode mode;
4169 rtx op0, op1;
4170 int reversed_nonequality;
4172 unsigned op0_hash, op1_hash;
4173 int op0_in_memory, op1_in_memory;
4174 struct table_elt *op0_elt, *op1_elt;
4176 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
4177 we know that they are also equal in the smaller mode (this is also
4178 true for all smaller modes whether or not there is a SUBREG, but
4179 is not worth testing for with no SUBREG). */
4181 /* Note that GET_MODE (op0) may not equal MODE. */
4182 if (code == EQ && GET_CODE (op0) == SUBREG
4183 && (GET_MODE_SIZE (GET_MODE (op0))
4184 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4186 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4187 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
4189 record_jump_cond (code, mode, SUBREG_REG (op0),
4190 tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
4191 reversed_nonequality);
4194 if (code == EQ && GET_CODE (op1) == SUBREG
4195 && (GET_MODE_SIZE (GET_MODE (op1))
4196 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4198 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4199 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
4201 record_jump_cond (code, mode, SUBREG_REG (op1),
4202 tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
4203 reversed_nonequality);
4206 /* Similarly, if this is an NE comparison, and either is a SUBREG
4207 making a smaller mode, we know the whole thing is also NE. */
4209 /* Note that GET_MODE (op0) may not equal MODE;
4210 if we test MODE instead, we can get an infinite recursion
4211 alternating between two modes each wider than MODE. */
4213 if (code == NE && GET_CODE (op0) == SUBREG
4214 && subreg_lowpart_p (op0)
4215 && (GET_MODE_SIZE (GET_MODE (op0))
4216 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4218 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4219 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
4221 record_jump_cond (code, mode, SUBREG_REG (op0),
4222 tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
4223 reversed_nonequality);
4226 if (code == NE && GET_CODE (op1) == SUBREG
4227 && subreg_lowpart_p (op1)
4228 && (GET_MODE_SIZE (GET_MODE (op1))
4229 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4231 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4232 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
4234 record_jump_cond (code, mode, SUBREG_REG (op1),
4235 tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
4236 reversed_nonequality);
4239 /* Hash both operands. */
4241 do_not_record = 0;
4242 hash_arg_in_memory = 0;
4243 op0_hash = HASH (op0, mode);
4244 op0_in_memory = hash_arg_in_memory;
4246 if (do_not_record)
4247 return;
4249 do_not_record = 0;
4250 hash_arg_in_memory = 0;
4251 op1_hash = HASH (op1, mode);
4252 op1_in_memory = hash_arg_in_memory;
4254 if (do_not_record)
4255 return;
4257 /* Look up both operands. */
4258 op0_elt = lookup (op0, op0_hash, mode);
4259 op1_elt = lookup (op1, op1_hash, mode);
4261 /* If both operands are already equivalent or if they are not in the
4262 table but are identical, do nothing. */
4263 if ((op0_elt != 0 && op1_elt != 0
4264 && op0_elt->first_same_value == op1_elt->first_same_value)
4265 || op0 == op1 || rtx_equal_p (op0, op1))
4266 return;
4268 /* If we aren't setting two things equal all we can do is save this
4269 comparison. Similarly if this is floating-point. In the latter
4270 case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
4271 If we record the equality, we might inadvertently delete code
4272 whose intent was to change -0 to +0. */
4274 if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
4276 struct qty_table_elem *ent;
4277 int qty;
4279 /* If we reversed a floating-point comparison, if OP0 is not a
4280 register, or if OP1 is neither a register or constant, we can't
4281 do anything. */
4283 if (GET_CODE (op1) != REG)
4284 op1 = equiv_constant (op1);
4286 if ((reversed_nonequality && FLOAT_MODE_P (mode))
4287 || GET_CODE (op0) != REG || op1 == 0)
4288 return;
4290 /* Put OP0 in the hash table if it isn't already. This gives it a
4291 new quantity number. */
4292 if (op0_elt == 0)
4294 if (insert_regs (op0, NULL_PTR, 0))
4296 rehash_using_reg (op0);
4297 op0_hash = HASH (op0, mode);
4299 /* If OP0 is contained in OP1, this changes its hash code
4300 as well. Faster to rehash than to check, except
4301 for the simple case of a constant. */
4302 if (! CONSTANT_P (op1))
4303 op1_hash = HASH (op1,mode);
4306 op0_elt = insert (op0, NULL_PTR, op0_hash, mode);
4307 op0_elt->in_memory = op0_in_memory;
4310 qty = REG_QTY (REGNO (op0));
4311 ent = &qty_table[qty];
4313 ent->comparison_code = code;
4314 if (GET_CODE (op1) == REG)
4316 /* Look it up again--in case op0 and op1 are the same. */
4317 op1_elt = lookup (op1, op1_hash, mode);
4319 /* Put OP1 in the hash table so it gets a new quantity number. */
4320 if (op1_elt == 0)
4322 if (insert_regs (op1, NULL_PTR, 0))
4324 rehash_using_reg (op1);
4325 op1_hash = HASH (op1, mode);
4328 op1_elt = insert (op1, NULL_PTR, op1_hash, mode);
4329 op1_elt->in_memory = op1_in_memory;
4332 ent->comparison_const = NULL_RTX;
4333 ent->comparison_qty = REG_QTY (REGNO (op1));
4335 else
4337 ent->comparison_const = op1;
4338 ent->comparison_qty = -1;
4341 return;
4344 /* If either side is still missing an equivalence, make it now,
4345 then merge the equivalences. */
4347 if (op0_elt == 0)
4349 if (insert_regs (op0, NULL_PTR, 0))
4351 rehash_using_reg (op0);
4352 op0_hash = HASH (op0, mode);
4355 op0_elt = insert (op0, NULL_PTR, op0_hash, mode);
4356 op0_elt->in_memory = op0_in_memory;
4359 if (op1_elt == 0)
4361 if (insert_regs (op1, NULL_PTR, 0))
4363 rehash_using_reg (op1);
4364 op1_hash = HASH (op1, mode);
4367 op1_elt = insert (op1, NULL_PTR, op1_hash, mode);
4368 op1_elt->in_memory = op1_in_memory;
4371 merge_equiv_classes (op0_elt, op1_elt);
4372 last_jump_equiv_class = op0_elt;
4375 /* CSE processing for one instruction.
4376 First simplify sources and addresses of all assignments
4377 in the instruction, using previously-computed equivalents values.
4378 Then install the new sources and destinations in the table
4379 of available values.
4381 If LIBCALL_INSN is nonzero, don't record any equivalence made in
4382 the insn. It means that INSN is inside libcall block. In this
4383 case LIBCALL_INSN is the corresponding insn with REG_LIBCALL. */
4385 /* Data on one SET contained in the instruction. */
4387 struct set
4389 /* The SET rtx itself. */
4390 rtx rtl;
4391 /* The SET_SRC of the rtx (the original value, if it is changing). */
4392 rtx src;
4393 /* The hash-table element for the SET_SRC of the SET. */
4394 struct table_elt *src_elt;
4395 /* Hash value for the SET_SRC. */
4396 unsigned src_hash;
4397 /* Hash value for the SET_DEST. */
4398 unsigned dest_hash;
4399 /* The SET_DEST, with SUBREG, etc., stripped. */
4400 rtx inner_dest;
4401 /* Nonzero if the SET_SRC is in memory. */
4402 char src_in_memory;
4403 /* Nonzero if the SET_SRC contains something
4404 whose value cannot be predicted and understood. */
4405 char src_volatile;
4406 /* Original machine mode, in case it becomes a CONST_INT. */
4407 enum machine_mode mode;
4408 /* A constant equivalent for SET_SRC, if any. */
4409 rtx src_const;
4410 /* Original SET_SRC value used for libcall notes. */
4411 rtx orig_src;
4412 /* Hash value of constant equivalent for SET_SRC. */
4413 unsigned src_const_hash;
4414 /* Table entry for constant equivalent for SET_SRC, if any. */
4415 struct table_elt *src_const_elt;
4418 static void
4419 cse_insn (insn, libcall_insn)
4420 rtx insn;
4421 rtx libcall_insn;
4423 register rtx x = PATTERN (insn);
4424 register int i;
4425 rtx tem;
4426 register int n_sets = 0;
4428 #ifdef HAVE_cc0
4429 /* Records what this insn does to set CC0. */
4430 rtx this_insn_cc0 = 0;
4431 enum machine_mode this_insn_cc0_mode = VOIDmode;
4432 #endif
4434 rtx src_eqv = 0;
4435 struct table_elt *src_eqv_elt = 0;
4436 int src_eqv_volatile = 0;
4437 int src_eqv_in_memory = 0;
4438 unsigned src_eqv_hash = 0;
4440 struct set *sets = (struct set *) NULL_PTR;
4442 this_insn = insn;
4444 /* Find all the SETs and CLOBBERs in this instruction.
4445 Record all the SETs in the array `set' and count them.
4446 Also determine whether there is a CLOBBER that invalidates
4447 all memory references, or all references at varying addresses. */
4449 if (GET_CODE (insn) == CALL_INSN)
4451 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4452 if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
4453 invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
4456 if (GET_CODE (x) == SET)
4458 sets = (struct set *) alloca (sizeof (struct set));
4459 sets[0].rtl = x;
4461 /* Ignore SETs that are unconditional jumps.
4462 They never need cse processing, so this does not hurt.
4463 The reason is not efficiency but rather
4464 so that we can test at the end for instructions
4465 that have been simplified to unconditional jumps
4466 and not be misled by unchanged instructions
4467 that were unconditional jumps to begin with. */
4468 if (SET_DEST (x) == pc_rtx
4469 && GET_CODE (SET_SRC (x)) == LABEL_REF)
4472 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
4473 The hard function value register is used only once, to copy to
4474 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
4475 Ensure we invalidate the destination register. On the 80386 no
4476 other code would invalidate it since it is a fixed_reg.
4477 We need not check the return of apply_change_group; see canon_reg. */
4479 else if (GET_CODE (SET_SRC (x)) == CALL)
4481 canon_reg (SET_SRC (x), insn);
4482 apply_change_group ();
4483 fold_rtx (SET_SRC (x), insn);
4484 invalidate (SET_DEST (x), VOIDmode);
4486 else
4487 n_sets = 1;
4489 else if (GET_CODE (x) == PARALLEL)
4491 register int lim = XVECLEN (x, 0);
4493 sets = (struct set *) alloca (lim * sizeof (struct set));
4495 /* Find all regs explicitly clobbered in this insn,
4496 and ensure they are not replaced with any other regs
4497 elsewhere in this insn.
4498 When a reg that is clobbered is also used for input,
4499 we should presume that that is for a reason,
4500 and we should not substitute some other register
4501 which is not supposed to be clobbered.
4502 Therefore, this loop cannot be merged into the one below
4503 because a CALL may precede a CLOBBER and refer to the
4504 value clobbered. We must not let a canonicalization do
4505 anything in that case. */
4506 for (i = 0; i < lim; i++)
4508 register rtx y = XVECEXP (x, 0, i);
4509 if (GET_CODE (y) == CLOBBER)
4511 rtx clobbered = XEXP (y, 0);
4513 if (GET_CODE (clobbered) == REG
4514 || GET_CODE (clobbered) == SUBREG)
4515 invalidate (clobbered, VOIDmode);
4516 else if (GET_CODE (clobbered) == STRICT_LOW_PART
4517 || GET_CODE (clobbered) == ZERO_EXTRACT)
4518 invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
4522 for (i = 0; i < lim; i++)
4524 register rtx y = XVECEXP (x, 0, i);
4525 if (GET_CODE (y) == SET)
4527 /* As above, we ignore unconditional jumps and call-insns and
4528 ignore the result of apply_change_group. */
4529 if (GET_CODE (SET_SRC (y)) == CALL)
4531 canon_reg (SET_SRC (y), insn);
4532 apply_change_group ();
4533 fold_rtx (SET_SRC (y), insn);
4534 invalidate (SET_DEST (y), VOIDmode);
4536 else if (SET_DEST (y) == pc_rtx
4537 && GET_CODE (SET_SRC (y)) == LABEL_REF)
4539 else
4540 sets[n_sets++].rtl = y;
4542 else if (GET_CODE (y) == CLOBBER)
4544 /* If we clobber memory, canon the address.
4545 This does nothing when a register is clobbered
4546 because we have already invalidated the reg. */
4547 if (GET_CODE (XEXP (y, 0)) == MEM)
4548 canon_reg (XEXP (y, 0), NULL_RTX);
4550 else if (GET_CODE (y) == USE
4551 && ! (GET_CODE (XEXP (y, 0)) == REG
4552 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
4553 canon_reg (y, NULL_RTX);
4554 else if (GET_CODE (y) == CALL)
4556 /* The result of apply_change_group can be ignored; see
4557 canon_reg. */
4558 canon_reg (y, insn);
4559 apply_change_group ();
4560 fold_rtx (y, insn);
4564 else if (GET_CODE (x) == CLOBBER)
4566 if (GET_CODE (XEXP (x, 0)) == MEM)
4567 canon_reg (XEXP (x, 0), NULL_RTX);
4570 /* Canonicalize a USE of a pseudo register or memory location. */
4571 else if (GET_CODE (x) == USE
4572 && ! (GET_CODE (XEXP (x, 0)) == REG
4573 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
4574 canon_reg (XEXP (x, 0), NULL_RTX);
4575 else if (GET_CODE (x) == CALL)
4577 /* The result of apply_change_group can be ignored; see canon_reg. */
4578 canon_reg (x, insn);
4579 apply_change_group ();
4580 fold_rtx (x, insn);
4583 /* Store the equivalent value in SRC_EQV, if different, or if the DEST
4584 is a STRICT_LOW_PART. The latter condition is necessary because SRC_EQV
4585 is handled specially for this case, and if it isn't set, then there will
4586 be no equivalence for the destination. */
4587 if (n_sets == 1 && REG_NOTES (insn) != 0
4588 && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
4589 && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
4590 || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
4591 src_eqv = canon_reg (XEXP (tem, 0), NULL_RTX);
4593 /* Canonicalize sources and addresses of destinations.
4594 We do this in a separate pass to avoid problems when a MATCH_DUP is
4595 present in the insn pattern. In that case, we want to ensure that
4596 we don't break the duplicate nature of the pattern. So we will replace
4597 both operands at the same time. Otherwise, we would fail to find an
4598 equivalent substitution in the loop calling validate_change below.
4600 We used to suppress canonicalization of DEST if it appears in SRC,
4601 but we don't do this any more. */
4603 for (i = 0; i < n_sets; i++)
4605 rtx dest = SET_DEST (sets[i].rtl);
4606 rtx src = SET_SRC (sets[i].rtl);
4607 rtx new = canon_reg (src, insn);
4608 int insn_code;
4610 sets[i].orig_src = src;
4611 if ((GET_CODE (new) == REG && GET_CODE (src) == REG
4612 && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
4613 != (REGNO (src) < FIRST_PSEUDO_REGISTER)))
4614 || (insn_code = recog_memoized (insn)) < 0
4615 || insn_data[insn_code].n_dups > 0)
4616 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
4617 else
4618 SET_SRC (sets[i].rtl) = new;
4620 if (GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SIGN_EXTRACT)
4622 validate_change (insn, &XEXP (dest, 1),
4623 canon_reg (XEXP (dest, 1), insn), 1);
4624 validate_change (insn, &XEXP (dest, 2),
4625 canon_reg (XEXP (dest, 2), insn), 1);
4628 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
4629 || GET_CODE (dest) == ZERO_EXTRACT
4630 || GET_CODE (dest) == SIGN_EXTRACT)
4631 dest = XEXP (dest, 0);
4633 if (GET_CODE (dest) == MEM)
4634 canon_reg (dest, insn);
4637 /* Now that we have done all the replacements, we can apply the change
4638 group and see if they all work. Note that this will cause some
4639 canonicalizations that would have worked individually not to be applied
4640 because some other canonicalization didn't work, but this should not
4641 occur often.
4643 The result of apply_change_group can be ignored; see canon_reg. */
4645 apply_change_group ();
4647 /* Set sets[i].src_elt to the class each source belongs to.
4648 Detect assignments from or to volatile things
4649 and set set[i] to zero so they will be ignored
4650 in the rest of this function.
4652 Nothing in this loop changes the hash table or the register chains. */
4654 for (i = 0; i < n_sets; i++)
4656 register rtx src, dest;
4657 register rtx src_folded;
4658 register struct table_elt *elt = 0, *p;
4659 enum machine_mode mode;
4660 rtx src_eqv_here;
4661 rtx src_const = 0;
4662 rtx src_related = 0;
4663 struct table_elt *src_const_elt = 0;
4664 int src_cost = 10000, src_eqv_cost = 10000, src_folded_cost = 10000;
4665 int src_related_cost = 10000, src_elt_cost = 10000;
4666 /* Set non-zero if we need to call force_const_mem on with the
4667 contents of src_folded before using it. */
4668 int src_folded_force_flag = 0;
4670 dest = SET_DEST (sets[i].rtl);
4671 src = SET_SRC (sets[i].rtl);
4673 /* If SRC is a constant that has no machine mode,
4674 hash it with the destination's machine mode.
4675 This way we can keep different modes separate. */
4677 mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
4678 sets[i].mode = mode;
4680 if (src_eqv)
4682 enum machine_mode eqvmode = mode;
4683 if (GET_CODE (dest) == STRICT_LOW_PART)
4684 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
4685 do_not_record = 0;
4686 hash_arg_in_memory = 0;
4687 src_eqv = fold_rtx (src_eqv, insn);
4688 src_eqv_hash = HASH (src_eqv, eqvmode);
4690 /* Find the equivalence class for the equivalent expression. */
4692 if (!do_not_record)
4693 src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
4695 src_eqv_volatile = do_not_record;
4696 src_eqv_in_memory = hash_arg_in_memory;
4699 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
4700 value of the INNER register, not the destination. So it is not
4701 a valid substitution for the source. But save it for later. */
4702 if (GET_CODE (dest) == STRICT_LOW_PART)
4703 src_eqv_here = 0;
4704 else
4705 src_eqv_here = src_eqv;
4707 /* Simplify and foldable subexpressions in SRC. Then get the fully-
4708 simplified result, which may not necessarily be valid. */
4709 src_folded = fold_rtx (src, insn);
4711 #if 0
4712 /* ??? This caused bad code to be generated for the m68k port with -O2.
4713 Suppose src is (CONST_INT -1), and that after truncation src_folded
4714 is (CONST_INT 3). Suppose src_folded is then used for src_const.
4715 At the end we will add src and src_const to the same equivalence
4716 class. We now have 3 and -1 on the same equivalence class. This
4717 causes later instructions to be mis-optimized. */
4718 /* If storing a constant in a bitfield, pre-truncate the constant
4719 so we will be able to record it later. */
4720 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
4721 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
4723 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
4725 if (GET_CODE (src) == CONST_INT
4726 && GET_CODE (width) == CONST_INT
4727 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
4728 && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
4729 src_folded
4730 = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
4731 << INTVAL (width)) - 1));
4733 #endif
4735 /* Compute SRC's hash code, and also notice if it
4736 should not be recorded at all. In that case,
4737 prevent any further processing of this assignment. */
4738 do_not_record = 0;
4739 hash_arg_in_memory = 0;
4741 sets[i].src = src;
4742 sets[i].src_hash = HASH (src, mode);
4743 sets[i].src_volatile = do_not_record;
4744 sets[i].src_in_memory = hash_arg_in_memory;
4746 /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
4747 a pseudo that is set more than once, do not record SRC. Using
4748 SRC as a replacement for anything else will be incorrect in that
4749 situation. Note that this usually occurs only for stack slots,
4750 in which case all the RTL would be referring to SRC, so we don't
4751 lose any optimization opportunities by not having SRC in the
4752 hash table. */
4754 if (GET_CODE (src) == MEM
4755 && find_reg_note (insn, REG_EQUIV, src) != 0
4756 && GET_CODE (dest) == REG
4757 && REGNO (dest) >= FIRST_PSEUDO_REGISTER
4758 && REG_N_SETS (REGNO (dest)) != 1)
4759 sets[i].src_volatile = 1;
4761 #if 0
4762 /* It is no longer clear why we used to do this, but it doesn't
4763 appear to still be needed. So let's try without it since this
4764 code hurts cse'ing widened ops. */
4765 /* If source is a perverse subreg (such as QI treated as an SI),
4766 treat it as volatile. It may do the work of an SI in one context
4767 where the extra bits are not being used, but cannot replace an SI
4768 in general. */
4769 if (GET_CODE (src) == SUBREG
4770 && (GET_MODE_SIZE (GET_MODE (src))
4771 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
4772 sets[i].src_volatile = 1;
4773 #endif
4775 /* Locate all possible equivalent forms for SRC. Try to replace
4776 SRC in the insn with each cheaper equivalent.
4778 We have the following types of equivalents: SRC itself, a folded
4779 version, a value given in a REG_EQUAL note, or a value related
4780 to a constant.
4782 Each of these equivalents may be part of an additional class
4783 of equivalents (if more than one is in the table, they must be in
4784 the same class; we check for this).
4786 If the source is volatile, we don't do any table lookups.
4788 We note any constant equivalent for possible later use in a
4789 REG_NOTE. */
4791 if (!sets[i].src_volatile)
4792 elt = lookup (src, sets[i].src_hash, mode);
4794 sets[i].src_elt = elt;
4796 if (elt && src_eqv_here && src_eqv_elt)
4798 if (elt->first_same_value != src_eqv_elt->first_same_value)
4800 /* The REG_EQUAL is indicating that two formerly distinct
4801 classes are now equivalent. So merge them. */
4802 merge_equiv_classes (elt, src_eqv_elt);
4803 src_eqv_hash = HASH (src_eqv, elt->mode);
4804 src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
4807 src_eqv_here = 0;
4810 else if (src_eqv_elt)
4811 elt = src_eqv_elt;
4813 /* Try to find a constant somewhere and record it in `src_const'.
4814 Record its table element, if any, in `src_const_elt'. Look in
4815 any known equivalences first. (If the constant is not in the
4816 table, also set `sets[i].src_const_hash'). */
4817 if (elt)
4818 for (p = elt->first_same_value; p; p = p->next_same_value)
4819 if (p->is_const)
4821 src_const = p->exp;
4822 src_const_elt = elt;
4823 break;
4826 if (src_const == 0
4827 && (CONSTANT_P (src_folded)
4828 /* Consider (minus (label_ref L1) (label_ref L2)) as
4829 "constant" here so we will record it. This allows us
4830 to fold switch statements when an ADDR_DIFF_VEC is used. */
4831 || (GET_CODE (src_folded) == MINUS
4832 && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
4833 && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
4834 src_const = src_folded, src_const_elt = elt;
4835 else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
4836 src_const = src_eqv_here, src_const_elt = src_eqv_elt;
4838 /* If we don't know if the constant is in the table, get its
4839 hash code and look it up. */
4840 if (src_const && src_const_elt == 0)
4842 sets[i].src_const_hash = HASH (src_const, mode);
4843 src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
4846 sets[i].src_const = src_const;
4847 sets[i].src_const_elt = src_const_elt;
4849 /* If the constant and our source are both in the table, mark them as
4850 equivalent. Otherwise, if a constant is in the table but the source
4851 isn't, set ELT to it. */
4852 if (src_const_elt && elt
4853 && src_const_elt->first_same_value != elt->first_same_value)
4854 merge_equiv_classes (elt, src_const_elt);
4855 else if (src_const_elt && elt == 0)
4856 elt = src_const_elt;
4858 /* See if there is a register linearly related to a constant
4859 equivalent of SRC. */
4860 if (src_const
4861 && (GET_CODE (src_const) == CONST
4862 || (src_const_elt && src_const_elt->related_value != 0)))
4864 src_related = use_related_value (src_const, src_const_elt);
4865 if (src_related)
4867 struct table_elt *src_related_elt
4868 = lookup (src_related, HASH (src_related, mode), mode);
4869 if (src_related_elt && elt)
4871 if (elt->first_same_value
4872 != src_related_elt->first_same_value)
4873 /* This can occur when we previously saw a CONST
4874 involving a SYMBOL_REF and then see the SYMBOL_REF
4875 twice. Merge the involved classes. */
4876 merge_equiv_classes (elt, src_related_elt);
4878 src_related = 0;
4879 src_related_elt = 0;
4881 else if (src_related_elt && elt == 0)
4882 elt = src_related_elt;
4886 /* See if we have a CONST_INT that is already in a register in a
4887 wider mode. */
4889 if (src_const && src_related == 0 && GET_CODE (src_const) == CONST_INT
4890 && GET_MODE_CLASS (mode) == MODE_INT
4891 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
4893 enum machine_mode wider_mode;
4895 for (wider_mode = GET_MODE_WIDER_MODE (mode);
4896 GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
4897 && src_related == 0;
4898 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
4900 struct table_elt *const_elt
4901 = lookup (src_const, HASH (src_const, wider_mode), wider_mode);
4903 if (const_elt == 0)
4904 continue;
4906 for (const_elt = const_elt->first_same_value;
4907 const_elt; const_elt = const_elt->next_same_value)
4908 if (GET_CODE (const_elt->exp) == REG)
4910 src_related = gen_lowpart_if_possible (mode,
4911 const_elt->exp);
4912 break;
4917 /* Another possibility is that we have an AND with a constant in
4918 a mode narrower than a word. If so, it might have been generated
4919 as part of an "if" which would narrow the AND. If we already
4920 have done the AND in a wider mode, we can use a SUBREG of that
4921 value. */
4923 if (flag_expensive_optimizations && ! src_related
4924 && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
4925 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
4927 enum machine_mode tmode;
4928 rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
4930 for (tmode = GET_MODE_WIDER_MODE (mode);
4931 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
4932 tmode = GET_MODE_WIDER_MODE (tmode))
4934 rtx inner = gen_lowpart_if_possible (tmode, XEXP (src, 0));
4935 struct table_elt *larger_elt;
4937 if (inner)
4939 PUT_MODE (new_and, tmode);
4940 XEXP (new_and, 0) = inner;
4941 larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
4942 if (larger_elt == 0)
4943 continue;
4945 for (larger_elt = larger_elt->first_same_value;
4946 larger_elt; larger_elt = larger_elt->next_same_value)
4947 if (GET_CODE (larger_elt->exp) == REG)
4949 src_related
4950 = gen_lowpart_if_possible (mode, larger_elt->exp);
4951 break;
4954 if (src_related)
4955 break;
4960 #ifdef LOAD_EXTEND_OP
4961 /* See if a MEM has already been loaded with a widening operation;
4962 if it has, we can use a subreg of that. Many CISC machines
4963 also have such operations, but this is only likely to be
4964 beneficial these machines. */
4966 if (flag_expensive_optimizations && src_related == 0
4967 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
4968 && GET_MODE_CLASS (mode) == MODE_INT
4969 && GET_CODE (src) == MEM && ! do_not_record
4970 && LOAD_EXTEND_OP (mode) != NIL)
4972 enum machine_mode tmode;
4974 /* Set what we are trying to extend and the operation it might
4975 have been extended with. */
4976 PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
4977 XEXP (memory_extend_rtx, 0) = src;
4979 for (tmode = GET_MODE_WIDER_MODE (mode);
4980 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
4981 tmode = GET_MODE_WIDER_MODE (tmode))
4983 struct table_elt *larger_elt;
4985 PUT_MODE (memory_extend_rtx, tmode);
4986 larger_elt = lookup (memory_extend_rtx,
4987 HASH (memory_extend_rtx, tmode), tmode);
4988 if (larger_elt == 0)
4989 continue;
4991 for (larger_elt = larger_elt->first_same_value;
4992 larger_elt; larger_elt = larger_elt->next_same_value)
4993 if (GET_CODE (larger_elt->exp) == REG)
4995 src_related = gen_lowpart_if_possible (mode,
4996 larger_elt->exp);
4997 break;
5000 if (src_related)
5001 break;
5004 #endif /* LOAD_EXTEND_OP */
5006 if (src == src_folded)
5007 src_folded = 0;
5009 /* At this point, ELT, if non-zero, points to a class of expressions
5010 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
5011 and SRC_RELATED, if non-zero, each contain additional equivalent
5012 expressions. Prune these latter expressions by deleting expressions
5013 already in the equivalence class.
5015 Check for an equivalent identical to the destination. If found,
5016 this is the preferred equivalent since it will likely lead to
5017 elimination of the insn. Indicate this by placing it in
5018 `src_related'. */
5020 if (elt) elt = elt->first_same_value;
5021 for (p = elt; p; p = p->next_same_value)
5023 enum rtx_code code = GET_CODE (p->exp);
5025 /* If the expression is not valid, ignore it. Then we do not
5026 have to check for validity below. In most cases, we can use
5027 `rtx_equal_p', since canonicalization has already been done. */
5028 if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, 0))
5029 continue;
5031 /* Also skip paradoxical subregs, unless that's what we're
5032 looking for. */
5033 if (code == SUBREG
5034 && (GET_MODE_SIZE (GET_MODE (p->exp))
5035 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))
5036 && ! (src != 0
5037 && GET_CODE (src) == SUBREG
5038 && GET_MODE (src) == GET_MODE (p->exp)
5039 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5040 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))))
5041 continue;
5043 if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
5044 src = 0;
5045 else if (src_folded && GET_CODE (src_folded) == code
5046 && rtx_equal_p (src_folded, p->exp))
5047 src_folded = 0;
5048 else if (src_eqv_here && GET_CODE (src_eqv_here) == code
5049 && rtx_equal_p (src_eqv_here, p->exp))
5050 src_eqv_here = 0;
5051 else if (src_related && GET_CODE (src_related) == code
5052 && rtx_equal_p (src_related, p->exp))
5053 src_related = 0;
5055 /* This is the same as the destination of the insns, we want
5056 to prefer it. Copy it to src_related. The code below will
5057 then give it a negative cost. */
5058 if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
5059 src_related = dest;
5063 /* Find the cheapest valid equivalent, trying all the available
5064 possibilities. Prefer items not in the hash table to ones
5065 that are when they are equal cost. Note that we can never
5066 worsen an insn as the current contents will also succeed.
5067 If we find an equivalent identical to the destination, use it as best,
5068 since this insn will probably be eliminated in that case. */
5069 if (src)
5071 if (rtx_equal_p (src, dest))
5072 src_cost = -1;
5073 else
5074 src_cost = COST (src);
5077 if (src_eqv_here)
5079 if (rtx_equal_p (src_eqv_here, dest))
5080 src_eqv_cost = -1;
5081 else
5082 src_eqv_cost = COST (src_eqv_here);
5085 if (src_folded)
5087 if (rtx_equal_p (src_folded, dest))
5088 src_folded_cost = -1;
5089 else
5090 src_folded_cost = COST (src_folded);
5093 if (src_related)
5095 if (rtx_equal_p (src_related, dest))
5096 src_related_cost = -1;
5097 else
5098 src_related_cost = COST (src_related);
5101 /* If this was an indirect jump insn, a known label will really be
5102 cheaper even though it looks more expensive. */
5103 if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
5104 src_folded = src_const, src_folded_cost = -1;
5106 /* Terminate loop when replacement made. This must terminate since
5107 the current contents will be tested and will always be valid. */
5108 while (1)
5110 rtx trial;
5112 /* Skip invalid entries. */
5113 while (elt && GET_CODE (elt->exp) != REG
5114 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
5115 elt = elt->next_same_value;
5117 /* A paradoxical subreg would be bad here: it'll be the right
5118 size, but later may be adjusted so that the upper bits aren't
5119 what we want. So reject it. */
5120 if (elt != 0
5121 && GET_CODE (elt->exp) == SUBREG
5122 && (GET_MODE_SIZE (GET_MODE (elt->exp))
5123 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))
5124 /* It is okay, though, if the rtx we're trying to match
5125 will ignore any of the bits we can't predict. */
5126 && ! (src != 0
5127 && GET_CODE (src) == SUBREG
5128 && GET_MODE (src) == GET_MODE (elt->exp)
5129 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5130 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))))
5132 elt = elt->next_same_value;
5133 continue;
5136 if (elt) src_elt_cost = elt->cost;
5138 /* Find cheapest and skip it for the next time. For items
5139 of equal cost, use this order:
5140 src_folded, src, src_eqv, src_related and hash table entry. */
5141 if (src_folded_cost <= src_cost
5142 && src_folded_cost <= src_eqv_cost
5143 && src_folded_cost <= src_related_cost
5144 && src_folded_cost <= src_elt_cost)
5146 trial = src_folded, src_folded_cost = 10000;
5147 if (src_folded_force_flag)
5148 trial = force_const_mem (mode, trial);
5150 else if (src_cost <= src_eqv_cost
5151 && src_cost <= src_related_cost
5152 && src_cost <= src_elt_cost)
5153 trial = src, src_cost = 10000;
5154 else if (src_eqv_cost <= src_related_cost
5155 && src_eqv_cost <= src_elt_cost)
5156 trial = copy_rtx (src_eqv_here), src_eqv_cost = 10000;
5157 else if (src_related_cost <= src_elt_cost)
5158 trial = copy_rtx (src_related), src_related_cost = 10000;
5159 else
5161 trial = copy_rtx (elt->exp);
5162 elt = elt->next_same_value;
5163 src_elt_cost = 10000;
5166 /* We don't normally have an insn matching (set (pc) (pc)), so
5167 check for this separately here. We will delete such an
5168 insn below.
5170 Tablejump insns contain a USE of the table, so simply replacing
5171 the operand with the constant won't match. This is simply an
5172 unconditional branch, however, and is therefore valid. Just
5173 insert the substitution here and we will delete and re-emit
5174 the insn later. */
5176 if (n_sets == 1 && dest == pc_rtx
5177 && (trial == pc_rtx
5178 || (GET_CODE (trial) == LABEL_REF
5179 && ! condjump_p (insn))))
5181 /* If TRIAL is a label in front of a jump table, we are
5182 really falling through the switch (this is how casesi
5183 insns work), so we must branch around the table. */
5184 if (GET_CODE (trial) == CODE_LABEL
5185 && NEXT_INSN (trial) != 0
5186 && GET_CODE (NEXT_INSN (trial)) == JUMP_INSN
5187 && (GET_CODE (PATTERN (NEXT_INSN (trial))) == ADDR_DIFF_VEC
5188 || GET_CODE (PATTERN (NEXT_INSN (trial))) == ADDR_VEC))
5190 trial = gen_rtx_LABEL_REF (Pmode, get_label_after (trial));
5192 SET_SRC (sets[i].rtl) = trial;
5193 cse_jumps_altered = 1;
5194 break;
5197 /* Look for a substitution that makes a valid insn. */
5198 else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
5200 /* If we just made a substitution inside a libcall, then we
5201 need to make the same substitution in any notes attached
5202 to the RETVAL insn. */
5203 if (libcall_insn
5204 && (GET_CODE (sets[i].orig_src) == REG
5205 || GET_CODE (sets[i].orig_src) == SUBREG
5206 || GET_CODE (sets[i].orig_src) == MEM))
5207 replace_rtx (REG_NOTES (libcall_insn), sets[i].orig_src,
5208 canon_reg (SET_SRC (sets[i].rtl), insn));
5210 /* The result of apply_change_group can be ignored; see
5211 canon_reg. */
5213 validate_change (insn, &SET_SRC (sets[i].rtl),
5214 canon_reg (SET_SRC (sets[i].rtl), insn),
5216 apply_change_group ();
5217 break;
5220 /* If we previously found constant pool entries for
5221 constants and this is a constant, try making a
5222 pool entry. Put it in src_folded unless we already have done
5223 this since that is where it likely came from. */
5225 else if (constant_pool_entries_cost
5226 && CONSTANT_P (trial)
5227 && ! (GET_CODE (trial) == CONST
5228 && GET_CODE (XEXP (trial, 0)) == TRUNCATE)
5229 && (src_folded == 0
5230 || (GET_CODE (src_folded) != MEM
5231 && ! src_folded_force_flag))
5232 && GET_MODE_CLASS (mode) != MODE_CC
5233 && mode != VOIDmode)
5235 src_folded_force_flag = 1;
5236 src_folded = trial;
5237 src_folded_cost = constant_pool_entries_cost;
5241 src = SET_SRC (sets[i].rtl);
5243 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
5244 However, there is an important exception: If both are registers
5245 that are not the head of their equivalence class, replace SET_SRC
5246 with the head of the class. If we do not do this, we will have
5247 both registers live over a portion of the basic block. This way,
5248 their lifetimes will likely abut instead of overlapping. */
5249 if (GET_CODE (dest) == REG
5250 && REGNO_QTY_VALID_P (REGNO (dest)))
5252 int dest_q = REG_QTY (REGNO (dest));
5253 struct qty_table_elem *dest_ent = &qty_table[dest_q];
5255 if (dest_ent->mode == GET_MODE (dest)
5256 && dest_ent->first_reg != REGNO (dest)
5257 && GET_CODE (src) == REG && REGNO (src) == REGNO (dest)
5258 /* Don't do this if the original insn had a hard reg as
5259 SET_SRC or SET_DEST. */
5260 && (GET_CODE (sets[i].src) != REG
5261 || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER)
5262 && (GET_CODE (dest) != REG || REGNO (dest) >= FIRST_PSEUDO_REGISTER))
5263 /* We can't call canon_reg here because it won't do anything if
5264 SRC is a hard register. */
5266 int src_q = REG_QTY (REGNO (src));
5267 struct qty_table_elem *src_ent = &qty_table[src_q];
5268 int first = src_ent->first_reg;
5269 rtx new_src
5270 = (first >= FIRST_PSEUDO_REGISTER
5271 ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first));
5273 /* We must use validate-change even for this, because this
5274 might be a special no-op instruction, suitable only to
5275 tag notes onto. */
5276 if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0))
5278 src = new_src;
5279 /* If we had a constant that is cheaper than what we are now
5280 setting SRC to, use that constant. We ignored it when we
5281 thought we could make this into a no-op. */
5282 if (src_const && COST (src_const) < COST (src)
5283 && validate_change (insn, &SET_SRC (sets[i].rtl), src_const,
5285 src = src_const;
5290 /* If we made a change, recompute SRC values. */
5291 if (src != sets[i].src)
5293 do_not_record = 0;
5294 hash_arg_in_memory = 0;
5295 sets[i].src = src;
5296 sets[i].src_hash = HASH (src, mode);
5297 sets[i].src_volatile = do_not_record;
5298 sets[i].src_in_memory = hash_arg_in_memory;
5299 sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
5302 /* If this is a single SET, we are setting a register, and we have an
5303 equivalent constant, we want to add a REG_NOTE. We don't want
5304 to write a REG_EQUAL note for a constant pseudo since verifying that
5305 that pseudo hasn't been eliminated is a pain. Such a note also
5306 won't help anything.
5308 Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
5309 which can be created for a reference to a compile time computable
5310 entry in a jump table. */
5312 if (n_sets == 1 && src_const && GET_CODE (dest) == REG
5313 && GET_CODE (src_const) != REG
5314 && ! (GET_CODE (src_const) == CONST
5315 && GET_CODE (XEXP (src_const, 0)) == MINUS
5316 && GET_CODE (XEXP (XEXP (src_const, 0), 0)) == LABEL_REF
5317 && GET_CODE (XEXP (XEXP (src_const, 0), 1)) == LABEL_REF))
5319 tem = find_reg_note (insn, REG_EQUAL, NULL_RTX);
5321 /* Make sure that the rtx is not shared with any other insn. */
5322 src_const = copy_rtx (src_const);
5324 /* Record the actual constant value in a REG_EQUAL note, making
5325 a new one if one does not already exist. */
5326 if (tem)
5327 XEXP (tem, 0) = src_const;
5328 else
5329 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL,
5330 src_const, REG_NOTES (insn));
5332 /* If storing a constant value in a register that
5333 previously held the constant value 0,
5334 record this fact with a REG_WAS_0 note on this insn.
5336 Note that the *register* is required to have previously held 0,
5337 not just any register in the quantity and we must point to the
5338 insn that set that register to zero.
5340 Rather than track each register individually, we just see if
5341 the last set for this quantity was for this register. */
5343 if (REGNO_QTY_VALID_P (REGNO (dest)))
5345 int dest_q = REG_QTY (REGNO (dest));
5346 struct qty_table_elem *dest_ent = &qty_table[dest_q];
5348 if (dest_ent->const_rtx == const0_rtx)
5350 /* See if we previously had a REG_WAS_0 note. */
5351 rtx note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
5352 rtx const_insn = dest_ent->const_insn;
5354 if ((tem = single_set (const_insn)) != 0
5355 && rtx_equal_p (SET_DEST (tem), dest))
5357 if (note)
5358 XEXP (note, 0) = const_insn;
5359 else
5360 REG_NOTES (insn)
5361 = gen_rtx_INSN_LIST (REG_WAS_0, const_insn,
5362 REG_NOTES (insn));
5368 /* Now deal with the destination. */
5369 do_not_record = 0;
5371 /* Look within any SIGN_EXTRACT or ZERO_EXTRACT
5372 to the MEM or REG within it. */
5373 while (GET_CODE (dest) == SIGN_EXTRACT
5374 || GET_CODE (dest) == ZERO_EXTRACT
5375 || GET_CODE (dest) == SUBREG
5376 || GET_CODE (dest) == STRICT_LOW_PART)
5377 dest = XEXP (dest, 0);
5379 sets[i].inner_dest = dest;
5381 if (GET_CODE (dest) == MEM)
5383 #ifdef PUSH_ROUNDING
5384 /* Stack pushes invalidate the stack pointer. */
5385 rtx addr = XEXP (dest, 0);
5386 if ((GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
5387 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
5388 && XEXP (addr, 0) == stack_pointer_rtx)
5389 invalidate (stack_pointer_rtx, Pmode);
5390 #endif
5391 dest = fold_rtx (dest, insn);
5394 /* Compute the hash code of the destination now,
5395 before the effects of this instruction are recorded,
5396 since the register values used in the address computation
5397 are those before this instruction. */
5398 sets[i].dest_hash = HASH (dest, mode);
5400 /* Don't enter a bit-field in the hash table
5401 because the value in it after the store
5402 may not equal what was stored, due to truncation. */
5404 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
5405 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
5407 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5409 if (src_const != 0 && GET_CODE (src_const) == CONST_INT
5410 && GET_CODE (width) == CONST_INT
5411 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5412 && ! (INTVAL (src_const)
5413 & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5414 /* Exception: if the value is constant,
5415 and it won't be truncated, record it. */
5417 else
5419 /* This is chosen so that the destination will be invalidated
5420 but no new value will be recorded.
5421 We must invalidate because sometimes constant
5422 values can be recorded for bitfields. */
5423 sets[i].src_elt = 0;
5424 sets[i].src_volatile = 1;
5425 src_eqv = 0;
5426 src_eqv_elt = 0;
5430 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
5431 the insn. */
5432 else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
5434 /* One less use of the label this insn used to jump to. */
5435 if (JUMP_LABEL (insn) != 0)
5436 --LABEL_NUSES (JUMP_LABEL (insn));
5437 PUT_CODE (insn, NOTE);
5438 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
5439 NOTE_SOURCE_FILE (insn) = 0;
5440 cse_jumps_altered = 1;
5441 /* No more processing for this set. */
5442 sets[i].rtl = 0;
5445 /* If this SET is now setting PC to a label, we know it used to
5446 be a conditional or computed branch. So we see if we can follow
5447 it. If it was a computed branch, delete it and re-emit. */
5448 else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF)
5450 /* If this is not in the format for a simple branch and
5451 we are the only SET in it, re-emit it. */
5452 if (! simplejump_p (insn) && n_sets == 1)
5454 rtx new = emit_jump_insn_before (gen_jump (XEXP (src, 0)), insn);
5455 JUMP_LABEL (new) = XEXP (src, 0);
5456 LABEL_NUSES (XEXP (src, 0))++;
5457 insn = new;
5459 else
5460 /* Otherwise, force rerecognition, since it probably had
5461 a different pattern before.
5462 This shouldn't really be necessary, since whatever
5463 changed the source value above should have done this.
5464 Until the right place is found, might as well do this here. */
5465 INSN_CODE (insn) = -1;
5467 never_reached_warning (insn);
5469 /* Now emit a BARRIER after the unconditional jump. Do not bother
5470 deleting any unreachable code, let jump/flow do that. */
5471 if (NEXT_INSN (insn) != 0
5472 && GET_CODE (NEXT_INSN (insn)) != BARRIER)
5473 emit_barrier_after (insn);
5475 cse_jumps_altered = 1;
5476 sets[i].rtl = 0;
5479 /* If destination is volatile, invalidate it and then do no further
5480 processing for this assignment. */
5482 else if (do_not_record)
5484 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
5485 || GET_CODE (dest) == MEM)
5486 invalidate (dest, VOIDmode);
5487 else if (GET_CODE (dest) == STRICT_LOW_PART
5488 || GET_CODE (dest) == ZERO_EXTRACT)
5489 invalidate (XEXP (dest, 0), GET_MODE (dest));
5490 sets[i].rtl = 0;
5493 if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
5494 sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
5496 #ifdef HAVE_cc0
5497 /* If setting CC0, record what it was set to, or a constant, if it
5498 is equivalent to a constant. If it is being set to a floating-point
5499 value, make a COMPARE with the appropriate constant of 0. If we
5500 don't do this, later code can interpret this as a test against
5501 const0_rtx, which can cause problems if we try to put it into an
5502 insn as a floating-point operand. */
5503 if (dest == cc0_rtx)
5505 this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
5506 this_insn_cc0_mode = mode;
5507 if (FLOAT_MODE_P (mode))
5508 this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0,
5509 CONST0_RTX (mode));
5511 #endif
5514 /* Now enter all non-volatile source expressions in the hash table
5515 if they are not already present.
5516 Record their equivalence classes in src_elt.
5517 This way we can insert the corresponding destinations into
5518 the same classes even if the actual sources are no longer in them
5519 (having been invalidated). */
5521 if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
5522 && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
5524 register struct table_elt *elt;
5525 register struct table_elt *classp = sets[0].src_elt;
5526 rtx dest = SET_DEST (sets[0].rtl);
5527 enum machine_mode eqvmode = GET_MODE (dest);
5529 if (GET_CODE (dest) == STRICT_LOW_PART)
5531 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5532 classp = 0;
5534 if (insert_regs (src_eqv, classp, 0))
5536 rehash_using_reg (src_eqv);
5537 src_eqv_hash = HASH (src_eqv, eqvmode);
5539 elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
5540 elt->in_memory = src_eqv_in_memory;
5541 src_eqv_elt = elt;
5543 /* Check to see if src_eqv_elt is the same as a set source which
5544 does not yet have an elt, and if so set the elt of the set source
5545 to src_eqv_elt. */
5546 for (i = 0; i < n_sets; i++)
5547 if (sets[i].rtl && sets[i].src_elt == 0
5548 && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
5549 sets[i].src_elt = src_eqv_elt;
5552 for (i = 0; i < n_sets; i++)
5553 if (sets[i].rtl && ! sets[i].src_volatile
5554 && ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
5556 if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
5558 /* REG_EQUAL in setting a STRICT_LOW_PART
5559 gives an equivalent for the entire destination register,
5560 not just for the subreg being stored in now.
5561 This is a more interesting equivalence, so we arrange later
5562 to treat the entire reg as the destination. */
5563 sets[i].src_elt = src_eqv_elt;
5564 sets[i].src_hash = src_eqv_hash;
5566 else
5568 /* Insert source and constant equivalent into hash table, if not
5569 already present. */
5570 register struct table_elt *classp = src_eqv_elt;
5571 register rtx src = sets[i].src;
5572 register rtx dest = SET_DEST (sets[i].rtl);
5573 enum machine_mode mode
5574 = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5576 if (sets[i].src_elt == 0)
5578 /* Don't put a hard register source into the table if this is
5579 the last insn of a libcall. In this case, we only need
5580 to put src_eqv_elt in src_elt. */
5581 if (GET_CODE (src) != REG
5582 || REGNO (src) >= FIRST_PSEUDO_REGISTER
5583 || ! find_reg_note (insn, REG_RETVAL, NULL_RTX))
5585 register struct table_elt *elt;
5587 /* Note that these insert_regs calls cannot remove
5588 any of the src_elt's, because they would have failed to
5589 match if not still valid. */
5590 if (insert_regs (src, classp, 0))
5592 rehash_using_reg (src);
5593 sets[i].src_hash = HASH (src, mode);
5595 elt = insert (src, classp, sets[i].src_hash, mode);
5596 elt->in_memory = sets[i].src_in_memory;
5597 sets[i].src_elt = classp = elt;
5599 else
5600 sets[i].src_elt = classp;
5602 if (sets[i].src_const && sets[i].src_const_elt == 0
5603 && src != sets[i].src_const
5604 && ! rtx_equal_p (sets[i].src_const, src))
5605 sets[i].src_elt = insert (sets[i].src_const, classp,
5606 sets[i].src_const_hash, mode);
5609 else if (sets[i].src_elt == 0)
5610 /* If we did not insert the source into the hash table (e.g., it was
5611 volatile), note the equivalence class for the REG_EQUAL value, if any,
5612 so that the destination goes into that class. */
5613 sets[i].src_elt = src_eqv_elt;
5615 invalidate_from_clobbers (x);
5617 /* Some registers are invalidated by subroutine calls. Memory is
5618 invalidated by non-constant calls. */
5620 if (GET_CODE (insn) == CALL_INSN)
5622 if (! CONST_CALL_P (insn))
5623 invalidate_memory ();
5624 invalidate_for_call ();
5627 /* Now invalidate everything set by this instruction.
5628 If a SUBREG or other funny destination is being set,
5629 sets[i].rtl is still nonzero, so here we invalidate the reg
5630 a part of which is being set. */
5632 for (i = 0; i < n_sets; i++)
5633 if (sets[i].rtl)
5635 /* We can't use the inner dest, because the mode associated with
5636 a ZERO_EXTRACT is significant. */
5637 register rtx dest = SET_DEST (sets[i].rtl);
5639 /* Needed for registers to remove the register from its
5640 previous quantity's chain.
5641 Needed for memory if this is a nonvarying address, unless
5642 we have just done an invalidate_memory that covers even those. */
5643 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG
5644 || GET_CODE (dest) == MEM)
5645 invalidate (dest, VOIDmode);
5646 else if (GET_CODE (dest) == STRICT_LOW_PART
5647 || GET_CODE (dest) == ZERO_EXTRACT)
5648 invalidate (XEXP (dest, 0), GET_MODE (dest));
5651 /* A volatile ASM invalidates everything. */
5652 if (GET_CODE (insn) == INSN
5653 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
5654 && MEM_VOLATILE_P (PATTERN (insn)))
5655 flush_hash_table ();
5657 /* Make sure registers mentioned in destinations
5658 are safe for use in an expression to be inserted.
5659 This removes from the hash table
5660 any invalid entry that refers to one of these registers.
5662 We don't care about the return value from mention_regs because
5663 we are going to hash the SET_DEST values unconditionally. */
5665 for (i = 0; i < n_sets; i++)
5667 if (sets[i].rtl)
5669 rtx x = SET_DEST (sets[i].rtl);
5671 if (GET_CODE (x) != REG)
5672 mention_regs (x);
5673 else
5675 /* We used to rely on all references to a register becoming
5676 inaccessible when a register changes to a new quantity,
5677 since that changes the hash code. However, that is not
5678 safe, since after HASH_SIZE new quantities we get a
5679 hash 'collision' of a register with its own invalid
5680 entries. And since SUBREGs have been changed not to
5681 change their hash code with the hash code of the register,
5682 it wouldn't work any longer at all. So we have to check
5683 for any invalid references lying around now.
5684 This code is similar to the REG case in mention_regs,
5685 but it knows that reg_tick has been incremented, and
5686 it leaves reg_in_table as -1 . */
5687 register int regno = REGNO (x);
5688 register int endregno
5689 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
5690 : HARD_REGNO_NREGS (regno, GET_MODE (x)));
5691 int i;
5693 for (i = regno; i < endregno; i++)
5695 if (REG_IN_TABLE (i) >= 0)
5697 remove_invalid_refs (i);
5698 REG_IN_TABLE (i) = -1;
5705 /* We may have just removed some of the src_elt's from the hash table.
5706 So replace each one with the current head of the same class. */
5708 for (i = 0; i < n_sets; i++)
5709 if (sets[i].rtl)
5711 if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
5712 /* If elt was removed, find current head of same class,
5713 or 0 if nothing remains of that class. */
5715 register struct table_elt *elt = sets[i].src_elt;
5717 while (elt && elt->prev_same_value)
5718 elt = elt->prev_same_value;
5720 while (elt && elt->first_same_value == 0)
5721 elt = elt->next_same_value;
5722 sets[i].src_elt = elt ? elt->first_same_value : 0;
5726 /* Now insert the destinations into their equivalence classes. */
5728 for (i = 0; i < n_sets; i++)
5729 if (sets[i].rtl)
5731 register rtx dest = SET_DEST (sets[i].rtl);
5732 rtx inner_dest = sets[i].inner_dest;
5733 register struct table_elt *elt;
5735 /* Don't record value if we are not supposed to risk allocating
5736 floating-point values in registers that might be wider than
5737 memory. */
5738 if ((flag_float_store
5739 && GET_CODE (dest) == MEM
5740 && FLOAT_MODE_P (GET_MODE (dest)))
5741 /* Don't record BLKmode values, because we don't know the
5742 size of it, and can't be sure that other BLKmode values
5743 have the same or smaller size. */
5744 || GET_MODE (dest) == BLKmode
5745 /* Don't record values of destinations set inside a libcall block
5746 since we might delete the libcall. Things should have been set
5747 up so we won't want to reuse such a value, but we play it safe
5748 here. */
5749 || libcall_insn
5750 /* If we didn't put a REG_EQUAL value or a source into the hash
5751 table, there is no point is recording DEST. */
5752 || sets[i].src_elt == 0
5753 /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
5754 or SIGN_EXTEND, don't record DEST since it can cause
5755 some tracking to be wrong.
5757 ??? Think about this more later. */
5758 || (GET_CODE (dest) == SUBREG
5759 && (GET_MODE_SIZE (GET_MODE (dest))
5760 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
5761 && (GET_CODE (sets[i].src) == SIGN_EXTEND
5762 || GET_CODE (sets[i].src) == ZERO_EXTEND)))
5763 continue;
5765 /* STRICT_LOW_PART isn't part of the value BEING set,
5766 and neither is the SUBREG inside it.
5767 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
5768 if (GET_CODE (dest) == STRICT_LOW_PART)
5769 dest = SUBREG_REG (XEXP (dest, 0));
5771 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
5772 /* Registers must also be inserted into chains for quantities. */
5773 if (insert_regs (dest, sets[i].src_elt, 1))
5775 /* If `insert_regs' changes something, the hash code must be
5776 recalculated. */
5777 rehash_using_reg (dest);
5778 sets[i].dest_hash = HASH (dest, GET_MODE (dest));
5781 if (GET_CODE (inner_dest) == MEM
5782 && GET_CODE (XEXP (inner_dest, 0)) == ADDRESSOF)
5783 /* Given (SET (MEM (ADDRESSOF (X))) Y) we don't want to say
5784 that (MEM (ADDRESSOF (X))) is equivalent to Y.
5785 Consider the case in which the address of the MEM is
5786 passed to a function, which alters the MEM. Then, if we
5787 later use Y instead of the MEM we'll miss the update. */
5788 elt = insert (dest, 0, sets[i].dest_hash, GET_MODE (dest));
5789 else
5790 elt = insert (dest, sets[i].src_elt,
5791 sets[i].dest_hash, GET_MODE (dest));
5793 elt->in_memory = (GET_CODE (sets[i].inner_dest) == MEM
5794 && (! RTX_UNCHANGING_P (sets[i].inner_dest)
5795 || FIXED_BASE_PLUS_P (XEXP (sets[i].inner_dest,
5796 0))));
5798 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
5799 narrower than M2, and both M1 and M2 are the same number of words,
5800 we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
5801 make that equivalence as well.
5803 However, BAR may have equivalences for which gen_lowpart_if_possible
5804 will produce a simpler value than gen_lowpart_if_possible applied to
5805 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
5806 BAR's equivalences. If we don't get a simplified form, make
5807 the SUBREG. It will not be used in an equivalence, but will
5808 cause two similar assignments to be detected.
5810 Note the loop below will find SUBREG_REG (DEST) since we have
5811 already entered SRC and DEST of the SET in the table. */
5813 if (GET_CODE (dest) == SUBREG
5814 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
5815 / UNITS_PER_WORD)
5816 == (GET_MODE_SIZE (GET_MODE (dest)) - 1)/ UNITS_PER_WORD)
5817 && (GET_MODE_SIZE (GET_MODE (dest))
5818 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
5819 && sets[i].src_elt != 0)
5821 enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
5822 struct table_elt *elt, *classp = 0;
5824 for (elt = sets[i].src_elt->first_same_value; elt;
5825 elt = elt->next_same_value)
5827 rtx new_src = 0;
5828 unsigned src_hash;
5829 struct table_elt *src_elt;
5831 /* Ignore invalid entries. */
5832 if (GET_CODE (elt->exp) != REG
5833 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
5834 continue;
5836 new_src = gen_lowpart_if_possible (new_mode, elt->exp);
5837 if (new_src == 0)
5838 new_src = gen_rtx_SUBREG (new_mode, elt->exp, 0);
5840 src_hash = HASH (new_src, new_mode);
5841 src_elt = lookup (new_src, src_hash, new_mode);
5843 /* Put the new source in the hash table is if isn't
5844 already. */
5845 if (src_elt == 0)
5847 if (insert_regs (new_src, classp, 0))
5849 rehash_using_reg (new_src);
5850 src_hash = HASH (new_src, new_mode);
5852 src_elt = insert (new_src, classp, src_hash, new_mode);
5853 src_elt->in_memory = elt->in_memory;
5855 else if (classp && classp != src_elt->first_same_value)
5856 /* Show that two things that we've seen before are
5857 actually the same. */
5858 merge_equiv_classes (src_elt, classp);
5860 classp = src_elt->first_same_value;
5861 /* Ignore invalid entries. */
5862 while (classp
5863 && GET_CODE (classp->exp) != REG
5864 && ! exp_equiv_p (classp->exp, classp->exp, 1, 0))
5865 classp = classp->next_same_value;
5870 /* Special handling for (set REG0 REG1)
5871 where REG0 is the "cheapest", cheaper than REG1.
5872 After cse, REG1 will probably not be used in the sequel,
5873 so (if easily done) change this insn to (set REG1 REG0) and
5874 replace REG1 with REG0 in the previous insn that computed their value.
5875 Then REG1 will become a dead store and won't cloud the situation
5876 for later optimizations.
5878 Do not make this change if REG1 is a hard register, because it will
5879 then be used in the sequel and we may be changing a two-operand insn
5880 into a three-operand insn.
5882 Also do not do this if we are operating on a copy of INSN.
5884 Also don't do this if INSN ends a libcall; this would cause an unrelated
5885 register to be set in the middle of a libcall, and we then get bad code
5886 if the libcall is deleted. */
5888 if (n_sets == 1 && sets[0].rtl && GET_CODE (SET_DEST (sets[0].rtl)) == REG
5889 && NEXT_INSN (PREV_INSN (insn)) == insn
5890 && GET_CODE (SET_SRC (sets[0].rtl)) == REG
5891 && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
5892 && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl))))
5894 int src_q = REG_QTY (REGNO (SET_SRC (sets[0].rtl)));
5895 struct qty_table_elem *src_ent = &qty_table[src_q];
5897 if ((src_ent->first_reg == REGNO (SET_DEST (sets[0].rtl)))
5898 && ! find_reg_note (insn, REG_RETVAL, NULL_RTX))
5900 rtx prev = PREV_INSN (insn);
5901 while (prev && GET_CODE (prev) == NOTE)
5902 prev = PREV_INSN (prev);
5904 if (prev && GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SET
5905 && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl))
5907 rtx dest = SET_DEST (sets[0].rtl);
5908 rtx note = find_reg_note (prev, REG_EQUIV, NULL_RTX);
5910 validate_change (prev, & SET_DEST (PATTERN (prev)), dest, 1);
5911 validate_change (insn, & SET_DEST (sets[0].rtl),
5912 SET_SRC (sets[0].rtl), 1);
5913 validate_change (insn, & SET_SRC (sets[0].rtl), dest, 1);
5914 apply_change_group ();
5916 /* If REG1 was equivalent to a constant, REG0 is not. */
5917 if (note)
5918 PUT_REG_NOTE_KIND (note, REG_EQUAL);
5920 /* If there was a REG_WAS_0 note on PREV, remove it. Move
5921 any REG_WAS_0 note on INSN to PREV. */
5922 note = find_reg_note (prev, REG_WAS_0, NULL_RTX);
5923 if (note)
5924 remove_note (prev, note);
5926 note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
5927 if (note)
5929 remove_note (insn, note);
5930 XEXP (note, 1) = REG_NOTES (prev);
5931 REG_NOTES (prev) = note;
5934 /* If INSN has a REG_EQUAL note, and this note mentions REG0,
5935 then we must delete it, because the value in REG0 has changed. */
5936 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
5937 if (note && reg_mentioned_p (dest, XEXP (note, 0)))
5938 remove_note (insn, note);
5943 /* If this is a conditional jump insn, record any known equivalences due to
5944 the condition being tested. */
5946 last_jump_equiv_class = 0;
5947 if (GET_CODE (insn) == JUMP_INSN
5948 && n_sets == 1 && GET_CODE (x) == SET
5949 && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
5950 record_jump_equiv (insn, 0);
5952 #ifdef HAVE_cc0
5953 /* If the previous insn set CC0 and this insn no longer references CC0,
5954 delete the previous insn. Here we use the fact that nothing expects CC0
5955 to be valid over an insn, which is true until the final pass. */
5956 if (prev_insn && GET_CODE (prev_insn) == INSN
5957 && (tem = single_set (prev_insn)) != 0
5958 && SET_DEST (tem) == cc0_rtx
5959 && ! reg_mentioned_p (cc0_rtx, x))
5961 PUT_CODE (prev_insn, NOTE);
5962 NOTE_LINE_NUMBER (prev_insn) = NOTE_INSN_DELETED;
5963 NOTE_SOURCE_FILE (prev_insn) = 0;
5966 prev_insn_cc0 = this_insn_cc0;
5967 prev_insn_cc0_mode = this_insn_cc0_mode;
5968 #endif
5970 prev_insn = insn;
5973 /* Remove from the hash table all expressions that reference memory. */
5975 static void
5976 invalidate_memory ()
5978 register int i;
5979 register struct table_elt *p, *next;
5981 for (i = 0; i < HASH_SIZE; i++)
5982 for (p = table[i]; p; p = next)
5984 next = p->next_same_hash;
5985 if (p->in_memory)
5986 remove_from_table (p, i);
5990 /* If ADDR is an address that implicitly affects the stack pointer, return
5991 1 and update the register tables to show the effect. Else, return 0. */
5993 static int
5994 addr_affects_sp_p (addr)
5995 register rtx addr;
5997 if ((GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
5998 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
5999 && GET_CODE (XEXP (addr, 0)) == REG
6000 && REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
6002 if (REG_TICK (STACK_POINTER_REGNUM) >= 0)
6003 REG_TICK (STACK_POINTER_REGNUM)++;
6005 /* This should be *very* rare. */
6006 if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
6007 invalidate (stack_pointer_rtx, VOIDmode);
6009 return 1;
6012 return 0;
6015 /* Perform invalidation on the basis of everything about an insn
6016 except for invalidating the actual places that are SET in it.
6017 This includes the places CLOBBERed, and anything that might
6018 alias with something that is SET or CLOBBERed.
6020 X is the pattern of the insn. */
6022 static void
6023 invalidate_from_clobbers (x)
6024 rtx x;
6026 if (GET_CODE (x) == CLOBBER)
6028 rtx ref = XEXP (x, 0);
6029 if (ref)
6031 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
6032 || GET_CODE (ref) == MEM)
6033 invalidate (ref, VOIDmode);
6034 else if (GET_CODE (ref) == STRICT_LOW_PART
6035 || GET_CODE (ref) == ZERO_EXTRACT)
6036 invalidate (XEXP (ref, 0), GET_MODE (ref));
6039 else if (GET_CODE (x) == PARALLEL)
6041 register int i;
6042 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
6044 register rtx y = XVECEXP (x, 0, i);
6045 if (GET_CODE (y) == CLOBBER)
6047 rtx ref = XEXP (y, 0);
6048 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
6049 || GET_CODE (ref) == MEM)
6050 invalidate (ref, VOIDmode);
6051 else if (GET_CODE (ref) == STRICT_LOW_PART
6052 || GET_CODE (ref) == ZERO_EXTRACT)
6053 invalidate (XEXP (ref, 0), GET_MODE (ref));
6059 /* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes
6060 and replace any registers in them with either an equivalent constant
6061 or the canonical form of the register. If we are inside an address,
6062 only do this if the address remains valid.
6064 OBJECT is 0 except when within a MEM in which case it is the MEM.
6066 Return the replacement for X. */
6068 static rtx
6069 cse_process_notes (x, object)
6070 rtx x;
6071 rtx object;
6073 enum rtx_code code = GET_CODE (x);
6074 const char *fmt = GET_RTX_FORMAT (code);
6075 int i;
6077 switch (code)
6079 case CONST_INT:
6080 case CONST:
6081 case SYMBOL_REF:
6082 case LABEL_REF:
6083 case CONST_DOUBLE:
6084 case PC:
6085 case CC0:
6086 case LO_SUM:
6087 return x;
6089 case MEM:
6090 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), x);
6091 return x;
6093 case EXPR_LIST:
6094 case INSN_LIST:
6095 if (REG_NOTE_KIND (x) == REG_EQUAL)
6096 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX);
6097 if (XEXP (x, 1))
6098 XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX);
6099 return x;
6101 case SIGN_EXTEND:
6102 case ZERO_EXTEND:
6103 case SUBREG:
6105 rtx new = cse_process_notes (XEXP (x, 0), object);
6106 /* We don't substitute VOIDmode constants into these rtx,
6107 since they would impede folding. */
6108 if (GET_MODE (new) != VOIDmode)
6109 validate_change (object, &XEXP (x, 0), new, 0);
6110 return x;
6113 case REG:
6114 i = REG_QTY (REGNO (x));
6116 /* Return a constant or a constant register. */
6117 if (REGNO_QTY_VALID_P (REGNO (x)))
6119 struct qty_table_elem *ent = &qty_table[i];
6121 if (ent->const_rtx != NULL_RTX
6122 && (CONSTANT_P (ent->const_rtx)
6123 || GET_CODE (ent->const_rtx) == REG))
6125 rtx new = gen_lowpart_if_possible (GET_MODE (x), ent->const_rtx);
6126 if (new)
6127 return new;
6131 /* Otherwise, canonicalize this register. */
6132 return canon_reg (x, NULL_RTX);
6134 default:
6135 break;
6138 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6139 if (fmt[i] == 'e')
6140 validate_change (object, &XEXP (x, i),
6141 cse_process_notes (XEXP (x, i), object), 0);
6143 return x;
6146 /* Find common subexpressions between the end test of a loop and the beginning
6147 of the loop. LOOP_START is the CODE_LABEL at the start of a loop.
6149 Often we have a loop where an expression in the exit test is used
6150 in the body of the loop. For example "while (*p) *q++ = *p++;".
6151 Because of the way we duplicate the loop exit test in front of the loop,
6152 however, we don't detect that common subexpression. This will be caught
6153 when global cse is implemented, but this is a quite common case.
6155 This function handles the most common cases of these common expressions.
6156 It is called after we have processed the basic block ending with the
6157 NOTE_INSN_LOOP_END note that ends a loop and the previous JUMP_INSN
6158 jumps to a label used only once. */
6160 static void
6161 cse_around_loop (loop_start)
6162 rtx loop_start;
6164 rtx insn;
6165 int i;
6166 struct table_elt *p;
6168 /* If the jump at the end of the loop doesn't go to the start, we don't
6169 do anything. */
6170 for (insn = PREV_INSN (loop_start);
6171 insn && (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0);
6172 insn = PREV_INSN (insn))
6175 if (insn == 0
6176 || GET_CODE (insn) != NOTE
6177 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_BEG)
6178 return;
6180 /* If the last insn of the loop (the end test) was an NE comparison,
6181 we will interpret it as an EQ comparison, since we fell through
6182 the loop. Any equivalences resulting from that comparison are
6183 therefore not valid and must be invalidated. */
6184 if (last_jump_equiv_class)
6185 for (p = last_jump_equiv_class->first_same_value; p;
6186 p = p->next_same_value)
6188 if (GET_CODE (p->exp) == MEM || GET_CODE (p->exp) == REG
6189 || (GET_CODE (p->exp) == SUBREG
6190 && GET_CODE (SUBREG_REG (p->exp)) == REG))
6191 invalidate (p->exp, VOIDmode);
6192 else if (GET_CODE (p->exp) == STRICT_LOW_PART
6193 || GET_CODE (p->exp) == ZERO_EXTRACT)
6194 invalidate (XEXP (p->exp, 0), GET_MODE (p->exp));
6197 /* Process insns starting after LOOP_START until we hit a CALL_INSN or
6198 a CODE_LABEL (we could handle a CALL_INSN, but it isn't worth it).
6200 The only thing we do with SET_DEST is invalidate entries, so we
6201 can safely process each SET in order. It is slightly less efficient
6202 to do so, but we only want to handle the most common cases.
6204 The gen_move_insn call in cse_set_around_loop may create new pseudos.
6205 These pseudos won't have valid entries in any of the tables indexed
6206 by register number, such as reg_qty. We avoid out-of-range array
6207 accesses by not processing any instructions created after cse started. */
6209 for (insn = NEXT_INSN (loop_start);
6210 GET_CODE (insn) != CALL_INSN && GET_CODE (insn) != CODE_LABEL
6211 && INSN_UID (insn) < max_insn_uid
6212 && ! (GET_CODE (insn) == NOTE
6213 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
6214 insn = NEXT_INSN (insn))
6216 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
6217 && (GET_CODE (PATTERN (insn)) == SET
6218 || GET_CODE (PATTERN (insn)) == CLOBBER))
6219 cse_set_around_loop (PATTERN (insn), insn, loop_start);
6220 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
6221 && GET_CODE (PATTERN (insn)) == PARALLEL)
6222 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6223 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET
6224 || GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == CLOBBER)
6225 cse_set_around_loop (XVECEXP (PATTERN (insn), 0, i), insn,
6226 loop_start);
6230 /* Process one SET of an insn that was skipped. We ignore CLOBBERs
6231 since they are done elsewhere. This function is called via note_stores. */
6233 static void
6234 invalidate_skipped_set (dest, set, data)
6235 rtx set;
6236 rtx dest;
6237 void *data ATTRIBUTE_UNUSED;
6239 enum rtx_code code = GET_CODE (dest);
6241 if (code == MEM
6242 && ! addr_affects_sp_p (dest) /* If this is not a stack push ... */
6243 /* There are times when an address can appear varying and be a PLUS
6244 during this scan when it would be a fixed address were we to know
6245 the proper equivalences. So invalidate all memory if there is
6246 a BLKmode or nonscalar memory reference or a reference to a
6247 variable address. */
6248 && (MEM_IN_STRUCT_P (dest) || GET_MODE (dest) == BLKmode
6249 || cse_rtx_varies_p (XEXP (dest, 0))))
6251 invalidate_memory ();
6252 return;
6255 if (GET_CODE (set) == CLOBBER
6256 #ifdef HAVE_cc0
6257 || dest == cc0_rtx
6258 #endif
6259 || dest == pc_rtx)
6260 return;
6262 if (code == STRICT_LOW_PART || code == ZERO_EXTRACT)
6263 invalidate (XEXP (dest, 0), GET_MODE (dest));
6264 else if (code == REG || code == SUBREG || code == MEM)
6265 invalidate (dest, VOIDmode);
6268 /* Invalidate all insns from START up to the end of the function or the
6269 next label. This called when we wish to CSE around a block that is
6270 conditionally executed. */
6272 static void
6273 invalidate_skipped_block (start)
6274 rtx start;
6276 rtx insn;
6278 for (insn = start; insn && GET_CODE (insn) != CODE_LABEL;
6279 insn = NEXT_INSN (insn))
6281 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
6282 continue;
6284 if (GET_CODE (insn) == CALL_INSN)
6286 if (! CONST_CALL_P (insn))
6287 invalidate_memory ();
6288 invalidate_for_call ();
6291 invalidate_from_clobbers (PATTERN (insn));
6292 note_stores (PATTERN (insn), invalidate_skipped_set, NULL);
6296 /* If modifying X will modify the value in *DATA (which is really an
6297 `rtx *'), indicate that fact by setting the pointed to value to
6298 NULL_RTX. */
6300 static void
6301 cse_check_loop_start (x, set, data)
6302 rtx x;
6303 rtx set ATTRIBUTE_UNUSED;
6304 void *data;
6306 rtx *cse_check_loop_start_value = (rtx *) data;
6308 if (*cse_check_loop_start_value == NULL_RTX
6309 || GET_CODE (x) == CC0 || GET_CODE (x) == PC)
6310 return;
6312 if ((GET_CODE (x) == MEM && GET_CODE (*cse_check_loop_start_value) == MEM)
6313 || reg_overlap_mentioned_p (x, *cse_check_loop_start_value))
6314 *cse_check_loop_start_value = NULL_RTX;
6317 /* X is a SET or CLOBBER contained in INSN that was found near the start of
6318 a loop that starts with the label at LOOP_START.
6320 If X is a SET, we see if its SET_SRC is currently in our hash table.
6321 If so, we see if it has a value equal to some register used only in the
6322 loop exit code (as marked by jump.c).
6324 If those two conditions are true, we search backwards from the start of
6325 the loop to see if that same value was loaded into a register that still
6326 retains its value at the start of the loop.
6328 If so, we insert an insn after the load to copy the destination of that
6329 load into the equivalent register and (try to) replace our SET_SRC with that
6330 register.
6332 In any event, we invalidate whatever this SET or CLOBBER modifies. */
6334 static void
6335 cse_set_around_loop (x, insn, loop_start)
6336 rtx x;
6337 rtx insn;
6338 rtx loop_start;
6340 struct table_elt *src_elt;
6342 /* If this is a SET, see if we can replace SET_SRC, but ignore SETs that
6343 are setting PC or CC0 or whose SET_SRC is already a register. */
6344 if (GET_CODE (x) == SET
6345 && GET_CODE (SET_DEST (x)) != PC && GET_CODE (SET_DEST (x)) != CC0
6346 && GET_CODE (SET_SRC (x)) != REG)
6348 src_elt = lookup (SET_SRC (x),
6349 HASH (SET_SRC (x), GET_MODE (SET_DEST (x))),
6350 GET_MODE (SET_DEST (x)));
6352 if (src_elt)
6353 for (src_elt = src_elt->first_same_value; src_elt;
6354 src_elt = src_elt->next_same_value)
6355 if (GET_CODE (src_elt->exp) == REG && REG_LOOP_TEST_P (src_elt->exp)
6356 && COST (src_elt->exp) < COST (SET_SRC (x)))
6358 rtx p, set;
6360 /* Look for an insn in front of LOOP_START that sets
6361 something in the desired mode to SET_SRC (x) before we hit
6362 a label or CALL_INSN. */
6364 for (p = prev_nonnote_insn (loop_start);
6365 p && GET_CODE (p) != CALL_INSN
6366 && GET_CODE (p) != CODE_LABEL;
6367 p = prev_nonnote_insn (p))
6368 if ((set = single_set (p)) != 0
6369 && GET_CODE (SET_DEST (set)) == REG
6370 && GET_MODE (SET_DEST (set)) == src_elt->mode
6371 && rtx_equal_p (SET_SRC (set), SET_SRC (x)))
6373 /* We now have to ensure that nothing between P
6374 and LOOP_START modified anything referenced in
6375 SET_SRC (x). We know that nothing within the loop
6376 can modify it, or we would have invalidated it in
6377 the hash table. */
6378 rtx q;
6379 rtx cse_check_loop_start_value = SET_SRC (x);
6380 for (q = p; q != loop_start; q = NEXT_INSN (q))
6381 if (GET_RTX_CLASS (GET_CODE (q)) == 'i')
6382 note_stores (PATTERN (q),
6383 cse_check_loop_start,
6384 &cse_check_loop_start_value);
6386 /* If nothing was changed and we can replace our
6387 SET_SRC, add an insn after P to copy its destination
6388 to what we will be replacing SET_SRC with. */
6389 if (cse_check_loop_start_value
6390 && validate_change (insn, &SET_SRC (x),
6391 src_elt->exp, 0))
6393 /* If this creates new pseudos, this is unsafe,
6394 because the regno of new pseudo is unsuitable
6395 to index into reg_qty when cse_insn processes
6396 the new insn. Therefore, if a new pseudo was
6397 created, discard this optimization. */
6398 int nregs = max_reg_num ();
6399 rtx move
6400 = gen_move_insn (src_elt->exp, SET_DEST (set));
6401 if (nregs != max_reg_num ())
6403 if (! validate_change (insn, &SET_SRC (x),
6404 SET_SRC (set), 0))
6405 abort ();
6407 else
6408 emit_insn_after (move, p);
6410 break;
6415 /* Deal with the destination of X affecting the stack pointer. */
6416 addr_affects_sp_p (SET_DEST (x));
6418 /* See comment on similar code in cse_insn for explanation of these
6419 tests. */
6420 if (GET_CODE (SET_DEST (x)) == REG || GET_CODE (SET_DEST (x)) == SUBREG
6421 || GET_CODE (SET_DEST (x)) == MEM)
6422 invalidate (SET_DEST (x), VOIDmode);
6423 else if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
6424 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
6425 invalidate (XEXP (SET_DEST (x), 0), GET_MODE (SET_DEST (x)));
6428 /* Find the end of INSN's basic block and return its range,
6429 the total number of SETs in all the insns of the block, the last insn of the
6430 block, and the branch path.
6432 The branch path indicates which branches should be followed. If a non-zero
6433 path size is specified, the block should be rescanned and a different set
6434 of branches will be taken. The branch path is only used if
6435 FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is non-zero.
6437 DATA is a pointer to a struct cse_basic_block_data, defined below, that is
6438 used to describe the block. It is filled in with the information about
6439 the current block. The incoming structure's branch path, if any, is used
6440 to construct the output branch path. */
6442 void
6443 cse_end_of_basic_block (insn, data, follow_jumps, after_loop, skip_blocks)
6444 rtx insn;
6445 struct cse_basic_block_data *data;
6446 int follow_jumps;
6447 int after_loop;
6448 int skip_blocks;
6450 rtx p = insn, q;
6451 int nsets = 0;
6452 int low_cuid = INSN_CUID (insn), high_cuid = INSN_CUID (insn);
6453 rtx next = GET_RTX_CLASS (GET_CODE (insn)) == 'i' ? insn : next_real_insn (insn);
6454 int path_size = data->path_size;
6455 int path_entry = 0;
6456 int i;
6458 /* Update the previous branch path, if any. If the last branch was
6459 previously TAKEN, mark it NOT_TAKEN. If it was previously NOT_TAKEN,
6460 shorten the path by one and look at the previous branch. We know that
6461 at least one branch must have been taken if PATH_SIZE is non-zero. */
6462 while (path_size > 0)
6464 if (data->path[path_size - 1].status != NOT_TAKEN)
6466 data->path[path_size - 1].status = NOT_TAKEN;
6467 break;
6469 else
6470 path_size--;
6473 /* If the first instruction is marked with QImode, that means we've
6474 already processed this block. Our caller will look at DATA->LAST
6475 to figure out where to go next. We want to return the next block
6476 in the instruction stream, not some branched-to block somewhere
6477 else. We accomplish this by pretending our called forbid us to
6478 follow jumps, or skip blocks. */
6479 if (GET_MODE (insn) == QImode)
6480 follow_jumps = skip_blocks = 0;
6482 /* Scan to end of this basic block. */
6483 while (p && GET_CODE (p) != CODE_LABEL)
6485 /* Don't cse out the end of a loop. This makes a difference
6486 only for the unusual loops that always execute at least once;
6487 all other loops have labels there so we will stop in any case.
6488 Cse'ing out the end of the loop is dangerous because it
6489 might cause an invariant expression inside the loop
6490 to be reused after the end of the loop. This would make it
6491 hard to move the expression out of the loop in loop.c,
6492 especially if it is one of several equivalent expressions
6493 and loop.c would like to eliminate it.
6495 If we are running after loop.c has finished, we can ignore
6496 the NOTE_INSN_LOOP_END. */
6498 if (! after_loop && GET_CODE (p) == NOTE
6499 && NOTE_LINE_NUMBER (p) == NOTE_INSN_LOOP_END)
6500 break;
6502 /* Don't cse over a call to setjmp; on some machines (eg vax)
6503 the regs restored by the longjmp come from
6504 a later time than the setjmp. */
6505 if (GET_CODE (p) == NOTE
6506 && NOTE_LINE_NUMBER (p) == NOTE_INSN_SETJMP)
6507 break;
6509 /* A PARALLEL can have lots of SETs in it,
6510 especially if it is really an ASM_OPERANDS. */
6511 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
6512 && GET_CODE (PATTERN (p)) == PARALLEL)
6513 nsets += XVECLEN (PATTERN (p), 0);
6514 else if (GET_CODE (p) != NOTE)
6515 nsets += 1;
6517 /* Ignore insns made by CSE; they cannot affect the boundaries of
6518 the basic block. */
6520 if (INSN_UID (p) <= max_uid && INSN_CUID (p) > high_cuid)
6521 high_cuid = INSN_CUID (p);
6522 if (INSN_UID (p) <= max_uid && INSN_CUID (p) < low_cuid)
6523 low_cuid = INSN_CUID (p);
6525 /* See if this insn is in our branch path. If it is and we are to
6526 take it, do so. */
6527 if (path_entry < path_size && data->path[path_entry].branch == p)
6529 if (data->path[path_entry].status != NOT_TAKEN)
6530 p = JUMP_LABEL (p);
6532 /* Point to next entry in path, if any. */
6533 path_entry++;
6536 /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
6537 was specified, we haven't reached our maximum path length, there are
6538 insns following the target of the jump, this is the only use of the
6539 jump label, and the target label is preceded by a BARRIER.
6541 Alternatively, we can follow the jump if it branches around a
6542 block of code and there are no other branches into the block.
6543 In this case invalidate_skipped_block will be called to invalidate any
6544 registers set in the block when following the jump. */
6546 else if ((follow_jumps || skip_blocks) && path_size < PATHLENGTH - 1
6547 && GET_CODE (p) == JUMP_INSN
6548 && GET_CODE (PATTERN (p)) == SET
6549 && GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
6550 && JUMP_LABEL (p) != 0
6551 && LABEL_NUSES (JUMP_LABEL (p)) == 1
6552 && NEXT_INSN (JUMP_LABEL (p)) != 0)
6554 for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
6555 if ((GET_CODE (q) != NOTE
6556 || NOTE_LINE_NUMBER (q) == NOTE_INSN_LOOP_END
6557 || NOTE_LINE_NUMBER (q) == NOTE_INSN_SETJMP)
6558 && (GET_CODE (q) != CODE_LABEL || LABEL_NUSES (q) != 0))
6559 break;
6561 /* If we ran into a BARRIER, this code is an extension of the
6562 basic block when the branch is taken. */
6563 if (follow_jumps && q != 0 && GET_CODE (q) == BARRIER)
6565 /* Don't allow ourself to keep walking around an
6566 always-executed loop. */
6567 if (next_real_insn (q) == next)
6569 p = NEXT_INSN (p);
6570 continue;
6573 /* Similarly, don't put a branch in our path more than once. */
6574 for (i = 0; i < path_entry; i++)
6575 if (data->path[i].branch == p)
6576 break;
6578 if (i != path_entry)
6579 break;
6581 data->path[path_entry].branch = p;
6582 data->path[path_entry++].status = TAKEN;
6584 /* This branch now ends our path. It was possible that we
6585 didn't see this branch the last time around (when the
6586 insn in front of the target was a JUMP_INSN that was
6587 turned into a no-op). */
6588 path_size = path_entry;
6590 p = JUMP_LABEL (p);
6591 /* Mark block so we won't scan it again later. */
6592 PUT_MODE (NEXT_INSN (p), QImode);
6594 /* Detect a branch around a block of code. */
6595 else if (skip_blocks && q != 0 && GET_CODE (q) != CODE_LABEL)
6597 register rtx tmp;
6599 if (next_real_insn (q) == next)
6601 p = NEXT_INSN (p);
6602 continue;
6605 for (i = 0; i < path_entry; i++)
6606 if (data->path[i].branch == p)
6607 break;
6609 if (i != path_entry)
6610 break;
6612 /* This is no_labels_between_p (p, q) with an added check for
6613 reaching the end of a function (in case Q precedes P). */
6614 for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
6615 if (GET_CODE (tmp) == CODE_LABEL)
6616 break;
6618 if (tmp == q)
6620 data->path[path_entry].branch = p;
6621 data->path[path_entry++].status = AROUND;
6623 path_size = path_entry;
6625 p = JUMP_LABEL (p);
6626 /* Mark block so we won't scan it again later. */
6627 PUT_MODE (NEXT_INSN (p), QImode);
6631 p = NEXT_INSN (p);
6634 data->low_cuid = low_cuid;
6635 data->high_cuid = high_cuid;
6636 data->nsets = nsets;
6637 data->last = p;
6639 /* If all jumps in the path are not taken, set our path length to zero
6640 so a rescan won't be done. */
6641 for (i = path_size - 1; i >= 0; i--)
6642 if (data->path[i].status != NOT_TAKEN)
6643 break;
6645 if (i == -1)
6646 data->path_size = 0;
6647 else
6648 data->path_size = path_size;
6650 /* End the current branch path. */
6651 data->path[path_size].branch = 0;
6654 /* Perform cse on the instructions of a function.
6655 F is the first instruction.
6656 NREGS is one plus the highest pseudo-reg number used in the instruction.
6658 AFTER_LOOP is 1 if this is the cse call done after loop optimization
6659 (only if -frerun-cse-after-loop).
6661 Returns 1 if jump_optimize should be redone due to simplifications
6662 in conditional jump instructions. */
6665 cse_main (f, nregs, after_loop, file)
6666 rtx f;
6667 int nregs;
6668 int after_loop;
6669 FILE *file;
6671 struct cse_basic_block_data val;
6672 register rtx insn = f;
6673 register int i;
6675 cse_jumps_altered = 0;
6676 recorded_label_ref = 0;
6677 constant_pool_entries_cost = 0;
6678 val.path_size = 0;
6680 init_recog ();
6681 init_alias_analysis ();
6683 max_reg = nregs;
6685 max_insn_uid = get_max_uid ();
6687 reg_eqv_table = (struct reg_eqv_elem *)
6688 xmalloc (nregs * sizeof (struct reg_eqv_elem));
6690 #ifdef LOAD_EXTEND_OP
6692 /* Allocate scratch rtl here. cse_insn will fill in the memory reference
6693 and change the code and mode as appropriate. */
6694 memory_extend_rtx = gen_rtx_ZERO_EXTEND (VOIDmode, NULL_RTX);
6695 #endif
6697 /* Discard all the free elements of the previous function
6698 since they are allocated in the temporarily obstack. */
6699 bzero ((char *) table, sizeof table);
6700 free_element_chain = 0;
6701 n_elements_made = 0;
6703 /* Find the largest uid. */
6705 max_uid = get_max_uid ();
6706 uid_cuid = (int *) xcalloc (max_uid + 1, sizeof (int));
6708 /* Compute the mapping from uids to cuids.
6709 CUIDs are numbers assigned to insns, like uids,
6710 except that cuids increase monotonically through the code.
6711 Don't assign cuids to line-number NOTEs, so that the distance in cuids
6712 between two insns is not affected by -g. */
6714 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
6716 if (GET_CODE (insn) != NOTE
6717 || NOTE_LINE_NUMBER (insn) < 0)
6718 INSN_CUID (insn) = ++i;
6719 else
6720 /* Give a line number note the same cuid as preceding insn. */
6721 INSN_CUID (insn) = i;
6724 /* Initialize which registers are clobbered by calls. */
6726 CLEAR_HARD_REG_SET (regs_invalidated_by_call);
6728 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
6729 if ((call_used_regs[i]
6730 /* Used to check !fixed_regs[i] here, but that isn't safe;
6731 fixed regs are still call-clobbered, and sched can get
6732 confused if they can "live across calls".
6734 The frame pointer is always preserved across calls. The arg
6735 pointer is if it is fixed. The stack pointer usually is, unless
6736 RETURN_POPS_ARGS, in which case an explicit CLOBBER
6737 will be present. If we are generating PIC code, the PIC offset
6738 table register is preserved across calls. */
6740 && i != STACK_POINTER_REGNUM
6741 && i != FRAME_POINTER_REGNUM
6742 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
6743 && i != HARD_FRAME_POINTER_REGNUM
6744 #endif
6745 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
6746 && ! (i == ARG_POINTER_REGNUM && fixed_regs[i])
6747 #endif
6748 #if defined (PIC_OFFSET_TABLE_REGNUM) && !defined (PIC_OFFSET_TABLE_REG_CALL_CLOBBERED)
6749 && ! (i == PIC_OFFSET_TABLE_REGNUM && flag_pic)
6750 #endif
6752 || global_regs[i])
6753 SET_HARD_REG_BIT (regs_invalidated_by_call, i);
6755 if (ggc_p)
6756 ggc_push_context ();
6758 /* Loop over basic blocks.
6759 Compute the maximum number of qty's needed for each basic block
6760 (which is 2 for each SET). */
6761 insn = f;
6762 while (insn)
6764 cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps, after_loop,
6765 flag_cse_skip_blocks);
6767 /* If this basic block was already processed or has no sets, skip it. */
6768 if (val.nsets == 0 || GET_MODE (insn) == QImode)
6770 PUT_MODE (insn, VOIDmode);
6771 insn = (val.last ? NEXT_INSN (val.last) : 0);
6772 val.path_size = 0;
6773 continue;
6776 cse_basic_block_start = val.low_cuid;
6777 cse_basic_block_end = val.high_cuid;
6778 max_qty = val.nsets * 2;
6780 if (file)
6781 fnotice (file, ";; Processing block from %d to %d, %d sets.\n",
6782 INSN_UID (insn), val.last ? INSN_UID (val.last) : 0,
6783 val.nsets);
6785 /* Make MAX_QTY bigger to give us room to optimize
6786 past the end of this basic block, if that should prove useful. */
6787 if (max_qty < 500)
6788 max_qty = 500;
6790 max_qty += max_reg;
6792 /* If this basic block is being extended by following certain jumps,
6793 (see `cse_end_of_basic_block'), we reprocess the code from the start.
6794 Otherwise, we start after this basic block. */
6795 if (val.path_size > 0)
6796 cse_basic_block (insn, val.last, val.path, 0);
6797 else
6799 int old_cse_jumps_altered = cse_jumps_altered;
6800 rtx temp;
6802 /* When cse changes a conditional jump to an unconditional
6803 jump, we want to reprocess the block, since it will give
6804 us a new branch path to investigate. */
6805 cse_jumps_altered = 0;
6806 temp = cse_basic_block (insn, val.last, val.path, ! after_loop);
6807 if (cse_jumps_altered == 0
6808 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
6809 insn = temp;
6811 cse_jumps_altered |= old_cse_jumps_altered;
6814 if (ggc_p)
6815 ggc_collect ();
6817 #ifdef USE_C_ALLOCA
6818 alloca (0);
6819 #endif
6822 if (ggc_p)
6823 ggc_pop_context ();
6825 if (max_elements_made < n_elements_made)
6826 max_elements_made = n_elements_made;
6828 /* Clean up. */
6829 end_alias_analysis ();
6830 free (uid_cuid);
6831 free (reg_eqv_table);
6833 return cse_jumps_altered || recorded_label_ref;
6836 /* Process a single basic block. FROM and TO and the limits of the basic
6837 block. NEXT_BRANCH points to the branch path when following jumps or
6838 a null path when not following jumps.
6840 AROUND_LOOP is non-zero if we are to try to cse around to the start of a
6841 loop. This is true when we are being called for the last time on a
6842 block and this CSE pass is before loop.c. */
6844 static rtx
6845 cse_basic_block (from, to, next_branch, around_loop)
6846 register rtx from, to;
6847 struct branch_path *next_branch;
6848 int around_loop;
6850 register rtx insn;
6851 int to_usage = 0;
6852 rtx libcall_insn = NULL_RTX;
6853 int num_insns = 0;
6855 /* This array is undefined before max_reg, so only allocate
6856 the space actually needed and adjust the start. */
6858 qty_table
6859 = (struct qty_table_elem *) xmalloc ((max_qty - max_reg)
6860 * sizeof (struct qty_table_elem));
6861 qty_table -= max_reg;
6863 new_basic_block ();
6865 /* TO might be a label. If so, protect it from being deleted. */
6866 if (to != 0 && GET_CODE (to) == CODE_LABEL)
6867 ++LABEL_NUSES (to);
6869 for (insn = from; insn != to; insn = NEXT_INSN (insn))
6871 register enum rtx_code code = GET_CODE (insn);
6873 /* If we have processed 1,000 insns, flush the hash table to
6874 avoid extreme quadratic behavior. We must not include NOTEs
6875 in the count since there may be more or them when generating
6876 debugging information. If we clear the table at different
6877 times, code generated with -g -O might be different than code
6878 generated with -O but not -g.
6880 ??? This is a real kludge and needs to be done some other way.
6881 Perhaps for 2.9. */
6882 if (code != NOTE && num_insns++ > 1000)
6884 flush_hash_table ();
6885 num_insns = 0;
6888 /* See if this is a branch that is part of the path. If so, and it is
6889 to be taken, do so. */
6890 if (next_branch->branch == insn)
6892 enum taken status = next_branch++->status;
6893 if (status != NOT_TAKEN)
6895 if (status == TAKEN)
6896 record_jump_equiv (insn, 1);
6897 else
6898 invalidate_skipped_block (NEXT_INSN (insn));
6900 /* Set the last insn as the jump insn; it doesn't affect cc0.
6901 Then follow this branch. */
6902 #ifdef HAVE_cc0
6903 prev_insn_cc0 = 0;
6904 #endif
6905 prev_insn = insn;
6906 insn = JUMP_LABEL (insn);
6907 continue;
6911 if (GET_MODE (insn) == QImode)
6912 PUT_MODE (insn, VOIDmode);
6914 if (GET_RTX_CLASS (code) == 'i')
6916 rtx p;
6918 /* Process notes first so we have all notes in canonical forms when
6919 looking for duplicate operations. */
6921 if (REG_NOTES (insn))
6922 REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn), NULL_RTX);
6924 /* Track when we are inside in LIBCALL block. Inside such a block,
6925 we do not want to record destinations. The last insn of a
6926 LIBCALL block is not considered to be part of the block, since
6927 its destination is the result of the block and hence should be
6928 recorded. */
6930 if ((p = find_reg_note (insn, REG_LIBCALL, NULL_RTX)))
6931 libcall_insn = XEXP (p, 0);
6932 else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
6933 libcall_insn = NULL_RTX;
6935 cse_insn (insn, libcall_insn);
6938 /* If INSN is now an unconditional jump, skip to the end of our
6939 basic block by pretending that we just did the last insn in the
6940 basic block. If we are jumping to the end of our block, show
6941 that we can have one usage of TO. */
6943 if (simplejump_p (insn))
6945 if (to == 0)
6947 free (qty_table + max_reg);
6948 return 0;
6951 if (JUMP_LABEL (insn) == to)
6952 to_usage = 1;
6954 /* Maybe TO was deleted because the jump is unconditional.
6955 If so, there is nothing left in this basic block. */
6956 /* ??? Perhaps it would be smarter to set TO
6957 to whatever follows this insn,
6958 and pretend the basic block had always ended here. */
6959 if (INSN_DELETED_P (to))
6960 break;
6962 insn = PREV_INSN (to);
6965 /* See if it is ok to keep on going past the label
6966 which used to end our basic block. Remember that we incremented
6967 the count of that label, so we decrement it here. If we made
6968 a jump unconditional, TO_USAGE will be one; in that case, we don't
6969 want to count the use in that jump. */
6971 if (to != 0 && NEXT_INSN (insn) == to
6972 && GET_CODE (to) == CODE_LABEL && --LABEL_NUSES (to) == to_usage)
6974 struct cse_basic_block_data val;
6975 rtx prev;
6977 insn = NEXT_INSN (to);
6979 /* If TO was the last insn in the function, we are done. */
6980 if (insn == 0)
6982 free (qty_table + max_reg);
6983 return 0;
6986 /* If TO was preceded by a BARRIER we are done with this block
6987 because it has no continuation. */
6988 prev = prev_nonnote_insn (to);
6989 if (prev && GET_CODE (prev) == BARRIER)
6991 free (qty_table + max_reg);
6992 return insn;
6995 /* Find the end of the following block. Note that we won't be
6996 following branches in this case. */
6997 to_usage = 0;
6998 val.path_size = 0;
6999 cse_end_of_basic_block (insn, &val, 0, 0, 0);
7001 /* If the tables we allocated have enough space left
7002 to handle all the SETs in the next basic block,
7003 continue through it. Otherwise, return,
7004 and that block will be scanned individually. */
7005 if (val.nsets * 2 + next_qty > max_qty)
7006 break;
7008 cse_basic_block_start = val.low_cuid;
7009 cse_basic_block_end = val.high_cuid;
7010 to = val.last;
7012 /* Prevent TO from being deleted if it is a label. */
7013 if (to != 0 && GET_CODE (to) == CODE_LABEL)
7014 ++LABEL_NUSES (to);
7016 /* Back up so we process the first insn in the extension. */
7017 insn = PREV_INSN (insn);
7021 if (next_qty > max_qty)
7022 abort ();
7024 /* If we are running before loop.c, we stopped on a NOTE_INSN_LOOP_END, and
7025 the previous insn is the only insn that branches to the head of a loop,
7026 we can cse into the loop. Don't do this if we changed the jump
7027 structure of a loop unless we aren't going to be following jumps. */
7029 if ((cse_jumps_altered == 0
7030 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7031 && around_loop && to != 0
7032 && GET_CODE (to) == NOTE && NOTE_LINE_NUMBER (to) == NOTE_INSN_LOOP_END
7033 && GET_CODE (PREV_INSN (to)) == JUMP_INSN
7034 && JUMP_LABEL (PREV_INSN (to)) != 0
7035 && LABEL_NUSES (JUMP_LABEL (PREV_INSN (to))) == 1)
7036 cse_around_loop (JUMP_LABEL (PREV_INSN (to)));
7038 free (qty_table + max_reg);
7040 return to ? NEXT_INSN (to) : 0;
7043 /* Count the number of times registers are used (not set) in X.
7044 COUNTS is an array in which we accumulate the count, INCR is how much
7045 we count each register usage.
7047 Don't count a usage of DEST, which is the SET_DEST of a SET which
7048 contains X in its SET_SRC. This is because such a SET does not
7049 modify the liveness of DEST. */
7051 static void
7052 count_reg_usage (x, counts, dest, incr)
7053 rtx x;
7054 int *counts;
7055 rtx dest;
7056 int incr;
7058 enum rtx_code code;
7059 const char *fmt;
7060 int i, j;
7062 if (x == 0)
7063 return;
7065 switch (code = GET_CODE (x))
7067 case REG:
7068 if (x != dest)
7069 counts[REGNO (x)] += incr;
7070 return;
7072 case PC:
7073 case CC0:
7074 case CONST:
7075 case CONST_INT:
7076 case CONST_DOUBLE:
7077 case SYMBOL_REF:
7078 case LABEL_REF:
7079 return;
7081 case CLOBBER:
7082 /* If we are clobbering a MEM, mark any registers inside the address
7083 as being used. */
7084 if (GET_CODE (XEXP (x, 0)) == MEM)
7085 count_reg_usage (XEXP (XEXP (x, 0), 0), counts, NULL_RTX, incr);
7086 return;
7088 case SET:
7089 /* Unless we are setting a REG, count everything in SET_DEST. */
7090 if (GET_CODE (SET_DEST (x)) != REG)
7091 count_reg_usage (SET_DEST (x), counts, NULL_RTX, incr);
7093 /* If SRC has side-effects, then we can't delete this insn, so the
7094 usage of SET_DEST inside SRC counts.
7096 ??? Strictly-speaking, we might be preserving this insn
7097 because some other SET has side-effects, but that's hard
7098 to do and can't happen now. */
7099 count_reg_usage (SET_SRC (x), counts,
7100 side_effects_p (SET_SRC (x)) ? NULL_RTX : SET_DEST (x),
7101 incr);
7102 return;
7104 case CALL_INSN:
7105 count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, NULL_RTX, incr);
7107 /* ... falls through ... */
7108 case INSN:
7109 case JUMP_INSN:
7110 count_reg_usage (PATTERN (x), counts, NULL_RTX, incr);
7112 /* Things used in a REG_EQUAL note aren't dead since loop may try to
7113 use them. */
7115 count_reg_usage (REG_NOTES (x), counts, NULL_RTX, incr);
7116 return;
7118 case EXPR_LIST:
7119 case INSN_LIST:
7120 if (REG_NOTE_KIND (x) == REG_EQUAL
7121 || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE))
7122 count_reg_usage (XEXP (x, 0), counts, NULL_RTX, incr);
7123 count_reg_usage (XEXP (x, 1), counts, NULL_RTX, incr);
7124 return;
7126 default:
7127 break;
7130 fmt = GET_RTX_FORMAT (code);
7131 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7133 if (fmt[i] == 'e')
7134 count_reg_usage (XEXP (x, i), counts, dest, incr);
7135 else if (fmt[i] == 'E')
7136 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7137 count_reg_usage (XVECEXP (x, i, j), counts, dest, incr);
7141 /* Scan all the insns and delete any that are dead; i.e., they store a register
7142 that is never used or they copy a register to itself.
7144 This is used to remove insns made obviously dead by cse, loop or other
7145 optimizations. It improves the heuristics in loop since it won't try to
7146 move dead invariants out of loops or make givs for dead quantities. The
7147 remaining passes of the compilation are also sped up. */
7149 void
7150 delete_trivially_dead_insns (insns, nreg)
7151 rtx insns;
7152 int nreg;
7154 int *counts;
7155 rtx insn, prev;
7156 #ifdef HAVE_cc0
7157 rtx tem;
7158 #endif
7159 int i;
7160 int in_libcall = 0, dead_libcall = 0;
7162 /* First count the number of times each register is used. */
7163 counts = (int *) xcalloc (nreg, sizeof (int));
7164 for (insn = next_real_insn (insns); insn; insn = next_real_insn (insn))
7165 count_reg_usage (insn, counts, NULL_RTX, 1);
7167 /* Go from the last insn to the first and delete insns that only set unused
7168 registers or copy a register to itself. As we delete an insn, remove
7169 usage counts for registers it uses.
7171 The first jump optimization pass may leave a real insn as the last
7172 insn in the function. We must not skip that insn or we may end
7173 up deleting code that is not really dead. */
7174 insn = get_last_insn ();
7175 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
7176 insn = prev_real_insn (insn);
7178 for ( ; insn; insn = prev)
7180 int live_insn = 0;
7181 rtx note;
7183 prev = prev_real_insn (insn);
7185 /* Don't delete any insns that are part of a libcall block unless
7186 we can delete the whole libcall block.
7188 Flow or loop might get confused if we did that. Remember
7189 that we are scanning backwards. */
7190 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7192 in_libcall = 1;
7193 live_insn = 1;
7194 dead_libcall = 0;
7196 /* See if there's a REG_EQUAL note on this insn and try to
7197 replace the source with the REG_EQUAL expression.
7199 We assume that insns with REG_RETVALs can only be reg->reg
7200 copies at this point. */
7201 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
7202 if (note)
7204 rtx set = single_set (insn);
7205 rtx new = simplify_rtx (XEXP (note, 0));
7207 if (!new)
7208 new = XEXP (note, 0);
7210 if (set && validate_change (insn, &SET_SRC (set), new, 0))
7212 remove_note (insn,
7213 find_reg_note (insn, REG_RETVAL, NULL_RTX));
7214 dead_libcall = 1;
7218 else if (in_libcall)
7219 live_insn = ! dead_libcall;
7220 else if (GET_CODE (PATTERN (insn)) == SET)
7222 if ((GET_CODE (SET_DEST (PATTERN (insn))) == REG
7223 || GET_CODE (SET_DEST (PATTERN (insn))) == SUBREG)
7224 && rtx_equal_p (SET_DEST (PATTERN (insn)),
7225 SET_SRC (PATTERN (insn))))
7228 #ifdef HAVE_cc0
7229 else if (GET_CODE (SET_DEST (PATTERN (insn))) == CC0
7230 && ! side_effects_p (SET_SRC (PATTERN (insn)))
7231 && ((tem = next_nonnote_insn (insn)) == 0
7232 || GET_RTX_CLASS (GET_CODE (tem)) != 'i'
7233 || ! reg_referenced_p (cc0_rtx, PATTERN (tem))))
7235 #endif
7236 else if (GET_CODE (SET_DEST (PATTERN (insn))) != REG
7237 || REGNO (SET_DEST (PATTERN (insn))) < FIRST_PSEUDO_REGISTER
7238 || counts[REGNO (SET_DEST (PATTERN (insn)))] != 0
7239 || side_effects_p (SET_SRC (PATTERN (insn)))
7240 /* An ADDRESSOF expression can turn into a use of the
7241 internal arg pointer, so always consider the
7242 internal arg pointer live. If it is truly dead,
7243 flow will delete the initializing insn. */
7244 || (SET_DEST (PATTERN (insn))
7245 == current_function_internal_arg_pointer))
7246 live_insn = 1;
7248 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
7249 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7251 rtx elt = XVECEXP (PATTERN (insn), 0, i);
7253 if (GET_CODE (elt) == SET)
7255 if ((GET_CODE (SET_DEST (elt)) == REG
7256 || GET_CODE (SET_DEST (elt)) == SUBREG)
7257 && rtx_equal_p (SET_DEST (elt), SET_SRC (elt)))
7260 #ifdef HAVE_cc0
7261 else if (GET_CODE (SET_DEST (elt)) == CC0
7262 && ! side_effects_p (SET_SRC (elt))
7263 && ((tem = next_nonnote_insn (insn)) == 0
7264 || GET_RTX_CLASS (GET_CODE (tem)) != 'i'
7265 || ! reg_referenced_p (cc0_rtx, PATTERN (tem))))
7267 #endif
7268 else if (GET_CODE (SET_DEST (elt)) != REG
7269 || REGNO (SET_DEST (elt)) < FIRST_PSEUDO_REGISTER
7270 || counts[REGNO (SET_DEST (elt))] != 0
7271 || side_effects_p (SET_SRC (elt))
7272 /* An ADDRESSOF expression can turn into a use of the
7273 internal arg pointer, so always consider the
7274 internal arg pointer live. If it is truly dead,
7275 flow will delete the initializing insn. */
7276 || (SET_DEST (elt)
7277 == current_function_internal_arg_pointer))
7278 live_insn = 1;
7280 else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
7281 live_insn = 1;
7283 else
7284 live_insn = 1;
7286 /* If this is a dead insn, delete it and show registers in it aren't
7287 being used. */
7289 if (! live_insn)
7291 count_reg_usage (insn, counts, NULL_RTX, -1);
7292 delete_insn (insn);
7295 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
7297 in_libcall = 0;
7298 dead_libcall = 0;
7302 /* Clean up. */
7303 free (counts);